From 13bac489aeb4723ba25f6ef9455f728ecfbf1ad2 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 28 Feb 2020 15:23:02 +0100 Subject: [PATCH 001/702] Add first draft of gcom-c sgli reader --- satpy/etc/composites/sgli.yaml | 148 +++++++++ satpy/etc/readers/sgli_l1b.yaml | 523 ++++++++++++++++++++++++++++++++ satpy/readers/sgli_l1b.py | 123 ++++++++ 3 files changed, 794 insertions(+) create mode 100644 satpy/etc/composites/sgli.yaml create mode 100644 satpy/etc/readers/sgli_l1b.yaml create mode 100644 satpy/readers/sgli_l1b.py diff --git a/satpy/etc/composites/sgli.yaml b/satpy/etc/composites/sgli.yaml new file mode 100644 index 0000000000..5dfd344422 --- /dev/null +++ b/satpy/etc/composites/sgli.yaml @@ -0,0 +1,148 @@ +sensor_name: visir/sgli + + +modifiers: + + rayleigh_corrected: + compositor: !!python/name:satpy.composites.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: rayleigh_only + prerequisites: + - name: 'VN8' + modifiers: [sunz_corrected] + optional_prerequisites: + - satellite_azimuth_angle + - satellite_zenith_angle + - solar_azimuth_angle + - solar_zenith_angle + + rayleigh_corrected_marine_clean: + compositor: !!python/name:satpy.composites.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: marine_clean_aerosol + prerequisites: + - name: 'VN8' + modifiers: [sunz_corrected] + optional_prerequisites: + - satellite_azimuth_angle + - satellite_zenith_angle + - solar_azimuth_angle + - solar_zenith_angle + + rayleigh_corrected_marine_tropical: + compositor: !!python/name:satpy.composites.PSPRayleighReflectance + atmosphere: tropical + aerosol_type: marine_tropical_aerosol + prerequisites: + - name: 'VN8' + modifiers: [sunz_corrected] + optional_prerequisites: + - satellite_azimuth_angle + - satellite_zenith_angle + - solar_azimuth_angle + - solar_zenith_angle + + rayleigh_corrected_desert: + compositor: !!python/name:satpy.composites.PSPRayleighReflectance + atmosphere: tropical + aerosol_type: desert_aerosol + prerequisites: + - name: 'VN8' + modifiers: [sunz_corrected] + optional_prerequisites: + - satellite_azimuth_angle + - satellite_zenith_angle + - solar_azimuth_angle + - solar_zenith_angle + + rayleigh_corrected_land: + compositor: !!python/name:satpy.composites.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: continental_average_aerosol + prerequisites: + - name: 'VN8' + modifiers: [sunz_corrected] + optional_prerequisites: + - satellite_azimuth_angle + - satellite_zenith_angle + - solar_azimuth_angle + - solar_zenith_angle + + +composites: + true_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'VN8' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'VN5' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'VN3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + standard_name: true_color + + true_color_land: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'Oa08' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] + - name: 'Oa06' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] + - name: 'Oa03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] + standard_name: true_color + + true_color_desert: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'Oa08' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] + - name: 'Oa06' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] + - name: 'Oa03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] + standard_name: true_color + + true_color_marine_clean: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'Oa08' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] + - name: 'Oa06' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] + - name: 'Oa03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] + standard_name: true_color + + true_color_marine_tropical: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'Oa08' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] + - name: 'Oa06' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] + - name: 'Oa03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] + standard_name: true_color + + true_color_raw: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'VN8' + modifiers: [effective_solar_pathlength_corrected] + - name: 'VN5' + modifiers: [effective_solar_pathlength_corrected] + - name: 'VN3' + modifiers: [effective_solar_pathlength_corrected] + standard_name: true_color + + ocean_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'Oa08' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'Oa06' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'Oa03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + standard_name: ocean_color diff --git a/satpy/etc/readers/sgli_l1b.yaml b/satpy/etc/readers/sgli_l1b.yaml new file mode 100644 index 0000000000..ce9b2d0f5e --- /dev/null +++ b/satpy/etc/readers/sgli_l1b.yaml @@ -0,0 +1,523 @@ +reader: + description: Reader for SGLI data + reference: https://gportal.jaxa.jp/gpr/assets/mng_upload/GCOM-C/SGLI_Level1_Product_Format_Description_en.pdf + name: sgli_l1b + sensors: [sgli] + default_channels: [] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + gcom-c_l1b_v: + file_reader: !!python/name:satpy.readers.sgli_l1b.HDF5SGLI + # GC1SG1_202002231142M25511_1BSG_VNRDQ_1008.h5 + file_patterns: + - '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_1BS{type:1s}_VNR{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' + #- '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_{level:2s}S{type:1s}_{subsystem:3s}{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' + +datasets: + longitude_v: + name: longitude_v + resolution: [250, 1000] + file_type: gcom-c_l1b + standard_name: longitude + units: degree + file_key: Geometry_data/Longitude + file_type: gcom-c_l1b_v + + latitude_v: + name: latitude_v + resolution: [250, 1000] + file_type: gcom-c_l1b + standard_name: latitude + units: degree + file_key: Geometry_data/Latitude + file_type: gcom-c_l1b_v + + solar_zenith_angle: + name: solar_zenith_angle + sensor: sgli + wavelength: [0.3925,0.4,0.4075] + resolution: [250, 1000] + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Geometry_data/Solar_zenith + + solar_azimuth_angle: + name: solar_azimuth_angle + sensor: sgli + wavelength: [0.3925,0.4,0.4075] + resolution: [250, 1000] + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Geometry_data/Solar_azimuth + + satellite_zenith_angle: + name: satellite_zenith_angle + sensor: sgli + wavelength: [0.3925,0.4,0.4075] + resolution: [250, 1000] + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Geometry_data/Sensor_zenith + + satellite_azimuth_angle: + name: satellite_azimuth_angle + sensor: sgli + wavelength: [0.3925,0.4,0.4075] + resolution: [250, 1000] + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Geometry_data/Sensor_azimuth + + + VN1: + name: VN1 + sensor: sgli + wavelength: [0.375,0.38,0.385] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN01 + + VN3: + name: VN3 + sensor: sgli + wavelength: [0.438, 0.443, 0.448] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN03 + + VN5: + name: VN5 + sensor: sgli + wavelength: [0.520,0.530,0.540] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN05 + + VN6: + name: VN6 + sensor: sgli + wavelength: [0.555,0.565,0.575] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN06 + + VN8: + name: VN8 + sensor: sgli + wavelength: [0.6635,0.6735,0.6835] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN08 + + VN11: + name: VN11 + sensor: sgli + wavelength: [0.8585,0.8685,0.8785] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN11 + + # Oa02: + # name: Oa02 + # sensor: olci + # wavelength: [0.4075, 0.4125, 0.4175] + # resolution: 300 + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # coordinates: [longitude, latitude] + # file_type: esa_l1b + + # Oa03: + # name: Oa03 + # sensor: olci + # wavelength: [0.4375,0.4425,0.4475] + # resolution: 300 + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # coordinates: [longitude, latitude] + # file_type: esa_l1b + + # Oa04: + # name: Oa04 + # sensor: olci + # wavelength: [0.485,0.49,0.495] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa05: + # name: Oa05 + # sensor: olci + # wavelength: [0.505,0.51,0.515] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa06: + # name: Oa06 + # sensor: olci + # wavelength: [0.555,0.56,0.565] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa07: + # name: Oa07 + # sensor: olci + # wavelength: [0.615,0.62,0.625] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa08: + # name: Oa08 + # sensor: olci + # wavelength: [0.66,0.665,0.67] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa09: + # name: Oa09 + # sensor: olci + # wavelength: [0.67,0.67375,0.6775] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa10: + # name: Oa10 + # sensor: olci + # wavelength: [0.6775,0.68125,0.685] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa11: + # name: Oa11 + # sensor: olci + # wavelength: [0.70375,0.70875,0.71375] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa12: + # name: Oa12 + # sensor: olci + # wavelength: [0.75,0.75375,0.7575] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa13: + # name: Oa13 + # sensor: olci + # wavelength: [0.76,0.76125,0.7625] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa14: + # name: Oa14 + # sensor: olci + # wavelength: [0.760625, 0.764375, 0.768125] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa15: + # name: Oa15 + # sensor: olci + # wavelength: [0.76625, 0.7675, 0.76875] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa16: + # name: Oa16 + # sensor: olci + # wavelength: [0.77125, 0.77875, 0.78625] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa17: + # name: Oa17 + # sensor: olci + # wavelength: [0.855, 0.865, 0.875] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa18: + # name: Oa18 + # sensor: olci + # wavelength: [0.88, 0.885, 0.89] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa19: + # name: Oa19 + # sensor: olci + # wavelength: [0.895, 0.9, 0.905] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa20: + # name: Oa20 + # sensor: olci + # wavelength: [0.93, 0.94, 0.95] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa21: + # name: Oa21 + # sensor: olci + # wavelength: [1.0, 1.02, 1.04] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # solar_zenith_angle: + # name: solar_zenith_angle + # sensor: olci + # resolution: 300 + # coordinates: [longitude, latitude] + # file_type: esa_angles + + # solar_azimuth_angle: + # name: solar_azimuth_angle + # sensor: olci + # resolution: 300 + # coordinates: [longitude, latitude] + # file_type: esa_angles + + # satellite_zenith_angle: + # name: satellite_zenith_angle + # sensor: olci + # resolution: 300 + # coordinates: [longitude, latitude] + # file_type: esa_angles + + # satellite_azimuth_angle: + # name: satellite_azimuth_angle + # sensor: olci + # resolution: 300 + # coordinates: [longitude, latitude] + # file_type: esa_angles + + # humidity: + # name: humidity + # sensor: olci + # resolution: 300 + # coordinates: [longitude, latitude] + # file_type: esa_meteo + + # sea_level_pressure: + # name: sea_level_pressure + # sensor: olci + # resolution: 300 + # coordinates: [longitude, latitude] + # file_type: esa_meteo + + # total_columnar_water_vapour: + # name: total_columnar_water_vapour + # sensor: olci + # resolution: 300 + # coordinates: [longitude, latitude] + # file_type: esa_meteo + + # total_ozone: + # name: total_ozone + # sensor: olci + # resolution: 300 + # coordinates: [longitude, latitude] + # file_type: esa_meteo diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py new file mode 100644 index 0000000000..4d13be55e2 --- /dev/null +++ b/satpy/readers/sgli_l1b.py @@ -0,0 +1,123 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2020 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""GCOM-C SGLI L1b reader. + +GCOM-C has an imager instrument: SGLI +https://www.wmo-sat.info/oscar/instruments/view/505 + +Test data is available here: +https://suzaku.eorc.jaxa.jp/GCOM_C/data/product_std.html +The live data is available from here: +https://gportal.jaxa.jp/gpr/search?tab=1 +And the format description is here: +https://gportal.jaxa.jp/gpr/assets/mng_upload/GCOM-C/SGLI_Level1_Product_Format_Description_en.pdf + +""" + +from satpy.readers.file_handlers import BaseFileHandler +from datetime import datetime +from satpy import CHUNK_SIZE +import xarray as xr +import dask.array as da +import h5py +import logging +import numpy as np + +logger = logging.getLogger(__name__) + +resolutions = {'Q': 250, + 'K': 1000, + 'L': 1000} + + +def interpolate(arr, sampling, full_shape): + """Interpolate the angles and navigation.""" + # TODO: daskify this! + # TODO: do it in cartesian coordinates ! pbs at date line and poles + # possible + tie_x = np.arange(0, arr.shape[0] * sampling, sampling) + tie_y = np.arange(0, arr.shape[1] * sampling, sampling) + full_x = np.arange(0, full_shape[0]) + full_y = np.arange(0, full_shape[1]) + + from scipy.interpolate import RectBivariateSpline + spl = RectBivariateSpline( + tie_x, tie_y, arr) + + values = spl(full_x, full_y) + + return da.from_array(values, chunks=(CHUNK_SIZE, CHUNK_SIZE)) + + +class HDF5SGLI(BaseFileHandler): + """File handler for the SGLI l1b data.""" + + def __init__(self, filename, filename_info, filetype_info): + """Initialize the filehandler.""" + super(HDF5SGLI, self).__init__(filename, filename_info, filetype_info) + self.resolution = resolutions[self.filename_info['resolution']] + self.fh = h5py.File(self.filename, 'r') + + @property + def start_time(self): + """Get the start time.""" + the_time = self.fh['Global_attributes'].attrs['Scene_start_time'].item() + return datetime.strptime(the_time.decode('ascii'), '%Y%m%d %H:%M:%S.%f') + + @property + def end_time(self): + """Get the end time.""" + the_time = self.fh['Global_attributes'].attrs['Scene_end_time'].item() + return datetime.strptime(the_time.decode('ascii'), '%Y%m%d %H:%M:%S.%f') + + def get_dataset(self, key, info): + """Get the dataset.""" + if key.resolution != self.resolution: + return + + h5dataset = self.fh[info['file_key']] + resampling_interval = h5dataset.attrs.get('Resampling_interval', 1) + if resampling_interval != 1: + logger.debug('Interpolating %s.', key.name) + full_shape = (self.fh['Image_data'].attrs['Number_of_lines'], + self.fh['Image_data'].attrs['Number_of_pixels']) + dataset = interpolate(h5dataset, resampling_interval, full_shape) + else: + dataset = da.from_array(h5dataset[:].astype(' Date: Fri, 28 Feb 2020 15:37:23 +0100 Subject: [PATCH 002/702] Apply scale and offset for all datasets --- satpy/readers/sgli_l1b.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 4d13be55e2..80c06ffab5 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -118,6 +118,8 @@ def get_dataset(self, key, info): # equivalent to the two lines above dataset = (dataset * h5dataset.attrs['Slope_reflectance'] + h5dataset.attrs['Offset_reflectance']) * 100 + else: + dataset = dataset * h5dataset.attrs['Slope'] + h5dataset.attrs['Offset'] dataset.attrs['platform_name'] = 'GCOM-C1' return dataset From ba2bc3821e53e8140ddb4cfefae09eb7ef7d583f Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 3 Mar 2020 17:40:09 +0100 Subject: [PATCH 003/702] Fix saturation and missing value --- satpy/etc/composites/sgli.yaml | 22 ++++++++++++++++------ satpy/etc/readers/sgli_l1b.yaml | 16 ++++++++++++++++ satpy/readers/sgli_l1b.py | 9 ++++++++- 3 files changed, 40 insertions(+), 7 deletions(-) diff --git a/satpy/etc/composites/sgli.yaml b/satpy/etc/composites/sgli.yaml index 5dfd344422..f66744c037 100644 --- a/satpy/etc/composites/sgli.yaml +++ b/satpy/etc/composites/sgli.yaml @@ -8,7 +8,7 @@ modifiers: atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - - name: 'VN8' + - name: 'VN9' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle @@ -73,10 +73,18 @@ composites: true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'VN8' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - - name: 'VN5' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - compositor: !!python/name:satpy.composites.Filler + prerequisites: + - name: 'VN8' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'VN3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - compositor: !!python/name:satpy.composites.Filler + prerequisites: + - name: 'VN5' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'VN3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'VN3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: true_color @@ -126,8 +134,10 @@ composites: standard_name: true_color true_color_raw: - compositor: !!python/name:satpy.composites.GenericCompositor + compositor: !!python/name:satpy.composites.FillingCompositor prerequisites: + - name: 'VN3' + modifiers: [effective_solar_pathlength_corrected] - name: 'VN8' modifiers: [effective_solar_pathlength_corrected] - name: 'VN5' diff --git a/satpy/etc/readers/sgli_l1b.yaml b/satpy/etc/readers/sgli_l1b.yaml index ce9b2d0f5e..8249e95513 100644 --- a/satpy/etc/readers/sgli_l1b.yaml +++ b/satpy/etc/readers/sgli_l1b.yaml @@ -150,6 +150,22 @@ datasets: file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN08 + VN9: + name: VN9 + sensor: sgli + wavelength: [0.757,0.763,0.769] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN09 + VN11: name: VN11 sensor: sgli diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 80c06ffab5..0483299088 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -103,7 +103,14 @@ def get_dataset(self, key, info): dataset.attrs.update(info) with xr.set_options(keep_attrs=True): if 'Mask' in h5dataset.attrs: - dataset = dataset & h5dataset.attrs['Mask'].item() + mask_value = h5dataset.attrs['Mask'].item() + dataset = dataset & mask_value + if 'Bit00(LSB)-13' in h5dataset.attrs: + mask_info = h5dataset.attrs['Bit00(LSB)-13'].item() + mask_vals = mask_info.split(b'\n')[1:] + missing = int(mask_vals[0].split(b':')[0].strip()) + saturation = int(mask_vals[1].split(b':')[0].strip()) + dataset = dataset.where(dataset < min(missing, saturation)) if 'Maximum_valid_DN' in h5dataset.attrs: # dataset = dataset.where(dataset <= h5dataset.attrs['Maximum_valid_DN'].item()) pass From 5a0b66ceb385dc967e1d6fc48e8f6251dfdb8b02 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 6 Mar 2020 16:45:29 +0100 Subject: [PATCH 004/702] Add support for polarized and IR channels --- satpy/etc/readers/sgli_l1b.yaml | 255 +++++++++++++++++++++++++++++++- satpy/readers/sgli_l1b.py | 11 +- 2 files changed, 258 insertions(+), 8 deletions(-) diff --git a/satpy/etc/readers/sgli_l1b.yaml b/satpy/etc/readers/sgli_l1b.yaml index 8249e95513..41cb3bed9e 100644 --- a/satpy/etc/readers/sgli_l1b.yaml +++ b/satpy/etc/readers/sgli_l1b.yaml @@ -14,6 +14,19 @@ file_types: - '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_1BS{type:1s}_VNR{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' #- '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_{level:2s}S{type:1s}_{subsystem:3s}{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' + gcom-c_l1b_p: + file_reader: !!python/name:satpy.readers.sgli_l1b.HDF5SGLI + file_patterns: + - '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_1BS{type:1s}_POL{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' + #- '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_{level:2s}S{type:1s}_{subsystem:3s}{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' + + gcom-c_l1b_ir: + file_reader: !!python/name:satpy.readers.sgli_l1b.HDF5SGLI + file_patterns: + - '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_1BS{type:1s}_IRS{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' + #- '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_{level:2s}S{type:1s}_{subsystem:3s}{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' + + datasets: longitude_v: name: longitude_v @@ -33,6 +46,44 @@ datasets: file_key: Geometry_data/Latitude file_type: gcom-c_l1b_v + longitude_p: + name: longitude_p + resolution: 1000 + polarization: [0, -60, 60] + file_type: gcom-c_l1b + standard_name: longitude + units: degree + file_key: Geometry_data/Longitude + file_type: gcom-c_l1b_p + + latitude_p: + name: latitude_p + resolution: 1000 + polarization: [0, -60, 60] + file_type: gcom-c_l1b + standard_name: latitude + units: degree + file_key: Geometry_data/Latitude + file_type: gcom-c_l1b_p + + longitude_ir: + name: longitude_ir + resolution: [250, 500, 1000] + file_type: gcom-c_l1b + standard_name: longitude + units: degree + file_key: Geometry_data/Longitude + file_type: gcom-c_l1b_ir + + latitude_ir: + name: latitude_ir + resolution: [250, 500, 1000] + file_type: gcom-c_l1b + standard_name: latitude + units: degree + file_key: Geometry_data/Latitude + file_type: gcom-c_l1b_ir + solar_zenith_angle: name: solar_zenith_angle sensor: sgli @@ -73,7 +124,7 @@ datasets: VN1: name: VN1 sensor: sgli - wavelength: [0.375,0.38,0.385] + wavelength: [0.375, 0.38, 0.385] resolution: [250, 1000] calibration: radiance: @@ -86,6 +137,23 @@ datasets: file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN01 + VN2: + name: VN2 + sensor: sgli + wavelength: [0.407, 0.412, 0.417] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN02 + + VN3: name: VN3 sensor: sgli @@ -102,10 +170,27 @@ datasets: file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN03 + VN4: + name: VN4 + sensor: sgli + wavelength: [0.485,0.49,0.495] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN04 + + VN5: name: VN5 sensor: sgli - wavelength: [0.520,0.530,0.540] + wavelength: [0.520, 0.530, 0.540] resolution: [250, 1000] calibration: radiance: @@ -121,7 +206,7 @@ datasets: VN6: name: VN6 sensor: sgli - wavelength: [0.555,0.565,0.575] + wavelength: [0.555, 0.565, 0.575] resolution: [250, 1000] calibration: radiance: @@ -134,10 +219,26 @@ datasets: file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN06 + VN7: + name: VN7 + sensor: sgli + wavelength: [0.6635, 0.6735, 0.6835] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN07 + VN8: name: VN8 sensor: sgli - wavelength: [0.6635,0.6735,0.6835] + wavelength: [0.6635, 0.6735, 0.6835] resolution: [250, 1000] calibration: radiance: @@ -153,7 +254,7 @@ datasets: VN9: name: VN9 sensor: sgli - wavelength: [0.757,0.763,0.769] + wavelength: [0.757, 0.763, 0.769] resolution: [250, 1000] calibration: radiance: @@ -166,10 +267,26 @@ datasets: file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN09 + VN10: + name: VN10 + sensor: sgli + wavelength: [0.8585, 0.8685, 0.8785] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN10 + VN11: name: VN11 sensor: sgli - wavelength: [0.8585,0.8685,0.8785] + wavelength: [0.8585, 0.8685, 0.8785] resolution: [250, 1000] calibration: radiance: @@ -182,6 +299,132 @@ datasets: file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN11 + P1: + name: P1 + sensor: sgli + wavelength: [0.6635, 0.6735, 0.6835] + resolution: 1000 + polarization: [0, -60, 60] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_p, latitude_p] + file_type: gcom-c_l1b_p + file_key: Image_data/Lt_P1_{pol} + + P2: + name: P2 + sensor: sgli + wavelength: [0.8585, 0.8685, 0.8785] + resolution: 1000 + polarization: [0, -60, 60] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_p, latitude_p] + file_type: gcom-c_l1b_p + file_key: Image_data/Lt_P2_{pol} + + SW1: + name: SW1 + sensor: sgli + wavelength: [1.04, 1.05, 1.05] + resolution: 1000 + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_ir, latitude_ir] + file_type: gcom-c_l1b_ir + file_key: Image_data/Lt_SW01 + + SW2: + name: SW2 + sensor: sgli + wavelength: [1.37, 1.38, 1.39] + resolution: 1000 + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_ir, latitude_ir] + file_type: gcom-c_l1b_ir + file_key: Image_data/Lt_SW02 + + SW3: + name: SW3 + sensor: sgli + wavelength: [1.53, 1.63, 1.73] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_ir, latitude_ir] + file_type: gcom-c_l1b_ir + file_key: Image_data/Lt_SW03 + + SW4: + name: SW4 + sensor: sgli + wavelength: [2.185, 2.21, 2.235] + resolution: 1000 + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_ir, latitude_ir] + file_type: gcom-c_l1b_ir + file_key: Image_data/Lt_SW04 + + T1: + name: T1 + sensor: sgli + wavelength: [10.45, 10.8, 11.15] + resolution: [250, 500, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + + coordinates: [longitude_ir, latitude_ir] + file_type: gcom-c_l1b_ir + file_key: Image_data/Lt_TI01 + + T2: + name: T2 + sensor: sgli + wavelength: [11.65, 12.0, 12.35] + resolution: [250, 500, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + + coordinates: [longitude_ir, latitude_ir] + file_type: gcom-c_l1b_ir + file_key: Image_data/Lt_TI02 + # Oa02: # name: Oa02 # sensor: olci diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 0483299088..2455c4b63b 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -90,7 +90,14 @@ def get_dataset(self, key, info): if key.resolution != self.resolution: return - h5dataset = self.fh[info['file_key']] + if key.polarization is not None: + pols = {0: '0', -60: 'm60', 60: 'p60'} + file_key = info['file_key'].format(pol=pols[key.polarization]) + else: + file_key = info['file_key'] + + h5dataset = self.fh[file_key] + resampling_interval = h5dataset.attrs.get('Resampling_interval', 1) if resampling_interval != 1: logger.debug('Interpolating %s.', key.name) @@ -114,7 +121,7 @@ def get_dataset(self, key, info): if 'Maximum_valid_DN' in h5dataset.attrs: # dataset = dataset.where(dataset <= h5dataset.attrs['Maximum_valid_DN'].item()) pass - if key.name.startswith('VN'): + if key.name[:2] in ['VN', 'SW', 'P1', 'P2']: if key.calibration == 'counts': pass if key.calibration == 'radiance': From 2280a40418a9de9530532fd822e63e6957c48b96 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 14 Jul 2021 11:04:12 +0200 Subject: [PATCH 005/702] Add a green-sar composite --- satpy/etc/composites/sar.yaml | 13 +++++++++++++ satpy/etc/enhancements/generic.yaml | 16 ++++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/satpy/etc/composites/sar.yaml b/satpy/etc/composites/sar.yaml index 3f9359d4b2..c624bcd7e6 100644 --- a/satpy/etc/composites/sar.yaml +++ b/satpy/etc/composites/sar.yaml @@ -93,3 +93,16 @@ composites: calibration: gamma quantity: dB standard_name: sar-ice-log + + green-sar: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: measurement + polarization: vh + - name: measurement + polarization: vv + quantity: dB + - name: measurement + polarization: vv + quantity: natural + standard_name: green-sar diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 73de008986..9b3e970990 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -331,6 +331,22 @@ enhancements: args: - [true, true, true] + green-sar: + standard_name: green-sar + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [0, -19.18, 0] + max_stretch: [0.07, -1.294, .43] + #min_stretch: [0, -30, 0] + #max_stretch: [1, 10, 2] + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: + gamma: [1.82, 0.74, 1] + sar-quick: standard_name: sar-quick operations: From c791c87facc3f85f489dc1e8ed985b9ce7f93da2 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 14 Jul 2021 11:05:30 +0200 Subject: [PATCH 006/702] Clean up the sar-c reader --- satpy/readers/sar_c_safe.py | 58 ++++++++++++++++++++----------------- 1 file changed, 31 insertions(+), 27 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 1b4e128627..ed725f48f3 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -71,8 +71,7 @@ def _dictify(r): return r.text for x in r.findall("./*"): if x.tag in d and not isinstance(d[x.tag], list): - d[x.tag] = [d[x.tag]] - d[x.tag].append(_dictify(x)) + d[x.tag] = [d[x.tag], _dictify(x)] else: d[x.tag] = _dictify(x) return d @@ -160,8 +159,7 @@ def get_calibration_constant(self): def get_calibration(self, calibration, chunks=None): """Get the calibration array.""" calibration_name = _get_calibration_name(calibration) - calibration_vector = self._get_calibration_vector(calibration_name, chunks) - return calibration_vector + return self._get_calibration_vector(calibration_name, chunks) def _get_calibration_vector(self, calibration_name, chunks): """Get the calibration vector.""" @@ -241,9 +239,7 @@ def __init__(self, root, shape): def read_azimuth_noise_array(self, chunks=CHUNK_SIZE): """Read the azimuth noise vectors.""" self._read_azimuth_noise_blocks(chunks) - populated_array = self._assemble_azimuth_noise_blocks(chunks) - - return populated_array + return self._assemble_azimuth_noise_blocks(chunks) def _read_azimuth_noise_blocks(self, chunks): """Read the azimuth noise blocks.""" @@ -289,16 +285,15 @@ def _create_dask_slice_from_block_line(self, current_line, chunks): pieces = [arr.sel(y=current_y) for arr in current_blocks] dask_pieces = self._get_padded_dask_pieces(pieces, chunks) - new_slice = da.hstack(dask_pieces) - return new_slice + return da.hstack(dask_pieces) def _find_blocks_covering_line(self, current_line): """Find the blocks covering a given line.""" - current_blocks = [] - for block in self.blocks: - if block.coords['y'][0] <= current_line <= block.coords['y'][-1]: - current_blocks.append(block) - return current_blocks + return [ + block + for block in self.blocks + if block.coords['y'][0] <= current_line <= block.coords['y'][-1] + ] def _get_padded_dask_pieces(self, pieces, chunks): """Get the padded pieces of a slice.""" @@ -538,23 +533,34 @@ def get_dataset(self, key, info): if key['name'] in ['longitude', 'latitude', 'altitude']: logger.debug('Constructing coordinate arrays.') - arrays = dict() + arrays = {} arrays['longitude'], arrays['latitude'], arrays['altitude'] = self.get_lonlatalts() data = arrays[key['name']] data.attrs.update(info) else: - data = rioxarray.open_rasterio(self.filename, lock=False, chunks=(1, CHUNK_SIZE, CHUNK_SIZE)).squeeze() - data = data.assign_coords(x=np.arange(len(data.coords['x'])), - y=np.arange(len(data.coords['y']))) - data = self._calibrate_and_denoise(data, key) - data.attrs.update(info) - data.attrs.update({'platform_name': self._mission_id}) + data = self.get_measurement(key, info) + return data - data = self._change_quantity(data, key['quantity']) + def get_measurement(self, key, info): + """Get the measurement data.""" + result = rioxarray.open_rasterio( + self.filename, lock=False, chunks=(1, CHUNK_SIZE, CHUNK_SIZE) + ).squeeze() - return data + result = result.assign_coords( + x=np.arange(len(result.coords['x'])), + y=np.arange(len(result.coords['y'])), + ) + + result = self._calibrate_and_denoise(result, key) + result.attrs.update(info) + result.attrs.update({'platform_name': self._mission_id}) + + result = self._change_quantity(result, key['quantity']) + + return result @staticmethod def _change_quantity(data, quantity): @@ -581,8 +587,7 @@ def _get_digital_number(self, data): """Get the digital numbers (uncalibrated data).""" data = data.where(data > 0) data = data.astype(np.float64) - dn = data * data - return dn + return data * data def _denoise(self, dn, chunks): """Denoise the data.""" @@ -597,8 +602,7 @@ def _calibrate(self, dn, chunks, key): cal = self.calibration.get_calibration(key['calibration'], chunks=chunks) cal_constant = self.calibration.get_calibration_constant() logger.debug('Calibrating.') - data = ((dn + cal_constant) / (cal ** 2)).clip(min=0) - return data + return ((dn + cal_constant) / (cal ** 2)).clip(min=0) @lru_cache(maxsize=2) def get_lonlatalts(self): From 4f080e4344a023d12ef7c9a985c1362a027f54d2 Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 11:19:17 +0000 Subject: [PATCH 007/702] Add reader for VIIRS level 2 products produced by the NOAA enterprise suite. --- satpy/etc/readers/viirs_jrr.yaml | 37 ++++++++++++++ satpy/readers/viirs_jrr.py | 85 ++++++++++++++++++++++++++++++++ 2 files changed, 122 insertions(+) create mode 100644 satpy/etc/readers/viirs_jrr.yaml create mode 100644 satpy/readers/viirs_jrr.py diff --git a/satpy/etc/readers/viirs_jrr.yaml b/satpy/etc/readers/viirs_jrr.yaml new file mode 100644 index 0000000000..fe896c1cf8 --- /dev/null +++ b/satpy/etc/readers/viirs_jrr.yaml @@ -0,0 +1,37 @@ +reader: + description: VIIRS NOAA Enterprise L2 product reader + name: viirs_jrr + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + sensors: [viirs] + + +file_types: + jrr_cloudmask: + file_reader: !!python/name:satpy.readers.viirs_jrr.VIIRSJRRFileHandler + variable_prefix: "" + file_patterns: + - 'JRR-CloudMask_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + + +datasets: + longitude: + name: longitude + standard_name: longitude + file_type: [jrr_cloudmask] + file_key: "Longitude" + units: 'degrees_east' + latitude: + name: latitude + standard_name: latitude + file_type: [jrr_cloudmask] + file_key: "Latitude" + units: 'degrees_north' + cloud_mask: + name: cloud_mask + file_type: [jrr_cloudmask] + file_key: "CloudMask" + coordinates: [longitude, latitude] + units: '1' + flag_meanings: ['Clear', 'Probably Clear', 'Probably Cloudy', 'Cloudy'] + flag_values: [0, 1, 2, 3] + _FillValue: -128 \ No newline at end of file diff --git a/satpy/readers/viirs_jrr.py b/satpy/readers/viirs_jrr.py new file mode 100644 index 0000000000..2ae344bd95 --- /dev/null +++ b/satpy/readers/viirs_jrr.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""VIIRS NOAA enterprise L2 product reader. + +This module implements readers for the NOAA enterprise level 2 products for the +VIIRS instrument. These replace the 'old' EDR products. +""" + +import logging + +from satpy import CHUNK_SIZE +from satpy.readers.file_handlers import BaseFileHandler + +import dask.array as da +import numpy as np +import xarray as xr + +# map platform attributes to Oscar standard name +PLATFORM_MAP = { + "NPP": "Suomi-NPP", + "J01": "NOAA-20", + "J02": "NOAA-21" +} + +LOG = logging.getLogger(__name__) + + +class VIIRSJRRFileHandler(BaseFileHandler): + """NetCDF4 reader for VIIRS Active Fires.""" + + def __init__(self, filename, filename_info, filetype_info): + """Initialize the geo filehandler.""" + super(VIIRSJRRFileHandler, self).__init__(filename, filename_info, + filetype_info) + self.nc = xr.open_dataset(self.filename, + decode_cf=True, + mask_and_scale=True, + chunks={'Columns': CHUNK_SIZE, + 'Rows': CHUNK_SIZE}) + self.nc = self.nc.rename({'Columns': 'x', 'Rows': 'y'}) + if 'Latitude' in self.nc: + self.nc['Latitude'].attrs.update({'standard_name': 'latitude'}) + if 'Longitude' in self.nc: + self.nc['Longitude'].attrs.update({'standard_name': 'longitude'}) + + def get_dataset(self, dataset_id, info): + """Get the dataset.""" + ds = self.nc[info['file_key']] + + return ds + + @property + def start_time(self): + """Get first date/time when observations were recorded.""" + return self.filename_info['start_time'] + + @property + def end_time(self): + """Get last date/time when observations were recorded.""" + return self.filename_info.get('end_time', self.start_time) + + @property + def sensor_name(self): + """Name of sensor for this file.""" + return self["sensor"] + + @property + def platform_name(self): + """Name of platform/satellite for this file.""" + return self["platform_name"] From 439cbdfbc6f60a20f4c1cf8650406d7a23238aeb Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 11:28:38 +0000 Subject: [PATCH 008/702] Add reader for VIIRS level 2 products produced by the NOAA enterprise suite. --- satpy/readers/viirs_jrr.py | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/satpy/readers/viirs_jrr.py b/satpy/readers/viirs_jrr.py index 2ae344bd95..0704834da4 100644 --- a/satpy/readers/viirs_jrr.py +++ b/satpy/readers/viirs_jrr.py @@ -34,7 +34,7 @@ PLATFORM_MAP = { "NPP": "Suomi-NPP", "J01": "NOAA-20", - "J02": "NOAA-21" + "J02": "NOAA-21", } LOG = logging.getLogger(__name__) @@ -53,11 +53,16 @@ def __init__(self, filename, filename_info, filetype_info): chunks={'Columns': CHUNK_SIZE, 'Rows': CHUNK_SIZE}) self.nc = self.nc.rename({'Columns': 'x', 'Rows': 'y'}) + + # For some reason, no 'standard_name' is defined in the netCDF files, so + # here we manually make the definitions. if 'Latitude' in self.nc: self.nc['Latitude'].attrs.update({'standard_name': 'latitude'}) if 'Longitude' in self.nc: self.nc['Longitude'].attrs.update({'standard_name': 'longitude'}) + self.algorithm_version = filename_info['platform_shortname'] + def get_dataset(self, dataset_id, info): """Get the dataset.""" ds = self.nc[info['file_key']] @@ -74,12 +79,13 @@ def end_time(self): """Get last date/time when observations were recorded.""" return self.filename_info.get('end_time', self.start_time) - @property - def sensor_name(self): - """Name of sensor for this file.""" - return self["sensor"] - @property def platform_name(self): - """Name of platform/satellite for this file.""" - return self["platform_name"] + """Get platform name.""" + platform_path = self.filetype_info['platform_name'] + platform_dict = {'NPP': 'Suomi-NPP', + 'JPSS-1': 'NOAA-20', + 'J01': 'NOAA-20', + 'JPSS-2': 'NOAA-21', + 'J02': 'NOAA-21'} + return platform_dict[platform_path] From d53e6b146e2da2d7a22960b452f4ee7015a9632d Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 11:42:24 +0000 Subject: [PATCH 009/702] Complete JRR cloudmask product list --- satpy/etc/readers/viirs_jrr.yaml | 43 ++++++++++++++++++++++++++++++++ satpy/readers/viirs_jrr.py | 7 ++---- 2 files changed, 45 insertions(+), 5 deletions(-) diff --git a/satpy/etc/readers/viirs_jrr.yaml b/satpy/etc/readers/viirs_jrr.yaml index fe896c1cf8..d9326460e7 100644 --- a/satpy/etc/readers/viirs_jrr.yaml +++ b/satpy/etc/readers/viirs_jrr.yaml @@ -34,4 +34,47 @@ datasets: units: '1' flag_meanings: ['Clear', 'Probably Clear', 'Probably Cloudy', 'Cloudy'] flag_values: [0, 1, 2, 3] + _FillValue: -128 + cloud_mask_binary: + name: cloud_mask_binary + file_type: [jrr_cloudmask] + file_key: "CloudMaskBinary" + coordinates: [longitude, latitude] + units: '1' + flag_meanings: ['Clear', 'Cloudy'] + flag_values: [0, 1] + _FillValue: -128 + cloud_probability: + name: cloud_probability + file_type: [jrr_cloudmask] + file_key: "CloudProbability" + coordinates: [longitude, latitude] + units: '1' + _FillValue: -999. + dust_mask: + name: dust_mask + file_type: [jrr_cloudmask] + file_key: "Dust_Mask" + coordinates: [longitude, latitude] + units: '1' + flag_meanings: ['Clear', 'Dusty'] + flag_values: [0, 1] + _FillValue: -128 + fire_mask: + name: fire_mask + file_type: [jrr_cloudmask] + file_key: "Fire_Mask" + coordinates: [longitude, latitude] + units: '1' + flag_meanings: ['No fire', 'Fire'] + flag_values: [0, 1] + _FillValue: -128 + smoke_mask: + name: smoke_mask + file_type: [jrr_cloudmask] + file_key: "Smoke_Mask" + coordinates: [longitude, latitude] + units: '1' + flag_meanings: ['Clear', 'Smoky'] + flag_values: [0, 1] _FillValue: -128 \ No newline at end of file diff --git a/satpy/readers/viirs_jrr.py b/satpy/readers/viirs_jrr.py index 0704834da4..42f13547f0 100644 --- a/satpy/readers/viirs_jrr.py +++ b/satpy/readers/viirs_jrr.py @@ -21,14 +21,11 @@ VIIRS instrument. These replace the 'old' EDR products. """ -import logging -from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler - -import dask.array as da -import numpy as np +from satpy import CHUNK_SIZE import xarray as xr +import logging # map platform attributes to Oscar standard name PLATFORM_MAP = { From c1523d1b108122c23ba28b34686fc083bbd45e46 Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 12:21:36 +0000 Subject: [PATCH 010/702] Add JRR aerosol product list --- satpy/etc/readers/viirs_jrr.yaml | 117 +++++++++++++++++++++++++++++-- 1 file changed, 113 insertions(+), 4 deletions(-) diff --git a/satpy/etc/readers/viirs_jrr.yaml b/satpy/etc/readers/viirs_jrr.yaml index d9326460e7..70a49ed041 100644 --- a/satpy/etc/readers/viirs_jrr.yaml +++ b/satpy/etc/readers/viirs_jrr.yaml @@ -11,24 +11,32 @@ file_types: variable_prefix: "" file_patterns: - 'JRR-CloudMask_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_aerosol_product: + file_reader: !!python/name:satpy.readers.viirs_jrr.VIIRSJRRFileHandler + variable_prefix: "" + file_patterns: + - 'JRR-ADP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' datasets: + # Common datasets longitude: name: longitude standard_name: longitude - file_type: [jrr_cloudmask] + file_type: [jrr_cloudmask, jrr_aerosol_product] file_key: "Longitude" units: 'degrees_east' latitude: name: latitude standard_name: latitude - file_type: [jrr_cloudmask] + file_type: [jrr_cloudmask, jrr_aerosol_product] file_key: "Latitude" units: 'degrees_north' + + # Cloudmask product datasets cloud_mask: name: cloud_mask - file_type: [jrr_cloudmask] + file_type: jrr_cloudmask file_key: "CloudMask" coordinates: [longitude, latitude] units: '1' @@ -77,4 +85,105 @@ datasets: units: '1' flag_meanings: ['Clear', 'Smoky'] flag_values: [0, 1] - _FillValue: -128 \ No newline at end of file + _FillValue: -128 + + # Aerosol optical depth product datasets + ash_mask: + name: ash_mask + file_type: [jrr_aerosol_product] + file_key: "Ash" + coordinates: [longitude, latitude] + units: '1' + flag_meanings: ['Clear', 'Ash'] + flag_values: [0, 1] + _FillValue: -128 + cloud_mask_adp: + name: cloud_mask_adp + file_type: [jrr_aerosol_product] + file_key: "Cloud" + coordinates: [longitude, latitude] + units: '1' + flag_meanings: ['Clear', 'Probably Clear', 'Probably Cloudy', 'Cloudy'] + flag_values: [0, 1, 2, 3] + _FillValue: -128 + dust_smoke_discrimination_index: + name: dust_smoke_discrimination_index + file_type: [jrr_aerosol_product] + file_key: "DSDI" + coordinates: [longitude, latitude] + units: '1' + _FillValue: -999 + nuc: + name: nuc + file_type: [jrr_aerosol_product] + file_key: "NUC" + coordinates: [longitude, latitude] + units: '1' + flag_meanings: ['No', 'Yes'] + flag_values: [0, 1] + _FillValue: -128 + pqi1: + name: pqi1 + file_type: [jrr_aerosol_product] + file_key: "PQI1" + coordinates: [longitude, latitude] + units: '1' + _FillValue: -128 + pqi2: + name: pqi2 + file_type: [jrr_aerosol_product] + file_key: "PQI2" + coordinates: [longitude, latitude] + units: '1' + _FillValue: -128 + pqi3: + name: pqi3 + file_type: [jrr_aerosol_product] + file_key: "PQI3" + coordinates: [longitude, latitude] + units: '1' + _FillValue: -128 + pqi4: + name: pqi4 + file_type: [jrr_aerosol_product] + file_key: "PQI4" + coordinates: [longitude, latitude] + units: '1' + _FillValue: -128 + qcflag: + name: qcflag + file_type: [jrr_aerosol_product] + file_key: "QC_Flag" + coordinates: [longitude, latitude] + units: '1' + _FillValue: -128 + saai: + name: saai + file_type: [jrr_aerosol_product] + file_key: "SAAI" + coordinates: [longitude, latitude] + units: '1' + _FillValue: -999 + smoke: + name: smoke + file_type: [jrr_aerosol_product] + file_key: "Smoke" + coordinates: [longitude, latitude] + units: '1' + _FillValue: -999 + smoke_concentration: + name: smoke_concentration + file_type: [jrr_aerosol_product] + file_key: "SmokeCon" + coordinates: [longitude, latitude] + units: 'ug/m^3' + _FillValue: -999 + snow_ice: + name: snow_ice + file_type: [jrr_aerosol_product] + file_key: "SnowIce" + coordinates: [longitude, latitude] + units: '1' + flag_meanings: ['No', 'Yes'] + flag_values: [0, 1] + _FillValue: -128 From d5f3ba573231ddaf4bab397b539a4680f7b30fec Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 12:54:09 +0000 Subject: [PATCH 011/702] Add surface reflectance JRR product --- satpy/etc/readers/viirs_jrr.yaml | 166 ++++++++++++++++++++++++++++++- satpy/readers/viirs_jrr.py | 8 +- 2 files changed, 171 insertions(+), 3 deletions(-) diff --git a/satpy/etc/readers/viirs_jrr.yaml b/satpy/etc/readers/viirs_jrr.yaml index 70a49ed041..81468aa313 100644 --- a/satpy/etc/readers/viirs_jrr.yaml +++ b/satpy/etc/readers/viirs_jrr.yaml @@ -16,10 +16,15 @@ file_types: variable_prefix: "" file_patterns: - 'JRR-ADP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_surfref_product: + file_reader: !!python/name:satpy.readers.viirs_jrr.VIIRSJRRFileHandler + variable_prefix: "" + file_patterns: + - 'SurfRefl_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' datasets: - # Common datasets + # Geolocation datasets longitude: name: longitude standard_name: longitude @@ -32,6 +37,30 @@ datasets: file_type: [jrr_cloudmask, jrr_aerosol_product] file_key: "Latitude" units: 'degrees_north' + longitude_375: + name: longitude_375 + standard_name: longitude + file_type: jrr_surfref_product + file_key: "Longitude_at_375m_resolution" + units: 'degrees_east' + latitude_375: + name: latitude_375 + standard_name: latitude + file_type: jrr_surfref_product + file_key: "Latitude_at_375m_resolution" + units: 'degrees_north' + longitude_750: + name: longitude_750 + standard_name: longitude + file_type: jrr_surfref_product + file_key: "Longitude_at_750m_resolution" + units: 'degrees_east' + latitude_750: + name: latitude_750 + standard_name: latitude + file_type: jrr_surfref_product + file_key: "Latitude_at_750m_resolution" + units: 'degrees_north' # Cloudmask product datasets cloud_mask: @@ -187,3 +216,138 @@ datasets: flag_meanings: ['No', 'Yes'] flag_values: [0, 1] _FillValue: -128 + + # Surface reflectance products + surf_refl_I01: + name: surf_refl_I01 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band I1" + coordinates: [longitude_375, latitude_375] + units: '1' + _FillValue: -9999 + surf_refl_I02: + name: surf_refl_I02 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band I2" + coordinates: [longitude_375, latitude_375] + units: '1' + _FillValue: -9999 + surf_refl_I03: + name: surf_refl_I03 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band I3" + coordinates: [longitude_375, latitude_375] + units: '1' + _FillValue: -9999 + surf_refl_M01: + name: surf_refl_M01 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band M1" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_M02: + name: surf_refl_M02 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band M2" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_M03: + name: surf_refl_M03 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band M3" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_M04: + name: surf_refl_M04 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band M4" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_M05: + name: surf_refl_M05 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band M5" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_M06: + name: surf_refl_M06 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band M6" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_M07: + name: surf_refl_M07 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band M7" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_M08: + name: surf_refl_M08 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band M8" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_M10: + name: surf_refl_M10 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band M10" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_qf1: + name: surf_refl_qf1 + file_type: [jrr_surfref_product] + file_key: "QF1 Surface Reflectance" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_qf2: + name: surf_refl_qf2 + file_type: [jrr_surfref_product] + file_key: "QF2 Surface Reflectance" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_qf3: + name: surf_refl_qf3 + file_type: [jrr_surfref_product] + file_key: "QF3 Surface Reflectance" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_qf4: + name: surf_refl_qf4 + file_type: [jrr_surfref_product] + file_key: "QF4 Surface Reflectance" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_qf5: + name: surf_refl_qf5 + file_type: [jrr_surfref_product] + file_key: "QF5 Surface Reflectance" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_qf6: + name: surf_refl_qf6 + file_type: [jrr_surfref_product] + file_key: "QF6 Surface Reflectance" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_qf7: + name: surf_refl_qf7 + file_type: [jrr_surfref_product] + file_key: "QF7 Surface Reflectance" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 diff --git a/satpy/readers/viirs_jrr.py b/satpy/readers/viirs_jrr.py index 42f13547f0..2d4944ae3d 100644 --- a/satpy/readers/viirs_jrr.py +++ b/satpy/readers/viirs_jrr.py @@ -49,9 +49,13 @@ def __init__(self, filename, filename_info, filetype_info): mask_and_scale=True, chunks={'Columns': CHUNK_SIZE, 'Rows': CHUNK_SIZE}) - self.nc = self.nc.rename({'Columns': 'x', 'Rows': 'y'}) + if 'columns' in self.nc.dims: + self.nc = self.nc.rename({'Columns': 'x', 'Rows': 'y'}) + elif 'Along_Track_375m' in self.nc.dims: + self.nc = self.nc.rename({'Along_Scan_375m': 'x', 'Along_Track_375m': 'y'}) + self.nc = self.nc.rename({'Along_Scan_750m': 'x', 'Along_Track_750m': 'y'}) - # For some reason, no 'standard_name' is defined in the netCDF files, so + # For some reason, no 'standard_name' is defined in some netCDF files, so # here we manually make the definitions. if 'Latitude' in self.nc: self.nc['Latitude'].attrs.update({'standard_name': 'latitude'}) From 9f4954da02b325b3b6d340d7dd9575b23e0c2d1f Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 13:35:11 +0000 Subject: [PATCH 012/702] Add VIIRS JRR composites, update JRR dataset keys for resolution. --- satpy/etc/composites/viirs.yaml | 24 ++++++++++ satpy/etc/readers/viirs_jrr.yaml | 76 ++++++++++++++++++++++++++++---- satpy/readers/viirs_jrr.py | 1 + 3 files changed, 92 insertions(+), 9 deletions(-) diff --git a/satpy/etc/composites/viirs.yaml b/satpy/etc/composites/viirs.yaml index 4fbc3a6b3c..da324d58b0 100644 --- a/satpy/etc/composites/viirs.yaml +++ b/satpy/etc/composites/viirs.yaml @@ -306,6 +306,30 @@ composites: modifiers: [sunz_corrected_iband] standard_name: natural_color + natural_color_iband_surf_nocorr: + compositor: !!python/name:satpy.composites.RGBCompositor + prerequisites: + - name: surf_refl_I03 + - name: surf_refl_I02 + - name: surf_refl_I01 + standard_name: natural_color + + natural_color_mband_surf_nocorr: + compositor: !!python/name:satpy.composites.RGBCompositor + prerequisites: + - name: surf_refl_M10 + - name: surf_refl_M07 + - name: surf_refl_M05 + standard_name: natural_color + + true_color_mband_nocorr: + compositor: !!python/name:satpy.composites.RGBCompositor + prerequisites: + - name: surf_refl_M05 + - name: surf_refl_M04 + - name: surf_refl_M03 + standard_name: true_color + natural_color_sun_lowres: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: diff --git a/satpy/etc/readers/viirs_jrr.yaml b/satpy/etc/readers/viirs_jrr.yaml index 81468aa313..7255756418 100644 --- a/satpy/etc/readers/viirs_jrr.yaml +++ b/satpy/etc/readers/viirs_jrr.yaml @@ -3,6 +3,8 @@ reader: name: viirs_jrr reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] + group_keys: ['platform_shortname'] + default_datasets: file_types: @@ -31,40 +33,47 @@ datasets: file_type: [jrr_cloudmask, jrr_aerosol_product] file_key: "Longitude" units: 'degrees_east' + resolution: 750 latitude: name: latitude standard_name: latitude file_type: [jrr_cloudmask, jrr_aerosol_product] file_key: "Latitude" units: 'degrees_north' + resolution: 750 longitude_375: name: longitude_375 standard_name: longitude file_type: jrr_surfref_product file_key: "Longitude_at_375m_resolution" units: 'degrees_east' + resolution: 375 latitude_375: name: latitude_375 standard_name: latitude file_type: jrr_surfref_product file_key: "Latitude_at_375m_resolution" units: 'degrees_north' + resolution: 375 longitude_750: name: longitude_750 standard_name: longitude file_type: jrr_surfref_product file_key: "Longitude_at_750m_resolution" units: 'degrees_east' + resolution: 750 latitude_750: name: latitude_750 standard_name: latitude file_type: jrr_surfref_product file_key: "Latitude_at_750m_resolution" units: 'degrees_north' + resolution: 750 # Cloudmask product datasets cloud_mask: name: cloud_mask + resolution: 750 file_type: jrr_cloudmask file_key: "CloudMask" coordinates: [longitude, latitude] @@ -74,6 +83,7 @@ datasets: _FillValue: -128 cloud_mask_binary: name: cloud_mask_binary + resolution: 750 file_type: [jrr_cloudmask] file_key: "CloudMaskBinary" coordinates: [longitude, latitude] @@ -83,6 +93,7 @@ datasets: _FillValue: -128 cloud_probability: name: cloud_probability + resolution: 750 file_type: [jrr_cloudmask] file_key: "CloudProbability" coordinates: [longitude, latitude] @@ -90,6 +101,7 @@ datasets: _FillValue: -999. dust_mask: name: dust_mask + resolution: 750 file_type: [jrr_cloudmask] file_key: "Dust_Mask" coordinates: [longitude, latitude] @@ -99,6 +111,7 @@ datasets: _FillValue: -128 fire_mask: name: fire_mask + resolution: 750 file_type: [jrr_cloudmask] file_key: "Fire_Mask" coordinates: [longitude, latitude] @@ -108,6 +121,7 @@ datasets: _FillValue: -128 smoke_mask: name: smoke_mask + resolution: 750 file_type: [jrr_cloudmask] file_key: "Smoke_Mask" coordinates: [longitude, latitude] @@ -119,6 +133,7 @@ datasets: # Aerosol optical depth product datasets ash_mask: name: ash_mask + resolution: 750 file_type: [jrr_aerosol_product] file_key: "Ash" coordinates: [longitude, latitude] @@ -128,6 +143,7 @@ datasets: _FillValue: -128 cloud_mask_adp: name: cloud_mask_adp + resolution: 750 file_type: [jrr_aerosol_product] file_key: "Cloud" coordinates: [longitude, latitude] @@ -137,6 +153,7 @@ datasets: _FillValue: -128 dust_smoke_discrimination_index: name: dust_smoke_discrimination_index + resolution: 750 file_type: [jrr_aerosol_product] file_key: "DSDI" coordinates: [longitude, latitude] @@ -144,6 +161,7 @@ datasets: _FillValue: -999 nuc: name: nuc + resolution: 750 file_type: [jrr_aerosol_product] file_key: "NUC" coordinates: [longitude, latitude] @@ -153,6 +171,7 @@ datasets: _FillValue: -128 pqi1: name: pqi1 + resolution: 750 file_type: [jrr_aerosol_product] file_key: "PQI1" coordinates: [longitude, latitude] @@ -160,6 +179,7 @@ datasets: _FillValue: -128 pqi2: name: pqi2 + resolution: 750 file_type: [jrr_aerosol_product] file_key: "PQI2" coordinates: [longitude, latitude] @@ -167,6 +187,7 @@ datasets: _FillValue: -128 pqi3: name: pqi3 + resolution: 750 file_type: [jrr_aerosol_product] file_key: "PQI3" coordinates: [longitude, latitude] @@ -174,6 +195,7 @@ datasets: _FillValue: -128 pqi4: name: pqi4 + resolution: 750 file_type: [jrr_aerosol_product] file_key: "PQI4" coordinates: [longitude, latitude] @@ -181,6 +203,7 @@ datasets: _FillValue: -128 qcflag: name: qcflag + resolution: 750 file_type: [jrr_aerosol_product] file_key: "QC_Flag" coordinates: [longitude, latitude] @@ -188,6 +211,7 @@ datasets: _FillValue: -128 saai: name: saai + resolution: 750 file_type: [jrr_aerosol_product] file_key: "SAAI" coordinates: [longitude, latitude] @@ -195,6 +219,7 @@ datasets: _FillValue: -999 smoke: name: smoke + resolution: 750 file_type: [jrr_aerosol_product] file_key: "Smoke" coordinates: [longitude, latitude] @@ -202,6 +227,7 @@ datasets: _FillValue: -999 smoke_concentration: name: smoke_concentration + resolution: 750 file_type: [jrr_aerosol_product] file_key: "SmokeCon" coordinates: [longitude, latitude] @@ -209,6 +235,7 @@ datasets: _FillValue: -999 snow_ice: name: snow_ice + resolution: 750 file_type: [jrr_aerosol_product] file_key: "SnowIce" coordinates: [longitude, latitude] @@ -220,6 +247,8 @@ datasets: # Surface reflectance products surf_refl_I01: name: surf_refl_I01 + resolution: 375 + wavelength: [0.600, 0.640, 0.680] file_type: [jrr_surfref_product] file_key: "375m Surface Reflectance Band I1" coordinates: [longitude_375, latitude_375] @@ -227,6 +256,8 @@ datasets: _FillValue: -9999 surf_refl_I02: name: surf_refl_I02 + resolution: 375 + wavelength: [0.845, 0.865, 0.884] file_type: [jrr_surfref_product] file_key: "375m Surface Reflectance Band I2" coordinates: [longitude_375, latitude_375] @@ -234,6 +265,8 @@ datasets: _FillValue: -9999 surf_refl_I03: name: surf_refl_I03 + resolution: 375 + wavelength: [1.580, 1.610, 1.640] file_type: [jrr_surfref_product] file_key: "375m Surface Reflectance Band I3" coordinates: [longitude_375, latitude_375] @@ -241,69 +274,88 @@ datasets: _FillValue: -9999 surf_refl_M01: name: surf_refl_M01 + resolution: 750 + wavelength: [0.402, 0.412, 0.422] file_type: [jrr_surfref_product] - file_key: "375m Surface Reflectance Band M1" + file_key: "750m Surface Reflectance Band M1" coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 surf_refl_M02: name: surf_refl_M02 + resolution: 750 + wavelength: [0.436, 0.445, 0.454] file_type: [jrr_surfref_product] - file_key: "375m Surface Reflectance Band M2" + file_key: "750m Surface Reflectance Band M2" coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 surf_refl_M03: name: surf_refl_M03 + resolution: 750 + wavelength: [0.478, 0.488, 0.498] file_type: [jrr_surfref_product] - file_key: "375m Surface Reflectance Band M3" + file_key: "750m Surface Reflectance Band M3" coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 surf_refl_M04: name: surf_refl_M04 + resolution: 750 + wavelength: [0.545, 0.555, 0.565] file_type: [jrr_surfref_product] - file_key: "375m Surface Reflectance Band M4" + file_key: "750m Surface Reflectance Band M4" coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 surf_refl_M05: name: surf_refl_M05 + resolution: 750 + wavelength: [0.662, 0.672, 0.682] file_type: [jrr_surfref_product] - file_key: "375m Surface Reflectance Band M5" + file_key: "750m Surface Reflectance Band M5" coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 surf_refl_M06: name: surf_refl_M06 + resolution: 750 + wavelength: [0.739, 0.746, 0.754] file_type: [jrr_surfref_product] - file_key: "375m Surface Reflectance Band M6" + file_key: "750m Surface Reflectance Band M6" coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 surf_refl_M07: name: surf_refl_M07 + resolution: 750 + wavelength: [0.846, 0.865, 0.885] file_type: [jrr_surfref_product] - file_key: "375m Surface Reflectance Band M7" + file_key: "750m Surface Reflectance Band M7" coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 surf_refl_M08: name: surf_refl_M08 + resolution: 750 + wavelength: [1.230, 1.240, 1.250] file_type: [jrr_surfref_product] - file_key: "375m Surface Reflectance Band M8" + file_key: "750m Surface Reflectance Band M8" coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 surf_refl_M10: name: surf_refl_M10 + resolution: 750 + wavelength: [1.580, 1.610, 1.640] file_type: [jrr_surfref_product] - file_key: "375m Surface Reflectance Band M10" + file_key: "750m Surface Reflectance Band M10" coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 surf_refl_qf1: name: surf_refl_qf1 + resolution: 750 file_type: [jrr_surfref_product] file_key: "QF1 Surface Reflectance" coordinates: [longitude_750, latitude_750] @@ -311,6 +363,7 @@ datasets: _FillValue: -9999 surf_refl_qf2: name: surf_refl_qf2 + resolution: 750 file_type: [jrr_surfref_product] file_key: "QF2 Surface Reflectance" coordinates: [longitude_750, latitude_750] @@ -318,6 +371,7 @@ datasets: _FillValue: -9999 surf_refl_qf3: name: surf_refl_qf3 + resolution: 750 file_type: [jrr_surfref_product] file_key: "QF3 Surface Reflectance" coordinates: [longitude_750, latitude_750] @@ -325,6 +379,7 @@ datasets: _FillValue: -9999 surf_refl_qf4: name: surf_refl_qf4 + resolution: 750 file_type: [jrr_surfref_product] file_key: "QF4 Surface Reflectance" coordinates: [longitude_750, latitude_750] @@ -332,6 +387,7 @@ datasets: _FillValue: -9999 surf_refl_qf5: name: surf_refl_qf5 + resolution: 750 file_type: [jrr_surfref_product] file_key: "QF5 Surface Reflectance" coordinates: [longitude_750, latitude_750] @@ -339,6 +395,7 @@ datasets: _FillValue: -9999 surf_refl_qf6: name: surf_refl_qf6 + resolution: 750 file_type: [jrr_surfref_product] file_key: "QF6 Surface Reflectance" coordinates: [longitude_750, latitude_750] @@ -346,6 +403,7 @@ datasets: _FillValue: -9999 surf_refl_qf7: name: surf_refl_qf7 + resolution: 750 file_type: [jrr_surfref_product] file_key: "QF7 Surface Reflectance" coordinates: [longitude_750, latitude_750] diff --git a/satpy/readers/viirs_jrr.py b/satpy/readers/viirs_jrr.py index 2d4944ae3d..a04886d852 100644 --- a/satpy/readers/viirs_jrr.py +++ b/satpy/readers/viirs_jrr.py @@ -63,6 +63,7 @@ def __init__(self, filename, filename_info, filetype_info): self.nc['Longitude'].attrs.update({'standard_name': 'longitude'}) self.algorithm_version = filename_info['platform_shortname'] + self.sensor_name = 'viirs' def get_dataset(self, dataset_id, info): """Get the dataset.""" From dea90d659e1c652e2a71875ab270d2e297dc3557 Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 13:41:39 +0000 Subject: [PATCH 013/702] Update VIIRS JRR module docstring. --- satpy/readers/viirs_jrr.py | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/satpy/readers/viirs_jrr.py b/satpy/readers/viirs_jrr.py index a04886d852..f8a502e583 100644 --- a/satpy/readers/viirs_jrr.py +++ b/satpy/readers/viirs_jrr.py @@ -17,8 +17,28 @@ # satpy. If not, see . """VIIRS NOAA enterprise L2 product reader. -This module implements readers for the NOAA enterprise level 2 products for the -VIIRS instrument. These replace the 'old' EDR products. +This module defines the :class:`VIIRSJRRFileHandler` file handler, to +be used for reading VIIRS Level 2 products generated by the NOAA enterprise +suite, which are downloadable via NOAA CLASS. +A wide variety of such products exist and, at present, only three are +supported here, showing example filenames: + - Cloud mask: JRR-CloudMask_v2r3_j01_s202112250807275_e202112250808520_c202112250837300.nc + - Aerosol properties: JRR-ADP_v2r3_j01_s202112250807275_e202112250808520_c202112250839550.nc + - Surface reflectance: SurfRefl_v1r1_j01_s202112250807275_e202112250808520_c202112250845080.nc +All products use the same base reader `viirs_jrr` and can be read through satpy with:: + + import satpy + import glob + + filenames = glob.glob('JRR-ADP*.nc') + scene = satpy.Scene(filenames, + reader='viirs_jrr') + scene.load(['smoke_concentration']) + +NOTE: + Multiple products contain datasets with the same name! For example, both the cloud mask + and aerosol files contain a cloud mask, but these are not identical. + For clarity, the aerosol file cloudmask is named `cloud_mask_adp` in this reader. """ From 2a9cf101065374123a0c620c917ac59ec9103ff8 Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 14:52:17 +0000 Subject: [PATCH 014/702] Update VIIRS JRR reader name and add tests. --- .../{viirs_jrr.yaml => viirs_l2_jrr.yaml} | 6 +- .../readers/{viirs_jrr.py => viirs_l2_jrr.py} | 14 +-- satpy/tests/reader_tests/test_viirs_l2_jrr.py | 93 +++++++++++++++++++ 3 files changed, 100 insertions(+), 13 deletions(-) rename satpy/etc/readers/{viirs_jrr.yaml => viirs_l2_jrr.yaml} (98%) rename satpy/readers/{viirs_jrr.py => viirs_l2_jrr.py} (93%) create mode 100644 satpy/tests/reader_tests/test_viirs_l2_jrr.py diff --git a/satpy/etc/readers/viirs_jrr.yaml b/satpy/etc/readers/viirs_l2_jrr.yaml similarity index 98% rename from satpy/etc/readers/viirs_jrr.yaml rename to satpy/etc/readers/viirs_l2_jrr.yaml index 7255756418..98bcd9253c 100644 --- a/satpy/etc/readers/viirs_jrr.yaml +++ b/satpy/etc/readers/viirs_l2_jrr.yaml @@ -9,17 +9,17 @@ reader: file_types: jrr_cloudmask: - file_reader: !!python/name:satpy.readers.viirs_jrr.VIIRSJRRFileHandler + file_reader: !!python/name:satpy.readers.viirs_l2_jrr.VIIRSJRRFileHandler variable_prefix: "" file_patterns: - 'JRR-CloudMask_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_aerosol_product: - file_reader: !!python/name:satpy.readers.viirs_jrr.VIIRSJRRFileHandler + file_reader: !!python/name:satpy.readers.viirs_l2_jrr.VIIRSJRRFileHandler variable_prefix: "" file_patterns: - 'JRR-ADP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_surfref_product: - file_reader: !!python/name:satpy.readers.viirs_jrr.VIIRSJRRFileHandler + file_reader: !!python/name:satpy.readers.viirs_l2_jrr.VIIRSJRRFileHandler variable_prefix: "" file_patterns: - 'SurfRefl_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' diff --git a/satpy/readers/viirs_jrr.py b/satpy/readers/viirs_l2_jrr.py similarity index 93% rename from satpy/readers/viirs_jrr.py rename to satpy/readers/viirs_l2_jrr.py index f8a502e583..4c0694fcd5 100644 --- a/satpy/readers/viirs_jrr.py +++ b/satpy/readers/viirs_l2_jrr.py @@ -43,17 +43,11 @@ from satpy.readers.file_handlers import BaseFileHandler +from datetime import datetime from satpy import CHUNK_SIZE import xarray as xr import logging -# map platform attributes to Oscar standard name -PLATFORM_MAP = { - "NPP": "Suomi-NPP", - "J01": "NOAA-20", - "J02": "NOAA-21", -} - LOG = logging.getLogger(__name__) @@ -99,15 +93,15 @@ def start_time(self): @property def end_time(self): """Get last date/time when observations were recorded.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info['end_time'] @property def platform_name(self): """Get platform name.""" - platform_path = self.filetype_info['platform_name'] + platform_path = self.filename_info['platform_shortname'] platform_dict = {'NPP': 'Suomi-NPP', 'JPSS-1': 'NOAA-20', 'J01': 'NOAA-20', 'JPSS-2': 'NOAA-21', 'J02': 'NOAA-21'} - return platform_dict[platform_path] + return platform_dict[platform_path.upper()] diff --git a/satpy/tests/reader_tests/test_viirs_l2_jrr.py b/satpy/tests/reader_tests/test_viirs_l2_jrr.py new file mode 100644 index 0000000000..c482319721 --- /dev/null +++ b/satpy/tests/reader_tests/test_viirs_l2_jrr.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Module for testing the satpy.readers.viirs_jrr module. +Note: This is adapted from the test_slstr_l2.py code. +""" + +from unittest.mock import MagicMock +from datetime import datetime +from unittest import mock +import xarray as xr +import unittest + + +from satpy.readers.viirs_l2_jrr import VIIRSJRRFileHandler + + +class TestVIIRSJRRReader(unittest.TestCase): + """Test the VIIRS JRR L2 reader.""" + @mock.patch('xarray.open_dataset') + def test_instantiate(self, mocked_dataset): + """Test initialization of file handlers.""" + filename_info = {'platform_shortname': 'npp'} + tmp = MagicMock(start_time='20191120T125002Z', stop_time='20191120T125002Z') + tmp.rename.return_value = tmp + xr.open_dataset.return_value = tmp + VIIRSJRRFileHandler('somedir/somefile.nc', filename_info, None) + mocked_dataset.assert_called() + mocked_dataset.reset_mock() + + @mock.patch('xarray.open_dataset') + def test_get_dataset(self, mocked_dataset): + """Test retrieval of datasets.""" + filename_info = {'platform_shortname': 'npp'} + tmp = MagicMock(start_time='20191120T125002Z', stop_time='20191120T125002Z') + xr.open_dataset.return_value = tmp + test = VIIRSJRRFileHandler('somedir/somefile.nc', filename_info, None) + test.nc = {'Longitude': xr.Dataset(), + 'Latitude': xr.Dataset(), + 'smoke_concentration': xr.Dataset(), + 'fire_mask': xr.Dataset(), + 'surf_refl_I01': xr.Dataset(), + 'surf_refl_M05': xr.Dataset(), + } + test.get_dataset('longitude', {'file_key': 'Longitude'}) + test.get_dataset('latitude', {'file_key': 'Latitude'}) + test.get_dataset('smoke_concentration', {'file_key': 'smoke_concentration'}) + test.get_dataset('fire_mask', {'file_key': 'fire_mask'}) + with self.assertRaises(KeyError): + test.get_dataset('erroneous dataset', {'file_key': 'erroneous dataset'}) + mocked_dataset.assert_called() + mocked_dataset.reset_mock() + test.get_dataset('surf_refl_I01', {'file_key': 'surf_refl_I01'}) + + @mock.patch('xarray.open_dataset') + def test_get_startend_times(self, mocked_dataset): + """Test finding start and end times of granules.""" + filename_info = {'platform_shortname': 'npp', + 'start_time': datetime(2021, 4, 3, 12, 0, 10), + 'end_time': datetime(2021, 4, 3, 12, 4, 28)} + tmp = MagicMock() + tmp.rename.return_value = tmp + xr.open_dataset.return_value = tmp + hdl = VIIRSJRRFileHandler('somedir/somefile.nc', filename_info, None) + self.assertEqual(hdl.start_time, datetime(2021, 4, 3, 12, 0, 10)) + self.assertEqual(hdl.end_time, datetime(2021, 4, 3, 12, 4, 28)) + + @mock.patch('xarray.open_dataset') + def test_get_platformname(self, mocked_dataset): + """Test finding start and end times of granules.""" + tmp = MagicMock() + tmp.rename.return_value = tmp + xr.open_dataset.return_value = tmp + hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'npp'}, None) + self.assertEqual(hdl.platform_name, 'Suomi-NPP') + hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'JPSS-1'}, None) + self.assertEqual(hdl.platform_name, 'NOAA-20') + hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'J01'}, None) + self.assertEqual(hdl.platform_name, 'NOAA-20') From 1fa0aecd0203f77a7680d1b33a2553edc0c9fd70 Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 14:57:01 +0000 Subject: [PATCH 015/702] Remove unused import. --- satpy/readers/viirs_l2_jrr.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/readers/viirs_l2_jrr.py b/satpy/readers/viirs_l2_jrr.py index 4c0694fcd5..73de4d499d 100644 --- a/satpy/readers/viirs_l2_jrr.py +++ b/satpy/readers/viirs_l2_jrr.py @@ -43,7 +43,6 @@ from satpy.readers.file_handlers import BaseFileHandler -from datetime import datetime from satpy import CHUNK_SIZE import xarray as xr import logging From 213d7358cdde0419b537ea9cf28bb7c635f56fb1 Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 15:04:21 +0000 Subject: [PATCH 016/702] Add blank lines. --- satpy/tests/reader_tests/test_viirs_l2_jrr.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/tests/reader_tests/test_viirs_l2_jrr.py b/satpy/tests/reader_tests/test_viirs_l2_jrr.py index c482319721..4364b5c43c 100644 --- a/satpy/tests/reader_tests/test_viirs_l2_jrr.py +++ b/satpy/tests/reader_tests/test_viirs_l2_jrr.py @@ -16,6 +16,7 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.viirs_jrr module. + Note: This is adapted from the test_slstr_l2.py code. """ @@ -31,6 +32,7 @@ class TestVIIRSJRRReader(unittest.TestCase): """Test the VIIRS JRR L2 reader.""" + @mock.patch('xarray.open_dataset') def test_instantiate(self, mocked_dataset): """Test initialization of file handlers.""" From e194830020e6e7dc31ee5bdfcdce7d4a5e4a911e Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 15:12:39 +0000 Subject: [PATCH 017/702] Fix indentation. --- satpy/readers/viirs_l2_jrr.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/viirs_l2_jrr.py b/satpy/readers/viirs_l2_jrr.py index 73de4d499d..09eac0cb17 100644 --- a/satpy/readers/viirs_l2_jrr.py +++ b/satpy/readers/viirs_l2_jrr.py @@ -36,9 +36,9 @@ scene.load(['smoke_concentration']) NOTE: - Multiple products contain datasets with the same name! For example, both the cloud mask - and aerosol files contain a cloud mask, but these are not identical. - For clarity, the aerosol file cloudmask is named `cloud_mask_adp` in this reader. +Multiple products contain datasets with the same name! For example, both the cloud mask +and aerosol files contain a cloud mask, but these are not identical. +For clarity, the aerosol file cloudmask is named `cloud_mask_adp` in this reader. """ From caadd86139d5b43b509598eaab4b5380e9faa350 Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 15:37:53 +0000 Subject: [PATCH 018/702] Update some reader name changes that were missed previously. --- satpy/etc/readers/viirs_l2_jrr.yaml | 2 +- satpy/readers/viirs_l2_jrr.py | 4 ++-- satpy/tests/reader_tests/test_viirs_l2_jrr.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/etc/readers/viirs_l2_jrr.yaml b/satpy/etc/readers/viirs_l2_jrr.yaml index 98bcd9253c..f337909134 100644 --- a/satpy/etc/readers/viirs_l2_jrr.yaml +++ b/satpy/etc/readers/viirs_l2_jrr.yaml @@ -1,6 +1,6 @@ reader: description: VIIRS NOAA Enterprise L2 product reader - name: viirs_jrr + name: viirs_l2_jrr reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] group_keys: ['platform_shortname'] diff --git a/satpy/readers/viirs_l2_jrr.py b/satpy/readers/viirs_l2_jrr.py index 09eac0cb17..e1e01c9de7 100644 --- a/satpy/readers/viirs_l2_jrr.py +++ b/satpy/readers/viirs_l2_jrr.py @@ -25,14 +25,14 @@ - Cloud mask: JRR-CloudMask_v2r3_j01_s202112250807275_e202112250808520_c202112250837300.nc - Aerosol properties: JRR-ADP_v2r3_j01_s202112250807275_e202112250808520_c202112250839550.nc - Surface reflectance: SurfRefl_v1r1_j01_s202112250807275_e202112250808520_c202112250845080.nc -All products use the same base reader `viirs_jrr` and can be read through satpy with:: +All products use the same base reader `viirs_l2_jrr` and can be read through satpy with:: import satpy import glob filenames = glob.glob('JRR-ADP*.nc') scene = satpy.Scene(filenames, - reader='viirs_jrr') + reader='viirs_l2_jrr') scene.load(['smoke_concentration']) NOTE: diff --git a/satpy/tests/reader_tests/test_viirs_l2_jrr.py b/satpy/tests/reader_tests/test_viirs_l2_jrr.py index 4364b5c43c..c572481adb 100644 --- a/satpy/tests/reader_tests/test_viirs_l2_jrr.py +++ b/satpy/tests/reader_tests/test_viirs_l2_jrr.py @@ -15,7 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -"""Module for testing the satpy.readers.viirs_jrr module. +"""Module for testing the satpy.readers.viirs_l2_jrr module. Note: This is adapted from the test_slstr_l2.py code. """ From a65f58dfff0b71a6a92d5dff4b1a3c7d918f4c4f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 11 Jan 2022 15:40:40 +0000 Subject: [PATCH 019/702] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/readers/viirs_l2_jrr.py | 8 +++++--- satpy/tests/reader_tests/test_viirs_l2_jrr.py | 6 +++--- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/satpy/readers/viirs_l2_jrr.py b/satpy/readers/viirs_l2_jrr.py index e1e01c9de7..a27e9c30dc 100644 --- a/satpy/readers/viirs_l2_jrr.py +++ b/satpy/readers/viirs_l2_jrr.py @@ -42,11 +42,13 @@ """ -from satpy.readers.file_handlers import BaseFileHandler -from satpy import CHUNK_SIZE -import xarray as xr import logging +import xarray as xr + +from satpy import CHUNK_SIZE +from satpy.readers.file_handlers import BaseFileHandler + LOG = logging.getLogger(__name__) diff --git a/satpy/tests/reader_tests/test_viirs_l2_jrr.py b/satpy/tests/reader_tests/test_viirs_l2_jrr.py index c572481adb..a462ec1416 100644 --- a/satpy/tests/reader_tests/test_viirs_l2_jrr.py +++ b/satpy/tests/reader_tests/test_viirs_l2_jrr.py @@ -20,12 +20,12 @@ Note: This is adapted from the test_slstr_l2.py code. """ -from unittest.mock import MagicMock +import unittest from datetime import datetime from unittest import mock -import xarray as xr -import unittest +from unittest.mock import MagicMock +import xarray as xr from satpy.readers.viirs_l2_jrr import VIIRSJRRFileHandler From 6e5d46d8611195984796cccafae5f5844b77163b Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 16:26:35 +0000 Subject: [PATCH 020/702] Remove unnecessary indentation. --- satpy/readers/viirs_l2_jrr.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/readers/viirs_l2_jrr.py b/satpy/readers/viirs_l2_jrr.py index a27e9c30dc..8d07b3a7c5 100644 --- a/satpy/readers/viirs_l2_jrr.py +++ b/satpy/readers/viirs_l2_jrr.py @@ -27,13 +27,13 @@ - Surface reflectance: SurfRefl_v1r1_j01_s202112250807275_e202112250808520_c202112250845080.nc All products use the same base reader `viirs_l2_jrr` and can be read through satpy with:: - import satpy - import glob + import satpy + import glob - filenames = glob.glob('JRR-ADP*.nc') - scene = satpy.Scene(filenames, - reader='viirs_l2_jrr') - scene.load(['smoke_concentration']) + filenames = glob.glob('JRR-ADP*.nc') + scene = satpy.Scene(filenames, + reader='viirs_l2_jrr') + scene.load(['smoke_concentration']) NOTE: Multiple products contain datasets with the same name! For example, both the cloud mask From 076748c348467aded6b7ca038c8f591ee824c7af Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 10 Mar 2022 19:27:38 -0600 Subject: [PATCH 021/702] Remove unused navigations section in modis_l1b reader --- satpy/etc/readers/modis_l1b.yaml | 9 --------- 1 file changed, 9 deletions(-) diff --git a/satpy/etc/readers/modis_l1b.yaml b/satpy/etc/readers/modis_l1b.yaml index 3670a1af7d..033677739f 100644 --- a/satpy/etc/readers/modis_l1b.yaml +++ b/satpy/etc/readers/modis_l1b.yaml @@ -5,15 +5,6 @@ reader: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [modis] -navigations: - hdf_eos_geo: - description: MODIS navigation - file_type: hdf_eos_geo - latitude_key: Latitude - longitude_key: Longitude - nadir_resolution: [1000] - rows_per_scan: 10 - datasets: '1': name: '1' From 20bf879233a22990e886dc3751d8cab21961e12c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 10 Mar 2022 19:33:03 -0600 Subject: [PATCH 022/702] Add resolution-dependent chunk sizing to 'modis_l1b' reader --- satpy/readers/hdfeos_base.py | 44 +++++++++++++++++++++- satpy/readers/modis_l1b.py | 9 +++-- satpy/tests/reader_tests/test_modis_l1b.py | 21 +++++++++-- 3 files changed, 65 insertions(+), 9 deletions(-) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 731611765c..af24a855f1 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -25,12 +25,13 @@ from contextlib import suppress from datetime import datetime +import dask.array.core import numpy as np import xarray as xr from pyhdf.error import HDF4Error from pyhdf.SD import SD -from satpy import CHUNK_SIZE, DataID +from satpy import DataID from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) @@ -216,7 +217,8 @@ def load_dataset(self, dataset_name, is_category=False): from satpy.readers.hdf4_utils import from_sds dataset = self._read_dataset_in_file(dataset_name) - dask_arr = from_sds(dataset, chunks=CHUNK_SIZE) + chunks = self._chunks_for_variable(dataset) + dask_arr = from_sds(dataset, chunks=chunks) dims = ('y', 'x') if dask_arr.ndim == 2 else None data = xr.DataArray(dask_arr, dims=dims, attrs=dataset.attributes()) @@ -224,6 +226,39 @@ def load_dataset(self, dataset_name, is_category=False): return data + def _chunks_for_variable(self, hdf_dataset): + scan_length_250m = 40 + var_shape = hdf_dataset.info()[2] + res_multiplier = self._get_res_multiplier(var_shape) + non_yx_chunks = tuple() + if len(var_shape) == 3: + # assume (band, y, x) + non_yx_chunks = ((1,) * var_shape[0],) + var_shape = var_shape[-2:] + elif len(var_shape) != 2: + # don't guess + return dask.array.core.normalize_chunks("auto", shape=var_shape, dtype=np.float32) + shape_for_250m = tuple(dim_size * res_multiplier for dim_size in var_shape) + chunks_for_250m = dask.array.core.normalize_chunks(("auto", -1), shape=shape_for_250m, dtype=np.float32) + row_chunks_for_250m = chunks_for_250m[0][0] + scanbased_row_chunks_for_250m = np.round(row_chunks_for_250m / scan_length_250m) * scan_length_250m + var_row_chunks = scanbased_row_chunks_for_250m / res_multiplier + var_row_chunks = max(var_row_chunks, scan_length_250m / res_multiplier) # avoid getting 0 chunk size + return non_yx_chunks + (var_row_chunks, -1) + + @staticmethod + def _get_res_multiplier(var_shape): + num_columns_to_multiplier = { + 271: 20, # 5km + 1354: 4, # 1km + 2708: 2, # 500m + 5416: 1, # 250m + } + for max_columns, res_multiplier in num_columns_to_multiplier.items(): + if var_shape[-1] <= max_columns: + return res_multiplier + return 1 + def _scale_and_mask_data_array(self, data, is_category=False): good_mask, new_fill = self._get_good_data_mask(data, is_category=is_category) scale_factor = data.attrs.pop('scale_factor', None) @@ -356,14 +391,19 @@ def get_interpolated_dataset(self, name1, name2, resolution, offset=0): result2 = self._load_ds_by_name(name2) - offset try: sensor_zenith = self._load_ds_by_name('satellite_zenith_angle') + print("sensor_zenith: ", sensor_zenith.chunks) except KeyError: # no sensor zenith angle, do "simple" interpolation sensor_zenith = None + print("get_interpolated_dataset: ", name1, resolution, result1.data.chunks) + print("get_interpolated_dataset: ", name2, resolution, result2.data.chunks) result1, result2 = interpolate( result1, result2, sensor_zenith, self.geo_resolution, resolution ) + print("get_interpolated_dataset after: ", name1, result1.data.chunks) + print("get_interpolated_dataset after: ", name2, result2.data.chunks) self.cache[(name1, resolution)] = result1 self.cache[(name2, resolution)] = result2 + offset diff --git a/satpy/readers/modis_l1b.py b/satpy/readers/modis_l1b.py index f0b5a74e8a..00e0b59c10 100644 --- a/satpy/readers/modis_l1b.py +++ b/satpy/readers/modis_l1b.py @@ -48,7 +48,6 @@ import numpy as np import xarray as xr -from satpy import CHUNK_SIZE from satpy.readers.hdf4_utils import from_sds from satpy.readers.hdfeos_base import HDFEOSBaseFileReader, HDFEOSGeoReader @@ -95,8 +94,8 @@ def get_dataset(self, key, info): index = band_names.index(key['name']) except ValueError: continue - uncertainty = self.sd.select(dataset + "_Uncert_Indexes") - array = xr.DataArray(from_sds(subdata, chunks=CHUNK_SIZE)[index, :, :], + chunks = self._chunks_for_variable(subdata) + array = xr.DataArray(from_sds(subdata, chunks=chunks)[index, :, :], dims=['y', 'x']).astype(np.float32) valid_range = var_attrs['valid_range'] @@ -122,7 +121,9 @@ def get_dataset(self, key, info): array = array.where(array >= np.float32(valid_range[0])) array = array.where(array <= np.float32(valid_range[1])) - array = array.where(from_sds(uncertainty, chunks=CHUNK_SIZE)[index, :, :] < 15) + uncertainty = self.sd.select(dataset + "_Uncert_Indexes") + uncertainty_chunks = self._chunks_for_variable(uncertainty) + array = array.where(from_sds(uncertainty, chunks=uncertainty_chunks)[index, :, :] < 15) if key['calibration'] == 'brightness_temperature': projectable = calibrate_bt(array, var_attrs, index, key['name']) diff --git a/satpy/tests/reader_tests/test_modis_l1b.py b/satpy/tests/reader_tests/test_modis_l1b.py index 981fac39bb..6cb9fd47a0 100644 --- a/satpy/tests/reader_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/test_modis_l1b.py @@ -41,6 +41,18 @@ def _check_shared_metadata(data_arr): assert "rows_per_scan" in data_arr.attrs assert isinstance(data_arr.attrs["rows_per_scan"], int) assert data_arr.attrs['reader'] == 'modis_l1b' + assert "resolution" in data_arr.attrs + res = data_arr.attrs["resolution"] + if res == 5000: + assert data_arr.chunks == ((2, 2, 2), (data_arr.shape[1],)) + elif res == 1000: + assert data_arr.chunks == ((10, 10, 10), (data_arr.shape[1],)) + elif res == 500: + assert data_arr.chunks == ((20, 20, 20), (data_arr.shape[1],)) + elif res == 250: + assert data_arr.chunks == ((40, 40, 40), (data_arr.shape[1],)) + else: + raise ValueError(f"Unexpected resolution: {res}") def _load_and_check_geolocation(scene, resolution, exp_res, exp_shape, has_res, @@ -137,7 +149,8 @@ def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, d shape_500m = _shape_for_resolution(500) shape_250m = _shape_for_resolution(250) default_shape = _shape_for_resolution(default_res) - with dask.config.set(scheduler=CustomScheduler(max_computes=1 + has_5km + has_500 + has_250)): + scheduler = CustomScheduler(max_computes=1 + has_5km + has_500 + has_250) + with dask.config.set({'scheduler': scheduler, 'array.chunk-size': '1 MiB'}): _load_and_check_geolocation(scene, "*", default_res, default_shape, True) _load_and_check_geolocation(scene, 5000, 5000, shape_5km, has_5km) _load_and_check_geolocation(scene, 500, 500, shape_500m, has_500) @@ -147,7 +160,8 @@ def test_load_sat_zenith_angle(self, modis_l1b_nasa_mod021km_file): """Test loading satellite zenith angle band.""" scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file) dataset_name = 'satellite_zenith_angle' - scene.load([dataset_name]) + with dask.config.set({'array.chunk-size': '1 MiB'}): + scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset.shape == _shape_for_resolution(1000) assert dataset.attrs['resolution'] == 1000 @@ -157,7 +171,8 @@ def test_load_vis(self, modis_l1b_nasa_mod021km_file): """Test loading visible band.""" scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file) dataset_name = '1' - scene.load([dataset_name]) + with dask.config.set({'array.chunk-size': '1 MiB'}): + scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset.shape == _shape_for_resolution(1000) assert dataset.attrs['resolution'] == 1000 From 876c69a5060f7df8fb9b63498f3b1f9ce4464810 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 11 Mar 2022 05:52:09 -0600 Subject: [PATCH 023/702] Remove debug print statements --- satpy/readers/hdfeos_base.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index af24a855f1..134e2926b5 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -391,19 +391,14 @@ def get_interpolated_dataset(self, name1, name2, resolution, offset=0): result2 = self._load_ds_by_name(name2) - offset try: sensor_zenith = self._load_ds_by_name('satellite_zenith_angle') - print("sensor_zenith: ", sensor_zenith.chunks) except KeyError: # no sensor zenith angle, do "simple" interpolation sensor_zenith = None - print("get_interpolated_dataset: ", name1, resolution, result1.data.chunks) - print("get_interpolated_dataset: ", name2, resolution, result2.data.chunks) result1, result2 = interpolate( result1, result2, sensor_zenith, self.geo_resolution, resolution ) - print("get_interpolated_dataset after: ", name1, result1.data.chunks) - print("get_interpolated_dataset after: ", name2, result2.data.chunks) self.cache[(name1, resolution)] = result1 self.cache[(name2, resolution)] = result2 + offset From fa7942d78f85a6680feca4b8202bed708486df06 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 11:00:56 +0200 Subject: [PATCH 024/702] to_hvplot function New function to plot Scene datasets as Hvplot Overlay --- AUTHORS.md | 1 + satpy/scene.py | 65 ++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 66 insertions(+) diff --git a/AUTHORS.md b/AUTHORS.md index dd2b24750d..e2aa4be396 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -47,6 +47,7 @@ The following people have made contributions to this project: - [Andrea Meraner (ameraner)](https://github.com/ameraner) - [Aronne Merrelli (aronnem)](https://github.com/aronnem) - [Lucas Meyer (LTMeyer)](https://github.com/LTMeyer) +- [Luca Merucci (lmeru)](https://github.com/lmeru) - [Ondrej Nedelcev (nedelceo)](https://github.com/nedelceo) - [Oana Nicola](https://github.com/) - [Esben S. Nielsen (storpipfugl)](https://github.com/storpipfugl) diff --git a/satpy/scene.py b/satpy/scene.py index 261d84ea81..6931c5bc4d 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1021,6 +1021,71 @@ def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynami gview = gvds.to(gvtype, kdims=["x", "y"], vdims=vdims, dynamic=dynamic) return gview + + def to_hvplot(self,datasets=None, *args,**kwargs): + """ + Convert satpy Scene to Hvplot. + Args: + datasets (list): Limit included products to these datasets. + kwargs: hvplot options list. + + Returns: hvplot object that contains within it the plots of datasets list. + As default it contains all Scene datasets plots and a plot title is shown. + + Example usage: + scene_list = ['ash','IR_108'] + plot = scn.to_hvplot(datasets=scene_list) + + plot.ash+plot.IR_108 + """ + + import hvplot.xarray + from holoviews import Overlay + from satpy import composites + from cartopy import crs + + def _get_crs(xarray_ds): + return xarray_ds.area.to_cartopy_crs() + + def _get_timestamp(xarray_ds): + time = xarray_ds.attrs['start_time'] + return time.strftime('%Y %m %d -- %H:%M UTC') + + def _get_units(xarray_ds,variable): + return xarray_ds[variable].attrs['units'] + + def _plot_rgb(xarray_ds, variable,**defaults): + img = composites.enhance2dataset(xarray_ds[variable]) + return img.hvplot.rgb(bands='bands',title=title, + clabel='',**defaults) + + def _plot_quadmesh(xarray_ds,variable,**defaults): + return xarray_ds[variable].hvplot.quadmesh( + clabel=f'[{_get_units(xarray_ds,variable)}]', + title=title,**defaults) + + plot = Overlay() + xarray_ds = self.to_xarray_dataset(datasets) + ccrs = _get_crs(xarray_ds) + + if datasets is None: datasets = list(xarray_ds.keys()) + + defaults = dict(x='x',y='y',data_aspect=1,project=True,geo=True, + crs=ccrs,projection=ccrs,rasterize=True, + coastline='110m',cmap='Plasma',responsive=True, + dynamic=False,framewise=True,colorbar=False, + global_extent=False,xlabel='Longitude',ylabel='Latitude') + + defaults.update(kwargs) + + for element in datasets: + title = f'{element} @ {_get_timestamp(xarray_ds)}' + if xarray_ds[element].shape[0] == 3: + plot[element] =_plot_rgb(xarray_ds,element,**defaults) + else: + plot[element]=_plot_quadmesh(xarray_ds,element,**defaults) + + return plot def to_xarray_dataset(self, datasets=None): """Merge all xr.DataArrays of a scene to a xr.DataSet. From 88d40023dd250bf4317ddee4d618a9ecf4fdfb66 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 11:14:34 +0200 Subject: [PATCH 025/702] Add to_hvplot function to_hvplot function plot the Scene datasets as Hvplot Overlay. Added Luca Merucci in authors.md (we create this function together ) --- AUTHORS.md | 1 + satpy/scene.py | 65 ++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 66 insertions(+) diff --git a/AUTHORS.md b/AUTHORS.md index dd2b24750d..e2aa4be396 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -47,6 +47,7 @@ The following people have made contributions to this project: - [Andrea Meraner (ameraner)](https://github.com/ameraner) - [Aronne Merrelli (aronnem)](https://github.com/aronnem) - [Lucas Meyer (LTMeyer)](https://github.com/LTMeyer) +- [Luca Merucci (lmeru)](https://github.com/lmeru) - [Ondrej Nedelcev (nedelceo)](https://github.com/nedelceo) - [Oana Nicola](https://github.com/) - [Esben S. Nielsen (storpipfugl)](https://github.com/storpipfugl) diff --git a/satpy/scene.py b/satpy/scene.py index 261d84ea81..6931c5bc4d 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1021,6 +1021,71 @@ def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynami gview = gvds.to(gvtype, kdims=["x", "y"], vdims=vdims, dynamic=dynamic) return gview + + def to_hvplot(self,datasets=None, *args,**kwargs): + """ + Convert satpy Scene to Hvplot. + Args: + datasets (list): Limit included products to these datasets. + kwargs: hvplot options list. + + Returns: hvplot object that contains within it the plots of datasets list. + As default it contains all Scene datasets plots and a plot title is shown. + + Example usage: + scene_list = ['ash','IR_108'] + plot = scn.to_hvplot(datasets=scene_list) + + plot.ash+plot.IR_108 + """ + + import hvplot.xarray + from holoviews import Overlay + from satpy import composites + from cartopy import crs + + def _get_crs(xarray_ds): + return xarray_ds.area.to_cartopy_crs() + + def _get_timestamp(xarray_ds): + time = xarray_ds.attrs['start_time'] + return time.strftime('%Y %m %d -- %H:%M UTC') + + def _get_units(xarray_ds,variable): + return xarray_ds[variable].attrs['units'] + + def _plot_rgb(xarray_ds, variable,**defaults): + img = composites.enhance2dataset(xarray_ds[variable]) + return img.hvplot.rgb(bands='bands',title=title, + clabel='',**defaults) + + def _plot_quadmesh(xarray_ds,variable,**defaults): + return xarray_ds[variable].hvplot.quadmesh( + clabel=f'[{_get_units(xarray_ds,variable)}]', + title=title,**defaults) + + plot = Overlay() + xarray_ds = self.to_xarray_dataset(datasets) + ccrs = _get_crs(xarray_ds) + + if datasets is None: datasets = list(xarray_ds.keys()) + + defaults = dict(x='x',y='y',data_aspect=1,project=True,geo=True, + crs=ccrs,projection=ccrs,rasterize=True, + coastline='110m',cmap='Plasma',responsive=True, + dynamic=False,framewise=True,colorbar=False, + global_extent=False,xlabel='Longitude',ylabel='Latitude') + + defaults.update(kwargs) + + for element in datasets: + title = f'{element} @ {_get_timestamp(xarray_ds)}' + if xarray_ds[element].shape[0] == 3: + plot[element] =_plot_rgb(xarray_ds,element,**defaults) + else: + plot[element]=_plot_quadmesh(xarray_ds,element,**defaults) + + return plot def to_xarray_dataset(self, datasets=None): """Merge all xr.DataArrays of a scene to a xr.DataSet. From 2fa9b87ef437e63c6c2e37ddec00f7aaf91b2dd5 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 12:42:35 +0200 Subject: [PATCH 026/702] trying to follow and correct stickler-ci messages --- satpy/scene.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index 6931c5bc4d..83361588dc 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1022,9 +1022,8 @@ def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynami return gview - def to_hvplot(self,datasets=None, *args,**kwargs): - """ - Convert satpy Scene to Hvplot. + def to_hvplot(self,datasets=None,*args,**kwargs): + """Convert satpy Scene to Hvplot. Args: datasets (list): Limit included products to these datasets. kwargs: hvplot options list. @@ -1037,8 +1036,8 @@ def to_hvplot(self,datasets=None, *args,**kwargs): plot = scn.to_hvplot(datasets=scene_list) plot.ash+plot.IR_108 + """ - import hvplot.xarray from holoviews import Overlay from satpy import composites @@ -1054,7 +1053,7 @@ def _get_timestamp(xarray_ds): def _get_units(xarray_ds,variable): return xarray_ds[variable].attrs['units'] - def _plot_rgb(xarray_ds, variable,**defaults): + def _plot_rgb(xarray_ds,variable,**defaults): img = composites.enhance2dataset(xarray_ds[variable]) return img.hvplot.rgb(bands='bands',title=title, clabel='',**defaults) From fcfd481516b6ebfbe31f80f8395ecb9b67aee71a Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 13:24:46 +0200 Subject: [PATCH 027/702] correction of whitespaces --- satpy/scene.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index 83361588dc..61d85e3599 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1021,7 +1021,7 @@ def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynami gview = gvds.to(gvtype, kdims=["x", "y"], vdims=vdims, dynamic=dynamic) return gview - + def to_hvplot(self,datasets=None,*args,**kwargs): """Convert satpy Scene to Hvplot. Args: @@ -1030,29 +1030,29 @@ def to_hvplot(self,datasets=None,*args,**kwargs): Returns: hvplot object that contains within it the plots of datasets list. As default it contains all Scene datasets plots and a plot title is shown. - + Example usage: scene_list = ['ash','IR_108'] plot = scn.to_hvplot(datasets=scene_list) - + plot.ash+plot.IR_108 """ import hvplot.xarray from holoviews import Overlay from satpy import composites - from cartopy import crs - + from cartopy import crs + def _get_crs(xarray_ds): return xarray_ds.area.to_cartopy_crs() def _get_timestamp(xarray_ds): time = xarray_ds.attrs['start_time'] return time.strftime('%Y %m %d -- %H:%M UTC') - + def _get_units(xarray_ds,variable): return xarray_ds[variable].attrs['units'] - + def _plot_rgb(xarray_ds,variable,**defaults): img = composites.enhance2dataset(xarray_ds[variable]) return img.hvplot.rgb(bands='bands',title=title, @@ -1062,11 +1062,11 @@ def _plot_quadmesh(xarray_ds,variable,**defaults): return xarray_ds[variable].hvplot.quadmesh( clabel=f'[{_get_units(xarray_ds,variable)}]', title=title,**defaults) - + plot = Overlay() xarray_ds = self.to_xarray_dataset(datasets) ccrs = _get_crs(xarray_ds) - + if datasets is None: datasets = list(xarray_ds.keys()) defaults = dict(x='x',y='y',data_aspect=1,project=True,geo=True, @@ -1074,18 +1074,18 @@ def _plot_quadmesh(xarray_ds,variable,**defaults): coastline='110m',cmap='Plasma',responsive=True, dynamic=False,framewise=True,colorbar=False, global_extent=False,xlabel='Longitude',ylabel='Latitude') - + defaults.update(kwargs) - + for element in datasets: title = f'{element} @ {_get_timestamp(xarray_ds)}' if xarray_ds[element].shape[0] == 3: plot[element] =_plot_rgb(xarray_ds,element,**defaults) else: plot[element]=_plot_quadmesh(xarray_ds,element,**defaults) - - return plot + return plot + def to_xarray_dataset(self, datasets=None): """Merge all xr.DataArrays of a scene to a xr.DataSet. From dfd93ec74f6f0e1658400f78e46fec2ec03ebedc Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 13:51:41 +0200 Subject: [PATCH 028/702] correction whitespaces --- satpy/scene.py | 29 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index 61d85e3599..097c19e521 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1022,7 +1022,7 @@ def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynami return gview - def to_hvplot(self,datasets=None,*args,**kwargs): + def to_hvplot(self, datasets=None, *args, **kwargs): """Convert satpy Scene to Hvplot. Args: datasets (list): Limit included products to these datasets. @@ -1050,18 +1050,18 @@ def _get_timestamp(xarray_ds): time = xarray_ds.attrs['start_time'] return time.strftime('%Y %m %d -- %H:%M UTC') - def _get_units(xarray_ds,variable): + def _get_units(xarray_ds, variable): return xarray_ds[variable].attrs['units'] - def _plot_rgb(xarray_ds,variable,**defaults): + def _plot_rgb(xarray_ds, variable, **defaults): img = composites.enhance2dataset(xarray_ds[variable]) - return img.hvplot.rgb(bands='bands',title=title, - clabel='',**defaults) + return img.hvplot.rgb(bands='bands', title=title, + clabel='', **defaults) - def _plot_quadmesh(xarray_ds,variable,**defaults): + def _plot_quadmesh(xarray_ds, variable, **defaults): return xarray_ds[variable].hvplot.quadmesh( - clabel=f'[{_get_units(xarray_ds,variable)}]', - title=title,**defaults) + clabel=f'[{_get_units(xarray_ds,variable)}]', title=title, + **defaults) plot = Overlay() xarray_ds = self.to_xarray_dataset(datasets) @@ -1069,20 +1069,19 @@ def _plot_quadmesh(xarray_ds,variable,**defaults): if datasets is None: datasets = list(xarray_ds.keys()) - defaults = dict(x='x',y='y',data_aspect=1,project=True,geo=True, - crs=ccrs,projection=ccrs,rasterize=True, - coastline='110m',cmap='Plasma',responsive=True, - dynamic=False,framewise=True,colorbar=False, - global_extent=False,xlabel='Longitude',ylabel='Latitude') + defaults = dict(x='x', y='y', data_aspect=1, project=True, geo=True, + crs=ccrs, projection=ccrs, rasterize=True, coastline='110m', + cmap='Plasma', responsive=True, dynamic=False, framewise=True, + colorbar=False, global_extent=False, xlabel='Longitude', ylabel='Latitude') defaults.update(kwargs) for element in datasets: title = f'{element} @ {_get_timestamp(xarray_ds)}' if xarray_ds[element].shape[0] == 3: - plot[element] =_plot_rgb(xarray_ds,element,**defaults) + plot[element] = _plot_rgb(xarray_ds, element, **defaults) else: - plot[element]=_plot_quadmesh(xarray_ds,element,**defaults) + plot[element] = _plot_quadmesh(xarray_ds, element, **defaults) return plot From c0022f3da8f24ca9b410c4d0b3159ef4e1d3e929 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 14:05:17 +0200 Subject: [PATCH 029/702] correction whitespaces --- satpy/scene.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index 097c19e521..2ea3c041c3 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1069,10 +1069,11 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): if datasets is None: datasets = list(xarray_ds.keys()) - defaults = dict(x='x', y='y', data_aspect=1, project=True, geo=True, - crs=ccrs, projection=ccrs, rasterize=True, coastline='110m', - cmap='Plasma', responsive=True, dynamic=False, framewise=True, - colorbar=False, global_extent=False, xlabel='Longitude', ylabel='Latitude') + defaults = dict(x='x', y='y', data_aspect=1, project=True, geo=True, + crs=ccrs, projection=ccrs, rasterize=True, coastline='110m', + cmap='Plasma', responsive=True, dynamic=False, framewise=True, + colorbar=False, global_extent=False, xlabel='Longitude', + ylabel='Latitude') defaults.update(kwargs) From 249c4209c61bdf81f81ebfc81694e0f16fa25e11 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 19:46:29 +0200 Subject: [PATCH 030/702] function correction for pull request correction whitespaces, import libraries at beginning --- AUTHORS.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AUTHORS.md b/AUTHORS.md index e2aa4be396..85adb23559 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -46,8 +46,8 @@ The following people have made contributions to this project: - [Lu Liu (yukaribbba)](https://github.com/yukaribbba) - [Andrea Meraner (ameraner)](https://github.com/ameraner) - [Aronne Merrelli (aronnem)](https://github.com/aronnem) -- [Lucas Meyer (LTMeyer)](https://github.com/LTMeyer) - [Luca Merucci (lmeru)](https://github.com/lmeru) +- [Lucas Meyer (LTMeyer)](https://github.com/LTMeyer) - [Ondrej Nedelcev (nedelceo)](https://github.com/nedelceo) - [Oana Nicola](https://github.com/) - [Esben S. Nielsen (storpipfugl)](https://github.com/storpipfugl) From 2f2e8a84eb05b07a8a7464e1766873efb90052fc Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 21:10:21 +0200 Subject: [PATCH 031/702] Add to_hvplot functon --- satpy/scene.py | 27 +++++++++++++-------------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index 2ea3c041c3..ecaa58bfed 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -23,12 +23,14 @@ import warnings from typing import Callable +import hvplot.xarray # noqa import numpy as np import xarray as xr +from holoviews import Overlay from pyresample.geometry import AreaDefinition, BaseDefinition, SwathDefinition from xarray import DataArray -from satpy.composites import IncompatibleAreas +from satpy.composites import IncompatibleAreas, enhance2dataset from satpy.composites.config_loader import load_compositor_configs_for_sensors from satpy.dataset import DataID, DataQuery, DatasetDict, combine_metadata, dataset_walker, replace_anc from satpy.dependency_tree import DependencyTree @@ -1024,11 +1026,12 @@ def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynami def to_hvplot(self, datasets=None, *args, **kwargs): """Convert satpy Scene to Hvplot. - Args: + + Args: datasets (list): Limit included products to these datasets. kwargs: hvplot options list. - Returns: hvplot object that contains within it the plots of datasets list. + Returns: hvplot object that contains within it the plots of datasets list. As default it contains all Scene datasets plots and a plot title is shown. Example usage: @@ -1038,11 +1041,6 @@ def to_hvplot(self, datasets=None, *args, **kwargs): plot.ash+plot.IR_108 """ - import hvplot.xarray - from holoviews import Overlay - from satpy import composites - from cartopy import crs - def _get_crs(xarray_ds): return xarray_ds.area.to_cartopy_crs() @@ -1054,22 +1052,23 @@ def _get_units(xarray_ds, variable): return xarray_ds[variable].attrs['units'] def _plot_rgb(xarray_ds, variable, **defaults): - img = composites.enhance2dataset(xarray_ds[variable]) + img = enhance2dataset(xarray_ds[variable]) return img.hvplot.rgb(bands='bands', title=title, clabel='', **defaults) def _plot_quadmesh(xarray_ds, variable, **defaults): return xarray_ds[variable].hvplot.quadmesh( - clabel=f'[{_get_units(xarray_ds,variable)}]', title=title, + clabel=f'[{_get_units(xarray_ds,variable)}]', title=title, **defaults) - plot = Overlay() + plot = Overlay() xarray_ds = self.to_xarray_dataset(datasets) ccrs = _get_crs(xarray_ds) - if datasets is None: datasets = list(xarray_ds.keys()) + if datasets is None: + datasets = list(xarray_ds.keys()) - defaults = dict(x='x', y='y', data_aspect=1, project=True, geo=True, + defaults = dict(x='x', y='y', data_aspect=1, project=True, geo=True, crs=ccrs, projection=ccrs, rasterize=True, coastline='110m', cmap='Plasma', responsive=True, dynamic=False, framewise=True, colorbar=False, global_extent=False, xlabel='Longitude', @@ -1085,7 +1084,7 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): plot[element] = _plot_quadmesh(xarray_ds, element, **defaults) return plot - + def to_xarray_dataset(self, datasets=None): """Merge all xr.DataArrays of a scene to a xr.DataSet. From ac8a36167736df51b5f3659be2270520848dd97d Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 21:26:39 +0200 Subject: [PATCH 032/702] add hvplot in extras require --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 0f10d9d15d..2ca2def303 100644 --- a/setup.py +++ b/setup.py @@ -78,6 +78,7 @@ 'doc': ['sphinx', 'sphinx_rtd_theme', 'sphinxcontrib-apidoc'], # Other 'geoviews': ['geoviews'], + 'hvplot': ['hvplot'], 'overlays': ['pycoast', 'pydecorate'], 'tests': test_requires, } From d2c80fb9876f9aa82527a17a33ed324b2c8d677f Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 23:24:07 +0200 Subject: [PATCH 033/702] add hvplot in test require --- setup.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 2ca2def303..099a3f63b7 100644 --- a/setup.py +++ b/setup.py @@ -37,7 +37,9 @@ test_requires = ['behave', 'h5py', 'netCDF4', 'pyhdf', 'imageio', 'pylibtiff', 'rasterio', 'geoviews', 'trollimage', 'fsspec', 'bottleneck', - 'rioxarray', 'pytest', 'pytest-lazy-fixture', 'defusedxml'] + 'rioxarray', 'pytest', 'pytest-lazy-fixture', 'defusedxml', + 'hvplot'] + extras_require = { # Readers: From 09b8f6ee3252ca52e9ba8fd14ecf69cf7f4b49f9 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Sat, 7 May 2022 19:47:01 +0200 Subject: [PATCH 034/702] Answer to #issuecomment-1120099909 --- continuous_integration/environment.yaml | 1 + satpy/scene.py | 10 +++++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index d131bad1e0..84c5b4be68 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -35,6 +35,7 @@ dependencies: - mock - libtiff - geoviews + - hvplot - zarr - python-eccodes # 2.19.1 seems to cause library linking issues diff --git a/satpy/scene.py b/satpy/scene.py index ecaa58bfed..c013957c6e 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -23,7 +23,6 @@ import warnings from typing import Callable -import hvplot.xarray # noqa import numpy as np import xarray as xr from holoviews import Overlay @@ -39,6 +38,12 @@ from satpy.resample import get_area_def, prepare_resampler, resample_dataset from satpy.writers import load_writer +try: + import hvplot.xarray # noqa +except ImportError: + hvplot.xarray = None + + LOG = logging.getLogger(__name__) @@ -1061,6 +1066,9 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): clabel=f'[{_get_units(xarray_ds,variable)}]', title=title, **defaults) + if hvplot.xarray is None: + raise ImportError("'hvplot' must be installed to use this feature") + plot = Overlay() xarray_ds = self.to_xarray_dataset(datasets) ccrs = _get_crs(xarray_ds) From d0299dd98254e0a37406e8dee7c3fec6d041c227 Mon Sep 17 00:00:00 2001 From: bornagain Date: Sat, 7 May 2022 21:35:43 +0200 Subject: [PATCH 035/702] Update satpy/scene.py Co-authored-by: Panu Lahtinen --- satpy/scene.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/scene.py b/satpy/scene.py index c013957c6e..4a73f99a14 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1034,7 +1034,7 @@ def to_hvplot(self, datasets=None, *args, **kwargs): Args: datasets (list): Limit included products to these datasets. - kwargs: hvplot options list. + kwargs: hvplot options dictionary. Returns: hvplot object that contains within it the plots of datasets list. As default it contains all Scene datasets plots and a plot title is shown. From aed6a9173fd8bf5dd33e7c8a5ca1ee3755e577a5 Mon Sep 17 00:00:00 2001 From: bornagain Date: Sat, 7 May 2022 21:35:58 +0200 Subject: [PATCH 036/702] Update satpy/scene.py Co-authored-by: Panu Lahtinen --- satpy/scene.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/scene.py b/satpy/scene.py index 4a73f99a14..1468487da6 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1039,7 +1039,8 @@ def to_hvplot(self, datasets=None, *args, **kwargs): Returns: hvplot object that contains within it the plots of datasets list. As default it contains all Scene datasets plots and a plot title is shown. - Example usage: + Example usage:: + scene_list = ['ash','IR_108'] plot = scn.to_hvplot(datasets=scene_list) From 4687ba437dc029514afe518b96caf7b0bcd83709 Mon Sep 17 00:00:00 2001 From: bornagain Date: Sat, 7 May 2022 21:37:07 +0200 Subject: [PATCH 037/702] Update satpy/scene.py Co-authored-by: Panu Lahtinen --- satpy/scene.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index 1468487da6..7501ee54b7 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -39,9 +39,9 @@ from satpy.writers import load_writer try: - import hvplot.xarray # noqa + import hvplot.xarray as hvplot_xarray # noqa except ImportError: - hvplot.xarray = None + hvplot_xarray = None LOG = logging.getLogger(__name__) From 1aff451cd1481077090bc6da047b96693ad4b588 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Sat, 7 May 2022 21:41:55 +0200 Subject: [PATCH 038/702] Update --- satpy/scene.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/scene.py b/satpy/scene.py index 7501ee54b7..1fcd7fc057 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1067,7 +1067,7 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): clabel=f'[{_get_units(xarray_ds,variable)}]', title=title, **defaults) - if hvplot.xarray is None: + if hvplot_xarray is None: raise ImportError("'hvplot' must be installed to use this feature") plot = Overlay() From 546bb5f938d44ca1231227cd343bd13507c8885b Mon Sep 17 00:00:00 2001 From: bornagain Date: Tue, 2 Aug 2022 08:09:48 +0200 Subject: [PATCH 039/702] Update setup.py Co-authored-by: Panu Lahtinen --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 099a3f63b7..af6819d6e5 100644 --- a/setup.py +++ b/setup.py @@ -80,7 +80,7 @@ 'doc': ['sphinx', 'sphinx_rtd_theme', 'sphinxcontrib-apidoc'], # Other 'geoviews': ['geoviews'], - 'hvplot': ['hvplot'], + 'hvplot': ['hvplot', 'geoviews', 'cartopy'], 'overlays': ['pycoast', 'pydecorate'], 'tests': test_requires, } From 05f745a0f3827743f2eca512290f3f07a5906b76 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 8 Nov 2022 09:19:56 +0100 Subject: [PATCH 040/702] Fix new datasetid format and add an backend --- satpy/etc/composites/sgli.yaml | 10 +-- satpy/readers/sgli_l1b.py | 75 ++++++++++++++++++----- satpy/tests/reader_tests/test_sgli_l1b.py | 16 +++++ 3 files changed, 79 insertions(+), 22 deletions(-) create mode 100644 satpy/tests/reader_tests/test_sgli_l1b.py diff --git a/satpy/etc/composites/sgli.yaml b/satpy/etc/composites/sgli.yaml index f66744c037..58f52a1124 100644 --- a/satpy/etc/composites/sgli.yaml +++ b/satpy/etc/composites/sgli.yaml @@ -4,7 +4,7 @@ sensor_name: visir/sgli modifiers: rayleigh_corrected: - compositor: !!python/name:satpy.composites.PSPRayleighReflectance + compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: @@ -17,7 +17,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_marine_clean: - compositor: !!python/name:satpy.composites.PSPRayleighReflectance + compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_clean_aerosol prerequisites: @@ -30,7 +30,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_marine_tropical: - compositor: !!python/name:satpy.composites.PSPRayleighReflectance + compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: marine_tropical_aerosol prerequisites: @@ -43,7 +43,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_desert: - compositor: !!python/name:satpy.composites.PSPRayleighReflectance + compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: desert_aerosol prerequisites: @@ -56,7 +56,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_land: - compositor: !!python/name:satpy.composites.PSPRayleighReflectance + compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 2455c4b63b..97db07a756 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. @@ -29,14 +27,19 @@ """ -from satpy.readers.file_handlers import BaseFileHandler +import logging from datetime import datetime -from satpy import CHUNK_SIZE -import xarray as xr + import dask.array as da import h5py -import logging import numpy as np +import xarray as xr +from xarray import Dataset, Variable +from xarray.backends import BackendArray, BackendEntrypoint +from xarray.core import indexing + +from satpy import CHUNK_SIZE +from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) @@ -69,7 +72,7 @@ class HDF5SGLI(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): """Initialize the filehandler.""" - super(HDF5SGLI, self).__init__(filename, filename_info, filetype_info) + super().__init__(filename, filename_info, filetype_info) self.resolution = resolutions[self.filename_info['resolution']] self.fh = h5py.File(self.filename, 'r') @@ -87,12 +90,12 @@ def end_time(self): def get_dataset(self, key, info): """Get the dataset.""" - if key.resolution != self.resolution: + if key["resolution"] != self.resolution: return - if key.polarization is not None: + if key["polarization"] is not None: pols = {0: '0', -60: 'm60', 60: 'p60'} - file_key = info['file_key'].format(pol=pols[key.polarization]) + file_key = info['file_key'].format(pol=pols[key["polarization"]]) else: file_key = info['file_key'] @@ -100,7 +103,7 @@ def get_dataset(self, key, info): resampling_interval = h5dataset.attrs.get('Resampling_interval', 1) if resampling_interval != 1: - logger.debug('Interpolating %s.', key.name) + logger.debug('Interpolating %s.', key["name"]) full_shape = (self.fh['Image_data'].attrs['Number_of_lines'], self.fh['Image_data'].attrs['Number_of_pixels']) dataset = interpolate(h5dataset, resampling_interval, full_shape) @@ -108,6 +111,12 @@ def get_dataset(self, key, info): dataset = da.from_array(h5dataset[:].astype(' Date: Fri, 24 Feb 2023 13:59:19 +0100 Subject: [PATCH 041/702] add holoviews to continuous_integration --- continuous_integration/environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 57728e2c73..67c007a798 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -35,12 +35,12 @@ dependencies: - mock - libtiff - geoviews + - holoviews - hvplot - zarr - python-eccodes # 2.19.1 seems to cause library linking issues - eccodes>=2.20 - - geoviews - pytest - pytest-cov - pytest-lazy-fixture From 24273d57e8c1d05b62c5104e53f0383e1e29ad40 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 20 Mar 2023 13:46:53 +0100 Subject: [PATCH 042/702] Add compositor for high-level clouds following GeoColor implementation. --- satpy/composites/__init__.py | 68 ++++++++++++++++++++++++++++++++++++ 1 file changed, 68 insertions(+) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 0948d543ab..248aa3b8e6 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1018,6 +1018,74 @@ def __call__(self, projectables, **kwargs): return res +class HighCloudCompositor(CloudCompositor): + """Detect high clouds based on latitude-dependent thresholding and use it as a mask for compositing. + + This compositor aims at identifying high clouds and assigning them a transparency based on the brightness + temperature (cloud opacity). In contrast to the `CloudCompositor`, the brightness temperature threshold at + the lower end, used to identify opaque clouds, is made a function of the latitude in order to have tropopause + level clouds appear as opaque at both high and low latitudes. This follows the Geocolor implementation of + high clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`). + + The idea is to define a tuple of two brightness temperature thresholds in transisiton_min and two corresponding + latitude thresholds in latitude_min. + + + TODO improve docstring: + The modified and latitude-dependent transition_min, sent to `CloudCopositor`, + will then be computed such that transition_min[0] is used if abs(latitude) < latitude_min[0]. + + if abs(latitude) < latitude_min(0): + tr_min_lat = transition_min[0] + elif abs(latitude) > latitude_min(1): + tr_min_lat = transition_min[1] + else: + tr_min_lat = linear intterpolation of + + tr_min_lat = transition_min[0] where abs(latitude) < latitude_min(0) + tr_min_lat = transition_min[1] where abs(latitude) > latitude_min(0) + tr_min_lat = linear interpolation between transition_min[0] and transition_min[1] where abs(latitude). + + """ + + def __init__(self, name, transition_min=(200., 220.), transition_max=280, latitude_min=(30., 60.), + transition_gamma=1.0, **kwargs): + """Collect custom configuration values. + + Args: + transition_min (tuple): Brightness temperature values used to identify opaque white + clouds at different latitudes + transition_max (float): Brightness temperatures above this value are not considered to + be high clouds -> transparent + latitude_min (tuple): Latitude values defining the intervals for computing latitude-dependent + transition_min values. + transition_gamma (float): Gamma correction to apply at the end + + """ + self.latitude_min = latitude_min + super().__init__(name, transition_min=transition_min, transition_max=transition_max, + transition_gamma=transition_gamma, **kwargs) + + def __call__(self, projectables, **kwargs): + """Generate the composite.""" + data = projectables[0] + _, lats = data.attrs["area"].get_lonlats() + lats = np.abs(lats) + + slope = (self.transition_min[1] - self.transition_min[0]) / (self.latitude_min[1] - self.latitude_min[0]) + offset = self.transition_min[0] - slope * self.latitude_min[0] + + tr_min_lat = xr.DataArray(name='tr_min_lat', coords=data.coords, dims=data.dims) + tr_min_lat = tr_min_lat.where(lats >= self.latitude_min[0], self.transition_min[0]) + tr_min_lat = tr_min_lat.where(lats <= self.latitude_min[1], self.transition_min[1]) + tr_min_lat = tr_min_lat.where((lats < self.latitude_min[0]) | (lats > self.latitude_min[1]), + slope * lats + offset) + + self.transition_min = tr_min_lat + + return super().__call__(projectables, **kwargs) + + class RatioSharpenedRGB(GenericCompositor): """Sharpen RGB bands with ratio of a high resolution band to a lower resolution version. From d01760c0da297c3bd8dfc984a4edb7d19b3f8960 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 20 Mar 2023 13:47:53 +0100 Subject: [PATCH 043/702] Add dedicated enhancement recipe for GeoColor high cloud composites. --- satpy/etc/enhancements/generic.yaml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index ce1ce1bb94..b575d776df 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -838,6 +838,20 @@ enhancements: kwargs: weight: 1.0 + ir_high_cloud: + standard_name: ir_high_cloud + operations: + - name: inverse + method: !!python/name:satpy.enhancements.invert + args: + - [True, false] + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: linear + - name: 3d + method: !!python/name:satpy.enhancements.three_d_effect + colorized_ir_clouds: standard_name: colorized_ir_clouds operations: From 84ae8071c956006a49422bd5e419b7c012681a0d Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 20 Mar 2023 13:49:46 +0100 Subject: [PATCH 044/702] Add AHI recipe for GeoColor high-level cloud composite nighttime layer. --- satpy/etc/composites/ahi.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index a2e80a4ac1..0785ad80c2 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -489,3 +489,10 @@ composites: prerequisites: - night_ir_alpha - _night_background_hires + + # GeoColor + GeoColor_HighClouds: + standard_name: ir_high_cloud + compositor: !!python/name:satpy.composites.HighCloudCompositor + prerequisites: + - name: B13 From fdd084359657c36c21d4c619c004eea8a07e0743 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 22 Mar 2023 12:00:29 +0100 Subject: [PATCH 045/702] Add first version of low-level cloud composite. --- satpy/composites/__init__.py | 72 ++++++++++++++++++++++++++++++++++++ 1 file changed, 72 insertions(+) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 248aa3b8e6..5c16e0c83f 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1086,6 +1086,78 @@ def __call__(self, projectables, **kwargs): return super().__call__(projectables, **kwargs) +# class LowCloudCompositor(CloudCompositor): +class LowCloudCompositor(GenericCompositor): + """Class information. + + TODO: Rewrite docstring + + Detect low clouds based on latitude-dependent thresholding and use it as a mask for compositing. + + This compositor aims at identifying high clouds and assigning them a transparency based on the brightness + temperature (cloud opacity). In contrast to the `CloudCompositor`, the brightness temperature threshold at + the lower end, used to identify opaque clouds, is made a function of the latitude in order to have tropopause + level clouds appear as opaque at both high and low latitudes. This follows the Geocolor implementation of + high clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`). + + The idea is to define a tuple of two brightness temperature thresholds in transisiton_min and two corresponding + latitude thresholds in latitude_min. + + """ + + def __init__(self, name, range_land=(1.0, 4.5), range_water=(0.0, 4.0), transition_gamma=1.0, + color=(140.25, 191.25, 249.9), **kwargs): + """Init info. + + TODO: Rewrite docstring + Collect custom configuration values. + + Args: + transition_min (tuple): Brightness temperature values used to identify opaque white + clouds at different latitudes + transition_max (float): Brightness temperatures above this value are not considered to + be high clouds -> transparent + latitude_min (tuple): Latitude values defining the intervals for computing latitude-dependent + transition_min values. + transition_gamma (float): Gamma correction to apply at the end + + """ + self.range_land = range_land + self.range_water = range_water + self.transition_gamma = transition_gamma + self.color = color + super().__init__(name, **kwargs) + # super().__init__(name, transition_gamma=transition_gamma, **kwargs) + + def __call__(self, projectables, **kwargs): + """Generate the composite.""" + diff_comp = DifferenceCompositor(name='ir_difference') + btd = diff_comp.__call__(projectables) + + # Avoid spurious false alarms caused by noise in the 3.9um channel that can occur for very cold cloud tops + btd = btd.where(projectables[0] >= 230, 0.0) + + # self.transition_min, self.transition_max = self.range_land + # res_land = super().__call__((btd), **kwargs) + + # self.transition_min, self.transition_max = self.range_water + # res_water = super().__call__(btd, **kwargs) + + tr_min = self.range_land[0] + tr_max = self.range_land[1] + + slope = 1 / (tr_max - tr_min) + offset = 0 - slope * tr_min + + alpha = btd.where(btd > tr_min, 0.0) + alpha = alpha.where(btd <= tr_max, 1.0) + alpha = alpha.where((btd <= tr_min) | (btd > tr_max), slope * btd + offset) + + alpha **= self.transition_gamma + res = super().__call__((btd, alpha), low_cloud_color=self.color, **kwargs) + return res + + class RatioSharpenedRGB(GenericCompositor): """Sharpen RGB bands with ratio of a high resolution band to a lower resolution version. From 92607d6f07e353494244b5143d9c585466203dcc Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 22 Mar 2023 12:02:21 +0100 Subject: [PATCH 046/702] Add support for monochromatic colorization enhancement. --- satpy/enhancements/__init__.py | 43 ++++++++++++++++++++++------- satpy/etc/enhancements/generic.yaml | 14 ++++++++++ 2 files changed, 47 insertions(+), 10 deletions(-) diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py index 29f2cbdf54..0ae8fd45c0 100644 --- a/satpy/enhancements/__init__.py +++ b/satpy/enhancements/__init__.py @@ -358,16 +358,39 @@ def _merge_colormaps(kwargs, img=None): from trollimage.colormap import Colormap full_cmap = None - palette = kwargs['palettes'] - if isinstance(palette, Colormap): - full_cmap = palette - else: - for itm in palette: - cmap = create_colormap(itm, img) - if full_cmap is None: - full_cmap = cmap - else: - full_cmap = full_cmap + cmap + # TODO + # - Improve check if both palettes and monochromatic are set + # - Improve exception handling for monochromatic cases + # - Resolve RunTimeWarnings + + if 'palettes' in kwargs: + palette = kwargs['palettes'] + if isinstance(palette, Colormap): + full_cmap = palette + else: + for itm in palette: + cmap = create_colormap(itm, img) + if full_cmap is None: + full_cmap = cmap + else: + full_cmap = full_cmap + cmap + + if 'monochromatic' in kwargs: + palette = {} + color = kwargs['monochromatic'].get('color', None) + if color is None: + # TODO: add error + pass + elif isinstance(color, (list, tuple)): + palette['colors'] = [color] + elif isinstance(color, str): + var = img.data.attrs.get(color, None) + if not isinstance(var, (tuple, list)): + # TODO: add error + pass + palette['colors'] = [var] + + full_cmap = create_colormap(palette, img) return full_cmap diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index b575d776df..43b40e2fcd 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -852,6 +852,20 @@ enhancements: - name: 3d method: !!python/name:satpy.enhancements.three_d_effect + ir_low_cloud: + standard_name: ir_low_cloud + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: linear + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + monochromatic: + color: low_cloud_color +# color: [0, 255, 0] + colorized_ir_clouds: standard_name: colorized_ir_clouds operations: From 6ee859b9f0d0b2c6a2e10994072dbc6cdae04523 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 22 Mar 2023 12:03:13 +0100 Subject: [PATCH 047/702] Add GeoColor low-level cloud composite for AHI. --- satpy/etc/composites/ahi.yaml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 0785ad80c2..964e37b0a3 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -491,8 +491,16 @@ composites: - _night_background_hires # GeoColor - GeoColor_HighClouds: + geo_color_high_clouds: standard_name: ir_high_cloud compositor: !!python/name:satpy.composites.HighCloudCompositor prerequisites: - name: B13 + + geo_color_low_clouds: + standard_name: ir_low_cloud + compositor: !!python/name:satpy.composites.LowCloudCompositor + color: [140.25, 191.25, 249.9] + prerequisites: + - name: B13 + - name: B07 From c8ae060e8db938108e2d8e4de6c0eba24abe0f91 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 31 Mar 2023 18:16:35 +0200 Subject: [PATCH 048/702] Implement usage of land-sea-mask for the brightness temperature difference used for the Geocolor low-level cloud layer. --- satpy/composites/__init__.py | 47 ++++++++++++++--------------- satpy/etc/composites/ahi.yaml | 10 +++++- satpy/etc/enhancements/generic.yaml | 5 ++- 3 files changed, 35 insertions(+), 27 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 5c16e0c83f..de7cf04613 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1086,8 +1086,7 @@ def __call__(self, projectables, **kwargs): return super().__call__(projectables, **kwargs) -# class LowCloudCompositor(CloudCompositor): -class LowCloudCompositor(GenericCompositor): +class LowCloudCompositor(CloudCompositor): """Class information. TODO: Rewrite docstring @@ -1105,8 +1104,10 @@ class LowCloudCompositor(GenericCompositor): """ - def __init__(self, name, range_land=(1.0, 4.5), range_water=(0.0, 4.0), transition_gamma=1.0, - color=(140.25, 191.25, 249.9), **kwargs): + def __init__(self, name, land_sea_mask=None, value_land=1, value_sea=0, + range_land=(1.0, 4.5), + range_sea=(0.0, 4.0), + transition_gamma=1.0, color=(140.25, 191.25, 249.9), **kwargs): """Init info. TODO: Rewrite docstring @@ -1122,39 +1123,35 @@ def __init__(self, name, range_land=(1.0, 4.5), range_water=(0.0, 4.0), transiti transition_gamma (float): Gamma correction to apply at the end """ + self.land_sea_mask = land_sea_mask + self.val_land = value_land + self.val_sea = value_sea self.range_land = range_land - self.range_water = range_water + self.range_sea = range_sea self.transition_gamma = transition_gamma self.color = color - super().__init__(name, **kwargs) - # super().__init__(name, transition_gamma=transition_gamma, **kwargs) + self.transition_min = None + self.transition_max = None + super().__init__(name, transition_gamma=transition_gamma, **kwargs) def __call__(self, projectables, **kwargs): """Generate the composite.""" - diff_comp = DifferenceCompositor(name='ir_difference') - btd = diff_comp.__call__(projectables) + projectables = self.match_data_arrays(projectables) + btd, lsm, win_bt = projectables + lsm = lsm.squeeze(drop=True) + lsm = lsm.round() # Make sure to have whole numbers in case of smearing from resampling # Avoid spurious false alarms caused by noise in the 3.9um channel that can occur for very cold cloud tops - btd = btd.where(projectables[0] >= 230, 0.0) - - # self.transition_min, self.transition_max = self.range_land - # res_land = super().__call__((btd), **kwargs) - - # self.transition_min, self.transition_max = self.range_water - # res_water = super().__call__(btd, **kwargs) + btd = btd.where(win_bt >= 230, 0.0) - tr_min = self.range_land[0] - tr_max = self.range_land[1] + self.transition_min, self.transition_max = self.range_land + res = super().__call__([btd.where(lsm == self.val_land)], low_cloud_color=self.color, **kwargs) - slope = 1 / (tr_max - tr_min) - offset = 0 - slope * tr_min + self.transition_min, self.transition_max = self.range_sea + res_sea = super().__call__([btd.where(lsm == self.val_sea)], low_cloud_color=self.color, **kwargs) - alpha = btd.where(btd > tr_min, 0.0) - alpha = alpha.where(btd <= tr_max, 1.0) - alpha = alpha.where((btd <= tr_min) | (btd > tr_max), slope * btd + offset) + res = res.where(lsm == self.val_land, res_sea) - alpha **= self.transition_gamma - res = super().__call__((btd, alpha), low_cloud_color=self.color, **kwargs) return res diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 964e37b0a3..2730547f56 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -500,7 +500,15 @@ composites: geo_color_low_clouds: standard_name: ir_low_cloud compositor: !!python/name:satpy.composites.LowCloudCompositor + value_sea: 0 + value_land: 254 color: [140.25, 191.25, 249.9] prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: B13 + - name: B07 + - compositor: !!python/name:satpy.composites.StaticImageCompositor + standard_name: land_sea_mask + filename: "/tcenas/scratch/strandgren/GeoColor/land_sea_mask_3km_i_.tif" - name: B13 - - name: B07 diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 43b40e2fcd..2094327918 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -855,6 +855,10 @@ enhancements: ir_low_cloud: standard_name: ir_low_cloud operations: + - name: inverse + method: !!python/name:satpy.enhancements.invert + args: + - [False, True] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: @@ -864,7 +868,6 @@ enhancements: kwargs: monochromatic: color: low_cloud_color -# color: [0, 255, 0] colorized_ir_clouds: standard_name: colorized_ir_clouds From c3636f3b3f60a46a6032bdca82a6a8e08b6a50ea Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 3 Apr 2023 11:33:49 +0200 Subject: [PATCH 049/702] Add GeoColor composite recipes including rayleigh correction modifier. --- satpy/etc/composites/ahi.yaml | 86 ++++++++++++++++++++++++++--------- 1 file changed, 64 insertions(+), 22 deletions(-) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 2730547f56..a2fccee84b 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -14,6 +14,22 @@ modifiers: - solar_azimuth_angle - solar_zenith_angle + geo_color_rayleigh_corrected: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: rayleigh_only + reduce_lim_low: 70 + reduce_lim_high: 105 + reduce_strength: 1.5 + prerequisites: + - name: B03 + modifiers: [sunz_corrected] + optional_prerequisites: + - satellite_azimuth_angle + - satellite_zenith_angle + - solar_azimuth_angle + - solar_zenith_angle + composites: green: deprecation_warning: "'green' is a deprecated composite. Use the equivalent 'hybrid_green' instead." @@ -103,17 +119,6 @@ composites: - wavelength: 0.85 standard_name: toa_reflectance - ndvi_hybrid_green: - compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen - prerequisites: - - name: B02 - modifiers: [sunz_corrected, rayleigh_corrected] - - name: B03 - modifiers: [sunz_corrected, rayleigh_corrected] - - name: B04 - modifiers: [sunz_corrected] - standard_name: toa_bidirectional_reflectance - airmass: # PDF slides: https://www.eumetsat.int/website/home/News/ConferencesandEvents/DAT_2833302.html # Under session 2 by Akihiro Shimizu (JMA) @@ -271,17 +276,6 @@ composites: high_resolution_band: red standard_name: true_color - true_color_ndvi_green: - compositor: !!python/name:satpy.composites.SelfSharpenedRGB - prerequisites: - - name: B03 - modifiers: [sunz_corrected, rayleigh_corrected] - - name: ndvi_hybrid_green - - name: B01 - modifiers: [sunz_corrected, rayleigh_corrected] - high_resolution_band: red - standard_name: true_color - natural_color_nocorr: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: @@ -491,6 +485,40 @@ composites: - _night_background_hires # GeoColor + + geo_color: + compositor: !!python/name:satpy.composites.DayNightCompositor + lim_low: 80 + lim_high: 88 + standard_name: true_color_with_night_ir + prerequisites: + - geo_color_true_color + - geo_color_night + + # GeoColor Daytime + geo_color_green: + compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen + prerequisites: + - name: B02 + modifiers: [ sunz_corrected, geo_color_rayleigh_corrected ] + - name: B03 + modifiers: [ sunz_corrected, geo_color_rayleigh_corrected ] + - name: B04 + modifiers: [ sunz_corrected ] + standard_name: toa_bidirectional_reflectance + + geo_color_true_color: + compositor: !!python/name:satpy.composites.SelfSharpenedRGB + prerequisites: + - name: B03 + modifiers: [ sunz_corrected, geo_color_rayleigh_corrected ] + - name: geo_color_green + - name: B01 + modifiers: [ sunz_corrected, geo_color_rayleigh_corrected ] + high_resolution_band: red + standard_name: true_color + + # GeoColor Nighttime geo_color_high_clouds: standard_name: ir_high_cloud compositor: !!python/name:satpy.composites.HighCloudCompositor @@ -512,3 +540,17 @@ composites: standard_name: land_sea_mask filename: "/tcenas/scratch/strandgren/GeoColor/land_sea_mask_3km_i_.tif" - name: B13 + + geo_color_bl: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - geo_color_low_clouds + - _night_background_hires + + geo_color_night: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - geo_color_high_clouds + - geo_color_bl From 258099073b32003402f308b1518f308b70e65334 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 3 Apr 2023 17:08:32 +0200 Subject: [PATCH 050/702] Add AHI GeoColor composite without background layer. --- satpy/etc/composites/ahi.yaml | 23 ++++++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index a2fccee84b..98e3a028c7 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -495,6 +495,15 @@ composites: - geo_color_true_color - geo_color_night + geo_color_without_background: + compositor: !!python/name:satpy.composites.DayNightCompositor + lim_low: 80 + lim_high: 88 + standard_name: true_color_with_night_ir + prerequisites: + - geo_color_true_color + - geo_color_night_without_background + # GeoColor Daytime geo_color_green: compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen @@ -518,7 +527,7 @@ composites: high_resolution_band: red standard_name: true_color - # GeoColor Nighttime + # GeoColor Night-time geo_color_high_clouds: standard_name: ir_high_cloud compositor: !!python/name:satpy.composites.HighCloudCompositor @@ -538,10 +547,11 @@ composites: - name: B07 - compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: land_sea_mask + # TODO Change filename filename: "/tcenas/scratch/strandgren/GeoColor/land_sea_mask_3km_i_.tif" - name: B13 - geo_color_bl: + geo_color_background_with_low_clouds: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: @@ -553,4 +563,11 @@ composites: standard_name: night_ir_with_background prerequisites: - geo_color_high_clouds - - geo_color_bl + - geo_color_background_with_low_clouds + + geo_color_night_without_background: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - geo_color_low_clouds + - geo_color_high_clouds From 608233fb1fa98850ff070848441750139baec55f Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 3 Apr 2023 18:56:09 +0200 Subject: [PATCH 051/702] Improve docstrings and exception handling of HighCloudCompositor and LowCloudCompositor. --- satpy/composites/__init__.py | 142 +++++++++++++++++++++-------------- 1 file changed, 85 insertions(+), 57 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index de7cf04613..b20d057ca2 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1023,29 +1023,20 @@ class HighCloudCompositor(CloudCompositor): This compositor aims at identifying high clouds and assigning them a transparency based on the brightness temperature (cloud opacity). In contrast to the `CloudCompositor`, the brightness temperature threshold at - the lower end, used to identify opaque clouds, is made a function of the latitude in order to have tropopause - level clouds appear as opaque at both high and low latitudes. This follows the Geocolor implementation of - high clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`). - - The idea is to define a tuple of two brightness temperature thresholds in transisiton_min and two corresponding - latitude thresholds in latitude_min. - - - TODO improve docstring: - The modified and latitude-dependent transition_min, sent to `CloudCopositor`, - will then be computed such that transition_min[0] is used if abs(latitude) < latitude_min[0]. - - if abs(latitude) < latitude_min(0): - tr_min_lat = transition_min[0] - elif abs(latitude) > latitude_min(1): - tr_min_lat = transition_min[1] - else: - tr_min_lat = linear intterpolation of - - tr_min_lat = transition_min[0] where abs(latitude) < latitude_min(0) - tr_min_lat = transition_min[1] where abs(latitude) > latitude_min(0) - tr_min_lat = linear interpolation between transition_min[0] and transition_min[1] where abs(latitude). - + the lower end, used to identify high opaque clouds, is made a function of the latitude in order to have + tropopause level clouds appear opaque at both high and low latitudes. This follows the Geocolor + implementation of high clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`). + + The two brightness temperature thresholds in `transition_min` are used together with the corresponding + latitude limits in `latitude_min` to compute a modified version of `transition_min` that is later used + when calling `CloudCompositor`. The modified version of `transition_min` will be an array with the same + shape as the input projectable dataset, where the actual values of threshold_min are a function of the + dataset `latitude`: + + - transition_min = transition_min[0] where abs(latitude) < latitude_min(0) + - transition_min = transition_min[1] where abs(latitude) > latitude_min(0) + - transition_min = linear interpolation between transition_min[0] and transition_min[1] as a funtion + of where abs(latitude). """ def __init__(self, name, transition_min=(200., 220.), transition_max=280, latitude_min=(30., 60.), @@ -1054,20 +1045,35 @@ def __init__(self, name, transition_min=(200., 220.), transition_max=280, latitu Args: transition_min (tuple): Brightness temperature values used to identify opaque white - clouds at different latitudes + clouds at different latitudes transition_max (float): Brightness temperatures above this value are not considered to be high clouds -> transparent latitude_min (tuple): Latitude values defining the intervals for computing latitude-dependent transition_min values. - transition_gamma (float): Gamma correction to apply at the end + transition_gamma (float): Gamma correction to apply to the alpha channel within the brightness + temperature range (`transition_min` to `transition_max`). """ + if len(transition_min) != 2: + raise ValueError(f"Expected 2 `transition_min` values, got {len(transition_min)}") + if len(latitude_min) != 2: + raise ValueError(f"Expected 2 `latitude_min` values, got {len(latitude_min)}") + if type(transition_max) in [list, tuple]: + raise ValueError(f"Expected `transition_max` to be of type float, is of type {type(transition_max)}") + self.latitude_min = latitude_min super().__init__(name, transition_min=transition_min, transition_max=transition_max, transition_gamma=transition_gamma, **kwargs) def __call__(self, projectables, **kwargs): - """Generate the composite.""" + """Generate the composite. + + `projectables` is expected to be a list or tuple with a single element: + - index 0: Brightness temperature of a thermal infrared window channel (e.g. 10.5 microns). + """ + if len(projectables) != 1: + raise ValueError(f"Expected 1 dataset, got {len(projectables)}") + data = projectables[0] _, lats = data.attrs["area"].get_lonlats() lats = np.abs(lats) @@ -1087,70 +1093,92 @@ def __call__(self, projectables, **kwargs): class LowCloudCompositor(CloudCompositor): - """Class information. + """Detect low-level clouds based on thresholding and use it as a mask for compositing during night-time. - TODO: Rewrite docstring + This compsitor computes the brightness temperature difference between a window channel (e.g. 10.5 micron) + and the near-infrared channel e.g. (3.8 micron) and uses this brightness temperature difference, `BTD`, to + create a partially transparent mask for compositing. - Detect low clouds based on latitude-dependent thresholding and use it as a mask for compositing. - - This compositor aims at identifying high clouds and assigning them a transparency based on the brightness - temperature (cloud opacity). In contrast to the `CloudCompositor`, the brightness temperature threshold at - the lower end, used to identify opaque clouds, is made a function of the latitude in order to have tropopause - level clouds appear as opaque at both high and low latitudes. This follows the Geocolor implementation of - high clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`). - - The idea is to define a tuple of two brightness temperature thresholds in transisiton_min and two corresponding - latitude thresholds in latitude_min. + Pixels with `BTD` values below a given threshold will be transparent, whereas pixels with `BTD` values + above another threshold will be opaque. The transparency of all other `BTD` values will be a linear + function of the `BTD` value itself. Two sets of thresholds are used, one set for land surface types + (`range_land`) and another one for sea/water surface types (`range_sea`), respectively. Hence, + this compositor requires a land-sea-mask as a prerequisite input. This follows the GeoColor + implementation of night-time low-level clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`). + Please note that the spectral test and thus the output of the compositor (using the expected input data) is + only applicable during night-time. """ - def __init__(self, name, land_sea_mask=None, value_land=1, value_sea=0, + def __init__(self, name, values_land=(1), values_sea=(0), range_land=(1.0, 4.5), range_sea=(0.0, 4.0), transition_gamma=1.0, color=(140.25, 191.25, 249.9), **kwargs): """Init info. - TODO: Rewrite docstring Collect custom configuration values. Args: - transition_min (tuple): Brightness temperature values used to identify opaque white - clouds at different latitudes - transition_max (float): Brightness temperatures above this value are not considered to - be high clouds -> transparent + value_land (list): List of values used to identify land surface pixels in the land-sea-mask. + value_sea (list): List of values used to identify sea/water surface pixels in the land-sea-mask. + range_land (tuple): Threshold values used for masking low-level clouds from the brightness temperature + difference over land surface types. + range_sea (tuple): Threshold values used for masking low-level clouds from the brightness temperature + difference over sea/water. latitude_min (tuple): Latitude values defining the intervals for computing latitude-dependent transition_min values. - transition_gamma (float): Gamma correction to apply at the end - + transition_gamma (float): Gamma correction to apply to the alpha channel within the brightness + temperature difference range. + color (list): RGB definition of color to use for the low-level clouds in the composite (the final + color will be a function of the corresponding trasnparency/alpha channel). """ - self.land_sea_mask = land_sea_mask - self.val_land = value_land - self.val_sea = value_sea + if len(range_land) != 2: + raise ValueError(f"Expected 2 `range_land` values, got {len(range_land)}") + if len(range_sea) != 2: + raise ValueError(f"Expected 2 `range_sea` values, got {len(range_sea)}") + if type(color) not in [list, tuple] or len(color) != 3: + raise ValueError("Expected list/tuple with the red, green and blue color components.") + + self.values_land = values_land if type(values_land) in [list, tuple] else [values_land] + self.values_sea = values_sea if type(values_sea) in [list, tuple] else [values_sea] self.range_land = range_land self.range_sea = range_sea self.transition_gamma = transition_gamma self.color = color - self.transition_min = None - self.transition_max = None + self.transition_min = None # Placeholder for later use in CloudCompositor + self.transition_max = None # Placeholder for later use in CloudCompositor super().__init__(name, transition_gamma=transition_gamma, **kwargs) def __call__(self, projectables, **kwargs): - """Generate the composite.""" + """Generate the composite. + + `projectables` is expected to be a list or tuple with the following three elements: + - index 0: Brightness temperature difference between a window channel (e.g. 10.5 micron) and a + near-infrared channel e.g. (3.8 micron). + - index 1. Brightness temperature of the window channel (used to filter out noise-induced false alarms). + - index 2: Land-Sea-Mask. + """ + if len(projectables) != 3: + raise ValueError(f"Expected 3 datasets, got {len(projectables)}") + projectables = self.match_data_arrays(projectables) - btd, lsm, win_bt = projectables + btd, bt_win, lsm = projectables lsm = lsm.squeeze(drop=True) lsm = lsm.round() # Make sure to have whole numbers in case of smearing from resampling # Avoid spurious false alarms caused by noise in the 3.9um channel that can occur for very cold cloud tops - btd = btd.where(win_bt >= 230, 0.0) + btd = btd.where(bt_win >= 230, 0.0) + # Call CloudCompositor for land surface pixels self.transition_min, self.transition_max = self.range_land - res = super().__call__([btd.where(lsm == self.val_land)], low_cloud_color=self.color, **kwargs) + res = super().__call__([btd.where(lsm.isin(self.values_land))], low_cloud_color=self.color, **kwargs) + # Call CloudCompositor for sea/water surface pixels self.transition_min, self.transition_max = self.range_sea - res_sea = super().__call__([btd.where(lsm == self.val_sea)], low_cloud_color=self.color, **kwargs) + res_sea = super().__call__([btd.where(lsm.isin(self.values_sea))], low_cloud_color=self.color, **kwargs) - res = res.where(lsm == self.val_land, res_sea) + # Compine resutls for land and sea/water surface pixels + res = res.where(lsm.isin(self.values_land), res_sea) return res From 489069551915d6c4744856abf65f9e288619215a Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 3 Apr 2023 18:57:15 +0200 Subject: [PATCH 052/702] Modify low-level cloud composite recipe to account for modified order of projectables and new land-sea mask data. --- satpy/etc/composites/ahi.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 98e3a028c7..f0543dd7d5 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -537,19 +537,19 @@ composites: geo_color_low_clouds: standard_name: ir_low_cloud compositor: !!python/name:satpy.composites.LowCloudCompositor - value_sea: 0 - value_land: 254 + values_sea: 0 + values_land: 100 color: [140.25, 191.25, 249.9] prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B13 - name: B07 + - name: B13 - compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: land_sea_mask # TODO Change filename - filename: "/tcenas/scratch/strandgren/GeoColor/land_sea_mask_3km_i_.tif" - - name: B13 + filename: "/tcenas/scratch/strandgren/GeoColor/gshhs_land_sea_mask_3km_i.tif" geo_color_background_with_low_clouds: compositor: !!python/name:satpy.composites.BackgroundCompositor From 40ded69a74d51790ba581e0cf30d4155162a23a3 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 3 Apr 2023 19:00:39 +0200 Subject: [PATCH 053/702] Fix syntax of single-element tuples. --- satpy/composites/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index b20d057ca2..995ef7d317 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1110,7 +1110,7 @@ class LowCloudCompositor(CloudCompositor): only applicable during night-time. """ - def __init__(self, name, values_land=(1), values_sea=(0), + def __init__(self, name, values_land=(1,), values_sea=(0,), range_land=(1.0, 4.5), range_sea=(0.0, 4.0), transition_gamma=1.0, color=(140.25, 191.25, 249.9), **kwargs): From bd8494e1931d8f967fdb170be218c1a49860b545 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 5 Apr 2023 13:01:11 +0200 Subject: [PATCH 054/702] Implement non-linearity term for NDVI-weighted hybrid-green correction when converting NDVI to blend factor. --- satpy/composites/spectral.py | 24 ++++++++++++++++--- satpy/tests/compositor_tests/test_spectral.py | 13 ++++++++++ 2 files changed, 34 insertions(+), 3 deletions(-) diff --git a/satpy/composites/spectral.py b/satpy/composites/spectral.py index c0ccaff64f..744637551a 100644 --- a/satpy/composites/spectral.py +++ b/satpy/composites/spectral.py @@ -114,8 +114,8 @@ class NDVIHybridGreen(SpectralBlender): This green band correction follows the same approach as the HybridGreen compositor, but with a dynamic blend factor `f` that depends on the pixel-level Normalized Differece Vegetation Index (NDVI). The higher the NDVI, the - smaller the contribution from the nir channel will be, following a liner relationship between the two ranges - `[ndvi_min, ndvi_max]` and `limits`. + smaller the contribution from the nir channel will be, following a liner (default) or non-linear relationship + between the two ranges `[ndvi_min, ndvi_max]` and `limits`. As an example, a new green channel using e.g. FCI data and the NDVIHybridGreen compositor can be defined like:: @@ -124,6 +124,7 @@ class NDVIHybridGreen(SpectralBlender): ndvi_min: 0.0 ndvi_max: 1.0 limits: [0.15, 0.05] + strength: 1.0 prerequisites: - name: vis_05 modifiers: [sunz_corrected, rayleigh_corrected] @@ -138,17 +139,29 @@ class NDVIHybridGreen(SpectralBlender): pixels with NDVI=1.0 will be a weighted average with 5% contribution from the near-infrared vis_08 channel and the remaining 95% from the native green vis_05 channel. For other values of NDVI a linear interpolation between these values will be performed. + + A strength larger or smaller than 1.0 will introduce a non-linear relationship between the two ranges + `[ndvi_min, ndvi_max]` and `limits`. Hence, a higher strength (> 1.0) will result in a slower transition + to higher/lower fractions at the NDVI extremes. Similarly, a lower strength (< 1.0) will result in a + faster transition to higher/lower fractions at the NDVI extremes. """ - def __init__(self, *args, ndvi_min=0.0, ndvi_max=1.0, limits=(0.15, 0.05), **kwargs): + def __init__(self, *args, ndvi_min=0.0, ndvi_max=1.0, limits=(0.15, 0.05), strength=1.0, **kwargs): """Initialize class and set the NDVI limits and the corresponding blending fraction limits.""" + if strength <= 0.0: + raise ValueError(f"Expected stength greater than 0.0, got {strength}.") + self.ndvi_min = ndvi_min self.ndvi_max = ndvi_max self.limits = limits + self.strength = strength super().__init__(*args, **kwargs) def __call__(self, projectables, optional_datasets=None, **attrs): """Construct the hybrid green channel weighted by NDVI.""" + LOG.info(f"Applying NDVI-weighted hybrid-green correction with limits [{self.limits[0]}, " + f"{self.limits[1]}] and stength {self.strength}.") + ndvi_input = self.match_data_arrays([projectables[1], projectables[2]]) ndvi = (ndvi_input[1] - ndvi_input[0]) / (ndvi_input[1] + ndvi_input[0]) @@ -156,6 +169,11 @@ def __call__(self, projectables, optional_datasets=None, **attrs): ndvi.data = da.where(ndvi > self.ndvi_min, ndvi, self.ndvi_min) ndvi.data = da.where(ndvi < self.ndvi_max, ndvi, self.ndvi_max) + # Apply non-linearity to the ndvi for a non-linear conversion from ndvi to fraction. This can be used for a + # slower transision to higher/lower fractions at the ndvi extremes. If strength equals 1.0, this operation has + # no effect on ndvi. + ndvi = ndvi ** self.strength / (ndvi ** self.strength + (1 - ndvi) ** self.strength) + fraction = (ndvi - self.ndvi_min) / (self.ndvi_max - self.ndvi_min) * (self.limits[1] - self.limits[0]) \ + self.limits[0] self.fractions = (1 - fraction, fraction) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 2e9f59c13f..03e51a5043 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -77,6 +77,7 @@ def test_ndvi_hybrid_green(self): comp = NDVIHybridGreen('ndvi_hybrid_green', limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), standard_name='toa_bidirectional_reflectance') + # Test General functionality with linear strength (=1.0) res = comp((self.c01, self.c02, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) @@ -85,6 +86,18 @@ def test_ndvi_hybrid_green(self): data = res.values np.testing.assert_array_almost_equal(data, np.array([[0.2633, 0.3071], [0.2115, 0.3420]]), decimal=4) + # Test invalid strength + with pytest.raises(ValueError): + _ = NDVIHybridGreen('ndvi_hybrid_green', strength=0.0, prerequisites=(0.51, 0.65, 0.85), + standard_name='toa_bidirectional_reflectance') + + # Test non-linear strength + comp = NDVIHybridGreen('ndvi_hybrid_green', limits=(0.15, 0.05), strength=2.0, prerequisites=(0.51, 0.65, 0.85), + standard_name='toa_bidirectional_reflectance') + + res = comp((self.c01, self.c02, self.c03)) + np.testing.assert_array_almost_equal(res.values, np.array([[0.2646, 0.3075], [0.2120, 0.3471]]), decimal=4) + def test_green_corrector(self): """Test the deprecated class for green corrections.""" comp = GreenCorrector('blended_channel', fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), From 5dc3d3edc53f2005cee3a3251e92c8fd913f6c28 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 6 Apr 2023 11:54:04 +0200 Subject: [PATCH 055/702] Remove monochromatic colorization and use available palettes functionality instead. --- satpy/composites/__init__.py | 11 ++------ satpy/enhancements/__init__.py | 43 +++++++---------------------- satpy/etc/composites/ahi.yaml | 3 +- satpy/etc/enhancements/generic.yaml | 4 +-- 4 files changed, 16 insertions(+), 45 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 995ef7d317..db120293ec 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1113,7 +1113,7 @@ class LowCloudCompositor(CloudCompositor): def __init__(self, name, values_land=(1,), values_sea=(0,), range_land=(1.0, 4.5), range_sea=(0.0, 4.0), - transition_gamma=1.0, color=(140.25, 191.25, 249.9), **kwargs): + transition_gamma=1.0, **kwargs): """Init info. Collect custom configuration values. @@ -1129,22 +1129,17 @@ def __init__(self, name, values_land=(1,), values_sea=(0,), transition_min values. transition_gamma (float): Gamma correction to apply to the alpha channel within the brightness temperature difference range. - color (list): RGB definition of color to use for the low-level clouds in the composite (the final - color will be a function of the corresponding trasnparency/alpha channel). """ if len(range_land) != 2: raise ValueError(f"Expected 2 `range_land` values, got {len(range_land)}") if len(range_sea) != 2: raise ValueError(f"Expected 2 `range_sea` values, got {len(range_sea)}") - if type(color) not in [list, tuple] or len(color) != 3: - raise ValueError("Expected list/tuple with the red, green and blue color components.") self.values_land = values_land if type(values_land) in [list, tuple] else [values_land] self.values_sea = values_sea if type(values_sea) in [list, tuple] else [values_sea] self.range_land = range_land self.range_sea = range_sea self.transition_gamma = transition_gamma - self.color = color self.transition_min = None # Placeholder for later use in CloudCompositor self.transition_max = None # Placeholder for later use in CloudCompositor super().__init__(name, transition_gamma=transition_gamma, **kwargs) @@ -1171,11 +1166,11 @@ def __call__(self, projectables, **kwargs): # Call CloudCompositor for land surface pixels self.transition_min, self.transition_max = self.range_land - res = super().__call__([btd.where(lsm.isin(self.values_land))], low_cloud_color=self.color, **kwargs) + res = super().__call__([btd.where(lsm.isin(self.values_land))], **kwargs) # Call CloudCompositor for sea/water surface pixels self.transition_min, self.transition_max = self.range_sea - res_sea = super().__call__([btd.where(lsm.isin(self.values_sea))], low_cloud_color=self.color, **kwargs) + res_sea = super().__call__([btd.where(lsm.isin(self.values_sea))], **kwargs) # Compine resutls for land and sea/water surface pixels res = res.where(lsm.isin(self.values_land), res_sea) diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py index 0ae8fd45c0..29f2cbdf54 100644 --- a/satpy/enhancements/__init__.py +++ b/satpy/enhancements/__init__.py @@ -358,39 +358,16 @@ def _merge_colormaps(kwargs, img=None): from trollimage.colormap import Colormap full_cmap = None - # TODO - # - Improve check if both palettes and monochromatic are set - # - Improve exception handling for monochromatic cases - # - Resolve RunTimeWarnings - - if 'palettes' in kwargs: - palette = kwargs['palettes'] - if isinstance(palette, Colormap): - full_cmap = palette - else: - for itm in palette: - cmap = create_colormap(itm, img) - if full_cmap is None: - full_cmap = cmap - else: - full_cmap = full_cmap + cmap - - if 'monochromatic' in kwargs: - palette = {} - color = kwargs['monochromatic'].get('color', None) - if color is None: - # TODO: add error - pass - elif isinstance(color, (list, tuple)): - palette['colors'] = [color] - elif isinstance(color, str): - var = img.data.attrs.get(color, None) - if not isinstance(var, (tuple, list)): - # TODO: add error - pass - palette['colors'] = [var] - - full_cmap = create_colormap(palette, img) + palette = kwargs['palettes'] + if isinstance(palette, Colormap): + full_cmap = palette + else: + for itm in palette: + cmap = create_colormap(itm, img) + if full_cmap is None: + full_cmap = cmap + else: + full_cmap = full_cmap + cmap return full_cmap diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index f0543dd7d5..5c73eea7e9 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -539,7 +539,6 @@ composites: compositor: !!python/name:satpy.composites.LowCloudCompositor values_sea: 0 values_land: 100 - color: [140.25, 191.25, 249.9] prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: @@ -567,7 +566,7 @@ composites: geo_color_night_without_background: compositor: !!python/name:satpy.composites.BackgroundCompositor - standard_name: night_ir_with_background + standard_name: night_ir prerequisites: - geo_color_low_clouds - geo_color_high_clouds diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 2094327918..362625fa15 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -866,8 +866,8 @@ enhancements: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: - monochromatic: - color: low_cloud_color + palettes: + - {colors: [[140.25, 191.25, 249.9]]} colorized_ir_clouds: standard_name: colorized_ir_clouds From 6bf7ffa640f2c9e6796db60913404f899b2e7d2e Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 16 May 2023 15:33:19 +0200 Subject: [PATCH 056/702] Fix enhancement --- satpy/etc/enhancements/generic.yaml | 13 +++---- satpy/readers/sar_c_safe.py | 60 ++++++++++++++--------------- 2 files changed, 34 insertions(+), 39 deletions(-) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 42eb36a1a6..fae957b160 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -338,14 +338,11 @@ enhancements: method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude - min_stretch: [0, -19.18, 0] - max_stretch: [0.07, -1.294, .43] - #min_stretch: [0, -30, 0] - #max_stretch: [1, 10, 2] - - name: gamma - method: !!python/name:satpy.enhancements.gamma - kwargs: - gamma: [1.82, 0.74, 1] + # R -- VH: 0.00109 to 0.0594 + # G -- VV_db: -17.57 to -3.3 + # B -- VV: 0.00332 to 0.3 + min_stretch: [0.00109, -17.57, 0.00332] + max_stretch: [0.0594, -3.3, .3] sar-quick: standard_name: sar-quick diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 7a8002a95b..4b2d214187 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -71,7 +71,8 @@ def _dictify(r): return r.text for x in r.findall("./*"): if x.tag in d and not isinstance(d[x.tag], list): - d[x.tag] = [d[x.tag], _dictify(x)] + d[x.tag] = [d[x.tag]] + d[x.tag].append(_dictify(x)) else: d[x.tag] = _dictify(x) return d @@ -173,7 +174,8 @@ def get_calibration_constant(self): def _get_calibration_uncached(self, calibration, chunks=None): """Get the calibration array.""" calibration_name = _get_calibration_name(calibration) - return self._get_calibration_vector(calibration_name, chunks) + calibration_vector = self._get_calibration_vector(calibration_name, chunks) + return calibration_vector def _get_calibration_vector(self, calibration_name, chunks): """Get the calibration vector.""" @@ -255,7 +257,9 @@ def __init__(self, root, shape): def read_azimuth_noise_array(self, chunks=CHUNK_SIZE): """Read the azimuth noise vectors.""" self._read_azimuth_noise_blocks(chunks) - return self._assemble_azimuth_noise_blocks(chunks) + populated_array = self._assemble_azimuth_noise_blocks(chunks) + + return populated_array def _read_azimuth_noise_blocks(self, chunks): """Read the azimuth noise blocks.""" @@ -295,7 +299,9 @@ def _create_dask_slice_from_block_line(self, current_line, chunks): """Create a dask slice from the blocks at the current line.""" pieces = self._get_array_pieces_for_current_line(current_line) dask_pieces = self._get_padded_dask_pieces(pieces, chunks) - return da.hstack(dask_pieces) + new_slice = da.hstack(dask_pieces) + + return new_slice def _get_array_pieces_for_current_line(self, current_line): """Get the array pieces that cover the current line.""" @@ -308,11 +314,11 @@ def _get_array_pieces_for_current_line(self, current_line): def _find_blocks_covering_line(self, current_line): """Find the blocks covering a given line.""" - return [ - block - for block in self.blocks - if block.coords['y'][0] <= current_line <= block.coords['y'][-1] - ] + current_blocks = [] + for block in self.blocks: + if block.coords['y'][0] <= current_line <= block.coords['y'][-1]: + current_blocks.append(block) + return current_blocks def _get_next_start_line(self, current_blocks, current_line): next_line = min((arr.coords['y'][-1] for arr in current_blocks)) + 1 @@ -572,34 +578,24 @@ def get_dataset(self, key, info): if key['name'] in ['longitude', 'latitude', 'altitude']: logger.debug('Constructing coordinate arrays.') - arrays = {} + arrays = dict() arrays['longitude'], arrays['latitude'], arrays['altitude'] = self.get_lonlatalts() data = arrays[key['name']] data.attrs.update(info) else: - data = self.get_measurement(key, info) - return data - - def get_measurement(self, key, info): - """Get the measurement data.""" - result = rioxarray.open_rasterio( - self.filename, lock=False, chunks=(1, CHUNK_SIZE, CHUNK_SIZE) - ).squeeze() - - result = result.assign_coords( - x=np.arange(len(result.coords['x'])), - y=np.arange(len(result.coords['y'])), - ) - - result = self._calibrate_and_denoise(result, key) - result.attrs.update(info) - result.attrs.update({'platform_name': self._mission_id}) + data = xr.open_dataset(self.filename, engine="rasterio", + chunks={"band": 1, "y": CHUNK_SIZE, "x": CHUNK_SIZE})["band_data"].squeeze() + data = data.assign_coords(x=np.arange(len(data.coords['x'])), + y=np.arange(len(data.coords['y']))) + data = self._calibrate_and_denoise(data, key) + data.attrs.update(info) + data.attrs.update({'platform_name': self._mission_id}) - result = self._change_quantity(result, key['quantity']) + data = self._change_quantity(data, key['quantity']) - return result + return data @staticmethod def _change_quantity(data, quantity): @@ -626,7 +622,8 @@ def _get_digital_number(self, data): """Get the digital numbers (uncalibrated data).""" data = data.where(data > 0) data = data.astype(np.float64) - return data * data + dn = data * data + return dn def _denoise(self, dn, chunks): """Denoise the data.""" @@ -641,7 +638,8 @@ def _calibrate(self, dn, chunks, key): cal = self.calibration.get_calibration(key['calibration'], chunks=chunks) cal_constant = self.calibration.get_calibration_constant() logger.debug('Calibrating.') - return ((dn + cal_constant) / (cal ** 2)).clip(min=0) + data = ((dn + cal_constant) / (cal ** 2)).clip(min=0) + return data def _get_lonlatalts_uncached(self): """Obtain GCPs and construct latitude and longitude arrays. From 6d8eed79f4d273193b4b7d1a5177c3809bdd5434 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 22 May 2023 13:12:14 +0300 Subject: [PATCH 057/702] Add a blend method to create temporal RGB from MultiScene --- satpy/multiscene/__init__.py | 2 +- satpy/multiscene/_blend_funcs.py | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/satpy/multiscene/__init__.py b/satpy/multiscene/__init__.py index 3cfa907017..0338f47d77 100644 --- a/satpy/multiscene/__init__.py +++ b/satpy/multiscene/__init__.py @@ -1,4 +1,4 @@ """Functions and classes related to MultiScene functionality.""" -from ._blend_funcs import stack, timeseries # noqa +from ._blend_funcs import stack, temporal_rgb, timeseries # noqa from ._multiscene import MultiScene # noqa diff --git a/satpy/multiscene/_blend_funcs.py b/satpy/multiscene/_blend_funcs.py index 0210cef5cc..e8d25e5f0a 100644 --- a/satpy/multiscene/_blend_funcs.py +++ b/satpy/multiscene/_blend_funcs.py @@ -178,3 +178,22 @@ def timeseries(datasets): res = xr.concat(expanded_ds, dim="time") res.attrs = combine_metadata(*[x.attrs for x in expanded_ds]) return res + + +def temporal_rgb( + data_arrays: Sequence[xr.DataArray], + weights: Optional[Sequence[xr.DataArray]] = None, + combine_times: bool = True, + blend_type: str = 'select_with_weights' +) -> xr.DataArray: + """Combine a series of datasets as a temporal RGB. + + The first dataset is used as the Red component of the new composite, the second as Green and the third as Blue. + All the other datasets are discarded. + """ + from satpy.composites import GenericCompositor + + compositor = GenericCompositor("temporal_composite") + composite = compositor((data_arrays[0], data_arrays[1], data_arrays[2]), attrs=data_arrays[2].attrs) + + return composite From 72ccf1f439178fa8d629701b64c8608906ff9def Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 22 May 2023 13:42:05 +0300 Subject: [PATCH 058/702] Remove extra parameters --- satpy/multiscene/_blend_funcs.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/satpy/multiscene/_blend_funcs.py b/satpy/multiscene/_blend_funcs.py index e8d25e5f0a..ff2749db81 100644 --- a/satpy/multiscene/_blend_funcs.py +++ b/satpy/multiscene/_blend_funcs.py @@ -182,9 +182,6 @@ def timeseries(datasets): def temporal_rgb( data_arrays: Sequence[xr.DataArray], - weights: Optional[Sequence[xr.DataArray]] = None, - combine_times: bool = True, - blend_type: str = 'select_with_weights' ) -> xr.DataArray: """Combine a series of datasets as a temporal RGB. From 3c48c53a1c51a966b6cbd9628f510380cfb491af Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 22 May 2023 13:49:51 +0300 Subject: [PATCH 059/702] Fix using the attributes from the latest scene --- satpy/multiscene/_blend_funcs.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/multiscene/_blend_funcs.py b/satpy/multiscene/_blend_funcs.py index ff2749db81..82597aa3fc 100644 --- a/satpy/multiscene/_blend_funcs.py +++ b/satpy/multiscene/_blend_funcs.py @@ -191,6 +191,7 @@ def temporal_rgb( from satpy.composites import GenericCompositor compositor = GenericCompositor("temporal_composite") - composite = compositor((data_arrays[0], data_arrays[1], data_arrays[2]), attrs=data_arrays[2].attrs) + composite = compositor((data_arrays[0], data_arrays[1], data_arrays[2])) + composite.attrs = data_arrays[2].attrs return composite From ef355b4e9c9c88dfd968540d7a29a6a3a54906d0 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 22 May 2023 14:49:15 +0300 Subject: [PATCH 060/702] Add tests for temporal RGB blending --- satpy/tests/multiscene_tests/test_blend.py | 43 +++++++++++++++++++++- 1 file changed, 42 insertions(+), 1 deletion(-) diff --git a/satpy/tests/multiscene_tests/test_blend.py b/satpy/tests/multiscene_tests/test_blend.py index 6b85dd9d79..a4aca901d2 100644 --- a/satpy/tests/multiscene_tests/test_blend.py +++ b/satpy/tests/multiscene_tests/test_blend.py @@ -234,7 +234,6 @@ def test_blend_two_scenes_bad_blend_type(self, multi_scene_and_weights, groups): simple_groups = {DataQuery(name='CloudType'): groups[DataQuery(name='CloudType')]} multi_scene.group(simple_groups) - weights = [weights[0][0], weights[1][0]] stack_func = partial(stack, weights=weights, blend_type="i_dont_exist") with pytest.raises(ValueError): @@ -390,3 +389,45 @@ def _check_stacked_metadata(data_arr: xr.DataArray, exp_name: str) -> None: assert 'sensor' not in data_arr.attrs assert 'platform_name' not in data_arr.attrs assert 'long_name' not in data_arr.attrs + + +class TestTemporalRGB: + """Test the temporal RGB blending method.""" + + @pytest.fixture + def nominal_data(self): + """Return the input arrays for the nominal use case.""" + da1 = xr.DataArray([1, 0, 0], attrs={'start_time': datetime(2023, 5, 22, 9, 0, 0)}) + da2 = xr.DataArray([0, 1, 0], attrs={'start_time': datetime(2023, 5, 22, 10, 0, 0)}) + da3 = xr.DataArray([0, 0, 1], attrs={'start_time': datetime(2023, 5, 22, 11, 0, 0)}) + + return [da1, da2, da3] + + @pytest.fixture + def expected_result(self): + """Return the expected result arrays.""" + return [[1, 0, 0], [0, 1, 0], [0, 0, 1]] + + @staticmethod + def _assert_results(res, expected_start_time, expected_result): + assert res.attrs['start_time'] == expected_start_time + for i in range(3): + np.testing.assert_equal(res.data[i, :], expected_result[i]) + + def test_nominal(self, nominal_data, expected_result): + """Test that nominal usage with 3 datasets works.""" + from satpy.multiscene import temporal_rgb + + res = temporal_rgb(nominal_data) + + self._assert_results(res, nominal_data[-1].attrs['start_time'], expected_result) + + def test_extra_datasets(self, nominal_data, expected_result): + """Test that only the first three arrays affect the usage.""" + from satpy.multiscene import temporal_rgb + + da4 = xr.DataArray([0, 0, 1], attrs={'start_time': datetime(2023, 5, 22, 12, 0, 0)}) + + res = temporal_rgb(nominal_data + da4) + + self._assert_results(res, nominal_data[-1].attrs['start_time'], expected_result) From 02b971c4d7156853aebe831dfc479d7eab1a964c Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 22 May 2023 14:55:40 +0300 Subject: [PATCH 061/702] Update MultiScene.blend() docstring --- satpy/multiscene/_multiscene.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/multiscene/_multiscene.py b/satpy/multiscene/_multiscene.py index d803758b88..48c8db6b99 100644 --- a/satpy/multiscene/_multiscene.py +++ b/satpy/multiscene/_multiscene.py @@ -338,7 +338,7 @@ def blend( then assigns those datasets to the blended scene. Blending functions provided in this module are :func:`stack` - (the default) and :func:`timeseries`, but the Python built-in + (the default), :func:`timeseries` and :func:`temporal_rgb`, but the Python built-in function :func:`sum` also works and may be appropriate for some types of data. From 652e32515ac24b524df9c3edde86ca94d820898a Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 23 May 2023 08:18:32 +0300 Subject: [PATCH 062/702] Update satpy/multiscene/_multiscene.py Co-authored-by: David Hoese --- satpy/multiscene/_multiscene.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/multiscene/_multiscene.py b/satpy/multiscene/_multiscene.py index 48c8db6b99..c93f5706bc 100644 --- a/satpy/multiscene/_multiscene.py +++ b/satpy/multiscene/_multiscene.py @@ -338,7 +338,7 @@ def blend( then assigns those datasets to the blended scene. Blending functions provided in this module are :func:`stack` - (the default), :func:`timeseries` and :func:`temporal_rgb`, but the Python built-in + (the default), :func:`timeseries`, and :func:`temporal_rgb`, but the Python built-in function :func:`sum` also works and may be appropriate for some types of data. From 6c41a99c7b0f76b92330b09a3b2ba7df1ed1245b Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 23 May 2023 08:21:41 +0300 Subject: [PATCH 063/702] Use explicit indices in test assertions --- satpy/tests/multiscene_tests/test_blend.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/satpy/tests/multiscene_tests/test_blend.py b/satpy/tests/multiscene_tests/test_blend.py index a4aca901d2..2fca990f21 100644 --- a/satpy/tests/multiscene_tests/test_blend.py +++ b/satpy/tests/multiscene_tests/test_blend.py @@ -411,8 +411,9 @@ def expected_result(self): @staticmethod def _assert_results(res, expected_start_time, expected_result): assert res.attrs['start_time'] == expected_start_time - for i in range(3): - np.testing.assert_equal(res.data[i, :], expected_result[i]) + np.testing.assert_equal(res.data[0, :], expected_result[0]) + np.testing.assert_equal(res.data[1, :], expected_result[1]) + np.testing.assert_equal(res.data[2, :], expected_result[2]) def test_nominal(self, nominal_data, expected_result): """Test that nominal usage with 3 datasets works.""" From ec911dbe090691246431bcf81a588bf811f47f9f Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 25 May 2023 10:17:29 +0300 Subject: [PATCH 064/702] Fix syntax error adding an item to a list --- satpy/tests/multiscene_tests/test_blend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/multiscene_tests/test_blend.py b/satpy/tests/multiscene_tests/test_blend.py index 2fca990f21..7140d98c8e 100644 --- a/satpy/tests/multiscene_tests/test_blend.py +++ b/satpy/tests/multiscene_tests/test_blend.py @@ -429,6 +429,6 @@ def test_extra_datasets(self, nominal_data, expected_result): da4 = xr.DataArray([0, 0, 1], attrs={'start_time': datetime(2023, 5, 22, 12, 0, 0)}) - res = temporal_rgb(nominal_data + da4) + res = temporal_rgb(nominal_data + [da4,]) self._assert_results(res, nominal_data[-1].attrs['start_time'], expected_result) From 7ff8e1c4d6de971962701d983dac6d7f4fe4c22c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 9 Jun 2023 21:10:42 -0500 Subject: [PATCH 065/702] Convert ABI L2 AOD test to pytest --- satpy/tests/reader_tests/test_abi_l2_nc.py | 46 ++++++++++++++-------- 1 file changed, 29 insertions(+), 17 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py index 63014685f9..5cdfb2e776 100644 --- a/satpy/tests/reader_tests/test_abi_l2_nc.py +++ b/satpy/tests/reader_tests/test_abi_l2_nc.py @@ -15,7 +15,7 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . """The abi_l2_nc reader tests package.""" - +import contextlib import unittest from unittest import mock @@ -281,13 +281,11 @@ def test_get_area_def_latlon(self, adef): np.testing.assert_allclose(call_args[6], (-85.0, -20.0, -65.0, 20)) -class Test_NC_ABI_L2_area_AOD(unittest.TestCase): +class Test_NC_ABI_L2_area_AOD: """Test the NC_ABI_L2 reader for the AOD product.""" - @mock.patch('satpy.readers.abi_base.xr') - def setUp(self, xr_): + def setup_method(self, xr_): """Create fake data for the tests.""" - from satpy.readers.abi_l2_nc import NC_ABI_L2 proj = xr.DataArray( [], attrs={'semi_major_axis': 1., @@ -326,22 +324,36 @@ def setUp(self, xr_): 'RSR': xr.DataArray(np.ones((2, 2)), dims=('y', 'x')), }, ) - xr_.open_dataset.return_value = fake_dataset - - self.reader = NC_ABI_L2('filename', - {'platform_shortname': 'G16', 'observation_type': 'RSR', - 'scene_abbr': 'C', 'scan_mode': 'M3'}, - {'filetype': 'info'}) + self.fake_dataset = fake_dataset @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_xy(self, adef): """Test the area generation.""" - self.reader.get_area_def(None) + with _create_reader_for_fake_data(self.fake_dataset) as reader: + reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], {'proj': 'latlong', 'a': 1.0, 'b': 1.0, 'fi': 1.0, 'pm': 0.0, - 'lon_0': -75.0, 'lat_0': 0.0}) - self.assertEqual(call_args[4], self.reader.ncols) - self.assertEqual(call_args[5], self.reader.nlines) + assert call_args[3] == {'proj': 'latlong', 'a': 1.0, 'b': 1.0, 'fi': 1.0, 'pm': 0.0, + 'lon_0': -75.0, 'lat_0': 0.0} + assert call_args[4] == reader.ncols + assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-85.0, -20.0, -65.0, 20)) + + +@contextlib.contextmanager +def _create_reader_for_fake_data(fake_dataset: xr.Dataset): + from satpy.readers.abi_l2_nc import NC_ABI_L2 + + reader_args = ( + "filename", + { + 'platform_shortname': 'G16', 'observation_type': 'RSR', + 'scene_abbr': 'C', 'scan_mode': 'M3' + }, + {'filetype': 'info'}, + ) + with mock.patch('satpy.readers.abi_base.xr') as xr_: + xr_.open_dataset.return_value = fake_dataset + reader = NC_ABI_L2(*reader_args) + yield reader From 6415c583b0579cf43136a6213fa53ed862b88aca Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 9 Jun 2023 21:21:33 -0500 Subject: [PATCH 066/702] Rewrite all ABI L2 tests with pytest --- satpy/tests/reader_tests/test_abi_l2_nc.py | 54 +++++++++------------- 1 file changed, 22 insertions(+), 32 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py index 5cdfb2e776..b08b0a2f62 100644 --- a/satpy/tests/reader_tests/test_abi_l2_nc.py +++ b/satpy/tests/reader_tests/test_abi_l2_nc.py @@ -16,7 +16,6 @@ # along with this program. If not, see . """The abi_l2_nc reader tests package.""" import contextlib -import unittest from unittest import mock import numpy as np @@ -94,10 +93,10 @@ def _create_mcmip_dataset(): return fake_dataset -class Test_NC_ABI_L2_base(unittest.TestCase): - """Test the NC_ABI_L2 reader.""" +class Test_NC_ABI_L2_get_dataset: + """Test get dataset function of the NC_ABI_L2 reader.""" - def setUp(self): + def setup_method(self): """Create fake data for the tests.""" from satpy.readers.abi_l2_nc import NC_ABI_L2 fake_cmip_dataset = _create_cmip_dataset() @@ -116,10 +115,6 @@ def setUp(self): }, ) - -class Test_NC_ABI_L2_get_dataset(Test_NC_ABI_L2_base): - """Test get dataset function of the NC_ABI_L2 reader.""" - def test_get_dataset(self): """Test basic L2 load.""" from satpy.tests.utils import make_dataid @@ -144,7 +139,7 @@ def test_get_dataset(self): 'timeline_ID': None, 'units': 'm'} - self.assertTrue(np.allclose(res.data, exp_data, equal_nan=True)) + np.testing.assert_allclose(res.data, exp_data, equal_nan=True) _compare_subdict(res.attrs, exp_attrs) _assert_orbital_parameters(res.attrs['orbital_parameters']) @@ -198,30 +193,29 @@ def test_mcmip_get_dataset(self, xr_): _assert_orbital_parameters(res.attrs['orbital_parameters']) -class Test_NC_ABI_L2_area_fixedgrid(Test_NC_ABI_L2_base): +class Test_NC_ABI_L2_area_fixedgrid: """Test the NC_ABI_L2 reader.""" @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_fixedgrid(self, adef): """Test the area generation.""" - self.reader.get_area_def(None) + with _create_reader_for_fake_data(_create_cmip_dataset()) as reader: + reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], {'a': 1.0, 'b': 1.0, 'h': 1.0, 'lon_0': -90.0, - 'proj': 'geos', 'sweep': 'x', 'units': 'm'}) - self.assertEqual(call_args[4], self.reader.ncols) - self.assertEqual(call_args[5], self.reader.nlines) + assert call_args[3] == {'a': 1.0, 'b': 1.0, 'h': 1.0, 'lon_0': -90.0, + 'proj': 'geos', 'sweep': 'x', 'units': 'm'} + assert call_args[4] == reader.ncols + assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-2., -2., 2., 2.)) -class Test_NC_ABI_L2_area_latlon(unittest.TestCase): +class Test_NC_ABI_L2_area_latlon: """Test the NC_ABI_L2 reader.""" - @mock.patch('satpy.readers.abi_base.xr') - def setUp(self, xr_): + def setup_method(self): """Create fake data for the tests.""" - from satpy.readers.abi_l2_nc import NC_ABI_L2 proj = xr.DataArray( [], attrs={'semi_major_axis': 1., @@ -260,24 +254,20 @@ def setUp(self, xr_): 'RSR': xr.DataArray(np.ones((2, 2)), dims=('lat', 'lon')), }, ) - xr_.open_dataset.return_value = fake_dataset - - self.reader = NC_ABI_L2('filename', - {'platform_shortname': 'G16', 'observation_type': 'RSR', - 'scene_abbr': 'C', 'scan_mode': 'M3'}, - {'filetype': 'info'}) + self.fake_dataset = fake_dataset @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_latlon(self, adef): """Test the area generation.""" - self.reader.get_area_def(None) + with _create_reader_for_fake_data(self.fake_dataset) as reader: + reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], {'proj': 'latlong', 'a': 1.0, 'b': 1.0, 'fi': 1.0, 'pm': 0.0, - 'lon_0': -75.0, 'lat_0': 0.0}) - self.assertEqual(call_args[4], self.reader.ncols) - self.assertEqual(call_args[5], self.reader.nlines) + assert call_args[3] == {'proj': 'latlong', 'a': 1.0, 'b': 1.0, 'fi': 1.0, 'pm': 0.0, + 'lon_0': -75.0, 'lat_0': 0.0} + assert call_args[4] == reader.ncols + assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-85.0, -20.0, -65.0, 20)) From 91494eec23548af754c4b69402d285c9de879875 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 9 Jun 2023 21:48:14 -0500 Subject: [PATCH 067/702] More ABI L2 test cleanup --- satpy/tests/reader_tests/test_abi_l2_nc.py | 36 ++++++---------------- 1 file changed, 9 insertions(+), 27 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py index b08b0a2f62..2d566770ae 100644 --- a/satpy/tests/reader_tests/test_abi_l2_nc.py +++ b/satpy/tests/reader_tests/test_abi_l2_nc.py @@ -96,30 +96,12 @@ def _create_mcmip_dataset(): class Test_NC_ABI_L2_get_dataset: """Test get dataset function of the NC_ABI_L2 reader.""" - def setup_method(self): - """Create fake data for the tests.""" - from satpy.readers.abi_l2_nc import NC_ABI_L2 - fake_cmip_dataset = _create_cmip_dataset() - with mock.patch('satpy.readers.abi_base.xr') as xr_: - xr_.open_dataset.return_value = fake_cmip_dataset - self.reader = NC_ABI_L2( - 'filename', - { - 'platform_shortname': 'G16', - 'scan_mode': 'M3', - 'scene_abbr': 'M1', - }, - { - 'file_type': 'info', - 'observation_type': 'ACHA', - }, - ) - def test_get_dataset(self): """Test basic L2 load.""" from satpy.tests.utils import make_dataid key = make_dataid(name='HT') - res = self.reader.get_dataset(key, {'file_key': 'HT'}) + with _create_reader_for_fake_data("ACHA", _create_cmip_dataset()) as reader: + res = reader.get_dataset(key, {'file_key': 'HT'}) exp_data = np.array([[2 * 0.3052037, np.nan], [32768 * 0.3052037, 32767 * 0.3052037]]) @@ -133,7 +115,7 @@ def test_get_dataset(self): 'platform_shortname': 'G16', 'production_site': None, 'scan_mode': 'M3', - 'scene_abbr': 'M1', + 'scene_abbr': 'C', 'scene_id': None, 'sensor': 'abi', 'timeline_ID': None, @@ -199,7 +181,7 @@ class Test_NC_ABI_L2_area_fixedgrid: @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_fixedgrid(self, adef): """Test the area generation.""" - with _create_reader_for_fake_data(_create_cmip_dataset()) as reader: + with _create_reader_for_fake_data("RSR", _create_cmip_dataset()) as reader: reader.get_area_def(None) assert adef.call_count == 1 @@ -259,7 +241,7 @@ def setup_method(self): @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_latlon(self, adef): """Test the area generation.""" - with _create_reader_for_fake_data(self.fake_dataset) as reader: + with _create_reader_for_fake_data("RSR", self.fake_dataset) as reader: reader.get_area_def(None) assert adef.call_count == 1 @@ -319,7 +301,7 @@ def setup_method(self, xr_): @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_xy(self, adef): """Test the area generation.""" - with _create_reader_for_fake_data(self.fake_dataset) as reader: + with _create_reader_for_fake_data("RSR", self.fake_dataset) as reader: reader.get_area_def(None) assert adef.call_count == 1 @@ -332,16 +314,16 @@ def test_get_area_def_xy(self, adef): @contextlib.contextmanager -def _create_reader_for_fake_data(fake_dataset: xr.Dataset): +def _create_reader_for_fake_data(observation_type: str, fake_dataset: xr.Dataset): from satpy.readers.abi_l2_nc import NC_ABI_L2 reader_args = ( "filename", { - 'platform_shortname': 'G16', 'observation_type': 'RSR', + 'platform_shortname': 'G16', 'scene_abbr': 'C', 'scan_mode': 'M3' }, - {'filetype': 'info'}, + {'file_type': 'info', 'observation_type': observation_type}, ) with mock.patch('satpy.readers.abi_base.xr') as xr_: xr_.open_dataset.return_value = fake_dataset From b0cabedd0d0575d4794ca9d4e6d5fe7cca50f0cf Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 13 Jun 2023 20:58:25 -0500 Subject: [PATCH 068/702] Add Low Cloud and Fog products to ABI L2 reader --- satpy/etc/readers/abi_l2_nc.yaml | 31 +++++++++++++++++++++ satpy/readers/abi_base.py | 3 ++ satpy/readers/abi_l2_nc.py | 2 +- satpy/tests/reader_tests/test_abi_l2_nc.py | 32 ++++++++++++++-------- 4 files changed, 55 insertions(+), 13 deletions(-) diff --git a/satpy/etc/readers/abi_l2_nc.yaml b/satpy/etc/readers/abi_l2_nc.yaml index 9bf69a1b7f..7c0f1a75ac 100644 --- a/satpy/etc/readers/abi_l2_nc.yaml +++ b/satpy/etc/readers/abi_l2_nc.yaml @@ -331,6 +331,27 @@ datasets: file_type: abi_l2_nav file_key: Latitude +# ---Low Cloud and Fog (GFLS) Products --- + mvfr_prob: + name: MVFR_Fog_Prob + file_type: abi_l2_gfls + file_key: MVFR_Fog_Prob + + ifr_prob: + name: IFR_Fog_Prob + file_type: abi_l2_gfls + file_key: IFR_Fog_Prob + + lifr_prob: + name: LIFR_Fog_Prob + file_type: abi_l2_gfls + file_key: LIFR_Fog_Prob + + fog_depth: + name: Fog_Depth + file_type: abi_l2_gfls + file_key: Fog_Depth + # ---- file_types: @@ -550,3 +571,13 @@ file_types: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-NAV{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "NAV" + + # Low Cloud and Fog + abi_l2_gfls: + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: + # AIT scheme: GOES16_ABI_2KM_MESO_2019147_1800_48_AVIATION_FOG_EN.nc + - '{platform_shortname:s}_{mission_id:3s}_2KM_{scene_abbr:s}_{start_time:%Y%j_%H%M}_{file_product:s}_{algorithm_type:2s}.nc' + # NDE scheme: ABI-L2-GFLSC-M6_v3r1_g16_s202306071931181_e202306071933554_c202306071934440.nc + - '{mission_id:3s}-L2-GFLS{scene_abbr:s}-{scan_mode:2s}_v{sw_version:d}r{sw_revision:d}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' + observation_type: "GFLS" diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 5f55170b6f..5f4cf506fe 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -38,6 +38,9 @@ 'G17': 'GOES-17', 'G18': 'GOES-18', 'G19': 'GOES-19', + 'GOES16': 'GOES-16', + 'GOES17': 'GOES-17', + 'GOES18': 'GOES-18', } diff --git a/satpy/readers/abi_l2_nc.py b/satpy/readers/abi_l2_nc.py index 1e83d2e7ef..a152790197 100644 --- a/satpy/readers/abi_l2_nc.py +++ b/satpy/readers/abi_l2_nc.py @@ -61,7 +61,7 @@ def _update_data_arr_with_filename_attrs(self, variable): # add in information from the filename that may be useful to the user for attr in ('scene_abbr', 'scan_mode', 'platform_shortname'): - variable.attrs[attr] = self.filename_info[attr] + variable.attrs[attr] = self.filename_info.get(attr) # add in information hardcoded in the filetype YAML for attr in ('observation_type',): diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py index 2d566770ae..b05ceb0f64 100644 --- a/satpy/tests/reader_tests/test_abi_l2_nc.py +++ b/satpy/tests/reader_tests/test_abi_l2_nc.py @@ -16,13 +16,14 @@ # along with this program. If not, see . """The abi_l2_nc reader tests package.""" import contextlib +from typing import Optional from unittest import mock import numpy as np import xarray as xr -def _create_cmip_dataset(): +def _create_cmip_dataset(data_variable: str = "HT"): proj = xr.DataArray( [], attrs={ @@ -57,7 +58,7 @@ def _create_cmip_dataset(): 'goes_imager_projection': proj, 'x': x__, 'y': y__, - 'HT': ht_da, + data_variable: ht_da, "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02), @@ -86,11 +87,7 @@ def _assert_orbital_parameters(orb_params): def _create_mcmip_dataset(): - fake_dataset = _create_cmip_dataset() - fake_dataset = fake_dataset.copy(deep=True) - fake_dataset['CMI_C14'] = fake_dataset['HT'] - del fake_dataset['HT'] - return fake_dataset + return _create_cmip_dataset("CMI_C14") class Test_NC_ABI_L2_get_dataset: @@ -125,6 +122,15 @@ def test_get_dataset(self): _compare_subdict(res.attrs, exp_attrs) _assert_orbital_parameters(res.attrs['orbital_parameters']) + def test_get_dataset_gfls(self): + """Test that Low Cloud and Fog filenames work.""" + from satpy.tests.utils import make_dataid + filename_info = {'platform_shortname': 'GOES16', 'scene_abbr': 'FD'} + key = make_dataid(name='MVFR_Fog_Prob') + with _create_reader_for_fake_data("GFLS", _create_cmip_dataset("MVFR_Fog_Prob"), filename_info) as reader: + res = reader.get_dataset(key, {'file_key': 'MVFR_Fog_Prob'}) + assert res.attrs["platform_name"] == "GOES-16" + class TestMCMIPReading: """Test cases of the MCMIP file format.""" @@ -314,15 +320,17 @@ def test_get_area_def_xy(self, adef): @contextlib.contextmanager -def _create_reader_for_fake_data(observation_type: str, fake_dataset: xr.Dataset): +def _create_reader_for_fake_data(observation_type: str, fake_dataset: xr.Dataset, filename_info: Optional[dict] = None): from satpy.readers.abi_l2_nc import NC_ABI_L2 - reader_args = ( - "filename", - { + if filename_info is None: + filename_info = { 'platform_shortname': 'G16', 'scene_abbr': 'C', 'scan_mode': 'M3' - }, + } + reader_args = ( + "filename", + filename_info, {'file_type': 'info', 'observation_type': observation_type}, ) with mock.patch('satpy.readers.abi_base.xr') as xr_: From 6d881054f1e3a799c95e2fec27d5765c26027599 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 21 Jun 2023 11:19:46 -0500 Subject: [PATCH 069/702] Add default enhancements for low cloud/fog products --- satpy/etc/enhancements/abi.yaml | 38 +++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/satpy/etc/enhancements/abi.yaml b/satpy/etc/enhancements/abi.yaml index c1ef573d07..b89d2d0785 100644 --- a/satpy/etc/enhancements/abi.yaml +++ b/satpy/etc/enhancements/abi.yaml @@ -235,3 +235,41 @@ enhancements: min_value: 0.0, max_value: 1.0, } + + # L2 low cloud/fog products + mvfr_prob: + name: MVFR_Fog_Prob + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: 0 + max_stretch: 100 + ifr_prob: + name: IFR_Fog_Prob + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: 0 + max_stretch: 100 + lifr_prob: + name: LIFR_Fog_Prob + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: 0 + max_stretch: 100 + fog_depth: + name: Fog_Depth + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: 0 + max_stretch: 500 From c3205c68fba7cbebf368d5d0c0506d69201494fd Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 21 Jun 2023 21:44:17 +0100 Subject: [PATCH 070/702] Include documentation of how to add text to an image. --- doc/source/writers.rst | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/doc/source/writers.rst b/doc/source/writers.rst index f453f4d5a5..64be014461 100644 --- a/doc/source/writers.rst +++ b/doc/source/writers.rst @@ -136,3 +136,34 @@ and save them all at once. ... compute=False) >>> results = [res1, res2] >>> compute_writer_results(results) + + +Adding text to images +===================== + +Satpy, via `pydecorate`, can add text to images when they're being saved. +To use this functionality, you must create a dictionary describing the text +to be added. + +:: + + >>> decodict = {'decorate': [{'text': {'txt': f' {my_text}', + >>> 'align': {'top_bottom': 'top', 'left_right': 'left'}, + >>> 'font': , + >>> 'font_size': 48, + >>> 'line': 'white', + >>> 'bg_opacity': 255, + >>> 'bg': 'black', + >>> 'height': 30, + >>> }}]} + +Where `my_text` is the text you wish to add and `` is the +location of the font file you wish to use, often in `/usr/share/fonts/` + +This dictionary can then be passed to the `save_dataset` or `save_datasets` command. + +:: + + >>> scene.save_dataset(my_dataset, writer='simple_image', fill_value=False, + >>> decorate=decodict) + From be2bd8756c18bb6d9fbf33339d7282454fe0b3cc Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 22 Jun 2023 10:29:38 +0100 Subject: [PATCH 071/702] Update docs describing text addition to saved images. --- doc/source/writers.rst | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/doc/source/writers.rst b/doc/source/writers.rst index 64be014461..12ee786f56 100644 --- a/doc/source/writers.rst +++ b/doc/source/writers.rst @@ -141,29 +141,28 @@ and save them all at once. Adding text to images ===================== -Satpy, via `pydecorate`, can add text to images when they're being saved. +Satpy, via :doc:`pydecorate `, can add text to images when they're being saved. To use this functionality, you must create a dictionary describing the text to be added. -:: +.. code-block:: python >>> decodict = {'decorate': [{'text': {'txt': f' {my_text}', - >>> 'align': {'top_bottom': 'top', 'left_right': 'left'}, - >>> 'font': , - >>> 'font_size': 48, - >>> 'line': 'white', - >>> 'bg_opacity': 255, - >>> 'bg': 'black', - >>> 'height': 30, - >>> }}]} + ... 'align': {'top_bottom': 'top', 'left_right': 'left'}, + ... 'font': , + ... 'font_size': 48, + ... 'line': 'white', + ... 'bg_opacity': 255, + ... 'bg': 'black', + ... 'height': 30, + ... }}]} Where `my_text` is the text you wish to add and `` is the location of the font file you wish to use, often in `/usr/share/fonts/` This dictionary can then be passed to the `save_dataset` or `save_datasets` command. -:: +.. code-block:: python >>> scene.save_dataset(my_dataset, writer='simple_image', fill_value=False, - >>> decorate=decodict) - + ... decorate=decodict) From 5ac92a774ce9f1cf84ae9e35d81e823c82ff906f Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 22:17:22 +0200 Subject: [PATCH 072/702] Refactor area-related functions --- satpy/tests/writer_tests/cf_tests/__init__.py | 18 + .../tests/writer_tests/cf_tests/test_area.py | 401 ++++++++++++++++++ satpy/tests/writer_tests/test_cf.py | 379 +---------------- satpy/writers/cf/area.py | 192 +++++++++ satpy/writers/cf_writer.py | 178 +------- 5 files changed, 621 insertions(+), 547 deletions(-) create mode 100644 satpy/tests/writer_tests/cf_tests/__init__.py create mode 100644 satpy/tests/writer_tests/cf_tests/test_area.py create mode 100644 satpy/writers/cf/area.py diff --git a/satpy/tests/writer_tests/cf_tests/__init__.py b/satpy/tests/writer_tests/cf_tests/__init__.py new file mode 100644 index 0000000000..e654e26dcc --- /dev/null +++ b/satpy/tests/writer_tests/cf_tests/__init__.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""The CF dataset tests package.""" diff --git a/satpy/tests/writer_tests/cf_tests/test_area.py b/satpy/tests/writer_tests/cf_tests/test_area.py new file mode 100644 index 0000000000..e293ff39a6 --- /dev/null +++ b/satpy/tests/writer_tests/cf_tests/test_area.py @@ -0,0 +1,401 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Tests for the CF Area.""" +import dask.array as da +import numpy as np +import pytest +import xarray as xr +from pyresample import AreaDefinition, SwathDefinition + + +class TestCFArea: + """Test case for CF Area.""" + + def test_assert_xy_unique(self): + """Test that the x and y coordinates are unique.""" + from satpy.writers.cf.area import assert_xy_unique + + dummy = [[1, 2], [3, 4]] + datas = {'a': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), + 'b': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), + 'n': xr.DataArray(data=dummy, dims=('v', 'w'), coords={'v': [1, 2], 'w': [3, 4]})} + assert_xy_unique(datas) + + datas['c'] = xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 3], 'x': [3, 4]}) + with pytest.raises(ValueError): + assert_xy_unique(datas) + + def test_link_coords(self): + """Check that coordinates link has been established correctly.""" + from satpy.writers.cf.area import link_coords + + data = [[1, 2], [3, 4]] + lon = np.zeros((2, 2)) + lon2 = np.zeros((1, 2, 2)) + lat = np.ones((2, 2)) + datasets = { + 'var1': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon lat'}), + 'var2': xr.DataArray(data=data, dims=('y', 'x')), + 'var3': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon2 lat'}), + 'var4': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'not_exist lon lat'}), + 'lon': xr.DataArray(data=lon, dims=('y', 'x')), + 'lon2': xr.DataArray(data=lon2, dims=('time', 'y', 'x')), + 'lat': xr.DataArray(data=lat, dims=('y', 'x')) + } + + link_coords(datasets) + + # Check that link has been established correctly and 'coordinate' atrribute has been dropped + assert 'lon' in datasets['var1'].coords + assert 'lat' in datasets['var1'].coords + np.testing.assert_array_equal(datasets['var1']['lon'].data, lon) + np.testing.assert_array_equal(datasets['var1']['lat'].data, lat) + assert 'coordinates' not in datasets['var1'].attrs + + # There should be no link if there was no 'coordinate' attribute + assert 'lon' not in datasets['var2'].coords + assert 'lat' not in datasets['var2'].coords + + # The non-existent dimension or coordinate should be dropped + assert 'time' not in datasets['var3'].coords + assert 'not_exist' not in datasets['var4'].coords + + def test_make_alt_coords_unique(self): + """Test that created coordinate variables are unique.""" + from satpy.writers.cf.area import make_alt_coords_unique + + data = [[1, 2], [3, 4]] + y = [1, 2] + x = [1, 2] + time1 = [1, 2] + time2 = [3, 4] + datasets = {'var1': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x, 'acq_time': ('y', time1)}), + 'var2': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x, 'acq_time': ('y', time2)})} + + # Test that dataset names are prepended to alternative coordinates + res = make_alt_coords_unique(datasets) + np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) + np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) + assert 'acq_time' not in res['var1'].coords + assert 'acq_time' not in res['var2'].coords + + # Make sure nothing else is modified + np.testing.assert_array_equal(res['var1']['x'], x) + np.testing.assert_array_equal(res['var1']['y'], y) + np.testing.assert_array_equal(res['var2']['x'], x) + np.testing.assert_array_equal(res['var2']['y'], y) + + # Coords not unique -> Dataset names must be prepended, even if pretty=True + with pytest.warns(UserWarning, match='Cannot pretty-format "acq_time"'): + res = make_alt_coords_unique(datasets, pretty=True) + np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) + np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) + assert 'acq_time' not in res['var1'].coords + assert 'acq_time' not in res['var2'].coords + + # Coords unique and pretty=True -> Don't modify coordinate names + datasets['var2']['acq_time'] = ('y', time1) + res = make_alt_coords_unique(datasets, pretty=True) + np.testing.assert_array_equal(res['var1']['acq_time'], time1) + np.testing.assert_array_equal(res['var2']['acq_time'], time1) + assert 'var1_acq_time' not in res['var1'].coords + assert 'var2_acq_time' not in res['var2'].coords + + def test_area2cf(self): + """Test the conversion of an area to CF standards.""" + from satpy.writers.cf.area import area2cf + + ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, + attrs={'name': 'var1'}) + + # a) Area Definition and strict=False + geos = AreaDefinition( + area_id='geos', + description='geos', + proj_id='geos', + projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, + width=2, height=2, + area_extent=[-1, -1, 1, 1]) + ds = ds_base.copy(deep=True) + ds.attrs['area'] = geos + + res = area2cf(ds, include_lonlats=False) + assert len(res) == 2 + assert res[0].size == 1 # grid mapping variable + assert res[0].name == res[1].attrs['grid_mapping'] + + # b) Area Definition and include_lonlats=False + ds = ds_base.copy(deep=True) + ds.attrs['area'] = geos + res = area2cf(ds, include_lonlats=True) + # same as above + assert len(res) == 2 + assert res[0].size == 1 # grid mapping variable + assert res[0].name == res[1].attrs['grid_mapping'] + # but now also have the lon/lats + assert 'longitude' in res[1].coords + assert 'latitude' in res[1].coords + + # c) Swath Definition + swath = SwathDefinition(lons=[[1, 1], [2, 2]], lats=[[1, 2], [1, 2]]) + ds = ds_base.copy(deep=True) + ds.attrs['area'] = swath + + res = area2cf(ds, include_lonlats=False) + assert len(res) == 1 + assert 'longitude' in res[0].coords + assert 'latitude' in res[0].coords + assert 'grid_mapping' not in res[0].attrs + + def test__add_grid_mapping(self): + """Test the conversion from pyresample area object to CF grid mapping.""" + from satpy.writers.cf.area import _add_grid_mapping + + def _gm_matches(gmapping, expected): + """Assert that all keys in ``expected`` match the values in ``gmapping``.""" + for attr_key, attr_val in expected.attrs.items(): + test_val = gmapping.attrs[attr_key] + if attr_val is None or isinstance(attr_val, str): + assert test_val == attr_val + else: + np.testing.assert_almost_equal(test_val, attr_val, decimal=3) + + ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, + attrs={'name': 'var1'}) + + # a) Projection has a corresponding CF representation (e.g. geos) + a = 6378169. + b = 6356583.8 + h = 35785831. + geos = AreaDefinition( + area_id='geos', + description='geos', + proj_id='geos', + projection={'proj': 'geos', 'h': h, 'a': a, 'b': b, + 'lat_0': 0, 'lon_0': 0}, + width=2, height=2, + area_extent=[-1, -1, 1, 1]) + geos_expected = xr.DataArray(data=0, + attrs={'perspective_point_height': h, + 'latitude_of_projection_origin': 0, + 'longitude_of_projection_origin': 0, + 'grid_mapping_name': 'geostationary', + 'semi_major_axis': a, + 'semi_minor_axis': b, + # 'sweep_angle_axis': None, + }) + + ds = ds_base.copy() + ds.attrs['area'] = geos + new_ds, grid_mapping = _add_grid_mapping(ds) + if 'sweep_angle_axis' in grid_mapping.attrs: + # older versions of pyproj might not include this + assert grid_mapping.attrs['sweep_angle_axis'] == 'y' + + assert new_ds.attrs['grid_mapping'] == 'geos' + _gm_matches(grid_mapping, geos_expected) + # should not have been modified + assert 'grid_mapping' not in ds.attrs + + # b) Projection does not have a corresponding CF representation (COSMO) + cosmo7 = AreaDefinition( + area_id='cosmo7', + description='cosmo7', + proj_id='cosmo7', + projection={'proj': 'ob_tran', 'ellps': 'WGS84', 'lat_0': 46, 'lon_0': 4.535, + 'o_proj': 'stere', 'o_lat_p': 90, 'o_lon_p': -5.465}, + width=597, height=510, + area_extent=[-1812933, -1003565, 814056, 1243448] + ) + + ds = ds_base.copy() + ds.attrs['area'] = cosmo7 + + new_ds, grid_mapping = _add_grid_mapping(ds) + assert 'crs_wkt' in grid_mapping.attrs + wkt = grid_mapping.attrs['crs_wkt'] + assert 'ELLIPSOID["WGS 84"' in wkt + assert 'PARAMETER["lat_0",46' in wkt + assert 'PARAMETER["lon_0",4.535' in wkt + assert 'PARAMETER["o_lat_p",90' in wkt + assert 'PARAMETER["o_lon_p",-5.465' in wkt + assert new_ds.attrs['grid_mapping'] == 'cosmo7' + + # c) Projection Transverse Mercator + lat_0 = 36.5 + lon_0 = 15.0 + + tmerc = AreaDefinition( + area_id='tmerc', + description='tmerc', + proj_id='tmerc', + projection={'proj': 'tmerc', 'ellps': 'WGS84', 'lat_0': 36.5, 'lon_0': 15.0}, + width=2, height=2, + area_extent=[-1, -1, 1, 1]) + + tmerc_expected = xr.DataArray(data=0, + attrs={'latitude_of_projection_origin': lat_0, + 'longitude_of_central_meridian': lon_0, + 'grid_mapping_name': 'transverse_mercator', + 'reference_ellipsoid_name': 'WGS 84', + 'false_easting': 0., + 'false_northing': 0., + }) + + ds = ds_base.copy() + ds.attrs['area'] = tmerc + new_ds, grid_mapping = _add_grid_mapping(ds) + assert new_ds.attrs['grid_mapping'] == 'tmerc' + _gm_matches(grid_mapping, tmerc_expected) + + # d) Projection that has a representation but no explicit a/b + h = 35785831. + geos = AreaDefinition( + area_id='geos', + description='geos', + proj_id='geos', + projection={'proj': 'geos', 'h': h, 'datum': 'WGS84', 'ellps': 'GRS80', + 'lat_0': 0, 'lon_0': 0}, + width=2, height=2, + area_extent=[-1, -1, 1, 1]) + geos_expected = xr.DataArray(data=0, + attrs={'perspective_point_height': h, + 'latitude_of_projection_origin': 0, + 'longitude_of_projection_origin': 0, + 'grid_mapping_name': 'geostationary', + # 'semi_major_axis': 6378137.0, + # 'semi_minor_axis': 6356752.314, + # 'sweep_angle_axis': None, + }) + + ds = ds_base.copy() + ds.attrs['area'] = geos + new_ds, grid_mapping = _add_grid_mapping(ds) + + assert new_ds.attrs['grid_mapping'] == 'geos' + _gm_matches(grid_mapping, geos_expected) + + # e) oblique Mercator + area = AreaDefinition( + area_id='omerc_otf', + description='On-the-fly omerc area', + proj_id='omerc', + projection={'alpha': '9.02638777018478', 'ellps': 'WGS84', 'gamma': '0', 'k': '1', + 'lat_0': '-0.256794486098476', 'lonc': '13.7888658224205', + 'proj': 'omerc', 'units': 'm'}, + width=2837, + height=5940, + area_extent=[-1460463.0893, 3455291.3877, 1538407.1158, 9615788.8787] + ) + + omerc_dict = {'azimuth_of_central_line': 9.02638777018478, + 'false_easting': 0., + 'false_northing': 0., + # 'gamma': 0, # this is not CF compliant + 'grid_mapping_name': "oblique_mercator", + 'latitude_of_projection_origin': -0.256794486098476, + 'longitude_of_projection_origin': 13.7888658224205, + # 'prime_meridian_name': "Greenwich", + 'reference_ellipsoid_name': "WGS 84"} + omerc_expected = xr.DataArray(data=0, attrs=omerc_dict) + + ds = ds_base.copy() + ds.attrs['area'] = area + new_ds, grid_mapping = _add_grid_mapping(ds) + + assert new_ds.attrs['grid_mapping'] == 'omerc_otf' + _gm_matches(grid_mapping, omerc_expected) + + # f) Projection that has a representation but no explicit a/b + h = 35785831. + geos = AreaDefinition( + area_id='geos', + description='geos', + proj_id='geos', + projection={'proj': 'geos', 'h': h, 'datum': 'WGS84', 'ellps': 'GRS80', + 'lat_0': 0, 'lon_0': 0}, + width=2, height=2, + area_extent=[-1, -1, 1, 1]) + geos_expected = xr.DataArray(data=0, + attrs={'perspective_point_height': h, + 'latitude_of_projection_origin': 0, + 'longitude_of_projection_origin': 0, + 'grid_mapping_name': 'geostationary', + 'reference_ellipsoid_name': 'WGS 84', + }) + + ds = ds_base.copy() + ds.attrs['area'] = geos + new_ds, grid_mapping = _add_grid_mapping(ds) + + assert new_ds.attrs['grid_mapping'] == 'geos' + _gm_matches(grid_mapping, geos_expected) + + def test_add_lonlat_coords(self): + """Test the conversion from areas to lon/lat.""" + from satpy.writers.cf.area import add_lonlat_coords + + area = AreaDefinition( + 'seviri', + 'Native SEVIRI grid', + 'geos', + "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", + 2, 2, + [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] + ) + lons_ref, lats_ref = area.get_lonlats() + dataarray = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), attrs={'area': area}) + + res = add_lonlat_coords(dataarray) + + # original should be unmodified + assert 'longitude' not in dataarray.coords + assert set(res.coords) == {'longitude', 'latitude'} + lat = res['latitude'] + lon = res['longitude'] + np.testing.assert_array_equal(lat.data, lats_ref) + np.testing.assert_array_equal(lon.data, lons_ref) + assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() + assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() + + area = AreaDefinition( + 'seviri', + 'Native SEVIRI grid', + 'geos', + "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", + 10, 10, + [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] + ) + lons_ref, lats_ref = area.get_lonlats() + dataarray = xr.DataArray(data=da.from_array(np.arange(3 * 10 * 10).reshape(3, 10, 10), chunks=(1, 5, 5)), + dims=('bands', 'y', 'x'), attrs={'area': area}) + res = add_lonlat_coords(dataarray) + + # original should be unmodified + assert 'longitude' not in dataarray.coords + assert set(res.coords) == {'longitude', 'latitude'} + lat = res['latitude'] + lon = res['longitude'] + np.testing.assert_array_equal(lat.data, lats_ref) + np.testing.assert_array_equal(lon.data, lons_ref) + assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() + assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 2b0a5dfc6c..005509f165 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -25,7 +25,6 @@ from collections import OrderedDict from datetime import datetime -import dask.array as da import numpy as np import pyresample.geometry import pytest @@ -720,380 +719,6 @@ def test_collect_cf_dataarrays(self): assert 'grid_mapping' not in da_var2.attrs assert da_var2.attrs['long_name'] == 'variable 2' - def test_assert_xy_unique(self): - """Test that the x and y coordinates are unique.""" - from satpy.writers.cf_writer import assert_xy_unique - - dummy = [[1, 2], [3, 4]] - datas = {'a': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), - 'b': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), - 'n': xr.DataArray(data=dummy, dims=('v', 'w'), coords={'v': [1, 2], 'w': [3, 4]})} - assert_xy_unique(datas) - - datas['c'] = xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 3], 'x': [3, 4]}) - with pytest.raises(ValueError): - assert_xy_unique(datas) - - def test_link_coords(self): - """Check that coordinates link has been established correctly.""" - from satpy.writers.cf_writer import link_coords - - data = [[1, 2], [3, 4]] - lon = np.zeros((2, 2)) - lon2 = np.zeros((1, 2, 2)) - lat = np.ones((2, 2)) - datasets = { - 'var1': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon lat'}), - 'var2': xr.DataArray(data=data, dims=('y', 'x')), - 'var3': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon2 lat'}), - 'var4': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'not_exist lon lat'}), - 'lon': xr.DataArray(data=lon, dims=('y', 'x')), - 'lon2': xr.DataArray(data=lon2, dims=('time', 'y', 'x')), - 'lat': xr.DataArray(data=lat, dims=('y', 'x')) - } - - link_coords(datasets) - - # Check that link has been established correctly and 'coordinate' atrribute has been dropped - assert 'lon' in datasets['var1'].coords - assert 'lat' in datasets['var1'].coords - np.testing.assert_array_equal(datasets['var1']['lon'].data, lon) - np.testing.assert_array_equal(datasets['var1']['lat'].data, lat) - assert 'coordinates' not in datasets['var1'].attrs - - # There should be no link if there was no 'coordinate' attribute - assert 'lon' not in datasets['var2'].coords - assert 'lat' not in datasets['var2'].coords - - # The non-existent dimension or coordinate should be dropped - assert 'time' not in datasets['var3'].coords - assert 'not_exist' not in datasets['var4'].coords - - def test_make_alt_coords_unique(self): - """Test that created coordinate variables are unique.""" - from satpy.writers.cf_writer import make_alt_coords_unique - - data = [[1, 2], [3, 4]] - y = [1, 2] - x = [1, 2] - time1 = [1, 2] - time2 = [3, 4] - datasets = {'var1': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x, 'acq_time': ('y', time1)}), - 'var2': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x, 'acq_time': ('y', time2)})} - - # Test that dataset names are prepended to alternative coordinates - res = make_alt_coords_unique(datasets) - np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) - np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) - assert 'acq_time' not in res['var1'].coords - assert 'acq_time' not in res['var2'].coords - - # Make sure nothing else is modified - np.testing.assert_array_equal(res['var1']['x'], x) - np.testing.assert_array_equal(res['var1']['y'], y) - np.testing.assert_array_equal(res['var2']['x'], x) - np.testing.assert_array_equal(res['var2']['y'], y) - - # Coords not unique -> Dataset names must be prepended, even if pretty=True - with pytest.warns(UserWarning, match='Cannot pretty-format "acq_time"'): - res = make_alt_coords_unique(datasets, pretty=True) - np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) - np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) - assert 'acq_time' not in res['var1'].coords - assert 'acq_time' not in res['var2'].coords - - # Coords unique and pretty=True -> Don't modify coordinate names - datasets['var2']['acq_time'] = ('y', time1) - res = make_alt_coords_unique(datasets, pretty=True) - np.testing.assert_array_equal(res['var1']['acq_time'], time1) - np.testing.assert_array_equal(res['var2']['acq_time'], time1) - assert 'var1_acq_time' not in res['var1'].coords - assert 'var2_acq_time' not in res['var2'].coords - - def test_area2cf(self): - """Test the conversion of an area to CF standards.""" - from satpy.writers.cf_writer import area2cf - - ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, - attrs={'name': 'var1'}) - - # a) Area Definition and strict=False - geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, - width=2, height=2, - area_extent=[-1, -1, 1, 1]) - ds = ds_base.copy(deep=True) - ds.attrs['area'] = geos - - res = area2cf(ds, include_lonlats=False) - assert len(res) == 2 - assert res[0].size == 1 # grid mapping variable - assert res[0].name == res[1].attrs['grid_mapping'] - - # b) Area Definition and include_lonlats=False - ds = ds_base.copy(deep=True) - ds.attrs['area'] = geos - res = area2cf(ds, include_lonlats=True) - # same as above - assert len(res) == 2 - assert res[0].size == 1 # grid mapping variable - assert res[0].name == res[1].attrs['grid_mapping'] - # but now also have the lon/lats - assert 'longitude' in res[1].coords - assert 'latitude' in res[1].coords - - # c) Swath Definition - swath = pyresample.geometry.SwathDefinition(lons=[[1, 1], [2, 2]], lats=[[1, 2], [1, 2]]) - ds = ds_base.copy(deep=True) - ds.attrs['area'] = swath - - res = area2cf(ds, include_lonlats=False) - assert len(res) == 1 - assert 'longitude' in res[0].coords - assert 'latitude' in res[0].coords - assert 'grid_mapping' not in res[0].attrs - - def test__add_grid_mapping(self): - """Test the conversion from pyresample area object to CF grid mapping.""" - from satpy.writers.cf_writer import _add_grid_mapping - - def _gm_matches(gmapping, expected): - """Assert that all keys in ``expected`` match the values in ``gmapping``.""" - for attr_key, attr_val in expected.attrs.items(): - test_val = gmapping.attrs[attr_key] - if attr_val is None or isinstance(attr_val, str): - assert test_val == attr_val - else: - np.testing.assert_almost_equal(test_val, attr_val, decimal=3) - - ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, - attrs={'name': 'var1'}) - - # a) Projection has a corresponding CF representation (e.g. geos) - a = 6378169. - b = 6356583.8 - h = 35785831. - geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': h, 'a': a, 'b': b, - 'lat_0': 0, 'lon_0': 0}, - width=2, height=2, - area_extent=[-1, -1, 1, 1]) - geos_expected = xr.DataArray(data=0, - attrs={'perspective_point_height': h, - 'latitude_of_projection_origin': 0, - 'longitude_of_projection_origin': 0, - 'grid_mapping_name': 'geostationary', - 'semi_major_axis': a, - 'semi_minor_axis': b, - # 'sweep_angle_axis': None, - }) - - ds = ds_base.copy() - ds.attrs['area'] = geos - new_ds, grid_mapping = _add_grid_mapping(ds) - if 'sweep_angle_axis' in grid_mapping.attrs: - # older versions of pyproj might not include this - assert grid_mapping.attrs['sweep_angle_axis'] == 'y' - - assert new_ds.attrs['grid_mapping'] == 'geos' - _gm_matches(grid_mapping, geos_expected) - # should not have been modified - assert 'grid_mapping' not in ds.attrs - - # b) Projection does not have a corresponding CF representation (COSMO) - cosmo7 = pyresample.geometry.AreaDefinition( - area_id='cosmo7', - description='cosmo7', - proj_id='cosmo7', - projection={'proj': 'ob_tran', 'ellps': 'WGS84', 'lat_0': 46, 'lon_0': 4.535, - 'o_proj': 'stere', 'o_lat_p': 90, 'o_lon_p': -5.465}, - width=597, height=510, - area_extent=[-1812933, -1003565, 814056, 1243448] - ) - - ds = ds_base.copy() - ds.attrs['area'] = cosmo7 - - new_ds, grid_mapping = _add_grid_mapping(ds) - assert 'crs_wkt' in grid_mapping.attrs - wkt = grid_mapping.attrs['crs_wkt'] - assert 'ELLIPSOID["WGS 84"' in wkt - assert 'PARAMETER["lat_0",46' in wkt - assert 'PARAMETER["lon_0",4.535' in wkt - assert 'PARAMETER["o_lat_p",90' in wkt - assert 'PARAMETER["o_lon_p",-5.465' in wkt - assert new_ds.attrs['grid_mapping'] == 'cosmo7' - - # c) Projection Transverse Mercator - lat_0 = 36.5 - lon_0 = 15.0 - - tmerc = pyresample.geometry.AreaDefinition( - area_id='tmerc', - description='tmerc', - proj_id='tmerc', - projection={'proj': 'tmerc', 'ellps': 'WGS84', 'lat_0': 36.5, 'lon_0': 15.0}, - width=2, height=2, - area_extent=[-1, -1, 1, 1]) - - tmerc_expected = xr.DataArray(data=0, - attrs={'latitude_of_projection_origin': lat_0, - 'longitude_of_central_meridian': lon_0, - 'grid_mapping_name': 'transverse_mercator', - 'reference_ellipsoid_name': 'WGS 84', - 'false_easting': 0., - 'false_northing': 0., - }) - - ds = ds_base.copy() - ds.attrs['area'] = tmerc - new_ds, grid_mapping = _add_grid_mapping(ds) - assert new_ds.attrs['grid_mapping'] == 'tmerc' - _gm_matches(grid_mapping, tmerc_expected) - - # d) Projection that has a representation but no explicit a/b - h = 35785831. - geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': h, 'datum': 'WGS84', 'ellps': 'GRS80', - 'lat_0': 0, 'lon_0': 0}, - width=2, height=2, - area_extent=[-1, -1, 1, 1]) - geos_expected = xr.DataArray(data=0, - attrs={'perspective_point_height': h, - 'latitude_of_projection_origin': 0, - 'longitude_of_projection_origin': 0, - 'grid_mapping_name': 'geostationary', - # 'semi_major_axis': 6378137.0, - # 'semi_minor_axis': 6356752.314, - # 'sweep_angle_axis': None, - }) - - ds = ds_base.copy() - ds.attrs['area'] = geos - new_ds, grid_mapping = _add_grid_mapping(ds) - - assert new_ds.attrs['grid_mapping'] == 'geos' - _gm_matches(grid_mapping, geos_expected) - - # e) oblique Mercator - area = pyresample.geometry.AreaDefinition( - area_id='omerc_otf', - description='On-the-fly omerc area', - proj_id='omerc', - projection={'alpha': '9.02638777018478', 'ellps': 'WGS84', 'gamma': '0', 'k': '1', - 'lat_0': '-0.256794486098476', 'lonc': '13.7888658224205', - 'proj': 'omerc', 'units': 'm'}, - width=2837, - height=5940, - area_extent=[-1460463.0893, 3455291.3877, 1538407.1158, 9615788.8787] - ) - - omerc_dict = {'azimuth_of_central_line': 9.02638777018478, - 'false_easting': 0., - 'false_northing': 0., - # 'gamma': 0, # this is not CF compliant - 'grid_mapping_name': "oblique_mercator", - 'latitude_of_projection_origin': -0.256794486098476, - 'longitude_of_projection_origin': 13.7888658224205, - # 'prime_meridian_name': "Greenwich", - 'reference_ellipsoid_name': "WGS 84"} - omerc_expected = xr.DataArray(data=0, attrs=omerc_dict) - - ds = ds_base.copy() - ds.attrs['area'] = area - new_ds, grid_mapping = _add_grid_mapping(ds) - - assert new_ds.attrs['grid_mapping'] == 'omerc_otf' - _gm_matches(grid_mapping, omerc_expected) - - # f) Projection that has a representation but no explicit a/b - h = 35785831. - geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': h, 'datum': 'WGS84', 'ellps': 'GRS80', - 'lat_0': 0, 'lon_0': 0}, - width=2, height=2, - area_extent=[-1, -1, 1, 1]) - geos_expected = xr.DataArray(data=0, - attrs={'perspective_point_height': h, - 'latitude_of_projection_origin': 0, - 'longitude_of_projection_origin': 0, - 'grid_mapping_name': 'geostationary', - 'reference_ellipsoid_name': 'WGS 84', - }) - - ds = ds_base.copy() - ds.attrs['area'] = geos - new_ds, grid_mapping = _add_grid_mapping(ds) - - assert new_ds.attrs['grid_mapping'] == 'geos' - _gm_matches(grid_mapping, geos_expected) - - def test_add_lonlat_coords(self): - """Test the conversion from areas to lon/lat.""" - from satpy.writers.cf_writer import add_lonlat_coords - - area = pyresample.geometry.AreaDefinition( - 'seviri', - 'Native SEVIRI grid', - 'geos', - "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", - 2, 2, - [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] - ) - lons_ref, lats_ref = area.get_lonlats() - dataarray = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), attrs={'area': area}) - - res = add_lonlat_coords(dataarray) - - # original should be unmodified - assert 'longitude' not in dataarray.coords - assert set(res.coords) == {'longitude', 'latitude'} - lat = res['latitude'] - lon = res['longitude'] - np.testing.assert_array_equal(lat.data, lats_ref) - np.testing.assert_array_equal(lon.data, lons_ref) - assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() - assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() - - area = pyresample.geometry.AreaDefinition( - 'seviri', - 'Native SEVIRI grid', - 'geos', - "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", - 10, 10, - [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] - ) - lons_ref, lats_ref = area.get_lonlats() - dataarray = xr.DataArray(data=da.from_array(np.arange(3 * 10 * 10).reshape(3, 10, 10), chunks=(1, 5, 5)), - dims=('bands', 'y', 'x'), attrs={'area': area}) - res = add_lonlat_coords(dataarray) - - # original should be unmodified - assert 'longitude' not in dataarray.coords - assert set(res.coords) == {'longitude', 'latitude'} - lat = res['latitude'] - lon = res['longitude'] - np.testing.assert_array_equal(lat.data, lats_ref) - np.testing.assert_array_equal(lon.data, lons_ref) - assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() - assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() - def test_load_module_with_old_pyproj(self): """Test that cf_writer can still be loaded with pyproj 1.9.6.""" import importlib @@ -1188,14 +813,14 @@ def datasets(self): def test_is_lon_or_lat_dataarray(self, datasets): """Test the is_lon_or_lat_dataarray function.""" - from satpy.writers.cf_writer import is_lon_or_lat_dataarray + from satpy.writers.cf.area import is_lon_or_lat_dataarray assert is_lon_or_lat_dataarray(datasets['lat']) assert not is_lon_or_lat_dataarray(datasets['var1']) def test_has_projection_coords(self, datasets): """Test the has_projection_coords function.""" - from satpy.writers.cf_writer import has_projection_coords + from satpy.writers.cf.area import has_projection_coords assert has_projection_coords(datasets) datasets['lat'].attrs['standard_name'] = 'dummy' diff --git a/satpy/writers/cf/area.py b/satpy/writers/cf/area.py new file mode 100644 index 0000000000..68113c1ee2 --- /dev/null +++ b/satpy/writers/cf/area.py @@ -0,0 +1,192 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""CF processing of pyresample area information.""" +import logging +import warnings +from collections import defaultdict + +import xarray as xr +from dask.base import tokenize +from packaging.version import Version +from pyresample.geometry import AreaDefinition, SwathDefinition + +logger = logging.getLogger(__name__) + + +def add_lonlat_coords(dataarray): + """Add 'longitude' and 'latitude' coordinates to DataArray.""" + dataarray = dataarray.copy() + area = dataarray.attrs['area'] + ignore_dims = {dim: 0 for dim in dataarray.dims if dim not in ['x', 'y']} + chunks = getattr(dataarray.isel(**ignore_dims), 'chunks', None) + lons, lats = area.get_lonlats(chunks=chunks) + dataarray['longitude'] = xr.DataArray(lons, dims=['y', 'x'], + attrs={'name': "longitude", + 'standard_name': "longitude", + 'units': 'degrees_east'}, + name='longitude') + dataarray['latitude'] = xr.DataArray(lats, dims=['y', 'x'], + attrs={'name': "latitude", + 'standard_name': "latitude", + 'units': 'degrees_north'}, + name='latitude') + return dataarray + + +def _create_grid_mapping(area): + """Create the grid mapping instance for `area`.""" + import pyproj + + if Version(pyproj.__version__) < Version('2.4.1'): + # technically 2.2, but important bug fixes in 2.4.1 + raise ImportError("'cf' writer requires pyproj 2.4.1 or greater") + # let pyproj do the heavily lifting (pyproj 2.0+ required) + grid_mapping = area.crs.to_cf() + return area.area_id, grid_mapping + + +def _add_grid_mapping(dataarray): + """Convert an area to at CF grid mapping.""" + dataarray = dataarray.copy() + area = dataarray.attrs['area'] + gmapping_var_name, attrs = _create_grid_mapping(area) + dataarray.attrs['grid_mapping'] = gmapping_var_name + return dataarray, xr.DataArray(0, attrs=attrs, name=gmapping_var_name) + + +def area2cf(dataarray, include_lonlats=False, got_lonlats=False): + """Convert an area to at CF grid mapping or lon and lats.""" + res = [] + if not got_lonlats and (isinstance(dataarray.attrs['area'], SwathDefinition) or include_lonlats): + dataarray = add_lonlat_coords(dataarray) + if isinstance(dataarray.attrs['area'], AreaDefinition): + dataarray, gmapping = _add_grid_mapping(dataarray) + res.append(gmapping) + res.append(dataarray) + return res + + +def is_lon_or_lat_dataarray(dataarray): + """Check if the DataArray represents the latitude or longitude coordinate.""" + if 'standard_name' in dataarray.attrs and dataarray.attrs['standard_name'] in ['longitude', 'latitude']: + return True + return False + + +def has_projection_coords(ds_collection): + """Check if DataArray collection has a "longitude" or "latitude" DataArray.""" + for dataarray in ds_collection.values(): + if is_lon_or_lat_dataarray(dataarray): + return True + return False + + +def make_alt_coords_unique(datas, pretty=False): + """Make non-dimensional coordinates unique among all datasets. + + Non-dimensional (or alternative) coordinates, such as scanline timestamps, + may occur in multiple datasets with the same name and dimension + but different values. + + In order to avoid conflicts, prepend the dataset name to the coordinate name. + If a non-dimensional coordinate is unique among all datasets and ``pretty=True``, + its name will not be modified. + + Since all datasets must have the same projection coordinates, + this is not applied to latitude and longitude. + + Args: + datas (dict): + Dictionary of (dataset name, dataset) + pretty (bool): + Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. + + Returns: + Dictionary holding the updated datasets + + """ + # Determine which non-dimensional coordinates are unique + tokens = defaultdict(set) + for dataset in datas.values(): + for coord_name in dataset.coords: + if not is_lon_or_lat_dataarray(dataset[coord_name]) and coord_name not in dataset.dims: + tokens[coord_name].add(tokenize(dataset[coord_name].data)) + coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) + + # Prepend dataset name, if not unique or no pretty-format desired + new_datas = datas.copy() + for coord_name, unique in coords_unique.items(): + if not pretty or not unique: + if pretty: + warnings.warn( + 'Cannot pretty-format "{}" coordinates because they are ' + 'not identical among the given datasets'.format(coord_name), + stacklevel=2 + ) + for ds_name, dataset in datas.items(): + if coord_name in dataset.coords: + rename = {coord_name: '{}_{}'.format(ds_name, coord_name)} + new_datas[ds_name] = new_datas[ds_name].rename(rename) + + return new_datas + + +def assert_xy_unique(datas): + """Check that all datasets share the same projection coordinates x/y.""" + unique_x = set() + unique_y = set() + for dataset in datas.values(): + if 'y' in dataset.dims: + token_y = tokenize(dataset['y'].data) + unique_y.add(token_y) + if 'x' in dataset.dims: + token_x = tokenize(dataset['x'].data) + unique_x.add(token_x) + if len(unique_x) > 1 or len(unique_y) > 1: + raise ValueError('Datasets to be saved in one file (or one group) must have identical projection coordinates. ' + 'Please group them by area or save them in separate files.') + + +def link_coords(datas): + """Link dataarrays and coordinates. + + If the `coordinates` attribute of a data array links to other dataarrays in the scene, for example + `coordinates='lon lat'`, add them as coordinates to the data array and drop that attribute. In the final call to + `xr.Dataset.to_netcdf()` all coordinate relations will be resolved and the `coordinates` attributes be set + automatically. + + """ + for da_name, data in datas.items(): + declared_coordinates = data.attrs.get('coordinates', []) + if isinstance(declared_coordinates, str): + declared_coordinates = declared_coordinates.split(' ') + for coord in declared_coordinates: + if coord not in data.coords: + try: + dimensions_not_in_data = list(set(datas[coord].dims) - set(data.dims)) + data[coord] = datas[coord].squeeze(dimensions_not_in_data, drop=True) + except KeyError: + warnings.warn( + 'Coordinate "{}" referenced by dataarray {} does not ' + 'exist, dropping reference.'.format(coord, da_name), + stacklevel=2 + ) + continue + + # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() + data.attrs.pop('coordinates', None) diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index b9a24b9292..4c672b70b6 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -164,9 +164,7 @@ import numpy as np import xarray as xr -from dask.base import tokenize from packaging.version import Version -from pyresample.geometry import AreaDefinition, SwathDefinition from xarray.coding.times import CFDatetimeCoder from satpy.writers import Writer @@ -228,174 +226,6 @@ def get_extra_ds(dataarray, keys=None): return ds_collection -# ###--------------------------------------------------------------------------. -# ### CF-Area - - -def add_lonlat_coords(dataarray): - """Add 'longitude' and 'latitude' coordinates to DataArray.""" - dataarray = dataarray.copy() - area = dataarray.attrs['area'] - ignore_dims = {dim: 0 for dim in dataarray.dims if dim not in ['x', 'y']} - chunks = getattr(dataarray.isel(**ignore_dims), 'chunks', None) - lons, lats = area.get_lonlats(chunks=chunks) - dataarray['longitude'] = xr.DataArray(lons, dims=['y', 'x'], - attrs={'name': "longitude", - 'standard_name': "longitude", - 'units': 'degrees_east'}, - name='longitude') - dataarray['latitude'] = xr.DataArray(lats, dims=['y', 'x'], - attrs={'name': "latitude", - 'standard_name': "latitude", - 'units': 'degrees_north'}, - name='latitude') - return dataarray - - -def _create_grid_mapping(area): - """Create the grid mapping instance for `area`.""" - import pyproj - - if Version(pyproj.__version__) < Version('2.4.1'): - # technically 2.2, but important bug fixes in 2.4.1 - raise ImportError("'cf' writer requires pyproj 2.4.1 or greater") - # let pyproj do the heavily lifting (pyproj 2.0+ required) - grid_mapping = area.crs.to_cf() - return area.area_id, grid_mapping - - -def _add_grid_mapping(dataarray): - """Convert an area to at CF grid mapping.""" - dataarray = dataarray.copy() - area = dataarray.attrs['area'] - gmapping_var_name, attrs = _create_grid_mapping(area) - dataarray.attrs['grid_mapping'] = gmapping_var_name - return dataarray, xr.DataArray(0, attrs=attrs, name=gmapping_var_name) - - -def area2cf(dataarray, include_lonlats=False, got_lonlats=False): - """Convert an area to at CF grid mapping or lon and lats.""" - res = [] - if not got_lonlats and (isinstance(dataarray.attrs['area'], SwathDefinition) or include_lonlats): - dataarray = add_lonlat_coords(dataarray) - if isinstance(dataarray.attrs['area'], AreaDefinition): - dataarray, gmapping = _add_grid_mapping(dataarray) - res.append(gmapping) - res.append(dataarray) - return res - - -def is_lon_or_lat_dataarray(dataarray): - """Check if the DataArray represents the latitude or longitude coordinate.""" - if 'standard_name' in dataarray.attrs and dataarray.attrs['standard_name'] in ['longitude', 'latitude']: - return True - return False - - -def has_projection_coords(ds_collection): - """Check if DataArray collection has a "longitude" or "latitude" DataArray.""" - for dataarray in ds_collection.values(): - if is_lon_or_lat_dataarray(dataarray): - return True - return False - - -def make_alt_coords_unique(datas, pretty=False): - """Make non-dimensional coordinates unique among all datasets. - - Non-dimensional (or alternative) coordinates, such as scanline timestamps, - may occur in multiple datasets with the same name and dimension - but different values. - - In order to avoid conflicts, prepend the dataset name to the coordinate name. - If a non-dimensional coordinate is unique among all datasets and ``pretty=True``, - its name will not be modified. - - Since all datasets must have the same projection coordinates, - this is not applied to latitude and longitude. - - Args: - datas (dict): - Dictionary of (dataset name, dataset) - pretty (bool): - Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. - - Returns: - Dictionary holding the updated datasets - - """ - # Determine which non-dimensional coordinates are unique - tokens = defaultdict(set) - for dataset in datas.values(): - for coord_name in dataset.coords: - if not is_lon_or_lat_dataarray(dataset[coord_name]) and coord_name not in dataset.dims: - tokens[coord_name].add(tokenize(dataset[coord_name].data)) - coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) - - # Prepend dataset name, if not unique or no pretty-format desired - new_datas = datas.copy() - for coord_name, unique in coords_unique.items(): - if not pretty or not unique: - if pretty: - warnings.warn( - 'Cannot pretty-format "{}" coordinates because they are ' - 'not identical among the given datasets'.format(coord_name), - stacklevel=2 - ) - for ds_name, dataset in datas.items(): - if coord_name in dataset.coords: - rename = {coord_name: '{}_{}'.format(ds_name, coord_name)} - new_datas[ds_name] = new_datas[ds_name].rename(rename) - - return new_datas - - -def assert_xy_unique(datas): - """Check that all datasets share the same projection coordinates x/y.""" - unique_x = set() - unique_y = set() - for dataset in datas.values(): - if 'y' in dataset.dims: - token_y = tokenize(dataset['y'].data) - unique_y.add(token_y) - if 'x' in dataset.dims: - token_x = tokenize(dataset['x'].data) - unique_x.add(token_x) - if len(unique_x) > 1 or len(unique_y) > 1: - raise ValueError('Datasets to be saved in one file (or one group) must have identical projection coordinates. ' - 'Please group them by area or save them in separate files.') - - -def link_coords(datas): - """Link dataarrays and coordinates. - - If the `coordinates` attribute of a data array links to other dataarrays in the scene, for example - `coordinates='lon lat'`, add them as coordinates to the data array and drop that attribute. In the final call to - `xr.Dataset.to_netcdf()` all coordinate relations will be resolved and the `coordinates` attributes be set - automatically. - - """ - for da_name, data in datas.items(): - declared_coordinates = data.attrs.get('coordinates', []) - if isinstance(declared_coordinates, str): - declared_coordinates = declared_coordinates.split(' ') - for coord in declared_coordinates: - if coord not in data.coords: - try: - dimensions_not_in_data = list(set(datas[coord].dims) - set(data.dims)) - data[coord] = datas[coord].squeeze(dimensions_not_in_data, drop=True) - except KeyError: - warnings.warn( - 'Coordinate "{}" referenced by dataarray {} does not ' - 'exist, dropping reference.'.format(coord, da_name), - stacklevel=2 - ) - continue - - # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() - data.attrs.pop('coordinates', None) - - # ###--------------------------------------------------------------------------. # ### CF-Time def add_time_bounds_dimension(ds, time="time"): @@ -864,6 +694,14 @@ def _collect_cf_dataset(list_dataarrays, ds : xr.Dataset A partially CF-compliant xr.Dataset """ + from satpy.writers.cf.area import ( + area2cf, + assert_xy_unique, + has_projection_coords, + link_coords, + make_alt_coords_unique, + ) + # Create dictionary of input datarrays # --> Since keys=None, it doesn't never retrieve ancillary variables !!! ds_collection = {} From 4b195664431d0eb4a0bcb7271afacabcdd6a5573 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 23:10:01 +0200 Subject: [PATCH 073/702] Refactor attrs-related functions --- .../tests/writer_tests/cf_tests/test_attrs.py | 144 ++++++++++ .../writer_tests/cf_tests/test_time_coords.py | 44 +++ satpy/tests/writer_tests/test_cf.py | 44 +-- satpy/writers/cf/attrs.py | 220 +++++++++++++++ satpy/writers/cf/time.py | 62 +++++ satpy/writers/cf_writer.py | 254 +----------------- 6 files changed, 479 insertions(+), 289 deletions(-) create mode 100644 satpy/tests/writer_tests/cf_tests/test_attrs.py create mode 100644 satpy/tests/writer_tests/cf_tests/test_time_coords.py create mode 100644 satpy/writers/cf/attrs.py create mode 100644 satpy/writers/cf/time.py diff --git a/satpy/tests/writer_tests/cf_tests/test_attrs.py b/satpy/tests/writer_tests/cf_tests/test_attrs.py new file mode 100644 index 0000000000..a7b36837b4 --- /dev/null +++ b/satpy/tests/writer_tests/cf_tests/test_attrs.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Tests for CF-compatible attributes encoding.""" +import datetime +import json +from collections import OrderedDict + +import numpy as np + + +class TestCFAttributeEncoding: + """Test case for CF attribute encodings.""" + + def get_test_attrs(self): + """Create some dataset attributes for testing purpose. + + Returns: + Attributes, encoded attributes, encoded and flattened attributes + + """ + # TODO: this is also used by test_da2cf + attrs = {'name': 'IR_108', + 'start_time': datetime(2018, 1, 1, 0), + 'end_time': datetime(2018, 1, 1, 0, 15), + 'int': 1, + 'float': 1.0, + 'none': None, # should be dropped + 'numpy_int': np.uint8(1), + 'numpy_float': np.float32(1), + 'numpy_bool': True, + 'numpy_void': np.void(0), + 'numpy_bytes': np.bytes_('test'), + 'numpy_string': np.string_('test'), + 'list': [1, 2, np.float64(3)], + 'nested_list': ["1", ["2", [3]]], + 'bool': True, + 'array': np.array([1, 2, 3], dtype='uint8'), + 'array_bool': np.array([True, False, True]), + 'array_2d': np.array([[1, 2], [3, 4]]), + 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), + 'dict': {'a': 1, 'b': 2}, + 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, + 'raw_metadata': OrderedDict([ + ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), + ('flag', np.bool_(True)), + ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) + ])} + encoded = {'name': 'IR_108', + 'start_time': '2018-01-01 00:00:00', + 'end_time': '2018-01-01 00:15:00', + 'int': 1, + 'float': 1.0, + 'numpy_int': np.uint8(1), + 'numpy_float': np.float32(1), + 'numpy_bool': 'true', + 'numpy_void': '[]', + 'numpy_bytes': 'test', + 'numpy_string': 'test', + 'list': [1, 2, np.float64(3)], + 'nested_list': '["1", ["2", [3]]]', + 'bool': 'true', + 'array': np.array([1, 2, 3], dtype='uint8'), + 'array_bool': ['true', 'false', 'true'], + 'array_2d': '[[1, 2], [3, 4]]', + 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', + 'dict': '{"a": 1, "b": 2}', + 'nested_dict': '{"l1": {"l2": {"l3": [1, 2, 3]}}}', + 'raw_metadata': '{"recarray": [[0, 0], [0, 0], [0, 0]], ' + '"flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}}'} + encoded_flat = {'name': 'IR_108', + 'start_time': '2018-01-01 00:00:00', + 'end_time': '2018-01-01 00:15:00', + 'int': 1, + 'float': 1.0, + 'numpy_int': np.uint8(1), + 'numpy_float': np.float32(1), + 'numpy_bool': 'true', + 'numpy_void': '[]', + 'numpy_bytes': 'test', + 'numpy_string': 'test', + 'list': [1, 2, np.float64(3)], + 'nested_list': '["1", ["2", [3]]]', + 'bool': 'true', + 'array': np.array([1, 2, 3], dtype='uint8'), + 'array_bool': ['true', 'false', 'true'], + 'array_2d': '[[1, 2], [3, 4]]', + 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', + 'dict_a': 1, + 'dict_b': 2, + 'nested_dict_l1_l2_l3': np.array([1, 2, 3], dtype='uint8'), + 'raw_metadata_recarray': '[[0, 0], [0, 0], [0, 0]]', + 'raw_metadata_flag': 'true', + 'raw_metadata_dict_a': 1, + 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} + return attrs, encoded, encoded_flat + + def assertDictWithArraysEqual(self, d1, d2): + """Check that dicts containing arrays are equal.""" + # TODO: this is also used by test_da2cf + assert set(d1.keys()) == set(d2.keys()) + for key, val1 in d1.items(): + val2 = d2[key] + if isinstance(val1, np.ndarray): + np.testing.assert_array_equal(val1, val2) + assert val1.dtype == val2.dtype + else: + assert val1 == val2 + if isinstance(val1, (np.floating, np.integer, np.bool_)): + assert isinstance(val2, np.generic) + assert val1.dtype == val2.dtype + + def test_encode_attrs_nc(self): + """Test attributes encoding.""" + from satpy.writers.cf.attrs import encode_attrs_nc + + attrs, expected, _ = self.get_test_attrs() + + # Test encoding + encoded = encode_attrs_nc(attrs) + self.assertDictWithArraysEqual(expected, encoded) + + # Test decoding of json-encoded attributes + raw_md_roundtrip = {'recarray': [[0, 0], [0, 0], [0, 0]], + 'flag': 'true', + 'dict': {'a': 1, 'b': [1, 2, 3]}} + assert json.loads(encoded['raw_metadata']) == raw_md_roundtrip + assert json.loads(encoded['array_3d']) == [[[1, 2], [3, 4]], [[1, 2], [3, 4]]] + assert json.loads(encoded['nested_dict']) == {"l1": {"l2": {"l3": [1, 2, 3]}}} + assert json.loads(encoded['nested_list']) == ["1", ["2", [3]]] diff --git a/satpy/tests/writer_tests/cf_tests/test_time_coords.py b/satpy/tests/writer_tests/cf_tests/test_time_coords.py new file mode 100644 index 0000000000..ce7845dcca --- /dev/null +++ b/satpy/tests/writer_tests/cf_tests/test_time_coords.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""CF processing of time information (coordinates and dimensions).""" +import numpy as np +import xarray as xr + + +class TestCFtime: + """Test cases for CF time dimension and coordinates.""" + + def test_add_time_bounds_dimension(self): + """Test addition of CF-compliant time attributes.""" + from satpy.writers.cf.time import add_time_bounds_dimension + + test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) + times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01', + '2018-05-30T10:05:02', '2018-05-30T10:05:03'], dtype=np.datetime64) + dataarray = xr.DataArray(test_array, + dims=['y', 'x'], + coords={'time': ('y', times)}, + attrs=dict(start_time=times[0], end_time=times[-1])) + ds = dataarray.to_dataset(name='test-array') + ds = add_time_bounds_dimension(ds) + + assert "bnds_1d" in ds.dims + assert ds.dims['bnds_1d'] == 2 + assert "time_bnds" in list(ds.data_vars) + assert "bounds" in ds["time"].attrs + assert "standard_name" in ds["time"].attrs diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 005509f165..ae55dcc1a2 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -17,7 +17,6 @@ # satpy. If not, see . """Tests for the CF writer.""" -import json import logging import os import tempfile @@ -155,28 +154,6 @@ def test_preprocess_dataarray_name(): assert "original_name" not in out_da.attrs -def test_add_time_cf_attrs(): - """Test addition of CF-compliant time attributes.""" - from satpy import Scene - from satpy.writers.cf_writer import add_time_bounds_dimension - - scn = Scene() - test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) - times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01', - '2018-05-30T10:05:02', '2018-05-30T10:05:03'], dtype=np.datetime64) - scn['test-array'] = xr.DataArray(test_array, - dims=['y', 'x'], - coords={'time': ('y', times)}, - attrs=dict(start_time=times[0], end_time=times[-1])) - ds = scn['test-array'].to_dataset(name='test-array') - ds = add_time_bounds_dimension(ds) - assert "bnds_1d" in ds.dims - assert ds.dims['bnds_1d'] == 2 - assert "time_bnds" in list(ds.data_vars) - assert "bounds" in ds["time"].attrs - assert "standard_name" in ds["time"].attrs - - def test_empty_collect_cf_datasets(): """Test that if no DataArrays, collect_cf_datasets raise error.""" from satpy.writers.cf_writer import collect_cf_datasets @@ -525,6 +502,7 @@ def get_test_attrs(self): Attributes, encoded attributes, encoded and flattened attributes """ + # TODO: also used by cf/test_attrs.py attrs = {'name': 'IR_108', 'start_time': datetime(2018, 1, 1, 0), 'end_time': datetime(2018, 1, 1, 0, 15), @@ -602,6 +580,7 @@ def get_test_attrs(self): def assertDictWithArraysEqual(self, d1, d2): """Check that dicts containing arrays are equal.""" + # TODO: also used by cf/test_attrs.py assert set(d1.keys()) == set(d2.keys()) for key, val1 in d1.items(): val2 = d2[key] @@ -614,25 +593,6 @@ def assertDictWithArraysEqual(self, d1, d2): assert isinstance(val2, np.generic) assert val1.dtype == val2.dtype - def test_encode_attrs_nc(self): - """Test attributes encoding.""" - from satpy.writers.cf_writer import encode_attrs_nc - - attrs, expected, _ = self.get_test_attrs() - - # Test encoding - encoded = encode_attrs_nc(attrs) - self.assertDictWithArraysEqual(expected, encoded) - - # Test decoding of json-encoded attributes - raw_md_roundtrip = {'recarray': [[0, 0], [0, 0], [0, 0]], - 'flag': 'true', - 'dict': {'a': 1, 'b': [1, 2, 3]}} - assert json.loads(encoded['raw_metadata']) == raw_md_roundtrip - assert json.loads(encoded['array_3d']) == [[[1, 2], [3, 4]], [[1, 2], [3, 4]]] - assert json.loads(encoded['nested_dict']) == {"l1": {"l2": {"l3": [1, 2, 3]}}} - assert json.loads(encoded['nested_list']) == ["1", ["2", [3]]] - def test_da2cf(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" from satpy.writers.cf_writer import CFWriter diff --git a/satpy/writers/cf/attrs.py b/satpy/writers/cf/attrs.py new file mode 100644 index 0000000000..7a9ecc33c8 --- /dev/null +++ b/satpy/writers/cf/attrs.py @@ -0,0 +1,220 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""CF processing of attributes.""" + +import datetime +import json +from collections import OrderedDict + +import numpy as np + +from satpy.writers.utils import flatten_dict + + +class AttributeEncoder(json.JSONEncoder): + """JSON encoder for dataset attributes.""" + + def default(self, obj): + """Return a json-serializable object for *obj*. + + In order to facilitate decoding, elements in dictionaries, lists/tuples and multi-dimensional arrays are + encoded recursively. + """ + if isinstance(obj, dict): + serialized = {} + for key, val in obj.items(): + serialized[key] = self.default(val) + return serialized + elif isinstance(obj, (list, tuple, np.ndarray)): + return [self.default(item) for item in obj] + return self._encode(obj) + + def _encode(self, obj): + """Encode the given object as a json-serializable datatype.""" + if isinstance(obj, (bool, np.bool_)): + # Bool has to be checked first, because it is a subclass of int + return str(obj).lower() + elif isinstance(obj, (int, float, str)): + return obj + elif isinstance(obj, np.integer): + return int(obj) + elif isinstance(obj, np.floating): + return float(obj) + elif isinstance(obj, np.void): + return tuple(obj) + elif isinstance(obj, np.ndarray): + return obj.tolist() + + return str(obj) + + +def _encode_nc(obj): + """Try to encode `obj` as a netCDF/Zarr compatible datatype which most closely resembles the object's nature. + + Raises: + ValueError if no such datatype could be found + """ + from satpy.writers.cf_writer import NC4_DTYPES + + if isinstance(obj, int) and not isinstance(obj, (bool, np.bool_)): + return obj + elif isinstance(obj, (float, str, np.integer, np.floating)): + return obj + elif isinstance(obj, np.ndarray): + # Only plain 1-d arrays are supported. Skip record arrays and multi-dimensional arrays. + is_plain_1d = not obj.dtype.fields and len(obj.shape) <= 1 + if is_plain_1d: + if obj.dtype in NC4_DTYPES: + return obj + elif obj.dtype == np.bool_: + # Boolean arrays are not supported, convert to array of strings. + return [s.lower() for s in obj.astype(str)] + return obj.tolist() + raise ValueError('Unable to encode') + + +def encode_nc(obj): + """Encode the given object as a netcdf compatible datatype.""" + try: + return obj.to_cf() + except AttributeError: + return _encode_python_objects(obj) + + +def _encode_python_objects(obj): + """Try to find the datatype which most closely resembles the object's nature. + + If on failure, encode as a string. Plain lists are encoded recursively. + """ + if isinstance(obj, (list, tuple)) and all([not isinstance(item, (list, tuple)) for item in obj]): + return [encode_nc(item) for item in obj] + try: + dump = _encode_nc(obj) + except ValueError: + try: + # Decode byte-strings + decoded = obj.decode() + except AttributeError: + decoded = obj + dump = json.dumps(decoded, cls=AttributeEncoder).strip('"') + return dump + + +def encode_attrs_nc(attrs): + """Encode dataset attributes in a netcdf compatible datatype. + + Args: + attrs (dict): + Attributes to be encoded + Returns: + dict: Encoded (and sorted) attributes + + """ + encoded_attrs = [] + for key, val in sorted(attrs.items()): + if val is not None: + encoded_attrs.append((key, encode_nc(val))) + return OrderedDict(encoded_attrs) + + +def _add_ancillary_variables_attrs(dataarray): + """Replace ancillary_variables DataArray with a list of their name.""" + list_ancillary_variable_names = [da_ancillary.attrs['name'] + for da_ancillary in dataarray.attrs.get('ancillary_variables', [])] + if list_ancillary_variable_names: + dataarray.attrs['ancillary_variables'] = ' '.join(list_ancillary_variable_names) + else: + dataarray.attrs.pop("ancillary_variables", None) + return dataarray + + +def _drop_exclude_attrs(dataarray, exclude_attrs): + """Remove user-specified list of attributes.""" + if exclude_attrs is None: + exclude_attrs = [] + for key in exclude_attrs: + dataarray.attrs.pop(key, None) + return dataarray + + +def _remove_satpy_attrs(new_data): + """Remove _satpy attribute.""" + satpy_attrs = [key for key in new_data.attrs if key.startswith('_satpy')] + for satpy_attr in satpy_attrs: + new_data.attrs.pop(satpy_attr) + new_data.attrs.pop('_last_resampler', None) + return new_data + + +def _format_prerequisites_attrs(dataarray): + """Reformat prerequisites attribute value to string.""" + if 'prerequisites' in dataarray.attrs: + dataarray.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] + return dataarray + + +def _remove_none_attrs(dataarray): + """Remove attribute keys with None value.""" + for key, val in dataarray.attrs.copy().items(): + if val is None: + dataarray.attrs.pop(key) + return dataarray + + +def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): + """Preprocess DataArray attributes to be written into CF-compliant netCDF/Zarr.""" + dataarray = _remove_satpy_attrs(dataarray) + dataarray = _add_ancillary_variables_attrs(dataarray) + dataarray = _drop_exclude_attrs(dataarray, exclude_attrs) + dataarray = _format_prerequisites_attrs(dataarray) + dataarray = _remove_none_attrs(dataarray) + _ = dataarray.attrs.pop("area", None) + + if 'long_name' not in dataarray.attrs and 'standard_name' not in dataarray.attrs: + dataarray.attrs['long_name'] = dataarray.name + + if flatten_attrs: + dataarray.attrs = flatten_dict(dataarray.attrs) + + dataarray.attrs = encode_attrs_nc(dataarray.attrs) + + return dataarray + + +def _add_history(attrs): + """Add 'history' attribute to dictionary.""" + _history_create = 'Created by pytroll/satpy on {}'.format(datetime.utcnow()) + if 'history' in attrs: + if isinstance(attrs['history'], list): + attrs['history'] = ''.join(attrs['history']) + attrs['history'] += '\n' + _history_create + else: + attrs['history'] = _history_create + return attrs + + +def preprocess_header_attrs(header_attrs, flatten_attrs=False): + """Prepare file header attributes.""" + if header_attrs is not None: + if flatten_attrs: + header_attrs = flatten_dict(header_attrs) + header_attrs = encode_attrs_nc(header_attrs) # OrderedDict + else: + header_attrs = {} + header_attrs = _add_history(header_attrs) + return header_attrs diff --git a/satpy/writers/cf/time.py b/satpy/writers/cf/time.py new file mode 100644 index 0000000000..6308f42364 --- /dev/null +++ b/satpy/writers/cf/time.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""CF processing of time dimension and coordinates.""" +import numpy as np +import xarray as xr + + +def add_time_bounds_dimension(ds, time="time"): + """Add time bound dimension to xr.Dataset.""" + start_times = [] + end_times = [] + for _var_name, data_array in ds.items(): + start_times.append(data_array.attrs.get("start_time", None)) + end_times.append(data_array.attrs.get("end_time", None)) + + start_time = min(start_time for start_time in start_times + if start_time is not None) + end_time = min(end_time for end_time in end_times + if end_time is not None) + ds['time_bnds'] = xr.DataArray([[np.datetime64(start_time), + np.datetime64(end_time)]], + dims=['time', 'bnds_1d']) + ds[time].attrs['bounds'] = "time_bnds" + ds[time].attrs['standard_name'] = "time" + return ds + + +def _process_time_coord(dataarray, epoch): + """Process the 'time' coordinate, if existing. + + It expand the DataArray with a time dimension if does not yet exists. + + The function assumes + + - that x and y dimensions have at least shape > 1 + - the time coordinate has size 1 + + """ + if 'time' in dataarray.coords: + dataarray['time'].encoding['units'] = epoch + dataarray['time'].attrs['standard_name'] = 'time' + dataarray['time'].attrs.pop('bounds', None) + + if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape: + dataarray = dataarray.expand_dims('time') + + return dataarray diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 4c672b70b6..a2edd70ab2 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -154,13 +154,10 @@ .. _xarray encoding documentation: http://xarray.pydata.org/en/stable/user-guide/io.html?highlight=encoding#writing-encoded-data """ - import copy -import json import logging import warnings -from collections import OrderedDict, defaultdict -from datetime import datetime +from collections import defaultdict import numpy as np import xarray as xr @@ -169,7 +166,6 @@ from satpy.writers import Writer from satpy.writers.cf.coords_attrs import add_xy_coords_attrs -from satpy.writers.utils import flatten_dict logger = logging.getLogger(__name__) @@ -226,236 +222,6 @@ def get_extra_ds(dataarray, keys=None): return ds_collection -# ###--------------------------------------------------------------------------. -# ### CF-Time -def add_time_bounds_dimension(ds, time="time"): - """Add time bound dimension to xr.Dataset.""" - start_times = [] - end_times = [] - for _var_name, data_array in ds.items(): - start_times.append(data_array.attrs.get("start_time", None)) - end_times.append(data_array.attrs.get("end_time", None)) - - start_time = min(start_time for start_time in start_times - if start_time is not None) - end_time = min(end_time for end_time in end_times - if end_time is not None) - ds['time_bnds'] = xr.DataArray([[np.datetime64(start_time), - np.datetime64(end_time)]], - dims=['time', 'bnds_1d']) - ds[time].attrs['bounds'] = "time_bnds" - ds[time].attrs['standard_name'] = "time" - return ds - - -def _process_time_coord(dataarray, epoch): - """Process the 'time' coordinate, if existing. - - If expand the DataArray with a time dimension if does not yet exists. - - The function assumes - - - that x and y dimensions have at least shape > 1 - - the time coordinate has size 1 - - """ - if 'time' in dataarray.coords: - dataarray['time'].encoding['units'] = epoch - dataarray['time'].attrs['standard_name'] = 'time' - dataarray['time'].attrs.pop('bounds', None) - - if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape: - dataarray = dataarray.expand_dims('time') - - return dataarray - - -# --------------------------------------------------------------------------. -# ### Attributes - - -class AttributeEncoder(json.JSONEncoder): - """JSON encoder for dataset attributes.""" - - def default(self, obj): - """Return a json-serializable object for *obj*. - - In order to facilitate decoding, elements in dictionaries, lists/tuples and multi-dimensional arrays are - encoded recursively. - """ - if isinstance(obj, dict): - serialized = {} - for key, val in obj.items(): - serialized[key] = self.default(val) - return serialized - elif isinstance(obj, (list, tuple, np.ndarray)): - return [self.default(item) for item in obj] - return self._encode(obj) - - def _encode(self, obj): - """Encode the given object as a json-serializable datatype.""" - if isinstance(obj, (bool, np.bool_)): - # Bool has to be checked first, because it is a subclass of int - return str(obj).lower() - elif isinstance(obj, (int, float, str)): - return obj - elif isinstance(obj, np.integer): - return int(obj) - elif isinstance(obj, np.floating): - return float(obj) - elif isinstance(obj, np.void): - return tuple(obj) - elif isinstance(obj, np.ndarray): - return obj.tolist() - - return str(obj) - - -def _encode_nc(obj): - """Try to encode `obj` as a netcdf compatible datatype which most closely resembles the object's nature. - - Raises: - ValueError if no such datatype could be found - - """ - if isinstance(obj, int) and not isinstance(obj, (bool, np.bool_)): - return obj - elif isinstance(obj, (float, str, np.integer, np.floating)): - return obj - elif isinstance(obj, np.ndarray): - # Only plain 1-d arrays are supported. Skip record arrays and multi-dimensional arrays. - is_plain_1d = not obj.dtype.fields and len(obj.shape) <= 1 - if is_plain_1d: - if obj.dtype in NC4_DTYPES: - return obj - elif obj.dtype == np.bool_: - # Boolean arrays are not supported, convert to array of strings. - return [s.lower() for s in obj.astype(str)] - return obj.tolist() - - raise ValueError('Unable to encode') - - -def encode_nc(obj): - """Encode the given object as a netcdf compatible datatype.""" - try: - return obj.to_cf() - except AttributeError: - return _encode_python_objects(obj) - - -def _encode_python_objects(obj): - """Try to find the datatype which most closely resembles the object's nature. - - If on failure, encode as a string. Plain lists are encoded recursively. - """ - if isinstance(obj, (list, tuple)) and all([not isinstance(item, (list, tuple)) for item in obj]): - return [encode_nc(item) for item in obj] - try: - dump = _encode_nc(obj) - except ValueError: - try: - # Decode byte-strings - decoded = obj.decode() - except AttributeError: - decoded = obj - dump = json.dumps(decoded, cls=AttributeEncoder).strip('"') - return dump - - -def encode_attrs_nc(attrs): - """Encode dataset attributes in a netcdf compatible datatype. - - Args: - attrs (dict): - Attributes to be encoded - Returns: - dict: Encoded (and sorted) attributes - - """ - encoded_attrs = [] - for key, val in sorted(attrs.items()): - if val is not None: - encoded_attrs.append((key, encode_nc(val))) - return OrderedDict(encoded_attrs) - - -def _add_ancillary_variables_attrs(dataarray): - """Replace ancillary_variables DataArray with a list of their name.""" - list_ancillary_variable_names = [da_ancillary.attrs['name'] - for da_ancillary in dataarray.attrs.get('ancillary_variables', [])] - if list_ancillary_variable_names: - dataarray.attrs['ancillary_variables'] = ' '.join(list_ancillary_variable_names) - else: - dataarray.attrs.pop("ancillary_variables", None) - return dataarray - - -def _drop_exclude_attrs(dataarray, exclude_attrs): - """Remove user-specified list of attributes.""" - if exclude_attrs is None: - exclude_attrs = [] - for key in exclude_attrs: - dataarray.attrs.pop(key, None) - return dataarray - - -def _remove_satpy_attrs(new_data): - """Remove _satpy attribute.""" - satpy_attrs = [key for key in new_data.attrs if key.startswith('_satpy')] - for satpy_attr in satpy_attrs: - new_data.attrs.pop(satpy_attr) - new_data.attrs.pop('_last_resampler', None) - return new_data - - -def _format_prerequisites_attrs(dataarray): - """Reformat prerequisites attribute value to string.""" - if 'prerequisites' in dataarray.attrs: - dataarray.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] - return dataarray - - -def _remove_none_attrs(dataarray): - """Remove attribute keys with None value.""" - for key, val in dataarray.attrs.copy().items(): - if val is None: - dataarray.attrs.pop(key) - return dataarray - - -def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): - """Preprocess DataArray attributes to be written into CF-compliant netCDF/Zarr.""" - dataarray = _remove_satpy_attrs(dataarray) - dataarray = _add_ancillary_variables_attrs(dataarray) - dataarray = _drop_exclude_attrs(dataarray, exclude_attrs) - dataarray = _format_prerequisites_attrs(dataarray) - dataarray = _remove_none_attrs(dataarray) - _ = dataarray.attrs.pop("area", None) - - if 'long_name' not in dataarray.attrs and 'standard_name' not in dataarray.attrs: - dataarray.attrs['long_name'] = dataarray.name - - if flatten_attrs: - dataarray.attrs = flatten_dict(dataarray.attrs) - - dataarray.attrs = encode_attrs_nc(dataarray.attrs) - - return dataarray - - -def preprocess_header_attrs(header_attrs, flatten_attrs=False): - """Prepare file header attributes.""" - if header_attrs is not None: - if flatten_attrs: - header_attrs = flatten_dict(header_attrs) - header_attrs = encode_attrs_nc(header_attrs) # OrderedDict - else: - header_attrs = {} - header_attrs = _add_history(header_attrs) - return header_attrs - - # ###--------------------------------------------------------------------------. # ### netCDF encodings @@ -582,18 +348,6 @@ def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name return dataarray -def _add_history(attrs): - """Add 'history' attribute to dictionary.""" - _history_create = 'Created by pytroll/satpy on {}'.format(datetime.utcnow()) - if 'history' in attrs: - if isinstance(attrs['history'], list): - attrs['history'] = ''.join(attrs['history']) - attrs['history'] += '\n' + _history_create - else: - attrs['history'] = _history_create - return attrs - - def _get_groups(groups, list_datarrays): """Return a dictionary with the list of xr.DataArray associated to each group. @@ -645,6 +399,9 @@ def make_cf_dataarray(dataarray, CF-compliant xr.DataArray. """ + from satpy.writers.cf.attrs import preprocess_datarray_attrs + from satpy.writers.cf.time import _process_time_coord + dataarray = _preprocess_dataarray_name(dataarray=dataarray, numeric_name_prefix=numeric_name_prefix, include_orig_name=include_orig_name) @@ -822,6 +579,9 @@ def collect_cf_datasets(list_dataarrays, header_attrs : dict Global attributes to be attached to the xr.Dataset / netCDF4. """ + from satpy.writers.cf.attrs import preprocess_header_attrs + from satpy.writers.cf.time import add_time_bounds_dimension + if not list_dataarrays: raise RuntimeError("None of the requested datasets have been " "generated or could not be loaded. Requested " From 6c27d86ed22ffd5a697449c57a0ee0a616081cbc Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 23:13:10 +0200 Subject: [PATCH 074/702] Fix datetime import --- satpy/tests/writer_tests/cf_tests/test_attrs.py | 4 ++-- satpy/writers/cf/attrs.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/tests/writer_tests/cf_tests/test_attrs.py b/satpy/tests/writer_tests/cf_tests/test_attrs.py index a7b36837b4..87cdfd173d 100644 --- a/satpy/tests/writer_tests/cf_tests/test_attrs.py +++ b/satpy/tests/writer_tests/cf_tests/test_attrs.py @@ -35,8 +35,8 @@ def get_test_attrs(self): """ # TODO: this is also used by test_da2cf attrs = {'name': 'IR_108', - 'start_time': datetime(2018, 1, 1, 0), - 'end_time': datetime(2018, 1, 1, 0, 15), + 'start_time': datetime.datetime(2018, 1, 1, 0), + 'end_time': datetime.datetime(2018, 1, 1, 0, 15), 'int': 1, 'float': 1.0, 'none': None, # should be dropped diff --git a/satpy/writers/cf/attrs.py b/satpy/writers/cf/attrs.py index 7a9ecc33c8..aac0f5f289 100644 --- a/satpy/writers/cf/attrs.py +++ b/satpy/writers/cf/attrs.py @@ -198,7 +198,7 @@ def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): def _add_history(attrs): """Add 'history' attribute to dictionary.""" - _history_create = 'Created by pytroll/satpy on {}'.format(datetime.utcnow()) + _history_create = 'Created by pytroll/satpy on {}'.format(datetime.datetime.utcnow()) if 'history' in attrs: if isinstance(attrs['history'], list): attrs['history'] = ''.join(attrs['history']) From 59f73ea592231a8379788cd7999273cf9be9dae1 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 23:18:04 +0200 Subject: [PATCH 075/702] Replace deprecated CFWriter.da2cf with make_cf_dataarray --- satpy/tests/writer_tests/test_cf.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index ae55dcc1a2..35b454c87f 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -87,10 +87,10 @@ def test_lonlat_storage(tmp_path): np.testing.assert_allclose(ds["mavas"].attrs["inverse_flattening"], 298.257223563) -def test_da2cf_lonlat(): - """Test correct da2cf encoding for area with lon/lat units.""" +def test_make_cf_dataarray_lonlat(): + """Test correct CF encoding for area with lon/lat units.""" from satpy.resample import add_crs_xy_coords - from satpy.writers.cf_writer import CFWriter + from satpy.writers.cf_writer import make_cf_dataarray area = create_area_def("mavas", 4326, shape=(5, 5), center=(0, 0), resolution=(1, 1)) @@ -99,7 +99,7 @@ def test_da2cf_lonlat(): dims=("y", "x"), attrs={"area": area}) da = add_crs_xy_coords(da, area) - new_da = CFWriter.da2cf(da) + new_da = make_cf_dataarray(da) assert new_da["x"].attrs["units"] == "degrees_east" assert new_da["y"].attrs["units"] == "degrees_north" @@ -593,9 +593,9 @@ def assertDictWithArraysEqual(self, d1, d2): assert isinstance(val2, np.generic) assert val1.dtype == val2.dtype - def test_da2cf(self): + def test_make_cf_dataarray(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" - from satpy.writers.cf_writer import CFWriter + from satpy.writers.cf_writer import make_cf_dataarray # Create set of test attributes attrs, attrs_expected, attrs_expected_flat = self.get_test_attrs() @@ -618,7 +618,7 @@ def test_da2cf(self): coords={'y': [0, 1], 'x': [1, 2], 'acq_time': ('y', [3, 4])}) # Test conversion to something cf-compliant - res = CFWriter.da2cf(arr) + res = make_cf_dataarray(arr) np.testing.assert_array_equal(res['x'], arr['x']) np.testing.assert_array_equal(res['y'], arr['y']) np.testing.assert_array_equal(res['acq_time'], arr['acq_time']) @@ -627,17 +627,17 @@ def test_da2cf(self): self.assertDictWithArraysEqual(res.attrs, attrs_expected) # Test attribute kwargs - res_flat = CFWriter.da2cf(arr, flatten_attrs=True, exclude_attrs=['int']) + res_flat = make_cf_dataarray(arr, flatten_attrs=True, exclude_attrs=['int']) attrs_expected_flat.pop('int') self.assertDictWithArraysEqual(res_flat.attrs, attrs_expected_flat) - def test_da2cf_one_dimensional_array(self): + def test_make_cf_dataarray_one_dimensional_array(self): """Test the conversion of an 1d DataArray to a CF-compatible DataArray.""" - from satpy.writers.cf_writer import CFWriter + from satpy.writers.cf_writer import make_cf_dataarray arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=('y',), coords={'y': [0, 1, 2, 3], 'acq_time': ('y', [0, 1, 2, 3])}) - _ = CFWriter.da2cf(arr) + _ = make_cf_dataarray(arr) def test_collect_cf_dataarrays(self): """Test collecting CF datasets from a DataArray objects.""" From 396700f0c04aeee05b381eeb16f0480d9ee657ac Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 23:35:24 +0200 Subject: [PATCH 076/702] Refactor Dataset encodings --- .../writer_tests/cf_tests/test_encoding.py | 123 ++++++++++++++++++ satpy/tests/writer_tests/test_cf.py | 107 +-------------- satpy/writers/cf/encoding.py | 110 ++++++++++++++++ satpy/writers/cf_writer.py | 99 +------------- 4 files changed, 241 insertions(+), 198 deletions(-) create mode 100644 satpy/tests/writer_tests/cf_tests/test_encoding.py create mode 100644 satpy/writers/cf/encoding.py diff --git a/satpy/tests/writer_tests/cf_tests/test_encoding.py b/satpy/tests/writer_tests/cf_tests/test_encoding.py new file mode 100644 index 0000000000..66f7c72a48 --- /dev/null +++ b/satpy/tests/writer_tests/cf_tests/test_encoding.py @@ -0,0 +1,123 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Tests for compatible netCDF/Zarr DataArray encodings.""" +import datetime + +import pytest +import xarray as xr + + +class TestUpdateDatasetEncodings: + """Test update of Dataset encodings.""" + + @pytest.fixture + def fake_ds(self): + """Create fake data for testing.""" + ds = xr.Dataset({'foo': (('y', 'x'), [[1, 2], [3, 4]]), + 'bar': (('y', 'x'), [[3, 4], [5, 6]])}, + coords={'y': [1, 2], + 'x': [3, 4], + 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) + return ds + + @pytest.fixture + def fake_ds_digit(self): + """Create fake data for testing.""" + ds_digit = xr.Dataset({'CHANNEL_1': (('y', 'x'), [[1, 2], [3, 4]]), + 'CHANNEL_2': (('y', 'x'), [[3, 4], [5, 6]])}, + coords={'y': [1, 2], + 'x': [3, 4], + 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) + return ds_digit + + def test_dataset_name_digit(self, fake_ds_digit): + """Test data with dataset name staring with a digit.""" + from satpy.writers.cf.encoding import update_encoding + + # Dataset with name staring with digit + ds_digit = fake_ds_digit + kwargs = {'encoding': {'1': {'dtype': 'float32'}, + '2': {'dtype': 'float32'}}, + 'other': 'kwargs'} + enc, other_kwargs = update_encoding(ds_digit, kwargs, numeric_name_prefix='CHANNEL_') + expected_dict = { + 'y': {'_FillValue': None}, + 'x': {'_FillValue': None}, + 'CHANNEL_1': {'dtype': 'float32'}, + 'CHANNEL_2': {'dtype': 'float32'} + } + assert enc == expected_dict + assert other_kwargs == {'other': 'kwargs'} + + def test_without_time(self, fake_ds): + """Test data with no time dimension.""" + from satpy.writers.cf.encoding import update_encoding + + # Without time dimension + ds = fake_ds.chunk(2) + kwargs = {'encoding': {'bar': {'chunksizes': (1, 1)}}, + 'other': 'kwargs'} + enc, other_kwargs = update_encoding(ds, kwargs) + expected_dict = { + 'y': {'_FillValue': None}, + 'x': {'_FillValue': None}, + 'lon': {'chunksizes': (2, 2)}, + 'foo': {'chunksizes': (2, 2)}, + 'bar': {'chunksizes': (1, 1)} + } + assert enc == expected_dict + assert other_kwargs == {'other': 'kwargs'} + + # Chunksize may not exceed shape + ds = fake_ds.chunk(8) + kwargs = {'encoding': {}, 'other': 'kwargs'} + enc, other_kwargs = update_encoding(ds, kwargs) + expected_dict = { + 'y': {'_FillValue': None}, + 'x': {'_FillValue': None}, + 'lon': {'chunksizes': (2, 2)}, + 'foo': {'chunksizes': (2, 2)}, + 'bar': {'chunksizes': (2, 2)} + } + assert enc == expected_dict + + def test_with_time(self, fake_ds): + """Test data with a time dimension.""" + from satpy.writers.cf.encoding import update_encoding + + # With time dimension + ds = fake_ds.chunk(8).expand_dims({'time': [datetime.datetime(2009, 7, 1, 12, 15)]}) + kwargs = {'encoding': {'bar': {'chunksizes': (1, 1, 1)}}, + 'other': 'kwargs'} + enc, other_kwargs = update_encoding(ds, kwargs) + expected_dict = { + 'y': {'_FillValue': None}, + 'x': {'_FillValue': None}, + 'lon': {'chunksizes': (2, 2)}, + 'foo': {'chunksizes': (1, 2, 2)}, + 'bar': {'chunksizes': (1, 1, 1)}, + 'time': {'_FillValue': None, + 'calendar': 'proleptic_gregorian', + 'units': 'days since 2009-07-01 12:15:00'}, + 'time_bnds': {'_FillValue': None, + 'calendar': 'proleptic_gregorian', + 'units': 'days since 2009-07-01 12:15:00'} + } + assert enc == expected_dict + # User-defined encoding may not be altered + assert kwargs['encoding'] == {'bar': {'chunksizes': (1, 1, 1)}} diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 35b454c87f..1d9e9c7650 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -808,109 +808,8 @@ def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): assert ds2['var1']['longitude'].attrs['name'] == 'longitude' -class EncodingUpdateTest: - """Test update of netCDF encoding.""" - - @pytest.fixture - def fake_ds(self): - """Create fake data for testing.""" - ds = xr.Dataset({'foo': (('y', 'x'), [[1, 2], [3, 4]]), - 'bar': (('y', 'x'), [[3, 4], [5, 6]])}, - coords={'y': [1, 2], - 'x': [3, 4], - 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) - return ds - - @pytest.fixture - def fake_ds_digit(self): - """Create fake data for testing.""" - ds_digit = xr.Dataset({'CHANNEL_1': (('y', 'x'), [[1, 2], [3, 4]]), - 'CHANNEL_2': (('y', 'x'), [[3, 4], [5, 6]])}, - coords={'y': [1, 2], - 'x': [3, 4], - 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) - return ds_digit - - def test_dataset_name_digit(self, fake_ds_digit): - """Test data with dataset name staring with a digit.""" - from satpy.writers.cf_writer import update_encoding - - # Dataset with name staring with digit - ds_digit = fake_ds_digit - kwargs = {'encoding': {'1': {'dtype': 'float32'}, - '2': {'dtype': 'float32'}}, - 'other': 'kwargs'} - enc, other_kwargs = update_encoding(ds_digit, kwargs, numeric_name_prefix='CHANNEL_') - expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'CHANNEL_1': {'dtype': 'float32'}, - 'CHANNEL_2': {'dtype': 'float32'} - } - assert enc == expected_dict - assert other_kwargs == {'other': 'kwargs'} - - def test_without_time(self, fake_ds): - """Test data with no time dimension.""" - from satpy.writers.cf_writer import update_encoding - - # Without time dimension - ds = fake_ds.chunk(2) - kwargs = {'encoding': {'bar': {'chunksizes': (1, 1)}}, - 'other': 'kwargs'} - enc, other_kwargs = update_encoding(ds, kwargs) - expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (2, 2)}, - 'bar': {'chunksizes': (1, 1)} - } - assert enc == expected_dict - assert other_kwargs == {'other': 'kwargs'} - - # Chunksize may not exceed shape - ds = fake_ds.chunk(8) - kwargs = {'encoding': {}, 'other': 'kwargs'} - enc, other_kwargs = update_encoding(ds, kwargs) - expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (2, 2)}, - 'bar': {'chunksizes': (2, 2)} - } - assert enc == expected_dict - - def test_with_time(self, fake_ds): - """Test data with a time dimension.""" - from satpy.writers.cf_writer import update_encoding - - # With time dimension - ds = fake_ds.chunk(8).expand_dims({'time': [datetime(2009, 7, 1, 12, 15)]}) - kwargs = {'encoding': {'bar': {'chunksizes': (1, 1, 1)}}, - 'other': 'kwargs'} - enc, other_kwargs = update_encoding(ds, kwargs) - expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (1, 2, 2)}, - 'bar': {'chunksizes': (1, 1, 1)}, - 'time': {'_FillValue': None, - 'calendar': 'proleptic_gregorian', - 'units': 'days since 2009-07-01 12:15:00'}, - 'time_bnds': {'_FillValue': None, - 'calendar': 'proleptic_gregorian', - 'units': 'days since 2009-07-01 12:15:00'} - } - assert enc == expected_dict - # User-defined encoding may not be altered - assert kwargs['encoding'] == {'bar': {'chunksizes': (1, 1, 1)}} - - -class TestEncodingKwarg: - """Test CF writer with 'encoding' keyword argument.""" +class TestNETCDFEncodingKwargs: + """Test netCDF compression encodings.""" @pytest.fixture def scene(self): @@ -1001,7 +900,7 @@ def test_no_warning_if_backends_match(self, scene, filename, monkeypatch): warnings.simplefilter("error") -class TestEncodingAttribute(TestEncodingKwarg): +class TestEncodingAttribute(TestNETCDFEncodingKwargs): """Test CF writer with 'encoding' dataset attribute.""" @pytest.fixture diff --git a/satpy/writers/cf/encoding.py b/satpy/writers/cf/encoding.py new file mode 100644 index 0000000000..c8ea0f25f4 --- /dev/null +++ b/satpy/writers/cf/encoding.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""CF encoding.""" + +import numpy as np +import xarray as xr +from xarray.coding.times import CFDatetimeCoder + + +def _set_default_chunks(encoding, dataset): + """Update encoding to preserve current dask chunks. + + Existing user-defined chunks take precedence. + """ + for var_name, variable in dataset.variables.items(): + if variable.chunks: + chunks = tuple( + np.stack([variable.data.chunksize, + variable.shape]).min(axis=0) + ) # Chunksize may not exceed shape + encoding.setdefault(var_name, {}) + encoding[var_name].setdefault('chunksizes', chunks) + return encoding + + +def _set_default_fill_value(encoding, dataset): + """Set default fill values. + + Avoid _FillValue attribute being added to coordinate variables + (https://github.com/pydata/xarray/issues/1865). + """ + coord_vars = [] + for data_array in dataset.values(): + coord_vars.extend(set(data_array.dims).intersection(data_array.coords)) + for coord_var in coord_vars: + encoding.setdefault(coord_var, {}) + encoding[coord_var].update({'_FillValue': None}) + return encoding + + +def _set_default_time_encoding(encoding, dataset): + """Set default time encoding. + + Make sure time coordinates and bounds have the same units. + Default is xarray's CF datetime encoding, which can be overridden + by user-defined encoding. + """ + if 'time' in dataset: + try: + dtnp64 = dataset['time'].data[0] + except IndexError: + dtnp64 = dataset['time'].data + + default = CFDatetimeCoder().encode(xr.DataArray(dtnp64)) + time_enc = {'units': default.attrs['units'], 'calendar': default.attrs['calendar']} + time_enc.update(encoding.get('time', {})) + bounds_enc = {'units': time_enc['units'], + 'calendar': time_enc['calendar'], + '_FillValue': None} + encoding['time'] = time_enc + encoding['time_bnds'] = bounds_enc # FUTURE: Not required anymore with xarray-0.14+ + return encoding + + +def _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix): + """Ensure variable names of the encoding dictionary account for numeric_name_prefix. + + A lot of channel names in satpy starts with a digit. + When preparing CF-compliant datasets, these channels are prefixed with numeric_name_prefix. + + If variables names in the encoding dictionary are numeric digits, their name is prefixed + with numeric_name_prefix + """ + for var_name in list(dataset.variables): + if not numeric_name_prefix or not var_name.startswith(numeric_name_prefix): + continue + orig_var_name = var_name.replace(numeric_name_prefix, '') + if orig_var_name in encoding: + encoding[var_name] = encoding.pop(orig_var_name) + return encoding + + +def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix='CHANNEL_'): + """Update encoding. + + Preserve dask chunks, avoid fill values in coordinate variables and make sure that + time & time bounds have the same units. + """ + other_to_netcdf_kwargs = to_netcdf_kwargs.copy() + encoding = other_to_netcdf_kwargs.pop('encoding', {}).copy() + encoding = _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix) + encoding = _set_default_chunks(encoding, dataset) + encoding = _set_default_fill_value(encoding, dataset) + encoding = _set_default_time_encoding(encoding, dataset) + return encoding, other_to_netcdf_kwargs diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index a2edd70ab2..63f57f2e63 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -162,7 +162,6 @@ import numpy as np import xarray as xr from packaging.version import Version -from xarray.coding.times import CFDatetimeCoder from satpy.writers import Writer from satpy.writers.cf.coords_attrs import add_xy_coords_attrs @@ -222,98 +221,6 @@ def get_extra_ds(dataarray, keys=None): return ds_collection -# ###--------------------------------------------------------------------------. -# ### netCDF encodings - - -def _set_default_chunks(encoding, dataset): - """Update encoding to preserve current dask chunks. - - Existing user-defined chunks take precedence. - """ - for var_name, variable in dataset.variables.items(): - if variable.chunks: - chunks = tuple( - np.stack([variable.data.chunksize, - variable.shape]).min(axis=0) - ) # Chunksize may not exceed shape - encoding.setdefault(var_name, {}) - encoding[var_name].setdefault('chunksizes', chunks) - return encoding - - -def _set_default_fill_value(encoding, dataset): - """Set default fill values. - - Avoid _FillValue attribute being added to coordinate variables - (https://github.com/pydata/xarray/issues/1865). - """ - coord_vars = [] - for data_array in dataset.values(): - coord_vars.extend(set(data_array.dims).intersection(data_array.coords)) - for coord_var in coord_vars: - encoding.setdefault(coord_var, {}) - encoding[coord_var].update({'_FillValue': None}) - return encoding - - -def _set_default_time_encoding(encoding, dataset): - """Set default time encoding. - - Make sure time coordinates and bounds have the same units. - Default is xarray's CF datetime encoding, which can be overridden - by user-defined encoding. - """ - if 'time' in dataset: - try: - dtnp64 = dataset['time'].data[0] - except IndexError: - dtnp64 = dataset['time'].data - - default = CFDatetimeCoder().encode(xr.DataArray(dtnp64)) - time_enc = {'units': default.attrs['units'], 'calendar': default.attrs['calendar']} - time_enc.update(encoding.get('time', {})) - bounds_enc = {'units': time_enc['units'], - 'calendar': time_enc['calendar'], - '_FillValue': None} - encoding['time'] = time_enc - encoding['time_bnds'] = bounds_enc # FUTURE: Not required anymore with xarray-0.14+ - return encoding - - -def _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix): - """Ensure variable names of the encoding dictionary account for numeric_name_prefix. - - A lot of channel names in satpy starts with a digit. - When preparing CF-compliant datasets, these channels are prefixed with numeric_name_prefix. - - If variables names in the encoding dictionary are numeric digits, their name is prefixed - with numeric_name_prefix - """ - for var_name in list(dataset.variables): - if not numeric_name_prefix or not var_name.startswith(numeric_name_prefix): - continue - orig_var_name = var_name.replace(numeric_name_prefix, '') - if orig_var_name in encoding: - encoding[var_name] = encoding.pop(orig_var_name) - return encoding - - -def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix='CHANNEL_'): - """Update encoding. - - Preserve dask chunks, avoid fill values in coordinate variables and make sure that - time & time bounds have the same units. - """ - other_to_netcdf_kwargs = to_netcdf_kwargs.copy() - encoding = other_to_netcdf_kwargs.pop('encoding', {}).copy() - encoding = _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix) - encoding = _set_default_chunks(encoding, dataset) - encoding = _set_default_fill_value(encoding, dataset) - encoding = _set_default_time_encoding(encoding, dataset) - return encoding, other_to_netcdf_kwargs - - # ###--------------------------------------------------------------------------. # ### CF-conversion @@ -681,8 +588,10 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, @staticmethod def update_encoding(dataset, to_netcdf_kwargs): """Update encoding info (deprecated).""" + from satpy.writers.cf.encoding import update_encoding + warnings.warn('CFWriter.update_encoding is deprecated. ' - 'Use satpy.writers.cf_writer.update_encoding instead.', + 'Use satpy.writers.cf.encoding.update_encoding instead.', DeprecationWarning, stacklevel=3) return update_encoding(dataset, to_netcdf_kwargs) @@ -728,6 +637,8 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. """ + from satpy.writers.cf.encoding import update_encoding + logger.info('Saving datasets to NetCDF4/CF.') _check_backend_versions() From f664c60925e7edf844c1e66c8bf7bf3e810f9db6 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 23:56:48 +0200 Subject: [PATCH 077/702] Refactor CF-compliant DataArray creation --- .../writer_tests/cf_tests/test_dataaarray.py | 197 ++++++++++++++++++ satpy/tests/writer_tests/test_cf.py | 174 +--------------- satpy/writers/cf/dataarray.py | 97 +++++++++ satpy/writers/cf/time.py | 2 + satpy/writers/cf_writer.py | 119 ++--------- 5 files changed, 320 insertions(+), 269 deletions(-) create mode 100644 satpy/tests/writer_tests/cf_tests/test_dataaarray.py create mode 100644 satpy/writers/cf/dataarray.py diff --git a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py new file mode 100644 index 0000000000..20c893d0a6 --- /dev/null +++ b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py @@ -0,0 +1,197 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Tests CF-compliant DataArray creation.""" + +import datetime +from collections import OrderedDict + +import numpy as np +import xarray as xr + +from satpy.tests.utils import make_dsq + + +def test_preprocess_dataarray_name(): + """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix include orig name.""" + from satpy import Scene + from satpy.writers.cf.dataarray import _preprocess_dataarray_name + + scn = Scene() + scn['1'] = xr.DataArray([1, 2, 3]) + dataarray = scn['1'] + # If numeric_name_prefix is a string, test add the original_name attributes + out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="TEST", include_orig_name=True) + assert out_da.attrs['original_name'] == '1' + + # If numeric_name_prefix is empty string, False or None, test do not add original_name attributes + out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="", include_orig_name=True) + assert "original_name" not in out_da.attrs + + out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix=False, include_orig_name=True) + assert "original_name" not in out_da.attrs + + out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix=None, include_orig_name=True) + assert "original_name" not in out_da.attrs + + +class TestCFWriter: + """Test creation of CF DataArray.""" + + def get_test_attrs(self): + """Create some dataset attributes for testing purpose. + + Returns: + Attributes, encoded attributes, encoded and flattened attributes + + """ + # TODO: also used by cf/test_attrs.py + attrs = {'name': 'IR_108', + 'start_time': datetime.datetime(2018, 1, 1, 0), + 'end_time': datetime.datetime(2018, 1, 1, 0, 15), + 'int': 1, + 'float': 1.0, + 'none': None, # should be dropped + 'numpy_int': np.uint8(1), + 'numpy_float': np.float32(1), + 'numpy_bool': True, + 'numpy_void': np.void(0), + 'numpy_bytes': np.bytes_('test'), + 'numpy_string': np.string_('test'), + 'list': [1, 2, np.float64(3)], + 'nested_list': ["1", ["2", [3]]], + 'bool': True, + 'array': np.array([1, 2, 3], dtype='uint8'), + 'array_bool': np.array([True, False, True]), + 'array_2d': np.array([[1, 2], [3, 4]]), + 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), + 'dict': {'a': 1, 'b': 2}, + 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, + 'raw_metadata': OrderedDict([ + ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), + ('flag', np.bool_(True)), + ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) + ])} + encoded = {'name': 'IR_108', + 'start_time': '2018-01-01 00:00:00', + 'end_time': '2018-01-01 00:15:00', + 'int': 1, + 'float': 1.0, + 'numpy_int': np.uint8(1), + 'numpy_float': np.float32(1), + 'numpy_bool': 'true', + 'numpy_void': '[]', + 'numpy_bytes': 'test', + 'numpy_string': 'test', + 'list': [1, 2, np.float64(3)], + 'nested_list': '["1", ["2", [3]]]', + 'bool': 'true', + 'array': np.array([1, 2, 3], dtype='uint8'), + 'array_bool': ['true', 'false', 'true'], + 'array_2d': '[[1, 2], [3, 4]]', + 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', + 'dict': '{"a": 1, "b": 2}', + 'nested_dict': '{"l1": {"l2": {"l3": [1, 2, 3]}}}', + 'raw_metadata': '{"recarray": [[0, 0], [0, 0], [0, 0]], ' + '"flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}}'} + encoded_flat = {'name': 'IR_108', + 'start_time': '2018-01-01 00:00:00', + 'end_time': '2018-01-01 00:15:00', + 'int': 1, + 'float': 1.0, + 'numpy_int': np.uint8(1), + 'numpy_float': np.float32(1), + 'numpy_bool': 'true', + 'numpy_void': '[]', + 'numpy_bytes': 'test', + 'numpy_string': 'test', + 'list': [1, 2, np.float64(3)], + 'nested_list': '["1", ["2", [3]]]', + 'bool': 'true', + 'array': np.array([1, 2, 3], dtype='uint8'), + 'array_bool': ['true', 'false', 'true'], + 'array_2d': '[[1, 2], [3, 4]]', + 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', + 'dict_a': 1, + 'dict_b': 2, + 'nested_dict_l1_l2_l3': np.array([1, 2, 3], dtype='uint8'), + 'raw_metadata_recarray': '[[0, 0], [0, 0], [0, 0]]', + 'raw_metadata_flag': 'true', + 'raw_metadata_dict_a': 1, + 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} + return attrs, encoded, encoded_flat + + def assertDictWithArraysEqual(self, d1, d2): + """Check that dicts containing arrays are equal.""" + # TODO: also used by cf/test_attrs.py + assert set(d1.keys()) == set(d2.keys()) + for key, val1 in d1.items(): + val2 = d2[key] + if isinstance(val1, np.ndarray): + np.testing.assert_array_equal(val1, val2) + assert val1.dtype == val2.dtype + else: + assert val1 == val2 + if isinstance(val1, (np.floating, np.integer, np.bool_)): + assert isinstance(val2, np.generic) + assert val1.dtype == val2.dtype + + def test_make_cf_dataarray(self): + """Test the conversion of a DataArray to a CF-compatible DataArray.""" + from satpy.writers.cf.dataarray import make_cf_dataarray + + # Create set of test attributes + attrs, attrs_expected, attrs_expected_flat = self.get_test_attrs() + attrs['area'] = 'some_area' + attrs['prerequisites'] = [make_dsq(name='hej')] + attrs['_satpy_id_name'] = 'myname' + + # Adjust expected attributes + expected_prereq = ("DataQuery(name='hej')") + update = {'prerequisites': [expected_prereq], 'long_name': attrs['name']} + + attrs_expected.update(update) + attrs_expected_flat.update(update) + + attrs_expected.pop('name') + attrs_expected_flat.pop('name') + + # Create test data array + arr = xr.DataArray(np.array([[1, 2], [3, 4]]), attrs=attrs, dims=('y', 'x'), + coords={'y': [0, 1], 'x': [1, 2], 'acq_time': ('y', [3, 4])}) + + # Test conversion to something cf-compliant + res = make_cf_dataarray(arr) + np.testing.assert_array_equal(res['x'], arr['x']) + np.testing.assert_array_equal(res['y'], arr['y']) + np.testing.assert_array_equal(res['acq_time'], arr['acq_time']) + assert res['x'].attrs == {'units': 'm', 'standard_name': 'projection_x_coordinate'} + assert res['y'].attrs == {'units': 'm', 'standard_name': 'projection_y_coordinate'} + self.assertDictWithArraysEqual(res.attrs, attrs_expected) + + # Test attribute kwargs + res_flat = make_cf_dataarray(arr, flatten_attrs=True, exclude_attrs=['int']) + attrs_expected_flat.pop('int') + self.assertDictWithArraysEqual(res_flat.attrs, attrs_expected_flat) + + def test_make_cf_dataarray_one_dimensional_array(self): + """Test the conversion of an 1d DataArray to a CF-compatible DataArray.""" + from satpy.writers.cf.dataarray import make_cf_dataarray + + arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=('y',), + coords={'y': [0, 1, 2, 3], 'acq_time': ('y', [0, 1, 2, 3])}) + _ = make_cf_dataarray(arr) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 1d9e9c7650..96cc09069a 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -21,7 +21,6 @@ import os import tempfile import warnings -from collections import OrderedDict from datetime import datetime import numpy as np @@ -90,7 +89,7 @@ def test_lonlat_storage(tmp_path): def test_make_cf_dataarray_lonlat(): """Test correct CF encoding for area with lon/lat units.""" from satpy.resample import add_crs_xy_coords - from satpy.writers.cf_writer import make_cf_dataarray + from satpy.writers.cf.dataarray import make_cf_dataarray area = create_area_def("mavas", 4326, shape=(5, 5), center=(0, 0), resolution=(1, 1)) @@ -131,29 +130,6 @@ def test_is_projected(caplog): assert "Failed to tell if data are projected." in caplog.text -def test_preprocess_dataarray_name(): - """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix include orig name.""" - from satpy import Scene - from satpy.writers.cf_writer import _preprocess_dataarray_name - - scn = Scene() - scn['1'] = xr.DataArray([1, 2, 3]) - dataarray = scn['1'] - # If numeric_name_prefix is a string, test add the original_name attributes - out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="TEST", include_orig_name=True) - assert out_da.attrs['original_name'] == '1' - - # If numeric_name_prefix is empty string, False or None, test do not add original_name attributes - out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="", include_orig_name=True) - assert "original_name" not in out_da.attrs - - out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix=False, include_orig_name=True) - assert "original_name" not in out_da.attrs - - out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix=None, include_orig_name=True) - assert "original_name" not in out_da.attrs - - def test_empty_collect_cf_datasets(): """Test that if no DataArrays, collect_cf_datasets raise error.""" from satpy.writers.cf_writer import collect_cf_datasets @@ -495,150 +471,6 @@ def test_header_attrs(self): assert f.attrs['bool_'] == 'true' assert 'none' not in f.attrs.keys() - def get_test_attrs(self): - """Create some dataset attributes for testing purpose. - - Returns: - Attributes, encoded attributes, encoded and flattened attributes - - """ - # TODO: also used by cf/test_attrs.py - attrs = {'name': 'IR_108', - 'start_time': datetime(2018, 1, 1, 0), - 'end_time': datetime(2018, 1, 1, 0, 15), - 'int': 1, - 'float': 1.0, - 'none': None, # should be dropped - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': True, - 'numpy_void': np.void(0), - 'numpy_bytes': np.bytes_('test'), - 'numpy_string': np.string_('test'), - 'list': [1, 2, np.float64(3)], - 'nested_list': ["1", ["2", [3]]], - 'bool': True, - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': np.array([True, False, True]), - 'array_2d': np.array([[1, 2], [3, 4]]), - 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), - 'dict': {'a': 1, 'b': 2}, - 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, - 'raw_metadata': OrderedDict([ - ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), - ('flag', np.bool_(True)), - ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) - ])} - encoded = {'name': 'IR_108', - 'start_time': '2018-01-01 00:00:00', - 'end_time': '2018-01-01 00:15:00', - 'int': 1, - 'float': 1.0, - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': 'true', - 'numpy_void': '[]', - 'numpy_bytes': 'test', - 'numpy_string': 'test', - 'list': [1, 2, np.float64(3)], - 'nested_list': '["1", ["2", [3]]]', - 'bool': 'true', - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': ['true', 'false', 'true'], - 'array_2d': '[[1, 2], [3, 4]]', - 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', - 'dict': '{"a": 1, "b": 2}', - 'nested_dict': '{"l1": {"l2": {"l3": [1, 2, 3]}}}', - 'raw_metadata': '{"recarray": [[0, 0], [0, 0], [0, 0]], ' - '"flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}}'} - encoded_flat = {'name': 'IR_108', - 'start_time': '2018-01-01 00:00:00', - 'end_time': '2018-01-01 00:15:00', - 'int': 1, - 'float': 1.0, - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': 'true', - 'numpy_void': '[]', - 'numpy_bytes': 'test', - 'numpy_string': 'test', - 'list': [1, 2, np.float64(3)], - 'nested_list': '["1", ["2", [3]]]', - 'bool': 'true', - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': ['true', 'false', 'true'], - 'array_2d': '[[1, 2], [3, 4]]', - 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', - 'dict_a': 1, - 'dict_b': 2, - 'nested_dict_l1_l2_l3': np.array([1, 2, 3], dtype='uint8'), - 'raw_metadata_recarray': '[[0, 0], [0, 0], [0, 0]]', - 'raw_metadata_flag': 'true', - 'raw_metadata_dict_a': 1, - 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} - return attrs, encoded, encoded_flat - - def assertDictWithArraysEqual(self, d1, d2): - """Check that dicts containing arrays are equal.""" - # TODO: also used by cf/test_attrs.py - assert set(d1.keys()) == set(d2.keys()) - for key, val1 in d1.items(): - val2 = d2[key] - if isinstance(val1, np.ndarray): - np.testing.assert_array_equal(val1, val2) - assert val1.dtype == val2.dtype - else: - assert val1 == val2 - if isinstance(val1, (np.floating, np.integer, np.bool_)): - assert isinstance(val2, np.generic) - assert val1.dtype == val2.dtype - - def test_make_cf_dataarray(self): - """Test the conversion of a DataArray to a CF-compatible DataArray.""" - from satpy.writers.cf_writer import make_cf_dataarray - - # Create set of test attributes - attrs, attrs_expected, attrs_expected_flat = self.get_test_attrs() - attrs['area'] = 'some_area' - attrs['prerequisites'] = [make_dsq(name='hej')] - attrs['_satpy_id_name'] = 'myname' - - # Adjust expected attributes - expected_prereq = ("DataQuery(name='hej')") - update = {'prerequisites': [expected_prereq], 'long_name': attrs['name']} - - attrs_expected.update(update) - attrs_expected_flat.update(update) - - attrs_expected.pop('name') - attrs_expected_flat.pop('name') - - # Create test data array - arr = xr.DataArray(np.array([[1, 2], [3, 4]]), attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [1, 2], 'acq_time': ('y', [3, 4])}) - - # Test conversion to something cf-compliant - res = make_cf_dataarray(arr) - np.testing.assert_array_equal(res['x'], arr['x']) - np.testing.assert_array_equal(res['y'], arr['y']) - np.testing.assert_array_equal(res['acq_time'], arr['acq_time']) - assert res['x'].attrs == {'units': 'm', 'standard_name': 'projection_x_coordinate'} - assert res['y'].attrs == {'units': 'm', 'standard_name': 'projection_y_coordinate'} - self.assertDictWithArraysEqual(res.attrs, attrs_expected) - - # Test attribute kwargs - res_flat = make_cf_dataarray(arr, flatten_attrs=True, exclude_attrs=['int']) - attrs_expected_flat.pop('int') - self.assertDictWithArraysEqual(res_flat.attrs, attrs_expected_flat) - - def test_make_cf_dataarray_one_dimensional_array(self): - """Test the conversion of an 1d DataArray to a CF-compatible DataArray.""" - from satpy.writers.cf_writer import make_cf_dataarray - - arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=('y',), - coords={'y': [0, 1, 2, 3], 'acq_time': ('y', [0, 1, 2, 3])}) - _ = make_cf_dataarray(arr) - def test_collect_cf_dataarrays(self): """Test collecting CF datasets from a DataArray objects.""" from satpy.writers.cf_writer import _collect_cf_dataset @@ -808,7 +640,7 @@ def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): assert ds2['var1']['longitude'].attrs['name'] == 'longitude' -class TestNETCDFEncodingKwargs: +class TestNetcdfEncodingKwargs: """Test netCDF compression encodings.""" @pytest.fixture @@ -900,7 +732,7 @@ def test_no_warning_if_backends_match(self, scene, filename, monkeypatch): warnings.simplefilter("error") -class TestEncodingAttribute(TestNETCDFEncodingKwargs): +class TestEncodingAttribute(TestNetcdfEncodingKwargs): """Test CF writer with 'encoding' dataset attribute.""" @pytest.fixture diff --git a/satpy/writers/cf/dataarray.py b/satpy/writers/cf/dataarray.py new file mode 100644 index 0000000000..9ca90ae52f --- /dev/null +++ b/satpy/writers/cf/dataarray.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Utility to generate a CF-compliant DataArray.""" +import warnings + +from satpy.writers.cf.attrs import preprocess_datarray_attrs +from satpy.writers.cf.coords_attrs import add_xy_coords_attrs +from satpy.writers.cf.time import EPOCH, _process_time_coord + + +def _handle_dataarray_name(original_name, numeric_name_prefix): + if original_name[0].isdigit(): + if numeric_name_prefix: + new_name = numeric_name_prefix + original_name + else: + warnings.warn( + f'Invalid NetCDF dataset name: {original_name} starts with a digit.', + stacklevel=5 + ) + new_name = original_name # occurs when numeric_name_prefix = '', None or False + else: + new_name = original_name + return original_name, new_name + + +def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name): + """Change the DataArray name by prepending numeric_name_prefix if the name is a digit.""" + original_name = None + dataarray = dataarray.copy() + if 'name' in dataarray.attrs: + original_name = dataarray.attrs.pop('name') + original_name, new_name = _handle_dataarray_name(original_name, numeric_name_prefix) + dataarray = dataarray.rename(new_name) + + if include_orig_name and numeric_name_prefix and original_name and original_name != new_name: + dataarray.attrs['original_name'] = original_name + + return dataarray + + +def make_cf_dataarray(dataarray, + epoch=EPOCH, + flatten_attrs=False, + exclude_attrs=None, + include_orig_name=True, + numeric_name_prefix='CHANNEL_'): + """Make the xr.DataArray CF-compliant. + + Parameters + ---------- + dataarray : xr.DataArray + The data array to be made CF-compliant. + epoch : str, optional + Reference time for encoding of time coordinates. + flatten_attrs : bool, optional + If True, flatten dict-type attributes. + The default is False. + exclude_attrs : list, optional + List of dataset attributes to be excluded. + The default is None. + include_orig_name : bool, optional + Include the original dataset name in the netcdf variable attributes. + The default is True. + numeric_name_prefix : TYPE, optional + Prepend dataset name with this if starting with a digit. + The default is ``"CHANNEL_"``. + + Returns + ------- + new_data : xr.DataArray + CF-compliant xr.DataArray. + + """ + dataarray = _preprocess_dataarray_name(dataarray=dataarray, + numeric_name_prefix=numeric_name_prefix, + include_orig_name=include_orig_name) + dataarray = preprocess_datarray_attrs(dataarray=dataarray, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs) + dataarray = add_xy_coords_attrs(dataarray) + dataarray = _process_time_coord(dataarray, epoch=epoch) + return dataarray diff --git a/satpy/writers/cf/time.py b/satpy/writers/cf/time.py index 6308f42364..821f7b47b0 100644 --- a/satpy/writers/cf/time.py +++ b/satpy/writers/cf/time.py @@ -19,6 +19,8 @@ import numpy as np import xarray as xr +EPOCH = u"seconds since 1970-01-01 00:00:00" + def add_time_bounds_dimension(ds, time="time"): """Add time bound dimension to xr.Dataset.""" diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 63f57f2e63..d7503860b4 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -164,12 +164,10 @@ from packaging.version import Version from satpy.writers import Writer -from satpy.writers.cf.coords_attrs import add_xy_coords_attrs +from satpy.writers.cf.time import EPOCH logger = logging.getLogger(__name__) -EPOCH = u"seconds since 1970-01-01 00:00:00" - # Check availability of either netCDF4 or h5netcdf package try: import netCDF4 @@ -225,101 +223,6 @@ def get_extra_ds(dataarray, keys=None): # ### CF-conversion -def _handle_dataarray_name(original_name, numeric_name_prefix): - if original_name[0].isdigit(): - if numeric_name_prefix: - new_name = numeric_name_prefix + original_name - else: - warnings.warn( - f'Invalid NetCDF dataset name: {original_name} starts with a digit.', - stacklevel=5 - ) - new_name = original_name # occurs when numeric_name_prefix = '', None or False - else: - new_name = original_name - return original_name, new_name - - -def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name): - """Change the DataArray name by prepending numeric_name_prefix if the name is a digit.""" - original_name = None - dataarray = dataarray.copy() - if 'name' in dataarray.attrs: - original_name = dataarray.attrs.pop('name') - original_name, new_name = _handle_dataarray_name(original_name, numeric_name_prefix) - dataarray = dataarray.rename(new_name) - - if include_orig_name and numeric_name_prefix and original_name and original_name != new_name: - dataarray.attrs['original_name'] = original_name - - return dataarray - - -def _get_groups(groups, list_datarrays): - """Return a dictionary with the list of xr.DataArray associated to each group. - - If no groups (groups=None), return all DataArray attached to a single None key. - Else, collect the DataArrays associated to each group. - """ - if groups is None: - grouped_dataarrays = {None: list_datarrays} - else: - grouped_dataarrays = defaultdict(list) - for datarray in list_datarrays: - for group_name, group_members in groups.items(): - if datarray.attrs['name'] in group_members: - grouped_dataarrays[group_name].append(datarray) - break - return grouped_dataarrays - - -def make_cf_dataarray(dataarray, - epoch=EPOCH, - flatten_attrs=False, - exclude_attrs=None, - include_orig_name=True, - numeric_name_prefix='CHANNEL_'): - """Make the xr.DataArray CF-compliant. - - Parameters - ---------- - dataarray : xr.DataArray - The data array to be made CF-compliant. - epoch : str, optional - Reference time for encoding of time coordinates. - flatten_attrs : bool, optional - If True, flatten dict-type attributes. - The default is False. - exclude_attrs : list, optional - List of dataset attributes to be excluded. - The default is None. - include_orig_name : bool, optional - Include the original dataset name in the netcdf variable attributes. - The default is True. - numeric_name_prefix : TYPE, optional - Prepend dataset name with this if starting with a digit. - The default is ``"CHANNEL_"``. - - Returns - ------- - new_data : xr.DataArray - CF-compliant xr.DataArray. - - """ - from satpy.writers.cf.attrs import preprocess_datarray_attrs - from satpy.writers.cf.time import _process_time_coord - - dataarray = _preprocess_dataarray_name(dataarray=dataarray, - numeric_name_prefix=numeric_name_prefix, - include_orig_name=include_orig_name) - dataarray = preprocess_datarray_attrs(dataarray=dataarray, - flatten_attrs=flatten_attrs, - exclude_attrs=exclude_attrs) - dataarray = add_xy_coords_attrs(dataarray) - dataarray = _process_time_coord(dataarray, epoch=epoch) - return dataarray - - def _collect_cf_dataset(list_dataarrays, epoch=EPOCH, flatten_attrs=False, @@ -365,6 +268,7 @@ def _collect_cf_dataset(list_dataarrays, link_coords, make_alt_coords_unique, ) + from satpy.writers.cf.dataarray import make_cf_dataarray # Create dictionary of input datarrays # --> Since keys=None, it doesn't never retrieve ancillary variables !!! @@ -534,6 +438,24 @@ def collect_cf_datasets(list_dataarrays, return grouped_datasets, header_attrs +def _get_groups(groups, list_datarrays): + """Return a dictionary with the list of xr.DataArray associated to each group. + + If no groups (groups=None), return all DataArray attached to a single None key. + Else, collect the DataArrays associated to each group. + """ + if groups is None: + grouped_dataarrays = {None: list_datarrays} + else: + grouped_dataarrays = defaultdict(list) + for datarray in list_datarrays: + for group_name, group_members in groups.items(): + if datarray.attrs['name'] in group_members: + grouped_dataarrays[group_name].append(datarray) + break + return grouped_dataarrays + + def _sanitize_writer_kwargs(writer_kwargs): """Remove satpy-specific kwargs.""" writer_kwargs = copy.deepcopy(writer_kwargs) @@ -575,6 +497,7 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, numeric_name_prefix (str): Prepend dataset name with this if starting with a digit """ + from satpy.writers.cf.dataarray import make_cf_dataarray warnings.warn('CFWriter.da2cf is deprecated.' 'Use satpy.writers.cf_writer.make_cf_dataarray instead.', DeprecationWarning, stacklevel=3) From 3953319ca3c8f94886e67030d589a35a68491fcc Mon Sep 17 00:00:00 2001 From: ghiggi Date: Wed, 28 Jun 2023 00:19:13 +0200 Subject: [PATCH 078/702] Refactor CF-compliant Datasets creation --- .../writer_tests/cf_tests/test_dataaarray.py | 19 + .../writer_tests/cf_tests/test_datasets.py | 129 +++++++ satpy/tests/writer_tests/test_cf.py | 86 ----- satpy/writers/cf/datasets.py | 272 ++++++++++++++ satpy/writers/cf_writer.py | 340 +++--------------- 5 files changed, 466 insertions(+), 380 deletions(-) create mode 100644 satpy/tests/writer_tests/cf_tests/test_datasets.py create mode 100644 satpy/writers/cf/datasets.py diff --git a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py index 20c893d0a6..13ed11474e 100644 --- a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py +++ b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py @@ -49,6 +49,25 @@ def test_preprocess_dataarray_name(): assert "original_name" not in out_da.attrs +def test_make_cf_dataarray_lonlat(): + """Test correct CF encoding for area with lon/lat units.""" + from pyresample import create_area_def + + from satpy.resample import add_crs_xy_coords + from satpy.writers.cf.dataarray import make_cf_dataarray + + area = create_area_def("mavas", 4326, shape=(5, 5), + center=(0, 0), resolution=(1, 1)) + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x"), + attrs={"area": area}) + da = add_crs_xy_coords(da, area) + new_da = make_cf_dataarray(da) + assert new_da["x"].attrs["units"] == "degrees_east" + assert new_da["y"].attrs["units"] == "degrees_north" + + class TestCFWriter: """Test creation of CF DataArray.""" diff --git a/satpy/tests/writer_tests/cf_tests/test_datasets.py b/satpy/tests/writer_tests/cf_tests/test_datasets.py new file mode 100644 index 0000000000..d6784cd78f --- /dev/null +++ b/satpy/tests/writer_tests/cf_tests/test_datasets.py @@ -0,0 +1,129 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Tests CF-compliant DataArray creation.""" +import datetime + +import pytest +import xarray as xr +from pyresample import AreaDefinition + + +def test_empty_collect_cf_datasets(): + """Test that if no DataArrays, collect_cf_datasets raise error.""" + from satpy.writers.cf.datasets import collect_cf_datasets + + with pytest.raises(RuntimeError): + collect_cf_datasets(list_dataarrays=[]) + + +class TestCollectCfDatasets: + """Test case for collect_cf_dataset.""" + + def test_collect_cf_dataarrays(self): + """Test collecting CF datasets from a DataArray objects.""" + from satpy.writers.cf.datasets import _collect_cf_dataset + + geos = AreaDefinition( + area_id='geos', + description='geos', + proj_id='geos', + projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, + width=2, height=2, + area_extent=[-1, -1, 1, 1]) + + # Define test datasets + data = [[1, 2], [3, 4]] + y = [1, 2] + x = [1, 2] + time = [1, 2] + tstart = datetime.datetime(2019, 4, 1, 12, 0) + tend = datetime.datetime(2019, 4, 1, 12, 15) + list_dataarrays = [xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, + attrs={'name': 'var1', 'start_time': tstart, 'end_time': tend, 'area': geos}), + xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, + attrs={'name': 'var2', 'long_name': 'variable 2'})] + + # Collect datasets + ds = _collect_cf_dataset(list_dataarrays, include_lonlats=True) + + # Test results + assert len(ds.keys()) == 3 + assert set(ds.keys()) == {'var1', 'var2', 'geos'} + + da_var1 = ds['var1'] + da_var2 = ds['var2'] + assert da_var1.name == 'var1' + assert da_var1.attrs['grid_mapping'] == 'geos' + assert da_var1.attrs['long_name'] == 'var1' + # variable 2 + assert 'grid_mapping' not in da_var2.attrs + assert da_var2.attrs['long_name'] == 'variable 2' + + def test_collect_cf_dataarrays_with_latitude_named_lat(self): + """Test collecting CF datasets with latitude named lat.""" + from satpy.writers.cf.datasets import _collect_cf_dataset + + data = [[75, 2], [3, 4]] + y = [1, 2] + x = [1, 2] + geos = AreaDefinition( + area_id='geos', + description='geos', + proj_id='geos', + projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, + width=2, height=2, + area_extent=[-1, -1, 1, 1]) + datasets = { + 'var1': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'var2': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'lat': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'lon': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x})} + datasets['lat'].attrs['standard_name'] = 'latitude' + datasets['var1'].attrs['standard_name'] = 'dummy' + datasets['var2'].attrs['standard_name'] = 'dummy' + datasets['var2'].attrs['area'] = geos + datasets['var1'].attrs['area'] = geos + datasets['lat'].attrs['name'] = 'lat' + datasets['var1'].attrs['name'] = 'var1' + datasets['var2'].attrs['name'] = 'var2' + datasets['lon'].attrs['name'] = 'lon' + + datasets_list = [datasets[key] for key in datasets.keys()] + datasets_list_no_latlon = [datasets[key] for key in ['var1', 'var2']] + + # Collect datasets + ds = _collect_cf_dataset(datasets_list, include_lonlats=True) + ds2 = _collect_cf_dataset(datasets_list_no_latlon, include_lonlats=True) + + # Test results + assert len(ds.keys()) == 5 + assert set(ds.keys()) == {'var1', 'var2', 'lon', 'lat', 'geos'} + with pytest.raises(KeyError): + ds['var1'].attrs["latitude"] + with pytest.raises(KeyError): + ds['var1'].attrs["longitude"] + assert ds2['var1']['latitude'].attrs['name'] == 'latitude' + assert ds2['var1']['longitude'].attrs['name'] == 'longitude' diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 96cc09069a..628a32fed3 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -86,23 +86,6 @@ def test_lonlat_storage(tmp_path): np.testing.assert_allclose(ds["mavas"].attrs["inverse_flattening"], 298.257223563) -def test_make_cf_dataarray_lonlat(): - """Test correct CF encoding for area with lon/lat units.""" - from satpy.resample import add_crs_xy_coords - from satpy.writers.cf.dataarray import make_cf_dataarray - - area = create_area_def("mavas", 4326, shape=(5, 5), - center=(0, 0), resolution=(1, 1)) - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x"), - attrs={"area": area}) - da = add_crs_xy_coords(da, area) - new_da = make_cf_dataarray(da) - assert new_da["x"].attrs["units"] == "degrees_east" - assert new_da["y"].attrs["units"] == "degrees_north" - - def test_is_projected(caplog): """Tests for private _is_projected function.""" from satpy.writers.cf.crs import _is_projected @@ -130,14 +113,6 @@ def test_is_projected(caplog): assert "Failed to tell if data are projected." in caplog.text -def test_empty_collect_cf_datasets(): - """Test that if no DataArrays, collect_cf_datasets raise error.""" - from satpy.writers.cf_writer import collect_cf_datasets - - with pytest.raises(RuntimeError): - collect_cf_datasets(list_dataarrays=[]) - - class TestCFWriter: """Test case for CF writer.""" @@ -471,46 +446,6 @@ def test_header_attrs(self): assert f.attrs['bool_'] == 'true' assert 'none' not in f.attrs.keys() - def test_collect_cf_dataarrays(self): - """Test collecting CF datasets from a DataArray objects.""" - from satpy.writers.cf_writer import _collect_cf_dataset - - geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, - width=2, height=2, - area_extent=[-1, -1, 1, 1]) - - # Define test datasets - data = [[1, 2], [3, 4]] - y = [1, 2] - x = [1, 2] - time = [1, 2] - tstart = datetime(2019, 4, 1, 12, 0) - tend = datetime(2019, 4, 1, 12, 15) - list_dataarrays = [xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, - attrs={'name': 'var1', 'start_time': tstart, 'end_time': tend, 'area': geos}), - xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, - attrs={'name': 'var2', 'long_name': 'variable 2'})] - - # Collect datasets - ds = _collect_cf_dataset(list_dataarrays, include_lonlats=True) - - # Test results - assert len(ds.keys()) == 3 - assert set(ds.keys()) == {'var1', 'var2', 'geos'} - - da_var1 = ds['var1'] - da_var2 = ds['var2'] - assert da_var1.name == 'var1' - assert da_var1.attrs['grid_mapping'] == 'geos' - assert da_var1.attrs['long_name'] == 'var1' - # variable 2 - assert 'grid_mapping' not in da_var2.attrs - assert da_var2.attrs['long_name'] == 'variable 2' - def test_load_module_with_old_pyproj(self): """Test that cf_writer can still be loaded with pyproj 1.9.6.""" import importlib @@ -618,27 +553,6 @@ def test_has_projection_coords(self, datasets): datasets['lat'].attrs['standard_name'] = 'dummy' assert not has_projection_coords(datasets) - def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): - """Test collecting CF datasets with latitude named lat.""" - from satpy.writers.cf_writer import _collect_cf_dataset - - datasets_list = [datasets[key] for key in datasets.keys()] - datasets_list_no_latlon = [datasets[key] for key in ['var1', 'var2']] - - # Collect datasets - ds = _collect_cf_dataset(datasets_list, include_lonlats=True) - ds2 = _collect_cf_dataset(datasets_list_no_latlon, include_lonlats=True) - - # Test results - assert len(ds.keys()) == 5 - assert set(ds.keys()) == {'var1', 'var2', 'lon', 'lat', 'geos'} - with pytest.raises(KeyError): - ds['var1'].attrs["latitude"] - with pytest.raises(KeyError): - ds['var1'].attrs["longitude"] - assert ds2['var1']['latitude'].attrs['name'] == 'latitude' - assert ds2['var1']['longitude'].attrs['name'] == 'longitude' - class TestNetcdfEncodingKwargs: """Test netCDF compression encodings.""" diff --git a/satpy/writers/cf/datasets.py b/satpy/writers/cf/datasets.py new file mode 100644 index 0000000000..09726c0e32 --- /dev/null +++ b/satpy/writers/cf/datasets.py @@ -0,0 +1,272 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Utility to generate a CF-compliant Datasets.""" +import warnings +from collections import defaultdict + +import xarray as xr + +from satpy.writers.cf.time import EPOCH +from satpy.writers.cf_writer import CF_DTYPES, CF_VERSION + + +def get_extra_ds(dataarray, keys=None): + """Get the ancillary_variables DataArrays associated to a dataset.""" + ds_collection = {} + # Retrieve ancillary variable datarrays + for ancillary_dataarray in dataarray.attrs.get('ancillary_variables', []): + ancillary_variable = ancillary_dataarray.name + if keys and ancillary_variable not in keys: + keys.append(ancillary_variable) + ds_collection.update(get_extra_ds(ancillary_dataarray, keys=keys)) + # Add input dataarray + ds_collection[dataarray.attrs['name']] = dataarray + return ds_collection + + +def _get_groups(groups, list_datarrays): + """Return a dictionary with the list of xr.DataArray associated to each group. + + If no groups (groups=None), return all DataArray attached to a single None key. + Else, collect the DataArrays associated to each group. + """ + if groups is None: + grouped_dataarrays = {None: list_datarrays} + else: + grouped_dataarrays = defaultdict(list) + for datarray in list_datarrays: + for group_name, group_members in groups.items(): + if datarray.attrs['name'] in group_members: + grouped_dataarrays[group_name].append(datarray) + break + return grouped_dataarrays + + +def _collect_cf_dataset(list_dataarrays, + epoch=EPOCH, + flatten_attrs=False, + exclude_attrs=None, + include_lonlats=True, + pretty=False, + include_orig_name=True, + numeric_name_prefix='CHANNEL_'): + """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Dataset. + + Parameters + ---------- + list_dataarrays : list + List of DataArrays to make CF compliant and merge into a xr.Dataset. + epoch : str + Reference time for encoding the time coordinates (if available). + Example format: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is retrieved using `from satpy.cf_writer import EPOCH` + flatten_attrs : bool, optional + If True, flatten dict-type attributes. + exclude_attrs : list, optional + List of xr.DataArray attribute names to be excluded. + include_lonlats : bool, optional + If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. + If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. + pretty : bool, optional + Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. + include_orig_name : bool, optional + Include the original dataset name as a variable attribute in the xr.Dataset. + numeric_name_prefix : str, optional + Prefix to add the each variable with name starting with a digit. + Use '' or None to leave this out. + + Returns + ------- + ds : xr.Dataset + A partially CF-compliant xr.Dataset + """ + from satpy.writers.cf.area import ( + area2cf, + assert_xy_unique, + has_projection_coords, + link_coords, + make_alt_coords_unique, + ) + from satpy.writers.cf.dataarray import make_cf_dataarray + + # Create dictionary of input datarrays + # --> Since keys=None, it doesn't never retrieve ancillary variables !!! + ds_collection = {} + for dataarray in list_dataarrays: + ds_collection.update(get_extra_ds(dataarray)) + + # Check if one DataArray in the collection has 'longitude' or 'latitude' + got_lonlats = has_projection_coords(ds_collection) + + # Sort dictionary by keys name + ds_collection = dict(sorted(ds_collection.items())) + + dict_dataarrays = {} + for dataarray in ds_collection.values(): + dataarray_type = dataarray.dtype + if dataarray_type not in CF_DTYPES: + warnings.warn( + f'dtype {dataarray_type} not compatible with {CF_VERSION}.', + stacklevel=3 + ) + # Deep copy the datarray since adding/modifying attributes and coordinates + dataarray = dataarray.copy(deep=True) + + # Add CF-compliant area information from the pyresample area + # - If include_lonlats=True, add latitude and longitude coordinates + # - Add grid_mapping attribute to the DataArray + # - Return the CRS DataArray as first list element + # - Return the CF-compliant input DataArray as second list element + try: + list_new_dataarrays = area2cf(dataarray, + include_lonlats=include_lonlats, + got_lonlats=got_lonlats) + except KeyError: + list_new_dataarrays = [dataarray] + + # Ensure each DataArray is CF-compliant + # --> NOTE: Here the CRS DataArray is repeatedly overwrited + # --> NOTE: If the input list_dataarrays have different pyresample areas with the same name + # area information can be lost here !!! + for new_dataarray in list_new_dataarrays: + new_dataarray = make_cf_dataarray(new_dataarray, + epoch=epoch, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) + dict_dataarrays[new_dataarray.name] = new_dataarray + + # Check all DataArray have same size + assert_xy_unique(dict_dataarrays) + + # Deal with the 'coordinates' attributes indicating lat/lon coords + # NOTE: this currently is dropped by default !!! + link_coords(dict_dataarrays) + + # Ensure non-dimensional coordinates to be unique across DataArrays + # --> If not unique, prepend the DataArray name to the coordinate + # --> If unique, does not prepend the DataArray name only if pretty=True + # --> 'longitude' and 'latitude' coordinates are not prepended + dict_dataarrays = make_alt_coords_unique(dict_dataarrays, pretty=pretty) + + # Create a xr.Dataset + ds = xr.Dataset(dict_dataarrays) + return ds + + +def collect_cf_datasets(list_dataarrays, + header_attrs=None, + exclude_attrs=None, + flatten_attrs=False, + pretty=True, + include_lonlats=True, + epoch=EPOCH, + include_orig_name=True, + numeric_name_prefix='CHANNEL_', + groups=None): + """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Datasets. + + If the xr.DataArrays does not share the same dimensions, it creates a collection + of xr.Datasets sharing the same dimensions. + + Parameters + ---------- + list_dataarrays (list): + List of DataArrays to make CF compliant and merge into groups of xr.Datasets. + header_attrs: (dict): + Global attributes of the output xr.Dataset. + epoch (str): + Reference time for encoding the time coordinates (if available). + Example format: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is retrieved using `from satpy.cf_writer import EPOCH` + flatten_attrs (bool): + If True, flatten dict-type attributes. + exclude_attrs (list): + List of xr.DataArray attribute names to be excluded. + include_lonlats (bool): + If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. + If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. + pretty (bool): + Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. + include_orig_name (bool). + Include the original dataset name as a variable attribute in the xr.Dataset. + numeric_name_prefix (str): + Prefix to add the each variable with name starting with a digit. + Use '' or None to leave this out. + groups (dict): + Group datasets according to the given assignment: + + `{'': ['dataset_name1', 'dataset_name2', ...]}` + + It is used to create grouped netCDFs using the CF_Writer. + If None (the default), no groups will be created. + + Returns + ------- + grouped_datasets : dict + A dictionary of CF-compliant xr.Dataset: {group_name: xr.Dataset} + header_attrs : dict + Global attributes to be attached to the xr.Dataset / netCDF4. + """ + from satpy.writers.cf.attrs import preprocess_header_attrs + from satpy.writers.cf.time import add_time_bounds_dimension + + if not list_dataarrays: + raise RuntimeError("None of the requested datasets have been " + "generated or could not be loaded. Requested " + "composite inputs may need to have matching " + "dimensions (eg. through resampling).") + + header_attrs = preprocess_header_attrs(header_attrs=header_attrs, + flatten_attrs=flatten_attrs) + + # Retrieve groups + # - If groups is None: {None: list_dataarrays} + # - if groups not None: {group_name: [xr.DataArray, xr.DataArray ,..], ...} + # Note: if all dataset names are wrong, behave like groups = None ! + grouped_dataarrays = _get_groups(groups, list_dataarrays) + is_grouped = len(grouped_dataarrays) >= 2 + + # If not grouped, add CF conventions. + # - If 'Conventions' key already present, do not overwrite ! + if "Conventions" not in header_attrs and not is_grouped: + header_attrs['Conventions'] = CF_VERSION + + # Create dictionary of group xr.Datasets + # --> If no groups (groups=None) --> group_name=None + grouped_datasets = {} + for group_name, group_dataarrays in grouped_dataarrays.items(): + ds = _collect_cf_dataset( + list_dataarrays=group_dataarrays, + epoch=epoch, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs, + include_lonlats=include_lonlats, + pretty=pretty, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) + + if not is_grouped: + ds.attrs = header_attrs + + if 'time' in ds: + ds = add_time_bounds_dimension(ds, time="time") + + grouped_datasets[group_name] = ds + return grouped_datasets, header_attrs diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index d7503860b4..85c6fe999b 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -157,7 +157,6 @@ import copy import logging import warnings -from collections import defaultdict import numpy as np import xarray as xr @@ -183,6 +182,10 @@ if netCDF4 is None and h5netcdf is None: raise ImportError('Ensure that the netCDF4 or h5netcdf package is installed.') + +CF_VERSION = 'CF-1.7' + + # Numpy datatypes compatible with all netCDF4 backends. ``np.unicode_`` is # excluded because h5py (and thus h5netcdf) has problems with unicode, see # https://github.com/h5py/h5py/issues/624.""" @@ -202,259 +205,6 @@ np.dtype('float64'), np.string_] -CF_VERSION = 'CF-1.7' - - -def get_extra_ds(dataarray, keys=None): - """Get the ancillary_variables DataArrays associated to a dataset.""" - ds_collection = {} - # Retrieve ancillary variable datarrays - for ancillary_dataarray in dataarray.attrs.get('ancillary_variables', []): - ancillary_variable = ancillary_dataarray.name - if keys and ancillary_variable not in keys: - keys.append(ancillary_variable) - ds_collection.update(get_extra_ds(ancillary_dataarray, keys=keys)) - # Add input dataarray - ds_collection[dataarray.attrs['name']] = dataarray - return ds_collection - - -# ###--------------------------------------------------------------------------. -# ### CF-conversion - - -def _collect_cf_dataset(list_dataarrays, - epoch=EPOCH, - flatten_attrs=False, - exclude_attrs=None, - include_lonlats=True, - pretty=False, - include_orig_name=True, - numeric_name_prefix='CHANNEL_'): - """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Dataset. - - Parameters - ---------- - list_dataarrays : list - List of DataArrays to make CF compliant and merge into a xr.Dataset. - epoch : str - Reference time for encoding the time coordinates (if available). - Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using `from satpy.cf_writer import EPOCH` - flatten_attrs : bool, optional - If True, flatten dict-type attributes. - exclude_attrs : list, optional - List of xr.DataArray attribute names to be excluded. - include_lonlats : bool, optional - If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. - If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. - pretty : bool, optional - Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. - include_orig_name : bool, optional - Include the original dataset name as a variable attribute in the xr.Dataset. - numeric_name_prefix : str, optional - Prefix to add the each variable with name starting with a digit. - Use '' or None to leave this out. - - Returns - ------- - ds : xr.Dataset - A partially CF-compliant xr.Dataset - """ - from satpy.writers.cf.area import ( - area2cf, - assert_xy_unique, - has_projection_coords, - link_coords, - make_alt_coords_unique, - ) - from satpy.writers.cf.dataarray import make_cf_dataarray - - # Create dictionary of input datarrays - # --> Since keys=None, it doesn't never retrieve ancillary variables !!! - ds_collection = {} - for dataarray in list_dataarrays: - ds_collection.update(get_extra_ds(dataarray)) - - # Check if one DataArray in the collection has 'longitude' or 'latitude' - got_lonlats = has_projection_coords(ds_collection) - - # Sort dictionary by keys name - ds_collection = dict(sorted(ds_collection.items())) - - dict_dataarrays = {} - for dataarray in ds_collection.values(): - dataarray_type = dataarray.dtype - if dataarray_type not in CF_DTYPES: - warnings.warn( - f'dtype {dataarray_type} not compatible with {CF_VERSION}.', - stacklevel=3 - ) - # Deep copy the datarray since adding/modifying attributes and coordinates - dataarray = dataarray.copy(deep=True) - - # Add CF-compliant area information from the pyresample area - # - If include_lonlats=True, add latitude and longitude coordinates - # - Add grid_mapping attribute to the DataArray - # - Return the CRS DataArray as first list element - # - Return the CF-compliant input DataArray as second list element - try: - list_new_dataarrays = area2cf(dataarray, - include_lonlats=include_lonlats, - got_lonlats=got_lonlats) - except KeyError: - list_new_dataarrays = [dataarray] - - # Ensure each DataArray is CF-compliant - # --> NOTE: Here the CRS DataArray is repeatedly overwrited - # --> NOTE: If the input list_dataarrays have different pyresample areas with the same name - # area information can be lost here !!! - for new_dataarray in list_new_dataarrays: - new_dataarray = make_cf_dataarray(new_dataarray, - epoch=epoch, - flatten_attrs=flatten_attrs, - exclude_attrs=exclude_attrs, - include_orig_name=include_orig_name, - numeric_name_prefix=numeric_name_prefix) - dict_dataarrays[new_dataarray.name] = new_dataarray - - # Check all DataArray have same size - assert_xy_unique(dict_dataarrays) - - # Deal with the 'coordinates' attributes indicating lat/lon coords - # NOTE: this currently is dropped by default !!! - link_coords(dict_dataarrays) - - # Ensure non-dimensional coordinates to be unique across DataArrays - # --> If not unique, prepend the DataArray name to the coordinate - # --> If unique, does not prepend the DataArray name only if pretty=True - # --> 'longitude' and 'latitude' coordinates are not prepended - dict_dataarrays = make_alt_coords_unique(dict_dataarrays, pretty=pretty) - - # Create a xr.Dataset - ds = xr.Dataset(dict_dataarrays) - return ds - - -def collect_cf_datasets(list_dataarrays, - header_attrs=None, - exclude_attrs=None, - flatten_attrs=False, - pretty=True, - include_lonlats=True, - epoch=EPOCH, - include_orig_name=True, - numeric_name_prefix='CHANNEL_', - groups=None): - """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Datasets. - - If the xr.DataArrays does not share the same dimensions, it creates a collection - of xr.Datasets sharing the same dimensions. - - Parameters - ---------- - list_dataarrays (list): - List of DataArrays to make CF compliant and merge into groups of xr.Datasets. - header_attrs: (dict): - Global attributes of the output xr.Dataset. - epoch (str): - Reference time for encoding the time coordinates (if available). - Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using `from satpy.cf_writer import EPOCH` - flatten_attrs (bool): - If True, flatten dict-type attributes. - exclude_attrs (list): - List of xr.DataArray attribute names to be excluded. - include_lonlats (bool): - If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. - If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. - pretty (bool): - Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. - include_orig_name (bool). - Include the original dataset name as a variable attribute in the xr.Dataset. - numeric_name_prefix (str): - Prefix to add the each variable with name starting with a digit. - Use '' or None to leave this out. - groups (dict): - Group datasets according to the given assignment: - - `{'': ['dataset_name1', 'dataset_name2', ...]}` - - It is used to create grouped netCDFs using the CF_Writer. - If None (the default), no groups will be created. - - Returns - ------- - grouped_datasets : dict - A dictionary of CF-compliant xr.Dataset: {group_name: xr.Dataset} - header_attrs : dict - Global attributes to be attached to the xr.Dataset / netCDF4. - """ - from satpy.writers.cf.attrs import preprocess_header_attrs - from satpy.writers.cf.time import add_time_bounds_dimension - - if not list_dataarrays: - raise RuntimeError("None of the requested datasets have been " - "generated or could not be loaded. Requested " - "composite inputs may need to have matching " - "dimensions (eg. through resampling).") - - header_attrs = preprocess_header_attrs(header_attrs=header_attrs, - flatten_attrs=flatten_attrs) - - # Retrieve groups - # - If groups is None: {None: list_dataarrays} - # - if groups not None: {group_name: [xr.DataArray, xr.DataArray ,..], ...} - # Note: if all dataset names are wrong, behave like groups = None ! - grouped_dataarrays = _get_groups(groups, list_dataarrays) - is_grouped = len(grouped_dataarrays) >= 2 - - # If not grouped, add CF conventions. - # - If 'Conventions' key already present, do not overwrite ! - if "Conventions" not in header_attrs and not is_grouped: - header_attrs['Conventions'] = CF_VERSION - - # Create dictionary of group xr.Datasets - # --> If no groups (groups=None) --> group_name=None - grouped_datasets = {} - for group_name, group_dataarrays in grouped_dataarrays.items(): - ds = _collect_cf_dataset( - list_dataarrays=group_dataarrays, - epoch=epoch, - flatten_attrs=flatten_attrs, - exclude_attrs=exclude_attrs, - include_lonlats=include_lonlats, - pretty=pretty, - include_orig_name=include_orig_name, - numeric_name_prefix=numeric_name_prefix) - - if not is_grouped: - ds.attrs = header_attrs - - if 'time' in ds: - ds = add_time_bounds_dimension(ds, time="time") - - grouped_datasets[group_name] = ds - return grouped_datasets, header_attrs - - -def _get_groups(groups, list_datarrays): - """Return a dictionary with the list of xr.DataArray associated to each group. - - If no groups (groups=None), return all DataArray attached to a single None key. - Else, collect the DataArrays associated to each group. - """ - if groups is None: - grouped_dataarrays = {None: list_datarrays} - else: - grouped_dataarrays = defaultdict(list) - for datarray in list_datarrays: - for group_name, group_members in groups.items(): - if datarray.attrs['name'] in group_members: - grouped_dataarrays[group_name].append(datarray) - break - return grouped_dataarrays - def _sanitize_writer_kwargs(writer_kwargs): """Remove satpy-specific kwargs.""" @@ -478,46 +228,6 @@ def _initialize_root_netcdf(filename, engine, header_attrs, to_netcdf_kwargs): class CFWriter(Writer): """Writer producing NetCDF/CF compatible datasets.""" - @staticmethod - def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, - include_orig_name=True, numeric_name_prefix='CHANNEL_'): - """Convert the dataarray to something cf-compatible. - - Args: - dataarray (xr.DataArray): - The data array to be converted - epoch (str): - Reference time for encoding of time coordinates - flatten_attrs (bool): - If True, flatten dict-type attributes - exclude_attrs (list): - List of dataset attributes to be excluded - include_orig_name (bool): - Include the original dataset name in the netcdf variable attributes - numeric_name_prefix (str): - Prepend dataset name with this if starting with a digit - """ - from satpy.writers.cf.dataarray import make_cf_dataarray - warnings.warn('CFWriter.da2cf is deprecated.' - 'Use satpy.writers.cf_writer.make_cf_dataarray instead.', - DeprecationWarning, stacklevel=3) - return make_cf_dataarray(dataarray=dataarray, - epoch=epoch, - flatten_attrs=flatten_attrs, - exclude_attrs=exclude_attrs, - include_orig_name=include_orig_name, - numeric_name_prefix=numeric_name_prefix) - - @staticmethod - def update_encoding(dataset, to_netcdf_kwargs): - """Update encoding info (deprecated).""" - from satpy.writers.cf.encoding import update_encoding - - warnings.warn('CFWriter.update_encoding is deprecated. ' - 'Use satpy.writers.cf.encoding.update_encoding instead.', - DeprecationWarning, stacklevel=3) - return update_encoding(dataset, to_netcdf_kwargs) - def save_dataset(self, dataset, filename=None, fill_value=None, **kwargs): """Save the *dataset* to a given *filename*.""" return self.save_datasets([dataset], filename, **kwargs) @@ -560,6 +270,7 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. """ + from satpy.writers.cf.datasets import collect_cf_datasets from satpy.writers.cf.encoding import update_encoding logger.info('Saving datasets to NetCDF4/CF.') @@ -614,6 +325,47 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, written.append(res) return written + @staticmethod + def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, + include_orig_name=True, numeric_name_prefix='CHANNEL_'): + """Convert the dataarray to something cf-compatible. + + Args: + dataarray (xr.DataArray): + The data array to be converted + epoch (str): + Reference time for encoding of time coordinates + flatten_attrs (bool): + If True, flatten dict-type attributes + exclude_attrs (list): + List of dataset attributes to be excluded + include_orig_name (bool): + Include the original dataset name in the netcdf variable attributes + numeric_name_prefix (str): + Prepend dataset name with this if starting with a digit + """ + from satpy.writers.cf.dataarray import make_cf_dataarray + warnings.warn('CFWriter.da2cf is deprecated.' + 'Use satpy.writers.cf_writer.make_cf_dataarray instead.', + DeprecationWarning, stacklevel=3) + return make_cf_dataarray(dataarray=dataarray, + epoch=epoch, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) + + @staticmethod + def update_encoding(dataset, to_netcdf_kwargs): + """Update encoding info (deprecated).""" + from satpy.writers.cf.encoding import update_encoding + + warnings.warn('CFWriter.update_encoding is deprecated. ' + 'Use satpy.writers.cf.encoding.update_encoding instead.', + DeprecationWarning, stacklevel=3) + return update_encoding(dataset, to_netcdf_kwargs) + + # --------------------------------------------------------------------------. # NetCDF version From 064558d25f460b2ba8c3a59082e0c9dee85d568a Mon Sep 17 00:00:00 2001 From: ghiggi Date: Wed, 28 Jun 2023 00:20:53 +0200 Subject: [PATCH 079/702] Fix changed imports --- satpy/_scene_converters.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py index 25fe728b9f..ba4432a58f 100644 --- a/satpy/_scene_converters.py +++ b/satpy/_scene_converters.py @@ -90,7 +90,8 @@ def to_xarray(scn, A CF-compliant xr.Dataset """ - from satpy.writers.cf_writer import EPOCH, collect_cf_datasets + from satpy.writers.cf.datasets import collect_cf_datasets + from satpy.writers.cf.time import EPOCH if epoch is None: epoch = EPOCH From 5762950332a450f4f3d36ff9af01bae26271e6e4 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Wed, 28 Jun 2023 00:44:41 +0200 Subject: [PATCH 080/702] Fix all writers tests --- .../tests/writer_tests/cf_tests/test_area.py | 87 ++++++++++++++ .../writer_tests/cf_tests/test_datasets.py | 23 +++- satpy/tests/writer_tests/test_cf.py | 108 ------------------ 3 files changed, 109 insertions(+), 109 deletions(-) diff --git a/satpy/tests/writer_tests/cf_tests/test_area.py b/satpy/tests/writer_tests/cf_tests/test_area.py index e293ff39a6..e3454b3170 100644 --- a/satpy/tests/writer_tests/cf_tests/test_area.py +++ b/satpy/tests/writer_tests/cf_tests/test_area.py @@ -16,12 +16,21 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the CF Area.""" +import logging + import dask.array as da import numpy as np import pytest import xarray as xr from pyresample import AreaDefinition, SwathDefinition +logger = logging.getLogger(__name__) + + +# NOTE: +# The following fixtures are not defined in this file, but are used and injected by Pytest: +# - caplog + class TestCFArea: """Test case for CF Area.""" @@ -399,3 +408,81 @@ def test_add_lonlat_coords(self): np.testing.assert_array_equal(lon.data, lons_ref) assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() + + def test_is_projected(self, caplog): + """Tests for private _is_projected function.""" + from satpy.writers.cf.crs import _is_projected + + # test case with units but no area + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x"), + coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "m"}), + "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "m"})}) + assert _is_projected(da) + + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x"), + coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "degrees_east"}), + "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "degrees_north"})}) + assert not _is_projected(da) + + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x")) + with caplog.at_level(logging.WARNING): + assert _is_projected(da) + assert "Failed to tell if data are projected." in caplog.text + + @pytest.fixture + def datasets(self): + """Create test dataset.""" + data = [[75, 2], [3, 4]] + y = [1, 2] + x = [1, 2] + geos = AreaDefinition( + area_id='geos', + description='geos', + proj_id='geos', + projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, + width=2, height=2, + area_extent=[-1, -1, 1, 1]) + datasets = { + 'var1': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'var2': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'lat': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'lon': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x})} + datasets['lat'].attrs['standard_name'] = 'latitude' + datasets['var1'].attrs['standard_name'] = 'dummy' + datasets['var2'].attrs['standard_name'] = 'dummy' + datasets['var2'].attrs['area'] = geos + datasets['var1'].attrs['area'] = geos + datasets['lat'].attrs['name'] = 'lat' + datasets['var1'].attrs['name'] = 'var1' + datasets['var2'].attrs['name'] = 'var2' + datasets['lon'].attrs['name'] = 'lon' + return datasets + + def test_is_lon_or_lat_dataarray(self, datasets): + """Test the is_lon_or_lat_dataarray function.""" + from satpy.writers.cf.area import is_lon_or_lat_dataarray + + assert is_lon_or_lat_dataarray(datasets['lat']) + assert not is_lon_or_lat_dataarray(datasets['var1']) + + def test_has_projection_coords(self, datasets): + """Test the has_projection_coords function.""" + from satpy.writers.cf.area import has_projection_coords + + assert has_projection_coords(datasets) + datasets['lat'].attrs['standard_name'] = 'dummy' + assert not has_projection_coords(datasets) diff --git a/satpy/tests/writer_tests/cf_tests/test_datasets.py b/satpy/tests/writer_tests/cf_tests/test_datasets.py index d6784cd78f..b094feecbc 100644 --- a/satpy/tests/writer_tests/cf_tests/test_datasets.py +++ b/satpy/tests/writer_tests/cf_tests/test_datasets.py @@ -18,9 +18,10 @@ """Tests CF-compliant DataArray creation.""" import datetime +import numpy as np import pytest import xarray as xr -from pyresample import AreaDefinition +from pyresample import AreaDefinition, create_area_def def test_empty_collect_cf_datasets(): @@ -127,3 +128,23 @@ def test_collect_cf_dataarrays_with_latitude_named_lat(self): ds['var1'].attrs["longitude"] assert ds2['var1']['latitude'].attrs['name'] == 'latitude' assert ds2['var1']['longitude'].attrs['name'] == 'longitude' + + def test_geographic_area_coords_attrs(self): + """Test correct storage for area with lon/lat units.""" + from satpy.tests.utils import make_fake_scene + from satpy.writers.cf.datasets import _collect_cf_dataset + + scn = make_fake_scene( + {"ketolysis": np.arange(25).reshape(5, 5)}, + daskify=True, + area=create_area_def("mavas", 4326, shape=(5, 5), + center=(0, 0), resolution=(1, 1))) + + ds = _collect_cf_dataset([scn["ketolysis"]], include_lonlats=False) + assert ds["ketolysis"].attrs["grid_mapping"] == "mavas" + assert ds["mavas"].attrs["grid_mapping_name"] == "latitude_longitude" + assert ds["x"].attrs["units"] == "degrees_east" + assert ds["y"].attrs["units"] == "degrees_north" + assert ds["mavas"].attrs["longitude_of_prime_meridian"] == 0.0 + np.testing.assert_allclose(ds["mavas"].attrs["semi_major_axis"], 6378137.0) + np.testing.assert_allclose(ds["mavas"].attrs["inverse_flattening"], 298.257223563) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 628a32fed3..046e689002 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -17,18 +17,15 @@ # satpy. If not, see . """Tests for the CF writer.""" -import logging import os import tempfile import warnings from datetime import datetime import numpy as np -import pyresample.geometry import pytest import xarray as xr from packaging.version import Version -from pyresample import create_area_def from satpy import Scene from satpy.tests.utils import make_dsq @@ -42,7 +39,6 @@ # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path -# - caplog # - request @@ -65,54 +61,6 @@ def __exit__(self, *args): os.remove(self.filename) -def test_lonlat_storage(tmp_path): - """Test correct storage for area with lon/lat units.""" - from ..utils import make_fake_scene - scn = make_fake_scene( - {"ketolysis": np.arange(25).reshape(5, 5)}, - daskify=True, - area=create_area_def("mavas", 4326, shape=(5, 5), - center=(0, 0), resolution=(1, 1))) - - filename = os.fspath(tmp_path / "test.nc") - scn.save_datasets(filename=filename, writer="cf", include_lonlats=False) - with xr.open_dataset(filename) as ds: - assert ds["ketolysis"].attrs["grid_mapping"] == "mavas" - assert ds["mavas"].attrs["grid_mapping_name"] == "latitude_longitude" - assert ds["x"].attrs["units"] == "degrees_east" - assert ds["y"].attrs["units"] == "degrees_north" - assert ds["mavas"].attrs["longitude_of_prime_meridian"] == 0.0 - np.testing.assert_allclose(ds["mavas"].attrs["semi_major_axis"], 6378137.0) - np.testing.assert_allclose(ds["mavas"].attrs["inverse_flattening"], 298.257223563) - - -def test_is_projected(caplog): - """Tests for private _is_projected function.""" - from satpy.writers.cf.crs import _is_projected - - # test case with units but no area - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x"), - coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "m"}), - "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "m"})}) - assert _is_projected(da) - - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x"), - coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "degrees_east"}), - "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "degrees_north"})}) - assert not _is_projected(da) - - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x")) - with caplog.at_level(logging.WARNING): - assert _is_projected(da) - assert "Failed to tell if data are projected." in caplog.text - - class TestCFWriter: """Test case for CF writer.""" @@ -498,62 +446,6 @@ def test_global_attr_history_and_Conventions(self): assert 'Created by pytroll/satpy on' in f.attrs['history'] -class TestCFWriterData: - """Test case for CF writer where data arrays are needed.""" - - @pytest.fixture - def datasets(self): - """Create test dataset.""" - data = [[75, 2], [3, 4]] - y = [1, 2] - x = [1, 2] - geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, - width=2, height=2, - area_extent=[-1, -1, 1, 1]) - datasets = { - 'var1': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'var2': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'lat': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'lon': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x})} - datasets['lat'].attrs['standard_name'] = 'latitude' - datasets['var1'].attrs['standard_name'] = 'dummy' - datasets['var2'].attrs['standard_name'] = 'dummy' - datasets['var2'].attrs['area'] = geos - datasets['var1'].attrs['area'] = geos - datasets['lat'].attrs['name'] = 'lat' - datasets['var1'].attrs['name'] = 'var1' - datasets['var2'].attrs['name'] = 'var2' - datasets['lon'].attrs['name'] = 'lon' - return datasets - - def test_is_lon_or_lat_dataarray(self, datasets): - """Test the is_lon_or_lat_dataarray function.""" - from satpy.writers.cf.area import is_lon_or_lat_dataarray - - assert is_lon_or_lat_dataarray(datasets['lat']) - assert not is_lon_or_lat_dataarray(datasets['var1']) - - def test_has_projection_coords(self, datasets): - """Test the has_projection_coords function.""" - from satpy.writers.cf.area import has_projection_coords - - assert has_projection_coords(datasets) - datasets['lat'].attrs['standard_name'] = 'dummy' - assert not has_projection_coords(datasets) - - class TestNetcdfEncodingKwargs: """Test netCDF compression encodings.""" From dabaa44223ed4f6acd634b9ab518ec149ca2fbfc Mon Sep 17 00:00:00 2001 From: ghiggi Date: Wed, 28 Jun 2023 00:49:10 +0200 Subject: [PATCH 081/702] Add logging on top-file --- satpy/tests/writer_tests/cf_tests/test_area.py | 3 --- satpy/writers/cf/attrs.py | 4 +++- satpy/writers/cf/dataarray.py | 3 +++ satpy/writers/cf/datasets.py | 3 +++ satpy/writers/cf/encoding.py | 3 +++ satpy/writers/cf/time.py | 5 +++++ 6 files changed, 17 insertions(+), 4 deletions(-) diff --git a/satpy/tests/writer_tests/cf_tests/test_area.py b/satpy/tests/writer_tests/cf_tests/test_area.py index e3454b3170..1dd82ddd9d 100644 --- a/satpy/tests/writer_tests/cf_tests/test_area.py +++ b/satpy/tests/writer_tests/cf_tests/test_area.py @@ -24,9 +24,6 @@ import xarray as xr from pyresample import AreaDefinition, SwathDefinition -logger = logging.getLogger(__name__) - - # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - caplog diff --git a/satpy/writers/cf/attrs.py b/satpy/writers/cf/attrs.py index aac0f5f289..153d645594 100644 --- a/satpy/writers/cf/attrs.py +++ b/satpy/writers/cf/attrs.py @@ -16,15 +16,17 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """CF processing of attributes.""" - import datetime import json +import logging from collections import OrderedDict import numpy as np from satpy.writers.utils import flatten_dict +logger = logging.getLogger(__name__) + class AttributeEncoder(json.JSONEncoder): """JSON encoder for dataset attributes.""" diff --git a/satpy/writers/cf/dataarray.py b/satpy/writers/cf/dataarray.py index 9ca90ae52f..fd9b20be5e 100644 --- a/satpy/writers/cf/dataarray.py +++ b/satpy/writers/cf/dataarray.py @@ -16,12 +16,15 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utility to generate a CF-compliant DataArray.""" +import logging import warnings from satpy.writers.cf.attrs import preprocess_datarray_attrs from satpy.writers.cf.coords_attrs import add_xy_coords_attrs from satpy.writers.cf.time import EPOCH, _process_time_coord +logger = logging.getLogger(__name__) + def _handle_dataarray_name(original_name, numeric_name_prefix): if original_name[0].isdigit(): diff --git a/satpy/writers/cf/datasets.py b/satpy/writers/cf/datasets.py index 09726c0e32..4baabbc894 100644 --- a/satpy/writers/cf/datasets.py +++ b/satpy/writers/cf/datasets.py @@ -16,6 +16,7 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utility to generate a CF-compliant Datasets.""" +import logging import warnings from collections import defaultdict @@ -24,6 +25,8 @@ from satpy.writers.cf.time import EPOCH from satpy.writers.cf_writer import CF_DTYPES, CF_VERSION +logger = logging.getLogger(__name__) + def get_extra_ds(dataarray, keys=None): """Get the ancillary_variables DataArrays associated to a dataset.""" diff --git a/satpy/writers/cf/encoding.py b/satpy/writers/cf/encoding.py index c8ea0f25f4..55a48f70fd 100644 --- a/satpy/writers/cf/encoding.py +++ b/satpy/writers/cf/encoding.py @@ -16,11 +16,14 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """CF encoding.""" +import logging import numpy as np import xarray as xr from xarray.coding.times import CFDatetimeCoder +logger = logging.getLogger(__name__) + def _set_default_chunks(encoding, dataset): """Update encoding to preserve current dask chunks. diff --git a/satpy/writers/cf/time.py b/satpy/writers/cf/time.py index 821f7b47b0..05b90c4641 100644 --- a/satpy/writers/cf/time.py +++ b/satpy/writers/cf/time.py @@ -16,9 +16,14 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """CF processing of time dimension and coordinates.""" +import logging + import numpy as np import xarray as xr +logger = logging.getLogger(__name__) + + EPOCH = u"seconds since 1970-01-01 00:00:00" From ea12d9b348cac72cf86c3c3df6abe140fce5d42e Mon Sep 17 00:00:00 2001 From: Gionata Ghiggi Date: Wed, 28 Jun 2023 11:54:21 +0200 Subject: [PATCH 082/702] Update satpy/tests/writer_tests/cf_tests/test_dataaarray.py --- satpy/tests/writer_tests/cf_tests/test_dataaarray.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py index 13ed11474e..896de5c55b 100644 --- a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py +++ b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py @@ -68,7 +68,7 @@ def test_make_cf_dataarray_lonlat(): assert new_da["y"].attrs["units"] == "degrees_north" -class TestCFWriter: +class TestCfDataArray: """Test creation of CF DataArray.""" def get_test_attrs(self): From bc7001f75573a2a5815f8e6195085791113e5d7c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 18 Jul 2023 13:01:13 -0500 Subject: [PATCH 083/702] Rename viirs_l2_jrr reader to viirs_edr --- satpy/etc/readers/{viirs_l2_jrr.yaml => viirs_edr.yaml} | 4 ++-- satpy/readers/{viirs_l2_jrr.py => viirs_edr.py} | 7 ++++--- .../{test_viirs_l2_jrr.py => test_viirs_edr.py} | 2 +- 3 files changed, 7 insertions(+), 6 deletions(-) rename satpy/etc/readers/{viirs_l2_jrr.yaml => viirs_edr.yaml} (99%) rename satpy/readers/{viirs_l2_jrr.py => viirs_edr.py} (95%) rename satpy/tests/reader_tests/{test_viirs_l2_jrr.py => test_viirs_edr.py} (98%) diff --git a/satpy/etc/readers/viirs_l2_jrr.yaml b/satpy/etc/readers/viirs_edr.yaml similarity index 99% rename from satpy/etc/readers/viirs_l2_jrr.yaml rename to satpy/etc/readers/viirs_edr.yaml index f337909134..3f268b86b5 100644 --- a/satpy/etc/readers/viirs_l2_jrr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -1,6 +1,6 @@ reader: - description: VIIRS NOAA Enterprise L2 product reader - name: viirs_l2_jrr + description: VIIRS NOAA Enterprise EDR product reader + name: viirs_edr reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] group_keys: ['platform_shortname'] diff --git a/satpy/readers/viirs_l2_jrr.py b/satpy/readers/viirs_edr.py similarity index 95% rename from satpy/readers/viirs_l2_jrr.py rename to satpy/readers/viirs_edr.py index 8d07b3a7c5..80a4c347c3 100644 --- a/satpy/readers/viirs_l2_jrr.py +++ b/satpy/readers/viirs_edr.py @@ -46,8 +46,8 @@ import xarray as xr -from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler +from satpy.utils import get_legacy_chunk_size LOG = logging.getLogger(__name__) @@ -59,11 +59,12 @@ def __init__(self, filename, filename_info, filetype_info): """Initialize the geo filehandler.""" super(VIIRSJRRFileHandler, self).__init__(filename, filename_info, filetype_info) + chunk_size = get_legacy_chunk_size() self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, - chunks={'Columns': CHUNK_SIZE, - 'Rows': CHUNK_SIZE}) + chunks={'Columns': chunk_size, + 'Rows': chunk_size}) if 'columns' in self.nc.dims: self.nc = self.nc.rename({'Columns': 'x', 'Rows': 'y'}) elif 'Along_Track_375m' in self.nc.dims: diff --git a/satpy/tests/reader_tests/test_viirs_l2_jrr.py b/satpy/tests/reader_tests/test_viirs_edr.py similarity index 98% rename from satpy/tests/reader_tests/test_viirs_l2_jrr.py rename to satpy/tests/reader_tests/test_viirs_edr.py index a462ec1416..def2af7ad2 100644 --- a/satpy/tests/reader_tests/test_viirs_l2_jrr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -27,7 +27,7 @@ import xarray as xr -from satpy.readers.viirs_l2_jrr import VIIRSJRRFileHandler +from satpy.readers.viirs_edr import VIIRSJRRFileHandler class TestVIIRSJRRReader(unittest.TestCase): From 9ff96918c47647f81028b1135fe7b986089d73d4 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 18 Jul 2023 13:29:58 -0500 Subject: [PATCH 084/702] Convert VIIRS EDR tests to pytest --- satpy/tests/reader_tests/test_viirs_edr.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index def2af7ad2..cbd036639f 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -20,17 +20,17 @@ Note: This is adapted from the test_slstr_l2.py code. """ -import unittest from datetime import datetime from unittest import mock from unittest.mock import MagicMock +import pytest import xarray as xr from satpy.readers.viirs_edr import VIIRSJRRFileHandler -class TestVIIRSJRRReader(unittest.TestCase): +class TestVIIRSJRRReader: """Test the VIIRS JRR L2 reader.""" @mock.patch('xarray.open_dataset') @@ -62,7 +62,7 @@ def test_get_dataset(self, mocked_dataset): test.get_dataset('latitude', {'file_key': 'Latitude'}) test.get_dataset('smoke_concentration', {'file_key': 'smoke_concentration'}) test.get_dataset('fire_mask', {'file_key': 'fire_mask'}) - with self.assertRaises(KeyError): + with pytest.raises(KeyError): test.get_dataset('erroneous dataset', {'file_key': 'erroneous dataset'}) mocked_dataset.assert_called() mocked_dataset.reset_mock() @@ -78,8 +78,8 @@ def test_get_startend_times(self, mocked_dataset): tmp.rename.return_value = tmp xr.open_dataset.return_value = tmp hdl = VIIRSJRRFileHandler('somedir/somefile.nc', filename_info, None) - self.assertEqual(hdl.start_time, datetime(2021, 4, 3, 12, 0, 10)) - self.assertEqual(hdl.end_time, datetime(2021, 4, 3, 12, 4, 28)) + assert hdl.start_time == datetime(2021, 4, 3, 12, 0, 10) + assert hdl.end_time == datetime(2021, 4, 3, 12, 4, 28) @mock.patch('xarray.open_dataset') def test_get_platformname(self, mocked_dataset): @@ -88,8 +88,8 @@ def test_get_platformname(self, mocked_dataset): tmp.rename.return_value = tmp xr.open_dataset.return_value = tmp hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'npp'}, None) - self.assertEqual(hdl.platform_name, 'Suomi-NPP') + assert hdl.platform_name == 'Suomi-NPP' hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'JPSS-1'}, None) - self.assertEqual(hdl.platform_name, 'NOAA-20') + assert hdl.platform_name == 'NOAA-20' hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'J01'}, None) - self.assertEqual(hdl.platform_name, 'NOAA-20') + assert hdl.platform_name == 'NOAA-20' From 60deee1b605e2d00ab99903d6b48c52a52383bdb Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 18 Jul 2023 15:11:36 -0500 Subject: [PATCH 085/702] Add surface reflectance specific tests to viirs_edr --- satpy/etc/readers/viirs_edr.yaml | 6 +- satpy/tests/reader_tests/test_viirs_edr.py | 103 +++++++++++++++------ 2 files changed, 79 insertions(+), 30 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 3f268b86b5..bf8a949de8 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -9,17 +9,17 @@ reader: file_types: jrr_cloudmask: - file_reader: !!python/name:satpy.readers.viirs_l2_jrr.VIIRSJRRFileHandler + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler variable_prefix: "" file_patterns: - 'JRR-CloudMask_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_aerosol_product: - file_reader: !!python/name:satpy.readers.viirs_l2_jrr.VIIRSJRRFileHandler + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler variable_prefix: "" file_patterns: - 'JRR-ADP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_surfref_product: - file_reader: !!python/name:satpy.readers.viirs_l2_jrr.VIIRSJRRFileHandler + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler variable_prefix: "" file_patterns: - 'SurfRefl_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index cbd036639f..c84fadf500 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -19,30 +19,89 @@ Note: This is adapted from the test_slstr_l2.py code. """ +from __future__ import annotations from datetime import datetime +from pathlib import Path from unittest import mock from unittest.mock import MagicMock +import numpy as np import pytest import xarray as xr +from pyresample import SwathDefinition from satpy.readers.viirs_edr import VIIRSJRRFileHandler +I_COLS = 64 # real-world 6400 +I_ROWS = 32 # one scan +M_COLS = 32 # real-world 3200 +M_ROWS = 16 # one scan +START_TIME = datetime(2023, 5, 30, 17, 55, 41, 0) +END_TIME = datetime(2023, 5, 30, 17, 57, 5, 0) + + +@pytest.fixture(scope="module") +def surface_reflectance_file(tmp_path_factory) -> Path: + """Generate fake surface reflectance EDR file.""" + tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") + fn = f"SurfRefl_v1r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" + file_path = tmp_path / fn + sr_vars = _create_surf_refl_variables() + ds = _create_fake_dataset(sr_vars) + ds.to_netcdf(file_path) + return file_path + + +def _create_fake_dataset(vars_dict: dict[str, xr.DataArray]) -> xr.Dataset: + ds = xr.Dataset( + vars_dict, + attrs={} + ) + return ds + + +def _create_surf_refl_variables() -> dict[str, xr.DataArray]: + dim_y_750 = "Along_Track_750m" + dim_x_750 = "Along_Scan_750m" + m_dims = (dim_y_750, dim_x_750) + dim_y_375 = "Along_Track_375m" + dim_x_375 = "Along_Scan_375m" + i_dims = (dim_y_375, dim_x_375) + + lon_attrs = {"standard_name": "longitude", "units": "degrees_east", "_FillValue": -999.9} + lat_attrs = {"standard_name": "latitude", "units": "degrees_north", "_FillValue": -999.9} + sr_attrs = {"units": "unitless", "_FillValue": -9999, "scale_factor": 0.0001, "add_offset": 0.0} + + i_data = np.zeros((I_ROWS, I_COLS), dtype=np.float32) + m_data = np.zeros((M_ROWS, M_COLS), dtype=np.float32) + data_arrs = { + "Longitude_at_375m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lon_attrs), + "Latitude_at_375m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lat_attrs), + "Longitude_at_750m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lon_attrs), + "Latitude_at_750m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lat_attrs), + "375m Surface Reflectance Band I1": xr.DataArray(i_data, dims=i_dims, attrs=sr_attrs), + "750m Surface Reflectance Band M1": xr.DataArray(m_data, dims=m_dims, attrs=sr_attrs), + } + for data_arr in data_arrs.values(): + if "scale_factor" not in data_arr.attrs: + continue + data_arr.encoding["dtype"] = np.int16 + return data_arrs + class TestVIIRSJRRReader: """Test the VIIRS JRR L2 reader.""" - @mock.patch('xarray.open_dataset') - def test_instantiate(self, mocked_dataset): - """Test initialization of file handlers.""" - filename_info = {'platform_shortname': 'npp'} - tmp = MagicMock(start_time='20191120T125002Z', stop_time='20191120T125002Z') - tmp.rename.return_value = tmp - xr.open_dataset.return_value = tmp - VIIRSJRRFileHandler('somedir/somefile.nc', filename_info, None) - mocked_dataset.assert_called() - mocked_dataset.reset_mock() + def test_get_dataset_surf_refl(self, surface_reflectance_file): + """Test retrieval of datasets.""" + from satpy import Scene + scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_file]) + assert scn.start_time == START_TIME + assert scn.end_time == END_TIME + scn.load(["surf_refl_I01", "surf_refl_M01"]) + _check_surf_refl_data_arr(scn["surf_refl_I01"]) + _check_surf_refl_data_arr(scn["surf_refl_M01"]) @mock.patch('xarray.open_dataset') def test_get_dataset(self, mocked_dataset): @@ -55,8 +114,6 @@ def test_get_dataset(self, mocked_dataset): 'Latitude': xr.Dataset(), 'smoke_concentration': xr.Dataset(), 'fire_mask': xr.Dataset(), - 'surf_refl_I01': xr.Dataset(), - 'surf_refl_M05': xr.Dataset(), } test.get_dataset('longitude', {'file_key': 'Longitude'}) test.get_dataset('latitude', {'file_key': 'Latitude'}) @@ -65,21 +122,6 @@ def test_get_dataset(self, mocked_dataset): with pytest.raises(KeyError): test.get_dataset('erroneous dataset', {'file_key': 'erroneous dataset'}) mocked_dataset.assert_called() - mocked_dataset.reset_mock() - test.get_dataset('surf_refl_I01', {'file_key': 'surf_refl_I01'}) - - @mock.patch('xarray.open_dataset') - def test_get_startend_times(self, mocked_dataset): - """Test finding start and end times of granules.""" - filename_info = {'platform_shortname': 'npp', - 'start_time': datetime(2021, 4, 3, 12, 0, 10), - 'end_time': datetime(2021, 4, 3, 12, 4, 28)} - tmp = MagicMock() - tmp.rename.return_value = tmp - xr.open_dataset.return_value = tmp - hdl = VIIRSJRRFileHandler('somedir/somefile.nc', filename_info, None) - assert hdl.start_time == datetime(2021, 4, 3, 12, 0, 10) - assert hdl.end_time == datetime(2021, 4, 3, 12, 4, 28) @mock.patch('xarray.open_dataset') def test_get_platformname(self, mocked_dataset): @@ -93,3 +135,10 @@ def test_get_platformname(self, mocked_dataset): assert hdl.platform_name == 'NOAA-20' hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'J01'}, None) assert hdl.platform_name == 'NOAA-20' + + +def _check_surf_refl_data_arr(data_arr: xr.DataArray) -> None: + assert data_arr.dims == ("y", "x") + assert isinstance(data_arr.attrs["area"], SwathDefinition) + assert np.issubdtype(data_arr.data.dtype, np.float32) + # TODO: More checks From dc1cf11020a9725dcdfe0aa785e5571628794628 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 18 Jul 2023 20:07:19 -0500 Subject: [PATCH 086/702] Comment out new VIIRS EDR test to debug Windows CI hanging --- satpy/readers/viirs_edr.py | 2 ++ satpy/tests/reader_tests/test_viirs_edr.py | 22 ++++++++++++---------- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 80a4c347c3..a68961be97 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -84,6 +84,8 @@ def __init__(self, filename, filename_info, filetype_info): def get_dataset(self, dataset_id, info): """Get the dataset.""" ds = self.nc[info['file_key']] + if ds.attrs.get("units", None) == "unitless": + ds.attrs["units"] = "1" return ds diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index c84fadf500..95d7bc4563 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -93,15 +93,15 @@ def _create_surf_refl_variables() -> dict[str, xr.DataArray]: class TestVIIRSJRRReader: """Test the VIIRS JRR L2 reader.""" - def test_get_dataset_surf_refl(self, surface_reflectance_file): - """Test retrieval of datasets.""" - from satpy import Scene - scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_file]) - assert scn.start_time == START_TIME - assert scn.end_time == END_TIME - scn.load(["surf_refl_I01", "surf_refl_M01"]) - _check_surf_refl_data_arr(scn["surf_refl_I01"]) - _check_surf_refl_data_arr(scn["surf_refl_M01"]) + # def test_get_dataset_surf_refl(self, surface_reflectance_file): + # """Test retrieval of datasets.""" + # from satpy import Scene + # scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_file]) + # assert scn.start_time == START_TIME + # assert scn.end_time == END_TIME + # scn.load(["surf_refl_I01", "surf_refl_M01"]) + # _check_surf_refl_data_arr(scn["surf_refl_I01"]) + # _check_surf_refl_data_arr(scn["surf_refl_M01"]) @mock.patch('xarray.open_dataset') def test_get_dataset(self, mocked_dataset): @@ -141,4 +141,6 @@ def _check_surf_refl_data_arr(data_arr: xr.DataArray) -> None: assert data_arr.dims == ("y", "x") assert isinstance(data_arr.attrs["area"], SwathDefinition) assert np.issubdtype(data_arr.data.dtype, np.float32) - # TODO: More checks + assert data_arr.attrs["units"] == "1" + exp_shape = (M_ROWS, M_COLS) if "M" in data_arr.attrs["name"] else (I_ROWS, I_COLS) + assert data_arr.shape == exp_shape From 74d45a45ba0d0147c3ba0571d203bff96fa977ee Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 18 Jul 2023 20:45:43 -0500 Subject: [PATCH 087/702] Comment out module scoped fixture to see if it stops Windows CI hanging --- satpy/tests/reader_tests/test_viirs_edr.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 95d7bc4563..14e7c50bfe 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -41,16 +41,16 @@ END_TIME = datetime(2023, 5, 30, 17, 57, 5, 0) -@pytest.fixture(scope="module") -def surface_reflectance_file(tmp_path_factory) -> Path: - """Generate fake surface reflectance EDR file.""" - tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") - fn = f"SurfRefl_v1r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" - file_path = tmp_path / fn - sr_vars = _create_surf_refl_variables() - ds = _create_fake_dataset(sr_vars) - ds.to_netcdf(file_path) - return file_path +# @pytest.fixture(scope="module") +# def surface_reflectance_file(tmp_path_factory) -> Path: +# """Generate fake surface reflectance EDR file.""" +# tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") +# fn = f"SurfRefl_v1r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" +# file_path = tmp_path / fn +# sr_vars = _create_surf_refl_variables() +# ds = _create_fake_dataset(sr_vars) +# ds.to_netcdf(file_path) +# return file_path def _create_fake_dataset(vars_dict: dict[str, xr.DataArray]) -> xr.Dataset: From d0943807348be4d6815a964808899557fd325210 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 18 Jul 2023 20:48:19 -0500 Subject: [PATCH 088/702] Fix unused import due to commented out code --- satpy/tests/reader_tests/test_viirs_edr.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 14e7c50bfe..8f984a323d 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -22,7 +22,6 @@ from __future__ import annotations from datetime import datetime -from pathlib import Path from unittest import mock from unittest.mock import MagicMock From 2f0c6aa73d47fe04e1ab2e11dcbdf08a69043ab4 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 19 Jul 2023 18:42:41 +0800 Subject: [PATCH 089/702] Update fy4_base.py --- satpy/readers/fy4_base.py | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/satpy/readers/fy4_base.py b/satpy/readers/fy4_base.py index ceb8ee75bd..9b6b364420 100644 --- a/satpy/readers/fy4_base.py +++ b/satpy/readers/fy4_base.py @@ -212,8 +212,12 @@ def get_area_def(self, key): # https://www.cgms-info.org/documents/cgms-lrit-hrit-global-specification-(v2-8-of-30-oct-2013).pdf res = key['resolution'] pdict = {} - pdict['coff'] = self._COFF_list[RESOLUTION_LIST.index(res)] - pdict['loff'] = self._LOFF_list[RESOLUTION_LIST.index(res)] + + begin_cols = float(self.file_content['/attr/Begin Pixel Number']) + end_lines = float(self.file_content['/attr/End Line Number']) + pdict['coff'] = self._COFF_list[RESOLUTION_LIST.index(res)] - begin_cols + 1 + pdict['loff'] = -self._LOFF_list[RESOLUTION_LIST.index(res)] + end_lines + 1 + pdict['cfac'] = self._CFAC_list[RESOLUTION_LIST.index(res)] pdict['lfac'] = self._LFAC_list[RESOLUTION_LIST.index(res)] try: @@ -240,21 +244,12 @@ def get_area_def(self, key): pdict['a_name'] = f'{self.filename_info["observation_type"]}_{res}m' pdict['p_id'] = f'FY-4, {res}m' - pdict['nlines'] = pdict['nlines'] - 1 - pdict['ncols'] = pdict['ncols'] - 1 - - pdict['coff'] = pdict['coff'] - 0.5 - pdict['loff'] = pdict['loff'] + 1 - area_extent = get_area_extent(pdict) area_extent = (area_extent[0], area_extent[1], area_extent[2], area_extent[3]) - pdict['nlines'] = pdict['nlines'] + 1 - pdict['ncols'] = pdict['ncols'] + 1 - area = get_area_definition(pdict, area_extent) return area From 4b63854d5ded42e0d7d21ce11da99cf40eb1607e Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 19 Jul 2023 20:47:14 +0800 Subject: [PATCH 090/702] Update test_agri_l1.py --- satpy/tests/reader_tests/test_agri_l1.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/satpy/tests/reader_tests/test_agri_l1.py b/satpy/tests/reader_tests/test_agri_l1.py index 722ea05a71..77c8e5e268 100644 --- a/satpy/tests/reader_tests/test_agri_l1.py +++ b/satpy/tests/reader_tests/test_agri_l1.py @@ -40,16 +40,16 @@ RESOLUTION_LIST = [500, 1000, 2000, 4000] AREA_EXTENTS_BY_RESOLUTION = {'FY4A': { - 500: (-5495271.006002, -5496021.008869, -5493270.998357, -5495521.006957), - 1000: (-5494521.070252, -5496021.076004, -5490521.054912, -5495021.072169), - 2000: (-5493021.198696, -5496021.210274, -5485021.167823, -5494021.202556), - 4000: (-5490021.187119, -5496021.210274, -5474021.125371, -5492021.194837) + 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), + 1000: (-5496021.076004, 5494021.068334, -5491021.05683, 5496021.076004), + 2000: (-5496021.210274, 5492021.194837, -5486021.171682, 5496021.210274), + 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) }, 'FY4B': { - 500: (-5495271.006002, -5496021.008869, -5493270.998357, -5495521.006957), - 1000: (-5494521.070252, -5496021.076004, -5490521.054912, -5495021.072169), - 2000: (-5493021.198696, -5496021.210274, -5485021.167823, -5494021.202556), - 4000: (-5490021.187119, -5496021.210274, -5474021.125371, -5492021.194837) + 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), + 1000: (-5496021.076004, 5494021.068334, -5491021.05683 , 5496021.076004), + 2000: (-5496021.210274, 5492021.194837, -5486021.171682, 5496021.210274), + 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) }} @@ -201,6 +201,8 @@ def get_test_content(self, filename, filename_info, filetype_info): '/attr/RegWidth': np.array(5.0), '/attr/Begin Line Number': np.array(0), '/attr/End Line Number': np.array(1), + '/attr/Begin Pixel Number': np.array(0), + '/attr/End Pixel Number': np.array(1), '/attr/Observing Beginning Date': '2019-06-03', '/attr/Observing Beginning Time': '00:30:01.807', '/attr/Observing Ending Date': '2019-06-03', '/attr/Observing Ending Time': '00:34:07.572', '/attr/Satellite Name': 'FY4A', '/attr/Sensor Identification Code': 'AGRI', '/attr/Sensor Name': 'AGRI', From 0d14a9e0f25de1231146ab033ad628e0c7ba321f Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 19 Jul 2023 20:49:31 +0800 Subject: [PATCH 091/702] Update test_agri_l1.py --- satpy/tests/reader_tests/test_agri_l1.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_agri_l1.py b/satpy/tests/reader_tests/test_agri_l1.py index 77c8e5e268..8bfc5057a7 100644 --- a/satpy/tests/reader_tests/test_agri_l1.py +++ b/satpy/tests/reader_tests/test_agri_l1.py @@ -47,7 +47,7 @@ }, 'FY4B': { 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), - 1000: (-5496021.076004, 5494021.068334, -5491021.05683 , 5496021.076004), + 1000: (-5496021.076004, 5494021.068334, -5491021.05683, 5496021.076004), 2000: (-5496021.210274, 5492021.194837, -5486021.171682, 5496021.210274), 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) }} From 125a0b38b4748fa5df4cb455c88aa513ef0e449e Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 19 Jul 2023 20:50:56 +0800 Subject: [PATCH 092/702] Update test_agri_l1.py --- satpy/tests/reader_tests/test_agri_l1.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/tests/reader_tests/test_agri_l1.py b/satpy/tests/reader_tests/test_agri_l1.py index 8bfc5057a7..44de75a7eb 100644 --- a/satpy/tests/reader_tests/test_agri_l1.py +++ b/satpy/tests/reader_tests/test_agri_l1.py @@ -40,16 +40,16 @@ RESOLUTION_LIST = [500, 1000, 2000, 4000] AREA_EXTENTS_BY_RESOLUTION = {'FY4A': { - 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), - 1000: (-5496021.076004, 5494021.068334, -5491021.05683, 5496021.076004), - 2000: (-5496021.210274, 5492021.194837, -5486021.171682, 5496021.210274), - 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) + 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), + 1000: (-5496021.076004, 5494021.068334, -5491021.05683, 5496021.076004), + 2000: (-5496021.210274, 5492021.194837, -5486021.171682, 5496021.210274), + 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) }, 'FY4B': { - 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), - 1000: (-5496021.076004, 5494021.068334, -5491021.05683, 5496021.076004), - 2000: (-5496021.210274, 5492021.194837, -5486021.171682, 5496021.210274), - 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) + 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), + 1000: (-5496021.076004, 5494021.068334, -5491021.05683, 5496021.076004), + 2000: (-5496021.210274, 5492021.194837, -5486021.171682, 5496021.210274), + 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) }} From 192dac3d0cfd39b83585a5e000fa47871c2e8a5a Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 19 Jul 2023 20:51:44 +0800 Subject: [PATCH 093/702] Update test_agri_l1.py --- satpy/tests/reader_tests/test_agri_l1.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_agri_l1.py b/satpy/tests/reader_tests/test_agri_l1.py index 44de75a7eb..12e30f2aac 100644 --- a/satpy/tests/reader_tests/test_agri_l1.py +++ b/satpy/tests/reader_tests/test_agri_l1.py @@ -43,7 +43,7 @@ 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), 1000: (-5496021.076004, 5494021.068334, -5491021.05683, 5496021.076004), 2000: (-5496021.210274, 5492021.194837, -5486021.171682, 5496021.210274), - 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) + 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) }, 'FY4B': { 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), From 17148bbffcfff9f837fabb9c8d5a15b5a4619f84 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 20 Jul 2023 12:16:16 -0500 Subject: [PATCH 094/702] Force libnetcdf to a non-hanging build (for now) --- continuous_integration/environment.yaml | 1 + satpy/tests/reader_tests/test_viirs_edr.py | 39 +++++++++++----------- 2 files changed, 21 insertions(+), 19 deletions(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 48976401a2..46096d7846 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -24,6 +24,7 @@ dependencies: - coverage - codecov - behave + - libnetcdf=4.9.2=nompi_h5902ca5_107 # [win] - netcdf4 - h5py - h5netcdf diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 8f984a323d..43647c4aab 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -22,6 +22,7 @@ from __future__ import annotations from datetime import datetime +from pathlib import Path from unittest import mock from unittest.mock import MagicMock @@ -40,16 +41,16 @@ END_TIME = datetime(2023, 5, 30, 17, 57, 5, 0) -# @pytest.fixture(scope="module") -# def surface_reflectance_file(tmp_path_factory) -> Path: -# """Generate fake surface reflectance EDR file.""" -# tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") -# fn = f"SurfRefl_v1r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" -# file_path = tmp_path / fn -# sr_vars = _create_surf_refl_variables() -# ds = _create_fake_dataset(sr_vars) -# ds.to_netcdf(file_path) -# return file_path +@pytest.fixture(scope="module") +def surface_reflectance_file(tmp_path_factory) -> Path: + """Generate fake surface reflectance EDR file.""" + tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") + fn = f"SurfRefl_v1r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" + file_path = tmp_path / fn + sr_vars = _create_surf_refl_variables() + ds = _create_fake_dataset(sr_vars) + ds.to_netcdf(file_path) + return file_path def _create_fake_dataset(vars_dict: dict[str, xr.DataArray]) -> xr.Dataset: @@ -92,15 +93,15 @@ def _create_surf_refl_variables() -> dict[str, xr.DataArray]: class TestVIIRSJRRReader: """Test the VIIRS JRR L2 reader.""" - # def test_get_dataset_surf_refl(self, surface_reflectance_file): - # """Test retrieval of datasets.""" - # from satpy import Scene - # scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_file]) - # assert scn.start_time == START_TIME - # assert scn.end_time == END_TIME - # scn.load(["surf_refl_I01", "surf_refl_M01"]) - # _check_surf_refl_data_arr(scn["surf_refl_I01"]) - # _check_surf_refl_data_arr(scn["surf_refl_M01"]) + def test_get_dataset_surf_refl(self, surface_reflectance_file): + """Test retrieval of datasets.""" + from satpy import Scene + scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_file]) + assert scn.start_time == START_TIME + assert scn.end_time == END_TIME + scn.load(["surf_refl_I01", "surf_refl_M01"]) + _check_surf_refl_data_arr(scn["surf_refl_I01"]) + _check_surf_refl_data_arr(scn["surf_refl_M01"]) @mock.patch('xarray.open_dataset') def test_get_dataset(self, mocked_dataset): From c9377f768d20f4372050e1d2498c275044c6d801 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 20 Jul 2023 12:38:39 -0500 Subject: [PATCH 095/702] Fix yaml selector for libnetcdf build on windows --- continuous_integration/environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 46096d7846..0cf682e1bb 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -24,7 +24,7 @@ dependencies: - coverage - codecov - behave - - libnetcdf=4.9.2=nompi_h5902ca5_107 # [win] + - libnetcdf=4.9.2=nompi_h5902ca5_107 # [win] - netcdf4 - h5py - h5netcdf From 69645a392f47c6bb1ff97c0a4287d121d58955bf Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 20 Jul 2023 12:58:16 -0500 Subject: [PATCH 096/702] Try environment hack one more time --- .github/workflows/ci.yaml | 3 +++ continuous_integration/environment.yaml | 1 - 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index faa0aea2cc..1c18e4a5cc 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -82,6 +82,9 @@ jobs: - name: Update environment run: mamba env update -n test-environment -f continuous_integration/environment.yaml if: steps.cache.outputs.cache-hit != 'true' + - name: Update environment - libnetcdf + run: mamba install -y -n test-environment libnetcdf=4.9.2=nompi_h5902ca5_107 + if: runner.os == 'Windows' - name: Install unstable dependencies if: matrix.experimental == true diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 0cf682e1bb..48976401a2 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -24,7 +24,6 @@ dependencies: - coverage - codecov - behave - - libnetcdf=4.9.2=nompi_h5902ca5_107 # [win] - netcdf4 - h5py - h5netcdf From fade8513d193168c61ff36aa2ccce634e20f2242 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 20 Jul 2023 14:06:28 -0500 Subject: [PATCH 097/702] Switch VIIRS EDR to modern chunk sizing --- satpy/readers/viirs_edr.py | 14 ++++++++++---- satpy/tests/reader_tests/test_viirs_edr.py | 10 ++++++++-- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index a68961be97..f7817833fc 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -47,7 +47,7 @@ import xarray as xr from satpy.readers.file_handlers import BaseFileHandler -from satpy.utils import get_legacy_chunk_size +from satpy.utils import get_chunk_size_limit LOG = logging.getLogger(__name__) @@ -59,12 +59,18 @@ def __init__(self, filename, filename_info, filetype_info): """Initialize the geo filehandler.""" super(VIIRSJRRFileHandler, self).__init__(filename, filename_info, filetype_info) - chunk_size = get_legacy_chunk_size() + chunk_size = get_chunk_size_limit() // 4 # 32-bit floats self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, - chunks={'Columns': chunk_size, - 'Rows': chunk_size}) + chunks={ + 'Columns': chunk_size, + 'Rows': chunk_size, + 'Along_Scan_375m': chunk_size, + 'Along_Track_375m': chunk_size, + 'Along_Scan_750m': chunk_size, + 'Along_Track_750m': chunk_size, + }) if 'columns' in self.nc.dims: self.nc = self.nc.rename({'Columns': 'x', 'Rows': 'y'}) elif 'Along_Track_375m' in self.nc.dims: diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 43647c4aab..f049a5e288 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -26,6 +26,8 @@ from unittest import mock from unittest.mock import MagicMock +import dask +import dask.array as da import numpy as np import pytest import xarray as xr @@ -96,10 +98,11 @@ class TestVIIRSJRRReader: def test_get_dataset_surf_refl(self, surface_reflectance_file): """Test retrieval of datasets.""" from satpy import Scene - scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_file]) + with dask.config.set({"array.chunk-size": "16B"}): + scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_file]) + scn.load(["surf_refl_I01", "surf_refl_M01"]) assert scn.start_time == START_TIME assert scn.end_time == END_TIME - scn.load(["surf_refl_I01", "surf_refl_M01"]) _check_surf_refl_data_arr(scn["surf_refl_I01"]) _check_surf_refl_data_arr(scn["surf_refl_M01"]) @@ -140,7 +143,10 @@ def test_get_platformname(self, mocked_dataset): def _check_surf_refl_data_arr(data_arr: xr.DataArray) -> None: assert data_arr.dims == ("y", "x") assert isinstance(data_arr.attrs["area"], SwathDefinition) + assert isinstance(data_arr.data, da.Array) assert np.issubdtype(data_arr.data.dtype, np.float32) + assert all(c == 4 for c in data_arr.chunks[0]) + assert all(c == 4 for c in data_arr.chunks[1]) assert data_arr.attrs["units"] == "1" exp_shape = (M_ROWS, M_COLS) if "M" in data_arr.attrs["name"] else (I_ROWS, I_COLS) assert data_arr.shape == exp_shape From af68e101d4701f5b2fbeab2d67787cc93d0de8bf Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 20 Jul 2023 14:20:42 -0500 Subject: [PATCH 098/702] Update VIIRS EDR chunking to be scan-based --- satpy/readers/viirs_edr.py | 16 +++++++++------- satpy/tests/reader_tests/test_viirs_edr.py | 13 ++++++++----- 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index f7817833fc..8389ea8019 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -59,17 +59,19 @@ def __init__(self, filename, filename_info, filetype_info): """Initialize the geo filehandler.""" super(VIIRSJRRFileHandler, self).__init__(filename, filename_info, filetype_info) - chunk_size = get_chunk_size_limit() // 4 # 32-bit floats + # use entire scans as chunks + row_chunks_m = max(get_chunk_size_limit() // 4 // 3200, 1) # 32-bit floats + row_chunks_i = row_chunks_m * 2 self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, chunks={ - 'Columns': chunk_size, - 'Rows': chunk_size, - 'Along_Scan_375m': chunk_size, - 'Along_Track_375m': chunk_size, - 'Along_Scan_750m': chunk_size, - 'Along_Track_750m': chunk_size, + 'Columns': -1, + 'Rows': row_chunks_i, + 'Along_Scan_375m': -1, + 'Along_Track_375m': row_chunks_i, + 'Along_Scan_750m': -1, + 'Along_Track_750m': row_chunks_m, }) if 'columns' in self.nc.dims: self.nc = self.nc.rename({'Columns': 'x', 'Rows': 'y'}) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index f049a5e288..6ab31c6ff6 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -98,7 +98,8 @@ class TestVIIRSJRRReader: def test_get_dataset_surf_refl(self, surface_reflectance_file): """Test retrieval of datasets.""" from satpy import Scene - with dask.config.set({"array.chunk-size": "16B"}): + bytes_in_m_row = 4 * 3200 + with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_file]) scn.load(["surf_refl_I01", "surf_refl_M01"]) assert scn.start_time == START_TIME @@ -145,8 +146,10 @@ def _check_surf_refl_data_arr(data_arr: xr.DataArray) -> None: assert isinstance(data_arr.attrs["area"], SwathDefinition) assert isinstance(data_arr.data, da.Array) assert np.issubdtype(data_arr.data.dtype, np.float32) - assert all(c == 4 for c in data_arr.chunks[0]) - assert all(c == 4 for c in data_arr.chunks[1]) - assert data_arr.attrs["units"] == "1" - exp_shape = (M_ROWS, M_COLS) if "M" in data_arr.attrs["name"] else (I_ROWS, I_COLS) + is_m_band = "I" not in data_arr.attrs["name"] + exp_shape = (M_ROWS, M_COLS) if is_m_band else (I_ROWS, I_COLS) assert data_arr.shape == exp_shape + exp_row_chunks = 4 if is_m_band else 8 + assert all(c == exp_row_chunks for c in data_arr.chunks[0]) + assert data_arr.chunks[1] == (exp_shape[1],) + assert data_arr.attrs["units"] == "1" From ab555c2de2de52390ece06f78179a6aa251230d3 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 20 Jul 2023 15:00:05 -0500 Subject: [PATCH 099/702] Remove mocking of platform name VIIRS EDR test --- satpy/readers/viirs_edr.py | 1 + satpy/tests/reader_tests/test_viirs_edr.py | 31 +++++++++++++--------- 2 files changed, 19 insertions(+), 13 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 8389ea8019..d02d38f53e 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -94,6 +94,7 @@ def get_dataset(self, dataset_id, info): ds = self.nc[info['file_key']] if ds.attrs.get("units", None) == "unitless": ds.attrs["units"] = "1" + ds.attrs["platform_name"] = self.platform_name return ds diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 6ab31c6ff6..d1da58970d 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -21,6 +21,7 @@ """ from __future__ import annotations +import shutil from datetime import datetime from pathlib import Path from unittest import mock @@ -107,6 +108,23 @@ def test_get_dataset_surf_refl(self, surface_reflectance_file): _check_surf_refl_data_arr(scn["surf_refl_I01"]) _check_surf_refl_data_arr(scn["surf_refl_M01"]) + @pytest.mark.parametrize( + ("filename_platform", "exp_shortname"), + [ + ("npp", "Suomi-NPP"), + ("JPSS-1", "NOAA-20"), + ("J01", "NOAA-20") + ]) + def test_get_platformname(self, surface_reflectance_file, filename_platform, exp_shortname): + """Test finding start and end times of granules.""" + from satpy import Scene + new_name = str(surface_reflectance_file).replace("npp", filename_platform) + if new_name != str(surface_reflectance_file): + shutil.copy(surface_reflectance_file, new_name) + scn = Scene(reader="viirs_edr", filenames=[new_name]) + scn.load(["surf_refl_I01"]) + assert scn["surf_refl_I01"].attrs["platform_name"] == exp_shortname + @mock.patch('xarray.open_dataset') def test_get_dataset(self, mocked_dataset): """Test retrieval of datasets.""" @@ -127,19 +145,6 @@ def test_get_dataset(self, mocked_dataset): test.get_dataset('erroneous dataset', {'file_key': 'erroneous dataset'}) mocked_dataset.assert_called() - @mock.patch('xarray.open_dataset') - def test_get_platformname(self, mocked_dataset): - """Test finding start and end times of granules.""" - tmp = MagicMock() - tmp.rename.return_value = tmp - xr.open_dataset.return_value = tmp - hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'npp'}, None) - assert hdl.platform_name == 'Suomi-NPP' - hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'JPSS-1'}, None) - assert hdl.platform_name == 'NOAA-20' - hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'J01'}, None) - assert hdl.platform_name == 'NOAA-20' - def _check_surf_refl_data_arr(data_arr: xr.DataArray) -> None: assert data_arr.dims == ("y", "x") From f3ea32f393dfee7ad2302c9ff7a1d8e1362a18ff Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 20 Jul 2023 15:02:00 -0500 Subject: [PATCH 100/702] Remove all-in-one VIIRS EDR test It wasn't doing much and they'll be readded later as new files are supported --- satpy/tests/reader_tests/test_viirs_edr.py | 24 ---------------------- 1 file changed, 24 deletions(-) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index d1da58970d..a9a7d13a81 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -24,8 +24,6 @@ import shutil from datetime import datetime from pathlib import Path -from unittest import mock -from unittest.mock import MagicMock import dask import dask.array as da @@ -34,8 +32,6 @@ import xarray as xr from pyresample import SwathDefinition -from satpy.readers.viirs_edr import VIIRSJRRFileHandler - I_COLS = 64 # real-world 6400 I_ROWS = 32 # one scan M_COLS = 32 # real-world 3200 @@ -125,26 +121,6 @@ def test_get_platformname(self, surface_reflectance_file, filename_platform, exp scn.load(["surf_refl_I01"]) assert scn["surf_refl_I01"].attrs["platform_name"] == exp_shortname - @mock.patch('xarray.open_dataset') - def test_get_dataset(self, mocked_dataset): - """Test retrieval of datasets.""" - filename_info = {'platform_shortname': 'npp'} - tmp = MagicMock(start_time='20191120T125002Z', stop_time='20191120T125002Z') - xr.open_dataset.return_value = tmp - test = VIIRSJRRFileHandler('somedir/somefile.nc', filename_info, None) - test.nc = {'Longitude': xr.Dataset(), - 'Latitude': xr.Dataset(), - 'smoke_concentration': xr.Dataset(), - 'fire_mask': xr.Dataset(), - } - test.get_dataset('longitude', {'file_key': 'Longitude'}) - test.get_dataset('latitude', {'file_key': 'Latitude'}) - test.get_dataset('smoke_concentration', {'file_key': 'smoke_concentration'}) - test.get_dataset('fire_mask', {'file_key': 'fire_mask'}) - with pytest.raises(KeyError): - test.get_dataset('erroneous dataset', {'file_key': 'erroneous dataset'}) - mocked_dataset.assert_called() - def _check_surf_refl_data_arr(data_arr: xr.DataArray) -> None: assert data_arr.dims == ("y", "x") From a34c9c8079a2406c3232532cd6ac24acfb1e68d5 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 21 Jul 2023 08:58:42 -0500 Subject: [PATCH 101/702] Add NDVI/EVI optional datasets to viirs_edr reader --- satpy/etc/readers/viirs_edr.yaml | 15 ++++ satpy/readers/file_handlers.py | 4 +- satpy/readers/viirs_edr.py | 48 +++++++++-- satpy/tests/reader_tests/test_viirs_edr.py | 99 ++++++++++++++++++++++ 4 files changed, 159 insertions(+), 7 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index bf8a949de8..ebcea3cd3d 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -409,3 +409,18 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + # Swath-based vegetation indexes added to CSPP LEO output + NDVI: + name: NDVI + resolution: 375 + file_type: [jrr_surfref_product] + file_key: "NDVI" + coordinates: [longitude_375, latitude_375] + units: "1" + EVI: + name: EVI + resolution: 375 + file_type: [jrr_surfref_product] + file_key: "NDVI" + coordinates: [longitude_375, latitude_375] + units: "1" diff --git a/satpy/readers/file_handlers.py b/satpy/readers/file_handlers.py index cebab6e307..0c47553b0d 100644 --- a/satpy/readers/file_handlers.py +++ b/satpy/readers/file_handlers.py @@ -228,9 +228,9 @@ def available_datasets(self, configured_datasets=None): Args: configured_datasets (list): Series of (bool or None, dict) in the same way as is returned by this method (see below). The bool - is whether or not the dataset is available from at least one + is whether the dataset is available from at least one of the current file handlers. It can also be ``None`` if - no file handler knows before us knows how to handle it. + no file handler before us knows how to handle it. The dictionary is existing dataset metadata. The dictionaries are typically provided from a YAML configuration file and may be modified, updated, or used as a "template" for additional diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index d02d38f53e..3ddd685bab 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -91,12 +91,14 @@ def __init__(self, filename, filename_info, filetype_info): def get_dataset(self, dataset_id, info): """Get the dataset.""" - ds = self.nc[info['file_key']] - if ds.attrs.get("units", None) == "unitless": - ds.attrs["units"] = "1" - ds.attrs["platform_name"] = self.platform_name + data_arr = self.nc[info['file_key']] + if data_arr.attrs.get("units", None) == "unitless": + data_arr.attrs["units"] = "1" + if isinstance(data_arr.attrs.get('flag_meanings'), str): + data_arr.attrs['flag_meanings'] = [flag.strip() for flag in data_arr.attrs['flag_meanings'].split(' ')] + data_arr.attrs["platform_name"] = self.platform_name - return ds + return data_arr @property def start_time(self): @@ -118,3 +120,39 @@ def platform_name(self): 'JPSS-2': 'NOAA-21', 'J02': 'NOAA-21'} return platform_dict[platform_path.upper()] + + def available_datasets(self, configured_datasets=None): + """Get information of available datasets in this file. + + Args: + configured_datasets (list): Series of (bool or None, dict) in the + same way as is returned by this method (see below). The bool + is whether the dataset is available from at least one + of the current file handlers. It can also be ``None`` if + no file handler before us knows how to handle it. + The dictionary is existing dataset metadata. The dictionaries + are typically provided from a YAML configuration file and may + be modified, updated, or used as a "template" for additional + available datasets. This argument could be the result of a + previous file handler's implementation of this method. + + Returns: + Iterator of (bool or None, dict) pairs where dict is the + dataset's metadata. If the dataset is available in the current + file type then the boolean value should be ``True``, ``False`` + if we **know** about the dataset but it is unavailable, or + ``None`` if this file object is not responsible for it. + + """ + for is_avail, ds_info in (configured_datasets or []): + if is_avail is not None: + # some other file handler said it has this dataset + # we don't know any more information than the previous + # file handler so let's yield early + yield is_avail, ds_info + continue + if self.file_type_matches(ds_info['file_type']) is None: + # this is not the file type for this dataset + yield None, ds_info + file_key = ds_info.get("file_key", ds_info["name"]) + yield file_key in self.nc, ds_info diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index a9a7d13a81..de95e946ac 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -31,6 +31,7 @@ import pytest import xarray as xr from pyresample import SwathDefinition +from pytest_lazyfixture import lazy_fixture I_COLS = 64 # real-world 6400 I_ROWS = 32 # one scan @@ -38,15 +39,52 @@ M_ROWS = 16 # one scan START_TIME = datetime(2023, 5, 30, 17, 55, 41, 0) END_TIME = datetime(2023, 5, 30, 17, 57, 5, 0) +QF1_FLAG_MEANINGS = """ +\tBits are listed from the MSB (bit 7) to the LSB (bit 0): +\tBit Description +\t6-7 SUN GLINT; +\t 00 -- none +\t 01 -- geometry based +\t 10 -- wind speed based +\t 11 -- geometry & wind speed based +\t5 low sun mask; +\t 0 -- high +\t 1 -- low +\t4 day/night; +\t 0 -- day +\t 1 -- night +\t2-3 cloud detection & confidence; +\t 00 -- confident clear +\t 01 -- probably clear +\t 10 -- probably cloudy +\t 11 -- confident cloudy +\t0-1 cloud mask quality; +\t 00 -- poor +\t 01 -- low +\t 10 -- medium +\t 11 -- high +""" @pytest.fixture(scope="module") def surface_reflectance_file(tmp_path_factory) -> Path: """Generate fake surface reflectance EDR file.""" + return _create_surface_reflectance_file(tmp_path_factory, include_veg_indices=False) + + +@pytest.fixture(scope="module") +def surface_reflectance_with_veg_indices_file(tmp_path_factory) -> Path: + """Generate fake surface reflectance EDR file with vegetation indexes included.""" + return _create_surface_reflectance_file(tmp_path_factory, include_veg_indices=True) + + +def _create_surface_reflectance_file(tmp_path_factory, include_veg_indices: bool = False) -> Path: tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") fn = f"SurfRefl_v1r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" file_path = tmp_path / fn sr_vars = _create_surf_refl_variables() + if include_veg_indices: + sr_vars.update(_create_veg_index_variables()) ds = _create_fake_dataset(sr_vars) ds.to_netcdf(file_path) return file_path @@ -89,6 +127,32 @@ def _create_surf_refl_variables() -> dict[str, xr.DataArray]: return data_arrs +def _create_veg_index_variables() -> dict[str, xr.DataArray]: + dim_y_750 = "Along_Track_750m" + dim_x_750 = "Along_Scan_750m" + m_dims = (dim_y_750, dim_x_750) + dim_y_375 = "Along_Track_375m" + dim_x_375 = "Along_Scan_375m" + i_dims = (dim_y_375, dim_x_375) + + i_data = np.zeros((I_ROWS, I_COLS), dtype=np.float32) + data_arrs = { + "NDVI": xr.DataArray(i_data, dims=i_dims, attrs={"units": "unitless"}), + "EVI": xr.DataArray(i_data, dims=i_dims, attrs={"units": "unitless"}), + } + data_arrs["NDVI"].encoding["dtype"] = np.float32 + data_arrs["EVI"].encoding["dtype"] = np.float32 + + # Quality Flags are from the Surface Reflectance data, but only used for VI products in the reader + qf_data = np.zeros((M_ROWS, M_COLS), dtype=np.uint8) + for qf_num in range(1, 8): + qf_name = f"QF{qf_num} Surface Reflectance" + data_arr = xr.DataArray(qf_data, dims=m_dims, attrs={"flag_meanings": QF1_FLAG_MEANINGS}) + data_arr.encoding["dtype"] = np.uint8 + data_arrs[qf_name] = data_arr + return data_arrs + + class TestVIIRSJRRReader: """Test the VIIRS JRR L2 reader.""" @@ -104,6 +168,34 @@ def test_get_dataset_surf_refl(self, surface_reflectance_file): _check_surf_refl_data_arr(scn["surf_refl_I01"]) _check_surf_refl_data_arr(scn["surf_refl_M01"]) + def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_indices_file): + """Test retrieval of vegetation indices from surface reflectance files.""" + from satpy import Scene + scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_with_veg_indices_file]) + scn.load(["NDVI", "EVI", "surf_refl_qf1"]) + _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"]) + # TODO: Check NDVI/EVI attributes/dims + # TODO: Check NDVI/EVI quality flag clearing + + @pytest.mark.parametrize( + ("data_file", "exp_available"), + [ + (lazy_fixture("surface_reflectance_file"), False), + (lazy_fixture("surface_reflectance_with_veg_indices_file"), True), + ] + ) + def test_availability_veg_idx(self, data_file, exp_available): + """Test that vegetation indexes aren't available when they aren't present.""" + from satpy import Scene + scn = Scene(reader="viirs_edr", filenames=[data_file]) + avail = scn.available_dataset_names() + if exp_available: + assert "NDVI" in avail + assert "EVI" in avail + else: + assert "NDVI" not in avail + assert "EVI" not in avail + @pytest.mark.parametrize( ("filename_platform", "exp_shortname"), [ @@ -134,3 +226,10 @@ def _check_surf_refl_data_arr(data_arr: xr.DataArray) -> None: assert all(c == exp_row_chunks for c in data_arr.chunks[0]) assert data_arr.chunks[1] == (exp_shape[1],) assert data_arr.attrs["units"] == "1" + + +def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray) -> None: + assert data_arr.dims == ("y", "x") + assert isinstance(data_arr.attrs["area"], SwathDefinition) + assert isinstance(data_arr.data, da.Array) + assert np.issubdtype(data_arr.data.dtype, np.uint8) From 976c1466b45a2c496dd71749c0cbcdb3d39eca40 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 21 Jul 2023 09:10:46 -0500 Subject: [PATCH 102/702] Cleanup VIIRS EDR testing --- satpy/readers/viirs_edr.py | 14 ++++++++---- satpy/tests/reader_tests/test_viirs_edr.py | 25 +++++++++++----------- 2 files changed, 23 insertions(+), 16 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 3ddd685bab..29a3d414d1 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -92,14 +92,20 @@ def __init__(self, filename, filename_info, filetype_info): def get_dataset(self, dataset_id, info): """Get the dataset.""" data_arr = self.nc[info['file_key']] - if data_arr.attrs.get("units", None) == "unitless": + units = data_arr.attrs.get("units", None) + if units is None or units == "unitless": data_arr.attrs["units"] = "1" - if isinstance(data_arr.attrs.get('flag_meanings'), str): - data_arr.attrs['flag_meanings'] = [flag.strip() for flag in data_arr.attrs['flag_meanings'].split(' ')] + self._decode_flag_meanings(data_arr) data_arr.attrs["platform_name"] = self.platform_name - return data_arr + @staticmethod + def _decode_flag_meanings(data_arr: xr.DataArray): + flag_meanings = data_arr.attrs.get("flag_meanings", None) + if isinstance(flag_meanings, str) and "\n" not in flag_meanings: + # only handle CF-standard flag meanings + data_arr.attrs['flag_meanings'] = [flag for flag in data_arr.attrs['flag_meanings'].split(' ')] + @property def start_time(self): """Get first date/time when observations were recorded.""" diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index de95e946ac..06bd49aba5 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -28,6 +28,7 @@ import dask import dask.array as da import numpy as np +import numpy.typing as npt import pytest import xarray as xr from pyresample import SwathDefinition @@ -171,10 +172,13 @@ def test_get_dataset_surf_refl(self, surface_reflectance_file): def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_indices_file): """Test retrieval of vegetation indices from surface reflectance files.""" from satpy import Scene - scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_with_veg_indices_file]) - scn.load(["NDVI", "EVI", "surf_refl_qf1"]) + bytes_in_m_row = 4 * 3200 + with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): + scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_with_veg_indices_file]) + scn.load(["NDVI", "EVI", "surf_refl_qf1"]) + _check_surf_refl_data_arr(scn["NDVI"]) + _check_surf_refl_data_arr(scn["EVI"]) _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"]) - # TODO: Check NDVI/EVI attributes/dims # TODO: Check NDVI/EVI quality flag clearing @pytest.mark.parametrize( @@ -214,22 +218,19 @@ def test_get_platformname(self, surface_reflectance_file, filename_platform, exp assert scn["surf_refl_I01"].attrs["platform_name"] == exp_shortname -def _check_surf_refl_data_arr(data_arr: xr.DataArray) -> None: +def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.float32) -> None: assert data_arr.dims == ("y", "x") assert isinstance(data_arr.attrs["area"], SwathDefinition) assert isinstance(data_arr.data, da.Array) - assert np.issubdtype(data_arr.data.dtype, np.float32) - is_m_band = "I" not in data_arr.attrs["name"] - exp_shape = (M_ROWS, M_COLS) if is_m_band else (I_ROWS, I_COLS) + assert np.issubdtype(data_arr.data.dtype, dtype) + is_mband_res = "I" not in data_arr.attrs["name"] # includes NDVI and EVI + exp_shape = (M_ROWS, M_COLS) if is_mband_res else (I_ROWS, I_COLS) assert data_arr.shape == exp_shape - exp_row_chunks = 4 if is_m_band else 8 + exp_row_chunks = 4 if is_mband_res else 8 assert all(c == exp_row_chunks for c in data_arr.chunks[0]) assert data_arr.chunks[1] == (exp_shape[1],) assert data_arr.attrs["units"] == "1" def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray) -> None: - assert data_arr.dims == ("y", "x") - assert isinstance(data_arr.attrs["area"], SwathDefinition) - assert isinstance(data_arr.data, da.Array) - assert np.issubdtype(data_arr.data.dtype, np.uint8) + _check_surf_refl_data_arr(data_arr, dtype=np.uint8) From 1db0e4855f2b82b2ea38456524fe346344f8f3f0 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 21 Jul 2023 09:47:35 -0500 Subject: [PATCH 103/702] Fix sensor in VIIRS EDR --- satpy/readers/viirs_edr.py | 1 + satpy/tests/reader_tests/test_viirs_edr.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 29a3d414d1..2d26b4dd35 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -97,6 +97,7 @@ def get_dataset(self, dataset_id, info): data_arr.attrs["units"] = "1" self._decode_flag_meanings(data_arr) data_arr.attrs["platform_name"] = self.platform_name + data_arr.attrs["sensor"] = self.sensor_name return data_arr @staticmethod diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 06bd49aba5..be1cf8dc6a 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -229,7 +229,9 @@ def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.floa exp_row_chunks = 4 if is_mband_res else 8 assert all(c == exp_row_chunks for c in data_arr.chunks[0]) assert data_arr.chunks[1] == (exp_shape[1],) + assert data_arr.attrs["units"] == "1" + assert data_arr.attrs["sensor"] == "viirs" def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray) -> None: From 7b183ca7147c970076a3fa7032002f878cbfdedf Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 21 Jul 2023 16:04:24 -0500 Subject: [PATCH 104/702] Fix fill value handling and add valid_range YAML handling for VI products --- satpy/etc/readers/viirs_edr.yaml | 11 +++------- satpy/readers/viirs_edr.py | 15 +++++++++++++ satpy/tests/reader_tests/test_viirs_edr.py | 25 ++++++++++++++-------- 3 files changed, 34 insertions(+), 17 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index ebcea3cd3d..23d07f4e07 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -360,7 +360,6 @@ datasets: file_key: "QF1 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' - _FillValue: -9999 surf_refl_qf2: name: surf_refl_qf2 resolution: 750 @@ -368,7 +367,6 @@ datasets: file_key: "QF2 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' - _FillValue: -9999 surf_refl_qf3: name: surf_refl_qf3 resolution: 750 @@ -376,7 +374,6 @@ datasets: file_key: "QF3 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' - _FillValue: -9999 surf_refl_qf4: name: surf_refl_qf4 resolution: 750 @@ -384,7 +381,6 @@ datasets: file_key: "QF4 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' - _FillValue: -9999 surf_refl_qf5: name: surf_refl_qf5 resolution: 750 @@ -392,7 +388,6 @@ datasets: file_key: "QF5 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' - _FillValue: -9999 surf_refl_qf6: name: surf_refl_qf6 resolution: 750 @@ -400,7 +395,6 @@ datasets: file_key: "QF6 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' - _FillValue: -9999 surf_refl_qf7: name: surf_refl_qf7 resolution: 750 @@ -408,7 +402,6 @@ datasets: file_key: "QF7 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' - _FillValue: -9999 # Swath-based vegetation indexes added to CSPP LEO output NDVI: name: NDVI @@ -417,10 +410,12 @@ datasets: file_key: "NDVI" coordinates: [longitude_375, latitude_375] units: "1" + valid_range: [-1.0, 1.0] EVI: name: EVI resolution: 375 file_type: [jrr_surfref_product] - file_key: "NDVI" + file_key: "EVI" coordinates: [longitude_375, latitude_375] units: "1" + valid_range: [-1.0, 1.0] diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 2d26b4dd35..dc58cd406e 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -44,6 +44,7 @@ import logging +import numpy as np import xarray as xr from satpy.readers.file_handlers import BaseFileHandler @@ -92,6 +93,7 @@ def __init__(self, filename, filename_info, filetype_info): def get_dataset(self, dataset_id, info): """Get the dataset.""" data_arr = self.nc[info['file_key']] + data_arr = self._mask_invalid(data_arr, info) units = data_arr.attrs.get("units", None) if units is None or units == "unitless": data_arr.attrs["units"] = "1" @@ -100,6 +102,19 @@ def get_dataset(self, dataset_id, info): data_arr.attrs["sensor"] = self.sensor_name return data_arr + def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: + fill_value = data_arr.encoding.get("_FillValue") + if fill_value is not None and not np.isnan(fill_value): + # xarray auto mask and scale handled this + return data_arr + yaml_fill = ds_info.get("_FillValue") + if yaml_fill is not None: + return data_arr.where(data_arr != yaml_fill) + valid_range = ds_info.get("valid_range", data_arr.attrs.get("valid_range")) + if valid_range is not None: + return data_arr.where((valid_range[0] <= data_arr) & (data_arr <= valid_range[1])) + return data_arr + @staticmethod def _decode_flag_meanings(data_arr: xr.DataArray): flag_meanings = data_arr.attrs.get("flag_meanings", None) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index be1cf8dc6a..69d03f6db9 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -136,10 +136,11 @@ def _create_veg_index_variables() -> dict[str, xr.DataArray]: dim_x_375 = "Along_Scan_375m" i_dims = (dim_y_375, dim_x_375) - i_data = np.zeros((I_ROWS, I_COLS), dtype=np.float32) + vi_data = np.zeros((I_ROWS, I_COLS), dtype=np.float32) + vi_data[0, :7] = [-2.0, -1.0, -0.5, 0.0, 0.5, 1.0, 1.5] data_arrs = { - "NDVI": xr.DataArray(i_data, dims=i_dims, attrs={"units": "unitless"}), - "EVI": xr.DataArray(i_data, dims=i_dims, attrs={"units": "unitless"}), + "NDVI": xr.DataArray(vi_data, dims=i_dims, attrs={"units": "unitless"}), + "EVI": xr.DataArray(vi_data, dims=i_dims, attrs={"units": "unitless"}), } data_arrs["NDVI"].encoding["dtype"] = np.float32 data_arrs["EVI"].encoding["dtype"] = np.float32 @@ -176,8 +177,8 @@ def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_i with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_with_veg_indices_file]) scn.load(["NDVI", "EVI", "surf_refl_qf1"]) - _check_surf_refl_data_arr(scn["NDVI"]) - _check_surf_refl_data_arr(scn["EVI"]) + _check_vi_data_arr(scn["NDVI"]) + _check_vi_data_arr(scn["EVI"]) _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"]) # TODO: Check NDVI/EVI quality flag clearing @@ -218,6 +219,16 @@ def test_get_platformname(self, surface_reflectance_file, filename_platform, exp assert scn["surf_refl_I01"].attrs["platform_name"] == exp_shortname +def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray) -> None: + _check_surf_refl_data_arr(data_arr, dtype=np.uint8) + + +def _check_vi_data_arr(data_arr: xr.DataArray) -> None: + _check_surf_refl_data_arr(data_arr) + data = data_arr.data.compute() + np.testing.assert_allclose(data[0, :7], [np.nan, -1.0, -0.5, 0.0, 0.5, 1.0, np.nan]) + + def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.float32) -> None: assert data_arr.dims == ("y", "x") assert isinstance(data_arr.attrs["area"], SwathDefinition) @@ -232,7 +243,3 @@ def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.floa assert data_arr.attrs["units"] == "1" assert data_arr.attrs["sensor"] == "viirs" - - -def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray) -> None: - _check_surf_refl_data_arr(data_arr, dtype=np.uint8) From 2864f98e2c37f3acc868c09008ac43d5688eafb7 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sat, 22 Jul 2023 14:15:57 -0500 Subject: [PATCH 105/702] Add basic vegetation quality masking --- satpy/readers/viirs_edr.py | 35 ++++++++++++++++++++++ satpy/tests/reader_tests/test_viirs_edr.py | 20 +++++++++++-- 2 files changed, 53 insertions(+), 2 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index dc58cd406e..df151a1fd9 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -94,6 +94,9 @@ def get_dataset(self, dataset_id, info): """Get the dataset.""" data_arr = self.nc[info['file_key']] data_arr = self._mask_invalid(data_arr, info) + if info["file_key"] in ("NDVI", "EVI"): + good_mask = self._get_veg_index_good_mask() + data_arr = data_arr.where(good_mask) units = data_arr.attrs.get("units", None) if units is None or units == "unitless": data_arr.attrs["units"] = "1" @@ -115,6 +118,38 @@ def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: return data_arr.where((valid_range[0] <= data_arr) & (data_arr <= valid_range[1])) return data_arr + def _get_veg_index_good_mask(self) -> xr.DataArray: + # each mask array should be TRUE when pixels are UNACCEPTABLE + qf1 = self.nc['QF1 Surface Reflectance'] + has_sun_glint = (qf1 & 0b11000000) > 0 + is_cloudy = (qf1 & 0b00001100) > 0 # mask everything but "confident clear" + cloud_quality = (qf1 & 0b00000011) < 0b10 + + qf2 = self.nc['QF2 Surface Reflectance'] + has_snow_or_ice = (qf2 & 0b00100000) > 0 + has_cloud_shadow = (qf2 & 0b00001000) > 0 + water_mask = (qf2 & 0b00000111) + has_water = (water_mask <= 0b010) | (water_mask == 0b101) # shallow water, deep ocean, arctic + + qf7 = self.nc['QF7 Surface Reflectance'] + has_aerosols = (qf7 & 0b00001100) > 0b1000 # high aerosol quantity + adjacent_to_cloud = (qf7 & 0b00000010) > 0 + + bad_mask = ( + has_sun_glint | + is_cloudy | + cloud_quality | + has_snow_or_ice | + has_cloud_shadow | + has_water | + has_aerosols | + adjacent_to_cloud + ) + # upscale from M-band resolution to I-band resolution + bad_mask_iband_dask = bad_mask.data.repeat(2, axis=1).repeat(2, axis=0) + good_mask_iband = xr.DataArray(~bad_mask_iband_dask, dims=qf1.dims) + return good_mask_iband + @staticmethod def _decode_flag_meanings(data_arr: xr.DataArray): flag_meanings = data_arr.attrs.get("flag_meanings", None) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 69d03f6db9..6d380cd017 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -146,9 +146,24 @@ def _create_veg_index_variables() -> dict[str, xr.DataArray]: data_arrs["EVI"].encoding["dtype"] = np.float32 # Quality Flags are from the Surface Reflectance data, but only used for VI products in the reader - qf_data = np.zeros((M_ROWS, M_COLS), dtype=np.uint8) for qf_num in range(1, 8): qf_name = f"QF{qf_num} Surface Reflectance" + qf_data = np.zeros((M_ROWS, M_COLS), dtype=np.uint8) + bad_qf_start = 4 # 0.5x the last test pixel set in "vi_data" above (I-band versus M-band index) + if qf_num == 1: + qf_data[:, :] |= 0b00000010 # medium cloud mask quality everywhere + qf_data[0, bad_qf_start] |= 0b11000000 # sun glint + qf_data[0, bad_qf_start + 1] |= 0b00001100 # cloudy + qf_data[0, bad_qf_start + 2] = 0b00000001 # low cloud mask quality + elif qf_num == 2: + qf_data[:, :] |= 0b00000011 # desert everywhere + qf_data[0, bad_qf_start + 3] |= 0b00100000 # snow or ice + qf_data[0, bad_qf_start + 4] |= 0b00001000 # cloud shadow + qf_data[0, bad_qf_start + 5] = 0b00000001 # deep ocean + elif qf_num == 7: + qf_data[0, bad_qf_start + 6] |= 0b00001100 # high aerosol + qf_data[0, bad_qf_start + 7] |= 0b00000010 # adjacent to cloud + data_arr = xr.DataArray(qf_data, dims=m_dims, attrs={"flag_meanings": QF1_FLAG_MEANINGS}) data_arr.encoding["dtype"] = np.uint8 data_arrs[qf_name] = data_arr @@ -180,7 +195,6 @@ def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_i _check_vi_data_arr(scn["NDVI"]) _check_vi_data_arr(scn["EVI"]) _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"]) - # TODO: Check NDVI/EVI quality flag clearing @pytest.mark.parametrize( ("data_file", "exp_available"), @@ -227,6 +241,8 @@ def _check_vi_data_arr(data_arr: xr.DataArray) -> None: _check_surf_refl_data_arr(data_arr) data = data_arr.data.compute() np.testing.assert_allclose(data[0, :7], [np.nan, -1.0, -0.5, 0.0, 0.5, 1.0, np.nan]) + np.testing.assert_allclose(data[0, 8:8 + 16], np.nan) + np.testing.assert_allclose(data[0, 8 + 16:], 0.0) def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.float32) -> None: From b9a5f4c321a33db1ad06f3c261680e03ad1891e7 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sat, 22 Jul 2023 14:29:12 -0500 Subject: [PATCH 106/702] Refactor viirs edr reader to surface reflectance is separate --- satpy/etc/readers/viirs_edr.yaml | 2 +- satpy/readers/viirs_edr.py | 78 ++++++++++++++++++-------------- 2 files changed, 44 insertions(+), 36 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 23d07f4e07..9e362c18af 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -19,7 +19,7 @@ file_types: file_patterns: - 'JRR-ADP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_surfref_product: - file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSSurfaceReflectanceWithVIHandler variable_prefix: "" file_patterns: - 'SurfRefl_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index df151a1fd9..9ad841754d 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -94,9 +94,6 @@ def get_dataset(self, dataset_id, info): """Get the dataset.""" data_arr = self.nc[info['file_key']] data_arr = self._mask_invalid(data_arr, info) - if info["file_key"] in ("NDVI", "EVI"): - good_mask = self._get_veg_index_good_mask() - data_arr = data_arr.where(good_mask) units = data_arr.attrs.get("units", None) if units is None or units == "unitless": data_arr.attrs["units"] = "1" @@ -118,38 +115,6 @@ def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: return data_arr.where((valid_range[0] <= data_arr) & (data_arr <= valid_range[1])) return data_arr - def _get_veg_index_good_mask(self) -> xr.DataArray: - # each mask array should be TRUE when pixels are UNACCEPTABLE - qf1 = self.nc['QF1 Surface Reflectance'] - has_sun_glint = (qf1 & 0b11000000) > 0 - is_cloudy = (qf1 & 0b00001100) > 0 # mask everything but "confident clear" - cloud_quality = (qf1 & 0b00000011) < 0b10 - - qf2 = self.nc['QF2 Surface Reflectance'] - has_snow_or_ice = (qf2 & 0b00100000) > 0 - has_cloud_shadow = (qf2 & 0b00001000) > 0 - water_mask = (qf2 & 0b00000111) - has_water = (water_mask <= 0b010) | (water_mask == 0b101) # shallow water, deep ocean, arctic - - qf7 = self.nc['QF7 Surface Reflectance'] - has_aerosols = (qf7 & 0b00001100) > 0b1000 # high aerosol quantity - adjacent_to_cloud = (qf7 & 0b00000010) > 0 - - bad_mask = ( - has_sun_glint | - is_cloudy | - cloud_quality | - has_snow_or_ice | - has_cloud_shadow | - has_water | - has_aerosols | - adjacent_to_cloud - ) - # upscale from M-band resolution to I-band resolution - bad_mask_iband_dask = bad_mask.data.repeat(2, axis=1).repeat(2, axis=0) - good_mask_iband = xr.DataArray(~bad_mask_iband_dask, dims=qf1.dims) - return good_mask_iband - @staticmethod def _decode_flag_meanings(data_arr: xr.DataArray): flag_meanings = data_arr.attrs.get("flag_meanings", None) @@ -213,3 +178,46 @@ def available_datasets(self, configured_datasets=None): yield None, ds_info file_key = ds_info.get("file_key", ds_info["name"]) yield file_key in self.nc, ds_info + + +class VIIRSSurfaceReflectanceWithVIHandler(VIIRSJRRFileHandler): + """File handler for surface reflectance files with optional vegetation indexes.""" + + def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: + new_data_arr = super()._mask_invalid(data_arr, ds_info) + if ds_info["file_key"] in ("NDVI", "EVI"): + good_mask = self._get_veg_index_good_mask() + new_data_arr = new_data_arr.where(good_mask) + return new_data_arr + + def _get_veg_index_good_mask(self) -> xr.DataArray: + # each mask array should be TRUE when pixels are UNACCEPTABLE + qf1 = self.nc['QF1 Surface Reflectance'] + has_sun_glint = (qf1 & 0b11000000) > 0 + is_cloudy = (qf1 & 0b00001100) > 0 # mask everything but "confident clear" + cloud_quality = (qf1 & 0b00000011) < 0b10 + + qf2 = self.nc['QF2 Surface Reflectance'] + has_snow_or_ice = (qf2 & 0b00100000) > 0 + has_cloud_shadow = (qf2 & 0b00001000) > 0 + water_mask = (qf2 & 0b00000111) + has_water = (water_mask <= 0b010) | (water_mask == 0b101) # shallow water, deep ocean, arctic + + qf7 = self.nc['QF7 Surface Reflectance'] + has_aerosols = (qf7 & 0b00001100) > 0b1000 # high aerosol quantity + adjacent_to_cloud = (qf7 & 0b00000010) > 0 + + bad_mask = ( + has_sun_glint | + is_cloudy | + cloud_quality | + has_snow_or_ice | + has_cloud_shadow | + has_water | + has_aerosols | + adjacent_to_cloud + ) + # upscale from M-band resolution to I-band resolution + bad_mask_iband_dask = bad_mask.data.repeat(2, axis=1).repeat(2, axis=0) + good_mask_iband = xr.DataArray(~bad_mask_iband_dask, dims=qf1.dims) + return good_mask_iband From 5b1573fc6f7cf794fb45c388362e9b86ef59d6b8 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sat, 22 Jul 2023 15:26:22 -0500 Subject: [PATCH 107/702] Add rows_per_scan to viirs_edr metadata --- satpy/readers/viirs_edr.py | 5 +++++ satpy/tests/reader_tests/test_viirs_edr.py | 5 +++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 9ad841754d..472e4c29ea 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -90,6 +90,10 @@ def __init__(self, filename, filename_info, filetype_info): self.algorithm_version = filename_info['platform_shortname'] self.sensor_name = 'viirs' + def rows_per_scans(self, data_arr: xr.DataArray) -> int: + """Get number of array rows per instrument scan based on data resolution.""" + return 32 if data_arr.shape[1] == 6400 else 16 + def get_dataset(self, dataset_id, info): """Get the dataset.""" data_arr = self.nc[info['file_key']] @@ -100,6 +104,7 @@ def get_dataset(self, dataset_id, info): self._decode_flag_meanings(data_arr) data_arr.attrs["platform_name"] = self.platform_name data_arr.attrs["sensor"] = self.sensor_name + data_arr.attrs["rows_per_scan"] = self.rows_per_scans(data_arr) return data_arr def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 6d380cd017..7f2659ec61 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -34,9 +34,9 @@ from pyresample import SwathDefinition from pytest_lazyfixture import lazy_fixture -I_COLS = 64 # real-world 6400 +I_COLS = 6400 I_ROWS = 32 # one scan -M_COLS = 32 # real-world 3200 +M_COLS = 3200 M_ROWS = 16 # one scan START_TIME = datetime(2023, 5, 30, 17, 55, 41, 0) END_TIME = datetime(2023, 5, 30, 17, 57, 5, 0) @@ -259,3 +259,4 @@ def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.floa assert data_arr.attrs["units"] == "1" assert data_arr.attrs["sensor"] == "viirs" + assert data_arr.attrs["rows_per_scan"] == 16 if is_mband_res else 32 From d7121bb1315360ac44c384435784785d32cb5530 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 23 Jul 2023 06:43:34 -0500 Subject: [PATCH 108/702] Add standard_names from YAML --- satpy/etc/readers/viirs_edr.yaml | 22 ++++++++++++++++++++ satpy/readers/viirs_edr.py | 5 ++++- satpy/tests/reader_tests/test_viirs_edr.py | 24 +++++++++++++++++++--- 3 files changed, 47 insertions(+), 4 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 9e362c18af..b93c2fdd3d 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -254,6 +254,7 @@ datasets: coordinates: [longitude_375, latitude_375] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_I02: name: surf_refl_I02 resolution: 375 @@ -263,6 +264,7 @@ datasets: coordinates: [longitude_375, latitude_375] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_I03: name: surf_refl_I03 resolution: 375 @@ -272,6 +274,7 @@ datasets: coordinates: [longitude_375, latitude_375] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_M01: name: surf_refl_M01 resolution: 750 @@ -281,6 +284,7 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_M02: name: surf_refl_M02 resolution: 750 @@ -290,6 +294,7 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_M03: name: surf_refl_M03 resolution: 750 @@ -299,6 +304,7 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_M04: name: surf_refl_M04 resolution: 750 @@ -308,6 +314,7 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_M05: name: surf_refl_M05 resolution: 750 @@ -317,6 +324,7 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_M06: name: surf_refl_M06 resolution: 750 @@ -326,6 +334,7 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_M07: name: surf_refl_M07 resolution: 750 @@ -335,6 +344,7 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_M08: name: surf_refl_M08 resolution: 750 @@ -344,6 +354,7 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_M10: name: surf_refl_M10 resolution: 750 @@ -353,6 +364,7 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_qf1: name: surf_refl_qf1 resolution: 750 @@ -360,6 +372,7 @@ datasets: file_key: "QF1 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' + standard_name: "quality_flag" surf_refl_qf2: name: surf_refl_qf2 resolution: 750 @@ -367,6 +380,7 @@ datasets: file_key: "QF2 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' + standard_name: "quality_flag" surf_refl_qf3: name: surf_refl_qf3 resolution: 750 @@ -374,6 +388,7 @@ datasets: file_key: "QF3 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' + standard_name: "quality_flag" surf_refl_qf4: name: surf_refl_qf4 resolution: 750 @@ -381,6 +396,7 @@ datasets: file_key: "QF4 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' + standard_name: "quality_flag" surf_refl_qf5: name: surf_refl_qf5 resolution: 750 @@ -388,6 +404,7 @@ datasets: file_key: "QF5 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' + standard_name: "quality_flag" surf_refl_qf6: name: surf_refl_qf6 resolution: 750 @@ -395,6 +412,7 @@ datasets: file_key: "QF6 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' + standard_name: "quality_flag" surf_refl_qf7: name: surf_refl_qf7 resolution: 750 @@ -402,6 +420,8 @@ datasets: file_key: "QF7 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' + standard_name: "quality_flag" + # Swath-based vegetation indexes added to CSPP LEO output NDVI: name: NDVI @@ -411,6 +431,7 @@ datasets: coordinates: [longitude_375, latitude_375] units: "1" valid_range: [-1.0, 1.0] + standard_name: "normalized_difference_vegetation_index" EVI: name: EVI resolution: 375 @@ -419,3 +440,4 @@ datasets: coordinates: [longitude_375, latitude_375] units: "1" valid_range: [-1.0, 1.0] + standard_name: "normalized_difference_vegetation_index" diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 472e4c29ea..ee9ba66953 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -98,9 +98,12 @@ def get_dataset(self, dataset_id, info): """Get the dataset.""" data_arr = self.nc[info['file_key']] data_arr = self._mask_invalid(data_arr, info) - units = data_arr.attrs.get("units", None) + units = info.get("units", data_arr.attrs.get("units", None)) if units is None or units == "unitless": data_arr.attrs["units"] = "1" + data_arr.attrs["units"] = units + if "standard_name" in info: + data_arr.attrs["standard_name"] = info["standard_name"] self._decode_flag_meanings(data_arr) data_arr.attrs["platform_name"] = self.platform_name data_arr.attrs["sensor"] = self.sensor_name diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 7f2659ec61..ff1b646737 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -234,11 +234,16 @@ def test_get_platformname(self, surface_reflectance_file, filename_platform, exp def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray) -> None: - _check_surf_refl_data_arr(data_arr, dtype=np.uint8) + _array_checks(data_arr, dtype=np.uint8) + _shared_metadata_checks(data_arr) + assert data_arr.attrs["standard_name"] == "quality_flag" def _check_vi_data_arr(data_arr: xr.DataArray) -> None: - _check_surf_refl_data_arr(data_arr) + _array_checks(data_arr) + _shared_metadata_checks(data_arr) + assert data_arr.attrs["standard_name"] == "normalized_difference_vegetation_index" + data = data_arr.data.compute() np.testing.assert_allclose(data[0, :7], [np.nan, -1.0, -0.5, 0.0, 0.5, 1.0, np.nan]) np.testing.assert_allclose(data[0, 8:8 + 16], np.nan) @@ -246,17 +251,30 @@ def _check_vi_data_arr(data_arr: xr.DataArray) -> None: def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.float32) -> None: + _array_checks(data_arr, dtype) + _shared_metadata_checks(data_arr) + assert data_arr.attrs["standard_name"] == "surface_bidirectional_reflectance" + + +def _array_checks(data_arr: xr.DataArray, dtype: npt.Dtype = np.float32) -> None: assert data_arr.dims == ("y", "x") assert isinstance(data_arr.attrs["area"], SwathDefinition) assert isinstance(data_arr.data, da.Array) assert np.issubdtype(data_arr.data.dtype, dtype) - is_mband_res = "I" not in data_arr.attrs["name"] # includes NDVI and EVI + is_mband_res = _is_mband_res(data_arr) exp_shape = (M_ROWS, M_COLS) if is_mband_res else (I_ROWS, I_COLS) assert data_arr.shape == exp_shape exp_row_chunks = 4 if is_mband_res else 8 assert all(c == exp_row_chunks for c in data_arr.chunks[0]) assert data_arr.chunks[1] == (exp_shape[1],) + +def _shared_metadata_checks(data_arr: xr.DataArray) -> None: + is_mband_res = _is_mband_res(data_arr) assert data_arr.attrs["units"] == "1" assert data_arr.attrs["sensor"] == "viirs" assert data_arr.attrs["rows_per_scan"] == 16 if is_mband_res else 32 + + +def _is_mband_res(data_arr: xr.DataArray) -> bool: + return "I" not in data_arr.attrs["name"] # includes NDVI and EVI From 891c4c70ae5f0f4267c7e3132835478a427d902f Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 23 Jul 2023 07:06:57 -0500 Subject: [PATCH 109/702] Change surface reflectances to percentage for consistency --- satpy/etc/enhancements/generic.yaml | 9 ++++++++ satpy/etc/readers/viirs_edr.yaml | 24 +++++++++++----------- satpy/readers/viirs_edr.py | 9 +++++--- satpy/tests/reader_tests/test_viirs_edr.py | 13 +++++++++--- 4 files changed, 37 insertions(+), 18 deletions(-) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 37b375f36c..967f47e2f1 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -15,6 +15,15 @@ enhancements: - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.5} + surface_reflectance_default: + standard_name: surface_bidirectional_reflectance + operations: + - name: linear_stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.} + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: {gamma: 1.5} true_color_default: standard_name: true_color operations: diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index b93c2fdd3d..fc6d49d2cf 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -252,7 +252,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "375m Surface Reflectance Band I1" coordinates: [longitude_375, latitude_375] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_I02: @@ -262,7 +262,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "375m Surface Reflectance Band I2" coordinates: [longitude_375, latitude_375] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_I03: @@ -272,7 +272,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "375m Surface Reflectance Band I3" coordinates: [longitude_375, latitude_375] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M01: @@ -282,7 +282,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "750m Surface Reflectance Band M1" coordinates: [longitude_750, latitude_750] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M02: @@ -292,7 +292,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "750m Surface Reflectance Band M2" coordinates: [longitude_750, latitude_750] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M03: @@ -302,7 +302,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "750m Surface Reflectance Band M3" coordinates: [longitude_750, latitude_750] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M04: @@ -312,7 +312,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "750m Surface Reflectance Band M4" coordinates: [longitude_750, latitude_750] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M05: @@ -322,7 +322,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "750m Surface Reflectance Band M5" coordinates: [longitude_750, latitude_750] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M06: @@ -332,7 +332,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "750m Surface Reflectance Band M6" coordinates: [longitude_750, latitude_750] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M07: @@ -342,7 +342,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "750m Surface Reflectance Band M7" coordinates: [longitude_750, latitude_750] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M08: @@ -352,7 +352,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "750m Surface Reflectance Band M8" coordinates: [longitude_750, latitude_750] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M10: @@ -362,7 +362,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "750m Surface Reflectance Band M10" coordinates: [longitude_750, latitude_750] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_qf1: diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index ee9ba66953..64edbd48c1 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -47,6 +47,7 @@ import numpy as np import xarray as xr +from satpy import DataID from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_chunk_size_limit @@ -94,13 +95,15 @@ def rows_per_scans(self, data_arr: xr.DataArray) -> int: """Get number of array rows per instrument scan based on data resolution.""" return 32 if data_arr.shape[1] == 6400 else 16 - def get_dataset(self, dataset_id, info): + def get_dataset(self, dataset_id: DataID, info: dict) -> xr.DataArray: """Get the dataset.""" data_arr = self.nc[info['file_key']] data_arr = self._mask_invalid(data_arr, info) - units = info.get("units", data_arr.attrs.get("units", None)) + units = info.get("units", data_arr.attrs.get("units")) if units is None or units == "unitless": - data_arr.attrs["units"] = "1" + units = "1" + if units == "%" and data_arr.attrs.get("units") in ("1", "unitless"): + data_arr *= 100.0 # turn into percentages data_arr.attrs["units"] = units if "standard_name" in info: data_arr.attrs["standard_name"] = info["standard_name"] diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index ff1b646737..6a40778505 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -111,8 +111,8 @@ def _create_surf_refl_variables() -> dict[str, xr.DataArray]: lat_attrs = {"standard_name": "latitude", "units": "degrees_north", "_FillValue": -999.9} sr_attrs = {"units": "unitless", "_FillValue": -9999, "scale_factor": 0.0001, "add_offset": 0.0} - i_data = np.zeros((I_ROWS, I_COLS), dtype=np.float32) - m_data = np.zeros((M_ROWS, M_COLS), dtype=np.float32) + i_data = np.random.random_sample((I_ROWS, I_COLS)).astype(np.float32) + m_data = np.random.random_sample((M_ROWS, M_COLS)).astype(np.float32) data_arrs = { "Longitude_at_375m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lon_attrs), "Latitude_at_375m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lat_attrs), @@ -125,6 +125,8 @@ def _create_surf_refl_variables() -> dict[str, xr.DataArray]: if "scale_factor" not in data_arr.attrs: continue data_arr.encoding["dtype"] = np.int16 + data_arr.encoding["scale_factor"] = data_arr.attrs.pop("scale_factor") + data_arr.encoding["add_offset"] = data_arr.attrs.pop("add_offset") return data_arrs @@ -236,12 +238,14 @@ def test_get_platformname(self, surface_reflectance_file, filename_platform, exp def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray) -> None: _array_checks(data_arr, dtype=np.uint8) _shared_metadata_checks(data_arr) + assert data_arr.attrs["units"] == "1" assert data_arr.attrs["standard_name"] == "quality_flag" def _check_vi_data_arr(data_arr: xr.DataArray) -> None: _array_checks(data_arr) _shared_metadata_checks(data_arr) + assert data_arr.attrs["units"] == "1" assert data_arr.attrs["standard_name"] == "normalized_difference_vegetation_index" data = data_arr.data.compute() @@ -252,7 +256,11 @@ def _check_vi_data_arr(data_arr: xr.DataArray) -> None: def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.float32) -> None: _array_checks(data_arr, dtype) + data = data_arr.data.compute() + assert data.max() > 1.0 # random 0-1 test data multiplied by 100 + _shared_metadata_checks(data_arr) + assert data_arr.attrs["units"] == "%" assert data_arr.attrs["standard_name"] == "surface_bidirectional_reflectance" @@ -271,7 +279,6 @@ def _array_checks(data_arr: xr.DataArray, dtype: npt.Dtype = np.float32) -> None def _shared_metadata_checks(data_arr: xr.DataArray) -> None: is_mband_res = _is_mband_res(data_arr) - assert data_arr.attrs["units"] == "1" assert data_arr.attrs["sensor"] == "viirs" assert data_arr.attrs["rows_per_scan"] == 16 if is_mband_res else 32 From e4ee636ece198e976fc7cff34d49f8ab4be3b6a5 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 23 Jul 2023 20:29:23 -0500 Subject: [PATCH 110/702] Fix true color surface name Addresses reviewer comment --- satpy/etc/composites/viirs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/composites/viirs.yaml b/satpy/etc/composites/viirs.yaml index 994a09c960..9c7269862b 100644 --- a/satpy/etc/composites/viirs.yaml +++ b/satpy/etc/composites/viirs.yaml @@ -322,7 +322,7 @@ composites: - name: surf_refl_M05 standard_name: natural_color - true_color_mband_nocorr: + true_color_mband_surf_nocorr: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: surf_refl_M05 From fee489f5b58c85a44dc4851ab498eb94f0908932 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 23 Jul 2023 20:31:09 -0500 Subject: [PATCH 111/702] Remove redundant night_overview for VIIRS Closes #1964 --- satpy/etc/composites/viirs.yaml | 8 -------- 1 file changed, 8 deletions(-) diff --git a/satpy/etc/composites/viirs.yaml b/satpy/etc/composites/viirs.yaml index 9c7269862b..541c4dff10 100644 --- a/satpy/etc/composites/viirs.yaml +++ b/satpy/etc/composites/viirs.yaml @@ -352,14 +352,6 @@ composites: modifiers: [sunz_corrected] standard_name: true_color - night_overview: - compositor: !!python/name:satpy.composites.GenericCompositor - prerequisites: - - DNB - - DNB - - M15 - standard_name: night_overview - overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: From ca587f9e77ed209ed29e81cc4a534be34c7ecbe4 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 23 Jul 2023 21:10:24 -0500 Subject: [PATCH 112/702] Rename surface reflectance based composites and add sharpened true color --- satpy/etc/composites/viirs.yaml | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/satpy/etc/composites/viirs.yaml b/satpy/etc/composites/viirs.yaml index 541c4dff10..6fcdad4e1e 100644 --- a/satpy/etc/composites/viirs.yaml +++ b/satpy/etc/composites/viirs.yaml @@ -306,7 +306,7 @@ composites: modifiers: [sunz_corrected_iband] standard_name: natural_color - natural_color_iband_surf_nocorr: + natural_color_surf: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: surf_refl_I03 @@ -314,7 +314,7 @@ composites: - name: surf_refl_I01 standard_name: natural_color - natural_color_mband_surf_nocorr: + natural_color_lowres_surf: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: surf_refl_M10 @@ -322,7 +322,7 @@ composites: - name: surf_refl_M05 standard_name: natural_color - true_color_mband_surf_nocorr: + true_color_lowres_surf: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: surf_refl_M05 @@ -330,6 +330,17 @@ composites: - name: surf_refl_M03 standard_name: true_color + true_color_surf: + compositor: !!python/name:satpy.composites.RatioSharpenedRGB + prerequisites: + - name: surf_refl_M05 + - name: surf_refl_M04 + - name: surf_refl_M03 + optional_prerequisites: + - name: surf_refl_I01 + standard_name: true_color + high_resolution_band: red + natural_color_sun_lowres: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: From a913e73cd161ff819a4856e167630cfb55989a6d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 24 Jul 2023 10:38:52 -0500 Subject: [PATCH 113/702] Remove inaccurate VIIRS EDR mask information --- satpy/etc/readers/viirs_edr.yaml | 60 ++------------------------------ 1 file changed, 2 insertions(+), 58 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index fc6d49d2cf..3ef014a88e 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -77,172 +77,116 @@ datasets: file_type: jrr_cloudmask file_key: "CloudMask" coordinates: [longitude, latitude] - units: '1' - flag_meanings: ['Clear', 'Probably Clear', 'Probably Cloudy', 'Cloudy'] - flag_values: [0, 1, 2, 3] - _FillValue: -128 cloud_mask_binary: name: cloud_mask_binary resolution: 750 file_type: [jrr_cloudmask] file_key: "CloudMaskBinary" coordinates: [longitude, latitude] - units: '1' - flag_meanings: ['Clear', 'Cloudy'] - flag_values: [0, 1] - _FillValue: -128 cloud_probability: name: cloud_probability resolution: 750 file_type: [jrr_cloudmask] file_key: "CloudProbability" coordinates: [longitude, latitude] - units: '1' - _FillValue: -999. dust_mask: name: dust_mask resolution: 750 file_type: [jrr_cloudmask] file_key: "Dust_Mask" coordinates: [longitude, latitude] - units: '1' - flag_meanings: ['Clear', 'Dusty'] - flag_values: [0, 1] - _FillValue: -128 fire_mask: name: fire_mask resolution: 750 file_type: [jrr_cloudmask] file_key: "Fire_Mask" coordinates: [longitude, latitude] - units: '1' - flag_meanings: ['No fire', 'Fire'] - flag_values: [0, 1] - _FillValue: -128 smoke_mask: name: smoke_mask resolution: 750 file_type: [jrr_cloudmask] file_key: "Smoke_Mask" coordinates: [longitude, latitude] - units: '1' - flag_meanings: ['Clear', 'Smoky'] - flag_values: [0, 1] - _FillValue: -128 - # Aerosol optical depth product datasets + # Aerosol detection product datasets ash_mask: name: ash_mask resolution: 750 file_type: [jrr_aerosol_product] file_key: "Ash" coordinates: [longitude, latitude] - units: '1' - flag_meanings: ['Clear', 'Ash'] - flag_values: [0, 1] - _FillValue: -128 cloud_mask_adp: name: cloud_mask_adp resolution: 750 file_type: [jrr_aerosol_product] file_key: "Cloud" coordinates: [longitude, latitude] - units: '1' - flag_meanings: ['Clear', 'Probably Clear', 'Probably Cloudy', 'Cloudy'] - flag_values: [0, 1, 2, 3] - _FillValue: -128 dust_smoke_discrimination_index: name: dust_smoke_discrimination_index resolution: 750 file_type: [jrr_aerosol_product] file_key: "DSDI" coordinates: [longitude, latitude] - units: '1' - _FillValue: -999 nuc: name: nuc resolution: 750 file_type: [jrr_aerosol_product] file_key: "NUC" coordinates: [longitude, latitude] - units: '1' - flag_meanings: ['No', 'Yes'] - flag_values: [0, 1] - _FillValue: -128 pqi1: name: pqi1 resolution: 750 file_type: [jrr_aerosol_product] file_key: "PQI1" coordinates: [longitude, latitude] - units: '1' - _FillValue: -128 pqi2: name: pqi2 resolution: 750 file_type: [jrr_aerosol_product] file_key: "PQI2" coordinates: [longitude, latitude] - units: '1' - _FillValue: -128 pqi3: name: pqi3 resolution: 750 file_type: [jrr_aerosol_product] file_key: "PQI3" coordinates: [longitude, latitude] - units: '1' - _FillValue: -128 pqi4: name: pqi4 resolution: 750 file_type: [jrr_aerosol_product] file_key: "PQI4" coordinates: [longitude, latitude] - units: '1' - _FillValue: -128 qcflag: name: qcflag resolution: 750 file_type: [jrr_aerosol_product] file_key: "QC_Flag" coordinates: [longitude, latitude] - units: '1' - _FillValue: -128 saai: name: saai resolution: 750 file_type: [jrr_aerosol_product] file_key: "SAAI" coordinates: [longitude, latitude] - units: '1' - _FillValue: -999 smoke: name: smoke resolution: 750 file_type: [jrr_aerosol_product] file_key: "Smoke" coordinates: [longitude, latitude] - units: '1' - _FillValue: -999 smoke_concentration: name: smoke_concentration resolution: 750 file_type: [jrr_aerosol_product] file_key: "SmokeCon" coordinates: [longitude, latitude] - units: 'ug/m^3' - _FillValue: -999 snow_ice: name: snow_ice resolution: 750 file_type: [jrr_aerosol_product] file_key: "SnowIce" coordinates: [longitude, latitude] - units: '1' - flag_meanings: ['No', 'Yes'] - flag_values: [0, 1] - _FillValue: -128 # Surface reflectance products surf_refl_I01: @@ -422,7 +366,7 @@ datasets: units: '1' standard_name: "quality_flag" - # Swath-based vegetation indexes added to CSPP LEO output + # Swath-based vegetation indexes added to CSPP LEO surface reflectance files NDVI: name: NDVI resolution: 375 From a3f1f4e152dd712f69a192435d901d234341c59d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 24 Jul 2023 12:24:23 -0500 Subject: [PATCH 114/702] Removal of unnecessary fill values in YAML --- satpy/etc/readers/viirs_edr.yaml | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 3ef014a88e..645c22a898 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -197,7 +197,6 @@ datasets: file_key: "375m Surface Reflectance Band I1" coordinates: [longitude_375, latitude_375] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_I02: name: surf_refl_I02 @@ -207,7 +206,6 @@ datasets: file_key: "375m Surface Reflectance Band I2" coordinates: [longitude_375, latitude_375] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_I03: name: surf_refl_I03 @@ -217,7 +215,6 @@ datasets: file_key: "375m Surface Reflectance Band I3" coordinates: [longitude_375, latitude_375] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M01: name: surf_refl_M01 @@ -227,7 +224,6 @@ datasets: file_key: "750m Surface Reflectance Band M1" coordinates: [longitude_750, latitude_750] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M02: name: surf_refl_M02 @@ -237,7 +233,6 @@ datasets: file_key: "750m Surface Reflectance Band M2" coordinates: [longitude_750, latitude_750] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M03: name: surf_refl_M03 @@ -247,7 +242,6 @@ datasets: file_key: "750m Surface Reflectance Band M3" coordinates: [longitude_750, latitude_750] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M04: name: surf_refl_M04 @@ -257,7 +251,6 @@ datasets: file_key: "750m Surface Reflectance Band M4" coordinates: [longitude_750, latitude_750] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M05: name: surf_refl_M05 @@ -267,7 +260,6 @@ datasets: file_key: "750m Surface Reflectance Band M5" coordinates: [longitude_750, latitude_750] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M06: name: surf_refl_M06 @@ -277,7 +269,6 @@ datasets: file_key: "750m Surface Reflectance Band M6" coordinates: [longitude_750, latitude_750] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M07: name: surf_refl_M07 @@ -287,7 +278,6 @@ datasets: file_key: "750m Surface Reflectance Band M7" coordinates: [longitude_750, latitude_750] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M08: name: surf_refl_M08 @@ -297,7 +287,6 @@ datasets: file_key: "750m Surface Reflectance Band M8" coordinates: [longitude_750, latitude_750] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M10: name: surf_refl_M10 @@ -307,7 +296,6 @@ datasets: file_key: "750m Surface Reflectance Band M10" coordinates: [longitude_750, latitude_750] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_qf1: name: surf_refl_qf1 From c6f6cc616c1b3d74d4dcc6619d822677703eb752 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 24 Jul 2023 13:12:52 -0500 Subject: [PATCH 115/702] Add missing M11 surface reflectance product and Polar2Grid false_color_surf product --- satpy/etc/composites/viirs.yaml | 16 ++++++++++++++-- satpy/etc/readers/viirs_edr.yaml | 9 +++++++++ 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/satpy/etc/composites/viirs.yaml b/satpy/etc/composites/viirs.yaml index 6fcdad4e1e..bebf6c5833 100644 --- a/satpy/etc/composites/viirs.yaml +++ b/satpy/etc/composites/viirs.yaml @@ -231,10 +231,10 @@ composites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected] optional_prerequisites: - - name: I01 + - name: I02 modifiers: [sunz_corrected_iband, rayleigh_corrected_iband] standard_name: false_color - high_resolution_band: blue + high_resolution_band: green fire_temperature: # CIRA: Original VIIRS @@ -322,6 +322,18 @@ composites: - name: surf_refl_M05 standard_name: natural_color + false_color_surf: + compositor: !!python/name:satpy.composites.RatioSharpenedRGB + prerequisites: + - name: surf_refl_M11 + - name: surf_refl_M07 + - name: surf_refl_M05 + optional_prerequisites: + - name: surf_refl_I02 + standard_name: false_color + high_resolution_band: green + + true_color_lowres_surf: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 645c22a898..953c508660 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -297,6 +297,15 @@ datasets: coordinates: [longitude_750, latitude_750] units: '%' standard_name: "surface_bidirectional_reflectance" + surf_refl_M11: + name: surf_refl_M11 + resolution: 750 + wavelength: [2.225, 2.250, 2.275] + file_type: [jrr_surfref_product] + file_key: "750m Surface Reflectance Band M11" + coordinates: [longitude_750, latitude_750] + units: '%' + standard_name: "surface_bidirectional_reflectance" surf_refl_qf1: name: surf_refl_qf1 resolution: 750 From 25b4f51207fb055f7d37ac63be7adbecb7a0e753 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 24 Jul 2023 14:03:48 -0500 Subject: [PATCH 116/702] Add a few cloud height VIIRS EDR products --- satpy/etc/readers/viirs_edr.yaml | 29 ++++++++- satpy/readers/viirs_edr.py | 4 +- satpy/tests/reader_tests/test_viirs_edr.py | 69 +++++++++++++++++++--- 3 files changed, 90 insertions(+), 12 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 953c508660..8bcc094b3b 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -23,6 +23,11 @@ file_types: variable_prefix: "" file_patterns: - 'SurfRefl_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_cloudheight_product: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + variable_prefix: "" + file_patterns: + - 'JRR-CloudHeight_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' datasets: @@ -30,14 +35,14 @@ datasets: longitude: name: longitude standard_name: longitude - file_type: [jrr_cloudmask, jrr_aerosol_product] + file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product] file_key: "Longitude" units: 'degrees_east' resolution: 750 latitude: name: latitude standard_name: latitude - file_type: [jrr_cloudmask, jrr_aerosol_product] + file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product] file_key: "Latitude" units: 'degrees_north' resolution: 750 @@ -382,3 +387,23 @@ datasets: units: "1" valid_range: [-1.0, 1.0] standard_name: "normalized_difference_vegetation_index" + + # Cloud Height products + cloud_top_temp: + name: CldTopTemp + file_key: "CldTopTemp" + file_type: [jrr_cloudheight_product] + resolution: 750 + coordinates: [longitude, latitude] + cloud_top_height: + name: CldTopHght + file_key: "CldTopHght" + file_type: [jrr_cloudheight_product] + resolution: 750 + coordinates: [longitude, latitude] + cloud_top_pressure: + name: CldTopPres + file_key: "CldTopPres" + file_type: [jrr_cloudheight_product] + resolution: 750 + coordinates: [longitude, latitude] diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 64edbd48c1..6dececa9c1 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -69,13 +69,13 @@ def __init__(self, filename, filename_info, filetype_info): mask_and_scale=True, chunks={ 'Columns': -1, - 'Rows': row_chunks_i, + 'Rows': row_chunks_m, 'Along_Scan_375m': -1, 'Along_Track_375m': row_chunks_i, 'Along_Scan_750m': -1, 'Along_Track_750m': row_chunks_m, }) - if 'columns' in self.nc.dims: + if 'Columns' in self.nc.dims: self.nc = self.nc.rename({'Columns': 'x', 'Rows': 'y'}) elif 'Along_Track_375m' in self.nc.dims: self.nc = self.nc.rename({'Along_Scan_375m': 'x', 'Along_Track_375m': 'y'}) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 6a40778505..459f058495 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -91,14 +91,6 @@ def _create_surface_reflectance_file(tmp_path_factory, include_veg_indices: bool return file_path -def _create_fake_dataset(vars_dict: dict[str, xr.DataArray]) -> xr.Dataset: - ds = xr.Dataset( - vars_dict, - attrs={} - ) - return ds - - def _create_surf_refl_variables() -> dict[str, xr.DataArray]: dim_y_750 = "Along_Track_750m" dim_x_750 = "Along_Scan_750m" @@ -172,6 +164,51 @@ def _create_veg_index_variables() -> dict[str, xr.DataArray]: return data_arrs +@pytest.fixture(scope="module") +def cloud_height_file(tmp_path_factory) -> Path: + """Generate fake CloudHeight VIIRS EDR file.""" + tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") + fn = f"JRR-CloudHeight_v3r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202307231023395.nc" + file_path = tmp_path / fn + ch_vars = _create_cloud_height_variables() + ds = _create_fake_dataset(ch_vars) + ds.to_netcdf(file_path) + return file_path + + +def _create_cloud_height_variables() -> dict[str, xr.DataArray]: + dims = ("Rows", "Columns") + + lon_attrs = {"standard_name": "longitude", "units": "degrees_east", "_FillValue": -999.9} + lat_attrs = {"standard_name": "latitude", "units": "degrees_north", "_FillValue": -999.9} + cont_attrs = {"units": "Kelvin", "_FillValue": -9999, "scale_factor": 0.0001, "add_offset": 0.0} + + m_data = np.random.random_sample((M_ROWS, M_COLS)).astype(np.float32) + data_arrs = { + "Longitude": xr.DataArray(m_data, dims=dims, attrs=lon_attrs), + "Latitude": xr.DataArray(m_data, dims=dims, attrs=lat_attrs), + } + for var_name in ("CldTopTemp", "CldTopHght", "CldTopPres"): + data_arrs[var_name] = xr.DataArray(m_data, dims=dims, attrs=cont_attrs) + for data_arr in data_arrs.values(): + if "_FillValue" in data_arr.attrs: + data_arr.encoding["_FillValue"] = data_arr.attrs.pop("_FillValue") + if "scale_factor" not in data_arr.attrs: + continue + data_arr.encoding["dtype"] = np.int16 + data_arr.encoding["scale_factor"] = data_arr.attrs.pop("scale_factor") + data_arr.encoding["add_offset"] = data_arr.attrs.pop("add_offset") + return data_arrs + + +def _create_fake_dataset(vars_dict: dict[str, xr.DataArray]) -> xr.Dataset: + ds = xr.Dataset( + vars_dict, + attrs={} + ) + return ds + + class TestVIIRSJRRReader: """Test the VIIRS JRR L2 reader.""" @@ -198,6 +235,17 @@ def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_i _check_vi_data_arr(scn["EVI"]) _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"]) + def test_get_dataset_cloud_height(self, cloud_height_file): + """Test datasets from cloud height files.""" + from satpy import Scene + bytes_in_m_row = 4 * 3200 + with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): + scn = Scene(reader="viirs_edr", filenames=[cloud_height_file]) + scn.load(["CldTopTemp", "CldTopHght", "CldTopPres"]) + _check_cloud_height_data_arr(scn["CldTopTemp"]) + _check_cloud_height_data_arr(scn["CldTopHght"]) + _check_cloud_height_data_arr(scn["CldTopPres"]) + @pytest.mark.parametrize( ("data_file", "exp_available"), [ @@ -264,6 +312,11 @@ def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.floa assert data_arr.attrs["standard_name"] == "surface_bidirectional_reflectance" +def _check_cloud_height_data_arr(data_arr: xr.DataArray) -> None: + _array_checks(data_arr) + _shared_metadata_checks(data_arr) + + def _array_checks(data_arr: xr.DataArray, dtype: npt.Dtype = np.float32) -> None: assert data_arr.dims == ("y", "x") assert isinstance(data_arr.attrs["area"], SwathDefinition) From c0b6e83c7316b1d9e78d96d874012148c8f667cf Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 24 Jul 2023 15:08:18 -0500 Subject: [PATCH 117/702] Add VIIRs EDR AOD product --- satpy/etc/readers/viirs_edr.yaml | 17 ++++++- satpy/tests/reader_tests/test_viirs_edr.py | 59 ++++++++++++++-------- 2 files changed, 54 insertions(+), 22 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 8bcc094b3b..0004751f5d 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -28,6 +28,11 @@ file_types: variable_prefix: "" file_patterns: - 'JRR-CloudHeight_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_aod_product: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + variable_prefix: "" + file_patterns: + - 'JRR-AOD_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' datasets: @@ -35,14 +40,14 @@ datasets: longitude: name: longitude standard_name: longitude - file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product] + file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product, jrr_aod_product] file_key: "Longitude" units: 'degrees_east' resolution: 750 latitude: name: latitude standard_name: latitude - file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product] + file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product, jrr_aod_product] file_key: "Latitude" units: 'degrees_north' resolution: 750 @@ -407,3 +412,11 @@ datasets: file_type: [jrr_cloudheight_product] resolution: 750 coordinates: [longitude, latitude] + + # Aerosol Optical Depth products + aod550: + name: AOD550 + file_key: AOD550 + file_type: [jrr_aod_product] + resolution: 750 + coordinates: [longitude, latitude] diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 459f058495..53d24e7d11 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -24,6 +24,7 @@ import shutil from datetime import datetime from pathlib import Path +from typing import Iterable import dask import dask.array as da @@ -80,15 +81,11 @@ def surface_reflectance_with_veg_indices_file(tmp_path_factory) -> Path: def _create_surface_reflectance_file(tmp_path_factory, include_veg_indices: bool = False) -> Path: - tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") fn = f"SurfRefl_v1r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" - file_path = tmp_path / fn sr_vars = _create_surf_refl_variables() if include_veg_indices: sr_vars.update(_create_veg_index_variables()) - ds = _create_fake_dataset(sr_vars) - ds.to_netcdf(file_path) - return file_path + return _create_fake_file(tmp_path_factory, fn, sr_vars) def _create_surf_refl_variables() -> dict[str, xr.DataArray]: @@ -167,16 +164,24 @@ def _create_veg_index_variables() -> dict[str, xr.DataArray]: @pytest.fixture(scope="module") def cloud_height_file(tmp_path_factory) -> Path: """Generate fake CloudHeight VIIRS EDR file.""" - tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") fn = f"JRR-CloudHeight_v3r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202307231023395.nc" - file_path = tmp_path / fn - ch_vars = _create_cloud_height_variables() - ds = _create_fake_dataset(ch_vars) - ds.to_netcdf(file_path) - return file_path + data_vars = _create_continuous_variables( + ("CldTopTemp", "CldTopHght", "CldTopPres") + ) + return _create_fake_file(tmp_path_factory, fn, data_vars) + + +@pytest.fixture(scope="module") +def aod_file(tmp_path_factory) -> Path: + """Generate fake AOD VIIRs EDR file.""" + fn = f"JRR-AOD_v3r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202307231023395.nc" + data_vars = _create_continuous_variables( + ("AOD550",) + ) + return _create_fake_file(tmp_path_factory, fn, data_vars) -def _create_cloud_height_variables() -> dict[str, xr.DataArray]: +def _create_continuous_variables(var_names: Iterable[str]) -> dict[str, xr.DataArray]: dims = ("Rows", "Columns") lon_attrs = {"standard_name": "longitude", "units": "degrees_east", "_FillValue": -999.9} @@ -188,7 +193,7 @@ def _create_cloud_height_variables() -> dict[str, xr.DataArray]: "Longitude": xr.DataArray(m_data, dims=dims, attrs=lon_attrs), "Latitude": xr.DataArray(m_data, dims=dims, attrs=lat_attrs), } - for var_name in ("CldTopTemp", "CldTopHght", "CldTopPres"): + for var_name in var_names: data_arrs[var_name] = xr.DataArray(m_data, dims=dims, attrs=cont_attrs) for data_arr in data_arrs.values(): if "_FillValue" in data_arr.attrs: @@ -201,6 +206,14 @@ def _create_cloud_height_variables() -> dict[str, xr.DataArray]: return data_arrs +def _create_fake_file(tmp_path_factory, filename: str, data_arrs: dict[str, xr.DataArray]) -> Path: + tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") + file_path = tmp_path / filename + ds = _create_fake_dataset(data_arrs) + ds.to_netcdf(file_path) + return file_path + + def _create_fake_dataset(vars_dict: dict[str, xr.DataArray]) -> xr.Dataset: ds = xr.Dataset( vars_dict, @@ -235,16 +248,22 @@ def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_i _check_vi_data_arr(scn["EVI"]) _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"]) - def test_get_dataset_cloud_height(self, cloud_height_file): + @pytest.mark.parametrize( + ("var_names", "data_file"), + [ + (("CldTopTemp", "CldTopHght", "CldTopPres"), lazy_fixture("cloud_height_file")), + (("AOD550",), lazy_fixture("aod_file")), + ] + ) + def test_get_dataset_generic(self, var_names, data_file): """Test datasets from cloud height files.""" from satpy import Scene bytes_in_m_row = 4 * 3200 with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): - scn = Scene(reader="viirs_edr", filenames=[cloud_height_file]) - scn.load(["CldTopTemp", "CldTopHght", "CldTopPres"]) - _check_cloud_height_data_arr(scn["CldTopTemp"]) - _check_cloud_height_data_arr(scn["CldTopHght"]) - _check_cloud_height_data_arr(scn["CldTopPres"]) + scn = Scene(reader="viirs_edr", filenames=[data_file]) + scn.load(var_names) + for var_name in var_names: + _check_continuous_data_arr(scn[var_name]) @pytest.mark.parametrize( ("data_file", "exp_available"), @@ -312,7 +331,7 @@ def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.floa assert data_arr.attrs["standard_name"] == "surface_bidirectional_reflectance" -def _check_cloud_height_data_arr(data_arr: xr.DataArray) -> None: +def _check_continuous_data_arr(data_arr: xr.DataArray) -> None: _array_checks(data_arr) _shared_metadata_checks(data_arr) From 31988b63ae75427fed7b3e96e05c33dcf10a276b Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 26 Jul 2023 06:55:37 -0500 Subject: [PATCH 118/702] Update viirs_edr module docstring --- satpy/readers/viirs_edr.py | 33 +++++++++++++++++++-------------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 6dececa9c1..58458b925c 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2022 Satpy developers +# Copyright (c) 2022-2023 Satpy developers # # This file is part of satpy. # @@ -15,30 +15,35 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -"""VIIRS NOAA enterprise L2 product reader. +"""VIIRS NOAA enterprise EDR product reader. This module defines the :class:`VIIRSJRRFileHandler` file handler, to -be used for reading VIIRS Level 2 products generated by the NOAA enterprise -suite, which are downloadable via NOAA CLASS. -A wide variety of such products exist and, at present, only three are -supported here, showing example filenames: +be used for reading VIIRS EDR products generated by the NOAA enterprise +suite, which are downloadable via NOAA CLASS or on NOAA's AWS buckets. + +A wide variety of such products exist and, at present, only a subset are supported. + - Cloud mask: JRR-CloudMask_v2r3_j01_s202112250807275_e202112250808520_c202112250837300.nc - - Aerosol properties: JRR-ADP_v2r3_j01_s202112250807275_e202112250808520_c202112250839550.nc + - Cloud products: JRR-CloudHeight_v2r3_j01_s202112250807275_e202112250808520_c202112250837300.nc + - Aerosol detection: JRR-ADP_v2r3_j01_s202112250807275_e202112250808520_c202112250839550.nc + - Aerosol optical depth: JRR-AOD_v2r3_j01_s202112250807275_e202112250808520_c202112250839550.nc - Surface reflectance: SurfRefl_v1r1_j01_s202112250807275_e202112250808520_c202112250845080.nc -All products use the same base reader `viirs_l2_jrr` and can be read through satpy with:: + +All products use the same base reader ``viirs_edr`` and can be read through satpy with:: import satpy import glob filenames = glob.glob('JRR-ADP*.nc') - scene = satpy.Scene(filenames, - reader='viirs_l2_jrr') + scene = satpy.Scene(filenames, reader='viirs_edr') scene.load(['smoke_concentration']) -NOTE: -Multiple products contain datasets with the same name! For example, both the cloud mask -and aerosol files contain a cloud mask, but these are not identical. -For clarity, the aerosol file cloudmask is named `cloud_mask_adp` in this reader. +.. note:: + + Multiple products contain datasets with the same name! For example, both the cloud mask + and aerosol detection files contain a cloud mask, but these are not identical. + For clarity, the aerosol file cloudmask is named `cloud_mask_adp` in this reader. + """ From 8957d02c18065dd6bfaefe79e32f0c790a93d484 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 27 Jul 2023 09:22:00 -0500 Subject: [PATCH 119/702] Add LST support --- satpy/etc/readers/viirs_edr.yaml | 21 +++++++---- satpy/readers/viirs_edr.py | 16 +++++++++ satpy/tests/reader_tests/test_viirs_edr.py | 41 ++++++++++++++++++---- 3 files changed, 65 insertions(+), 13 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 0004751f5d..0aa39b69db 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -10,29 +10,28 @@ reader: file_types: jrr_cloudmask: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler - variable_prefix: "" file_patterns: - 'JRR-CloudMask_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_aerosol_product: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler - variable_prefix: "" file_patterns: - 'JRR-ADP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_surfref_product: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSSurfaceReflectanceWithVIHandler - variable_prefix: "" file_patterns: - 'SurfRefl_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_cloudheight_product: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler - variable_prefix: "" file_patterns: - 'JRR-CloudHeight_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_aod_product: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler - variable_prefix: "" file_patterns: - 'JRR-AOD_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_lst_product: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSLSTHandler + file_patterns: + - 'LST_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' datasets: @@ -40,14 +39,14 @@ datasets: longitude: name: longitude standard_name: longitude - file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product, jrr_aod_product] + file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product, jrr_aod_product, jrr_lst_product] file_key: "Longitude" units: 'degrees_east' resolution: 750 latitude: name: latitude standard_name: latitude - file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product, jrr_aod_product] + file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product, jrr_aod_product, jrr_lst_product] file_key: "Latitude" units: 'degrees_north' resolution: 750 @@ -420,3 +419,11 @@ datasets: file_type: [jrr_aod_product] resolution: 750 coordinates: [longitude, latitude] + + # Land Surface Temperature + vlst: + name: VLST + file_key: VLST + file_type: [jrr_lst_product] + resolution: 750 + coordinates: [longitude, latitude] diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 58458b925c..d90b0359f6 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -28,6 +28,7 @@ - Aerosol detection: JRR-ADP_v2r3_j01_s202112250807275_e202112250808520_c202112250839550.nc - Aerosol optical depth: JRR-AOD_v2r3_j01_s202112250807275_e202112250808520_c202112250839550.nc - Surface reflectance: SurfRefl_v1r1_j01_s202112250807275_e202112250808520_c202112250845080.nc + - Land Surface Temperature: LST_v2r0_npp_s202307241724558_e202307241726200_c202307241854058.nc All products use the same base reader ``viirs_edr`` and can be read through satpy with:: @@ -237,3 +238,18 @@ def _get_veg_index_good_mask(self) -> xr.DataArray: bad_mask_iband_dask = bad_mask.data.repeat(2, axis=1).repeat(2, axis=0) good_mask_iband = xr.DataArray(~bad_mask_iband_dask, dims=qf1.dims) return good_mask_iband + + +class VIIRSLSTHandler(VIIRSJRRFileHandler): + """File handler to handle LST file scale factor and offset weirdness.""" + + def __init__(self, *args, **kwargs): + """Initialize the file handler and unscale necessary variables.""" + super().__init__(*args, **kwargs) + + # Update variables with external scale factor and offset + lst_data_arr = self.nc["VLST"] + scale_factor = self.nc["LST_ScaleFact"] + add_offset = self.nc["LST_Offset"] + lst_data_arr.data = lst_data_arr.data * scale_factor.data + add_offset.data + self.nc["VLST"] = lst_data_arr diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 53d24e7d11..cc06e16647 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -33,6 +33,7 @@ import pytest import xarray as xr from pyresample import SwathDefinition +from pytest import TempPathFactory from pytest_lazyfixture import lazy_fixture I_COLS = 6400 @@ -69,18 +70,18 @@ @pytest.fixture(scope="module") -def surface_reflectance_file(tmp_path_factory) -> Path: +def surface_reflectance_file(tmp_path_factory: TempPathFactory) -> Path: """Generate fake surface reflectance EDR file.""" return _create_surface_reflectance_file(tmp_path_factory, include_veg_indices=False) @pytest.fixture(scope="module") -def surface_reflectance_with_veg_indices_file(tmp_path_factory) -> Path: +def surface_reflectance_with_veg_indices_file(tmp_path_factory: TempPathFactory) -> Path: """Generate fake surface reflectance EDR file with vegetation indexes included.""" return _create_surface_reflectance_file(tmp_path_factory, include_veg_indices=True) -def _create_surface_reflectance_file(tmp_path_factory, include_veg_indices: bool = False) -> Path: +def _create_surface_reflectance_file(tmp_path_factory: TempPathFactory, include_veg_indices: bool = False) -> Path: fn = f"SurfRefl_v1r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" sr_vars = _create_surf_refl_variables() if include_veg_indices: @@ -162,7 +163,7 @@ def _create_veg_index_variables() -> dict[str, xr.DataArray]: @pytest.fixture(scope="module") -def cloud_height_file(tmp_path_factory) -> Path: +def cloud_height_file(tmp_path_factory: TempPathFactory) -> Path: """Generate fake CloudHeight VIIRS EDR file.""" fn = f"JRR-CloudHeight_v3r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202307231023395.nc" data_vars = _create_continuous_variables( @@ -172,7 +173,7 @@ def cloud_height_file(tmp_path_factory) -> Path: @pytest.fixture(scope="module") -def aod_file(tmp_path_factory) -> Path: +def aod_file(tmp_path_factory: TempPathFactory) -> Path: """Generate fake AOD VIIRs EDR file.""" fn = f"JRR-AOD_v3r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202307231023395.nc" data_vars = _create_continuous_variables( @@ -181,6 +182,27 @@ def aod_file(tmp_path_factory) -> Path: return _create_fake_file(tmp_path_factory, fn, data_vars) +@pytest.fixture(scope="module") +def lst_file(tmp_path_factory: TempPathFactory) -> Path: + """Generate fake VLST EDR file.""" + fn = f"LST_v2r0_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202307241854058.nc" + data_vars = _create_lst_variables() + return _create_fake_file(tmp_path_factory, fn, data_vars) + + +def _create_lst_variables() -> dict[str, xr.DataArray]: + data_vars = _create_continuous_variables(("VLST",)) + + # VLST scale factors + data_vars["VLST"].data = (data_vars["VLST"].data / 0.0001).astype(np.int16) + data_vars["VLST"].encoding.pop("scale_factor") + data_vars["VLST"].encoding.pop("add_offset") + data_vars["LST_ScaleFact"] = xr.DataArray(np.float32(0.0001)) + data_vars["LST_Offset"] = xr.DataArray(np.float32(0.0)) + + return data_vars + + def _create_continuous_variables(var_names: Iterable[str]) -> dict[str, xr.DataArray]: dims = ("Rows", "Columns") @@ -206,7 +228,7 @@ def _create_continuous_variables(var_names: Iterable[str]) -> dict[str, xr.DataA return data_arrs -def _create_fake_file(tmp_path_factory, filename: str, data_arrs: dict[str, xr.DataArray]) -> Path: +def _create_fake_file(tmp_path_factory: TempPathFactory, filename: str, data_arrs: dict[str, xr.DataArray]) -> Path: tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") file_path = tmp_path / filename ds = _create_fake_dataset(data_arrs) @@ -253,6 +275,7 @@ def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_i [ (("CldTopTemp", "CldTopHght", "CldTopPres"), lazy_fixture("cloud_height_file")), (("AOD550",), lazy_fixture("aod_file")), + (("VLST",), lazy_fixture("lst_file")), ] ) def test_get_dataset_generic(self, var_names, data_file): @@ -333,6 +356,12 @@ def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.floa def _check_continuous_data_arr(data_arr: xr.DataArray) -> None: _array_checks(data_arr) + + # random sample should be between 0 and 1 only if factor/offset applied + data = data_arr.data.compute() + assert not (data < 0).any() + assert not (data > 1).any() + _shared_metadata_checks(data_arr) From 06cee8d8b77ae0135212540af5c6fdc21cf85716 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Fri, 28 Jul 2023 15:22:26 +0000 Subject: [PATCH 120/702] Fix mismath in shape for lat/lon when reading amv bufr --- satpy/etc/readers/seviri_l2_bufr.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/seviri_l2_bufr.yaml b/satpy/etc/readers/seviri_l2_bufr.yaml index 1c11927707..e0c51c4370 100644 --- a/satpy/etc/readers/seviri_l2_bufr.yaml +++ b/satpy/etc/readers/seviri_l2_bufr.yaml @@ -62,7 +62,7 @@ datasets: latitude: name: latitude - key: 'latitude' + key: '#1#latitude' long_name: Latitude standard_name: latitude resolution: [48006.450653072,9001.209497451,72009.675979608] @@ -72,7 +72,7 @@ datasets: longitude: name: longitude - key: 'longitude' + key: '#1#longitude' resolution: [48006.450653072,9001.209497451,72009.675979608] file_type: [seviri_l2_bufr_asr,seviri_l2_bufr_cla,seviri_l2_bufr_csr,seviri_l2_bufr_gii,seviri_l2_bufr_thu,seviri_l2_bufr_toz,seviri_l2_bufr_amv] long_name: Longitude From e8f4629905639202bfdc4b815316236aa6dcbe11 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 1 Aug 2023 06:42:31 +0000 Subject: [PATCH 121/702] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/PyCQA/flake8: 6.0.0 → 6.1.0](https://github.com/PyCQA/flake8/compare/6.0.0...6.1.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 995f3035c4..d100d73ca7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ exclude: '^$' fail_fast: false repos: - repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 + rev: 6.1.0 hooks: - id: flake8 additional_dependencies: [flake8-docstrings, flake8-debugger, flake8-bugbear, mccabe] From 5ddc035b2f610e16b84156f95aa8e926a9dfb094 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 1 Aug 2023 06:54:36 -0500 Subject: [PATCH 122/702] Fix flake8 whitespace issues --- satpy/tests/scene_tests/test_load.py | 2 +- satpy/tests/test_writers.py | 4 ++-- satpy/tests/writer_tests/test_awips_tiled.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/scene_tests/test_load.py b/satpy/tests/scene_tests/test_load.py index 2c44995076..6eefbc0080 100644 --- a/satpy/tests/scene_tests/test_load.py +++ b/satpy/tests/scene_tests/test_load.py @@ -501,7 +501,7 @@ def test_load_dataset_after_composite(self): from satpy.tests.utils import FakeCompositor load_mock = spy_decorator(FileYAMLReader.load) comp_mock = spy_decorator(FakeCompositor.__call__) - with mock.patch.object(FileYAMLReader, 'load', load_mock),\ + with mock.patch.object(FileYAMLReader, 'load', load_mock), \ mock.patch.object(FakeCompositor, '__call__', comp_mock): lmock = load_mock.mock scene = Scene(filenames=['fake1_1.txt'], reader='fake1') diff --git a/satpy/tests/test_writers.py b/satpy/tests/test_writers.py index 032481e830..986687b0d6 100644 --- a/satpy/tests/test_writers.py +++ b/satpy/tests/test_writers.py @@ -779,8 +779,8 @@ def setUp(self): 'extend': False, 'width': 1670, 'height': 110, 'tick_marks': 5, 'minor_tick_marks': 1, - 'cursor': [0, 0], 'bg':'white', - 'title':'TEST TITLE OF SCALE', + 'cursor': [0, 0], 'bg': 'white', + 'title': 'TEST TITLE OF SCALE', 'fontsize': 110, 'align': 'cc' }} ] diff --git a/satpy/tests/writer_tests/test_awips_tiled.py b/satpy/tests/writer_tests/test_awips_tiled.py index ac75ed4069..a47552a708 100644 --- a/satpy/tests/writer_tests/test_awips_tiled.py +++ b/satpy/tests/writer_tests/test_awips_tiled.py @@ -236,7 +236,7 @@ def test_basic_numbered_tiles(self, tile_count, tile_size, tmp_path): ) should_error = tile_count is None and tile_size is None if should_error: - with dask.config.set(scheduler=CustomScheduler(0)),\ + with dask.config.set(scheduler=CustomScheduler(0)), \ pytest.raises(ValueError, match=r'Either.*tile_count.*'): w.save_datasets([input_data_arr], **save_kwargs) else: From 15f5e86c31862919630b1b5ac43b1fa8262e3f1c Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 2 Aug 2023 08:22:15 +0000 Subject: [PATCH 123/702] Update test with fix for the key for the mockup bufr for lat/lon --- satpy/tests/reader_tests/test_seviri_l2_bufr.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l2_bufr.py b/satpy/tests/reader_tests/test_seviri_l2_bufr.py index 89f973fd45..3578645e5b 100644 --- a/satpy/tests/reader_tests/test_seviri_l2_bufr.py +++ b/satpy/tests/reader_tests/test_seviri_l2_bufr.py @@ -51,13 +51,13 @@ DATASET_INFO_LAT = { 'name': 'latitude', - 'key': 'latitude', + 'key': '#1#latitude', 'fill_value': -1.e+100 } DATASET_INFO_LON = { 'name': 'longitude', - 'key': 'longitude', + 'key': '#1#longitude', 'fill_value': -1.e+100 } @@ -128,10 +128,10 @@ def __init__(self, filename, with_adef=False, rect_lon='default'): # write the bufr test data twice as we want to read in and then concatenate the data in the reader # 55 id corresponds to METEOSAT 8` ec.codes_set(self.buf1, 'satelliteIdentifier', 56) - ec.codes_set_array(self.buf1, 'latitude', LAT) - ec.codes_set_array(self.buf1, 'latitude', LAT) - ec.codes_set_array(self.buf1, 'longitude', LON) - ec.codes_set_array(self.buf1, 'longitude', LON) + ec.codes_set_array(self.buf1, '#1#latitude', LAT) + ec.codes_set_array(self.buf1, '#1#latitude', LAT) + ec.codes_set_array(self.buf1, '#1#longitude', LON) + ec.codes_set_array(self.buf1, '#1#longitude', LON) ec.codes_set_array(self.buf1, '#1#brightnessTemperature', DATA) ec.codes_set_array(self.buf1, '#1#brightnessTemperature', DATA) From db09792900673e45c5b5fa9f11cef12f8a757a90 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 9 Aug 2023 10:17:33 -0500 Subject: [PATCH 124/702] Restructure LST scaling to be more flexible --- satpy/readers/viirs_edr.py | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index d90b0359f6..1c7ba034ef 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -243,13 +243,27 @@ def _get_veg_index_good_mask(self) -> xr.DataArray: class VIIRSLSTHandler(VIIRSJRRFileHandler): """File handler to handle LST file scale factor and offset weirdness.""" + _manual_scalings = { + "VLST": ("LST_ScaleFact", "LST_Offset"), + "emis_m15": ("LSE_ScaleFact", "LSE_Offset"), + "emis_m16": ("LSE_ScaleFact", "LSE_Offset"), + "emis_bbe": ("LSE_ScaleFact", "LSE_Offset"), + "Satellite_Azimuth_Angle": ("AZI_ScaleFact", "AZI_Offset"), + } + def __init__(self, *args, **kwargs): """Initialize the file handler and unscale necessary variables.""" super().__init__(*args, **kwargs) # Update variables with external scale factor and offset - lst_data_arr = self.nc["VLST"] - scale_factor = self.nc["LST_ScaleFact"] - add_offset = self.nc["LST_Offset"] - lst_data_arr.data = lst_data_arr.data * scale_factor.data + add_offset.data - self.nc["VLST"] = lst_data_arr + self._scale_data() + + def _scale_data(self): + for var_name in list(self.nc.variables.keys()): + if var_name not in self._manual_scalings: + continue + data_arr = self.nc[var_name] + scale_factor = self.nc[self._manual_scalings[var_name][0]] + add_offset = self.nc[self._manual_scalings[var_name][1]] + data_arr.data = data_arr.data * scale_factor.data + add_offset.data + self.nc[var_name] = data_arr From 4e56372d4605ec486b240adc19b54dd2349e22fe Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 9 Aug 2023 13:03:37 -0500 Subject: [PATCH 125/702] Switch "viirs_edr" reader to dynamic variable loading --- satpy/etc/readers/viirs_edr.yaml | 240 ++++----------------- satpy/readers/viirs_edr.py | 44 +++- satpy/tests/reader_tests/test_viirs_edr.py | 5 +- 3 files changed, 84 insertions(+), 205 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 0aa39b69db..4f33bcc184 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -12,197 +12,71 @@ file_types: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: - 'JRR-CloudMask_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' - jrr_aerosol_product: + jrr_aerosol: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: - 'JRR-ADP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' - jrr_surfref_product: + jrr_surfref: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSSurfaceReflectanceWithVIHandler file_patterns: - 'SurfRefl_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' - jrr_cloudheight_product: + jrr_cloudheight: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: - 'JRR-CloudHeight_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' - jrr_aod_product: + jrr_aod: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: - 'JRR-AOD_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' - jrr_lst_product: + jrr_lst: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSLSTHandler file_patterns: - 'LST_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' datasets: - # Geolocation datasets - longitude: - name: longitude - standard_name: longitude - file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product, jrr_aod_product, jrr_lst_product] - file_key: "Longitude" - units: 'degrees_east' - resolution: 750 - latitude: - name: latitude - standard_name: latitude - file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product, jrr_aod_product, jrr_lst_product] - file_key: "Latitude" - units: 'degrees_north' - resolution: 750 + # NOTE: All non-surface reflectance file variables are dynamically loaded + # from the variable names inside the file. All 2D variables are + # supported and use the exact name of the variable in the NetCDF file. + # Files mentioned above in "file_types" are supported. + # To see a full list of loadable variables, create a Scene object with + # data files and run ``scn.available_dataset_names()``. + + # Surface reflectance products longitude_375: name: longitude_375 standard_name: longitude - file_type: jrr_surfref_product + file_type: jrr_surfref file_key: "Longitude_at_375m_resolution" units: 'degrees_east' resolution: 375 latitude_375: name: latitude_375 standard_name: latitude - file_type: jrr_surfref_product + file_type: jrr_surfref file_key: "Latitude_at_375m_resolution" units: 'degrees_north' resolution: 375 longitude_750: name: longitude_750 standard_name: longitude - file_type: jrr_surfref_product + file_type: jrr_surfref file_key: "Longitude_at_750m_resolution" units: 'degrees_east' resolution: 750 latitude_750: name: latitude_750 standard_name: latitude - file_type: jrr_surfref_product + file_type: jrr_surfref file_key: "Latitude_at_750m_resolution" units: 'degrees_north' resolution: 750 - # Cloudmask product datasets - cloud_mask: - name: cloud_mask - resolution: 750 - file_type: jrr_cloudmask - file_key: "CloudMask" - coordinates: [longitude, latitude] - cloud_mask_binary: - name: cloud_mask_binary - resolution: 750 - file_type: [jrr_cloudmask] - file_key: "CloudMaskBinary" - coordinates: [longitude, latitude] - cloud_probability: - name: cloud_probability - resolution: 750 - file_type: [jrr_cloudmask] - file_key: "CloudProbability" - coordinates: [longitude, latitude] - dust_mask: - name: dust_mask - resolution: 750 - file_type: [jrr_cloudmask] - file_key: "Dust_Mask" - coordinates: [longitude, latitude] - fire_mask: - name: fire_mask - resolution: 750 - file_type: [jrr_cloudmask] - file_key: "Fire_Mask" - coordinates: [longitude, latitude] - smoke_mask: - name: smoke_mask - resolution: 750 - file_type: [jrr_cloudmask] - file_key: "Smoke_Mask" - coordinates: [longitude, latitude] - - # Aerosol detection product datasets - ash_mask: - name: ash_mask - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "Ash" - coordinates: [longitude, latitude] - cloud_mask_adp: - name: cloud_mask_adp - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "Cloud" - coordinates: [longitude, latitude] - dust_smoke_discrimination_index: - name: dust_smoke_discrimination_index - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "DSDI" - coordinates: [longitude, latitude] - nuc: - name: nuc - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "NUC" - coordinates: [longitude, latitude] - pqi1: - name: pqi1 - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "PQI1" - coordinates: [longitude, latitude] - pqi2: - name: pqi2 - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "PQI2" - coordinates: [longitude, latitude] - pqi3: - name: pqi3 - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "PQI3" - coordinates: [longitude, latitude] - pqi4: - name: pqi4 - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "PQI4" - coordinates: [longitude, latitude] - qcflag: - name: qcflag - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "QC_Flag" - coordinates: [longitude, latitude] - saai: - name: saai - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "SAAI" - coordinates: [longitude, latitude] - smoke: - name: smoke - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "Smoke" - coordinates: [longitude, latitude] - smoke_concentration: - name: smoke_concentration - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "SmokeCon" - coordinates: [longitude, latitude] - snow_ice: - name: snow_ice - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "SnowIce" - coordinates: [longitude, latitude] - - # Surface reflectance products surf_refl_I01: name: surf_refl_I01 resolution: 375 wavelength: [0.600, 0.640, 0.680] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "375m Surface Reflectance Band I1" coordinates: [longitude_375, latitude_375] units: '%' @@ -211,7 +85,7 @@ datasets: name: surf_refl_I02 resolution: 375 wavelength: [0.845, 0.865, 0.884] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "375m Surface Reflectance Band I2" coordinates: [longitude_375, latitude_375] units: '%' @@ -220,7 +94,7 @@ datasets: name: surf_refl_I03 resolution: 375 wavelength: [1.580, 1.610, 1.640] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "375m Surface Reflectance Band I3" coordinates: [longitude_375, latitude_375] units: '%' @@ -229,7 +103,7 @@ datasets: name: surf_refl_M01 resolution: 750 wavelength: [0.402, 0.412, 0.422] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M1" coordinates: [longitude_750, latitude_750] units: '%' @@ -238,7 +112,7 @@ datasets: name: surf_refl_M02 resolution: 750 wavelength: [0.436, 0.445, 0.454] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M2" coordinates: [longitude_750, latitude_750] units: '%' @@ -247,7 +121,7 @@ datasets: name: surf_refl_M03 resolution: 750 wavelength: [0.478, 0.488, 0.498] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M3" coordinates: [longitude_750, latitude_750] units: '%' @@ -256,7 +130,7 @@ datasets: name: surf_refl_M04 resolution: 750 wavelength: [0.545, 0.555, 0.565] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M4" coordinates: [longitude_750, latitude_750] units: '%' @@ -265,7 +139,7 @@ datasets: name: surf_refl_M05 resolution: 750 wavelength: [0.662, 0.672, 0.682] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M5" coordinates: [longitude_750, latitude_750] units: '%' @@ -274,7 +148,7 @@ datasets: name: surf_refl_M06 resolution: 750 wavelength: [0.739, 0.746, 0.754] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M6" coordinates: [longitude_750, latitude_750] units: '%' @@ -283,7 +157,7 @@ datasets: name: surf_refl_M07 resolution: 750 wavelength: [0.846, 0.865, 0.885] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M7" coordinates: [longitude_750, latitude_750] units: '%' @@ -292,7 +166,7 @@ datasets: name: surf_refl_M08 resolution: 750 wavelength: [1.230, 1.240, 1.250] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M8" coordinates: [longitude_750, latitude_750] units: '%' @@ -301,7 +175,7 @@ datasets: name: surf_refl_M10 resolution: 750 wavelength: [1.580, 1.610, 1.640] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M10" coordinates: [longitude_750, latitude_750] units: '%' @@ -310,7 +184,7 @@ datasets: name: surf_refl_M11 resolution: 750 wavelength: [2.225, 2.250, 2.275] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M11" coordinates: [longitude_750, latitude_750] units: '%' @@ -318,7 +192,7 @@ datasets: surf_refl_qf1: name: surf_refl_qf1 resolution: 750 - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "QF1 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' @@ -326,7 +200,7 @@ datasets: surf_refl_qf2: name: surf_refl_qf2 resolution: 750 - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "QF2 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' @@ -334,7 +208,7 @@ datasets: surf_refl_qf3: name: surf_refl_qf3 resolution: 750 - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "QF3 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' @@ -342,7 +216,7 @@ datasets: surf_refl_qf4: name: surf_refl_qf4 resolution: 750 - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "QF4 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' @@ -350,7 +224,7 @@ datasets: surf_refl_qf5: name: surf_refl_qf5 resolution: 750 - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "QF5 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' @@ -358,7 +232,7 @@ datasets: surf_refl_qf6: name: surf_refl_qf6 resolution: 750 - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "QF6 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' @@ -366,7 +240,7 @@ datasets: surf_refl_qf7: name: surf_refl_qf7 resolution: 750 - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "QF7 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' @@ -376,7 +250,7 @@ datasets: NDVI: name: NDVI resolution: 375 - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "NDVI" coordinates: [longitude_375, latitude_375] units: "1" @@ -385,45 +259,9 @@ datasets: EVI: name: EVI resolution: 375 - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "EVI" coordinates: [longitude_375, latitude_375] units: "1" valid_range: [-1.0, 1.0] standard_name: "normalized_difference_vegetation_index" - - # Cloud Height products - cloud_top_temp: - name: CldTopTemp - file_key: "CldTopTemp" - file_type: [jrr_cloudheight_product] - resolution: 750 - coordinates: [longitude, latitude] - cloud_top_height: - name: CldTopHght - file_key: "CldTopHght" - file_type: [jrr_cloudheight_product] - resolution: 750 - coordinates: [longitude, latitude] - cloud_top_pressure: - name: CldTopPres - file_key: "CldTopPres" - file_type: [jrr_cloudheight_product] - resolution: 750 - coordinates: [longitude, latitude] - - # Aerosol Optical Depth products - aod550: - name: AOD550 - file_key: AOD550 - file_type: [jrr_aod_product] - resolution: 750 - coordinates: [longitude, latitude] - - # Land Surface Temperature - vlst: - name: VLST - file_key: VLST - file_type: [jrr_lst_product] - resolution: 750 - coordinates: [longitude, latitude] diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 1c7ba034ef..d1ef69db96 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -58,6 +58,7 @@ from satpy.utils import get_chunk_size_limit LOG = logging.getLogger(__name__) +M_COLS = 3200 class VIIRSJRRFileHandler(BaseFileHandler): @@ -68,7 +69,7 @@ def __init__(self, filename, filename_info, filetype_info): super(VIIRSJRRFileHandler, self).__init__(filename, filename_info, filetype_info) # use entire scans as chunks - row_chunks_m = max(get_chunk_size_limit() // 4 // 3200, 1) # 32-bit floats + row_chunks_m = max(get_chunk_size_limit() // 4 // M_COLS, 1) # 32-bit floats row_chunks_i = row_chunks_m * 2 self.nc = xr.open_dataset(self.filename, decode_cf=True, @@ -99,7 +100,7 @@ def __init__(self, filename, filename_info, filetype_info): def rows_per_scans(self, data_arr: xr.DataArray) -> int: """Get number of array rows per instrument scan based on data resolution.""" - return 32 if data_arr.shape[1] == 6400 else 16 + return 16 if data_arr.shape[1] == M_COLS else 32 def get_dataset(self, dataset_id: DataID, info: dict) -> xr.DataArray: """Get the dataset.""" @@ -183,6 +184,9 @@ def available_datasets(self, configured_datasets=None): ``None`` if this file object is not responsible for it. """ + # keep track of what variables the YAML has configured, so we don't + # duplicate entries for them in the dynamic portion + handled_var_names = set() for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: # some other file handler said it has this dataset @@ -194,8 +198,44 @@ def available_datasets(self, configured_datasets=None): # this is not the file type for this dataset yield None, ds_info file_key = ds_info.get("file_key", ds_info["name"]) + handled_var_names.add(file_key) yield file_key in self.nc, ds_info + ftype = self.filetype_info["file_type"] + m_lon_name = f"longitude_{ftype}" + m_lat_name = f"latitude_{ftype}" + m_coords = (m_lon_name, m_lat_name) + i_lon_name = f"longitude_i_{ftype}" + i_lat_name = f"latitude_i_{ftype}" + i_coords = (i_lon_name, i_lat_name) + for var_name, data_arr in self.nc.items(): + is_lon = "longitude" in var_name.lower() + is_lat = "latitude" in var_name.lower() + if var_name in handled_var_names and not (is_lon or is_lat): + # skip variables that YAML had configured, but allow lon/lats + # to be reprocessed due to our dynamic coordinate naming + continue + if data_arr.ndim != 2: + # only 2D arrays supported at this time + continue + res = 750 if data_arr.shape[1] == M_COLS else 375 + ds_info = { + "file_key": var_name, + "file_type": ftype, + "name": var_name, + "resolution": res, + "coordinates": m_coords if res == 750 else i_coords, + } + if is_lon: + ds_info["standard_name"] = "longitude" + ds_info["units"] = "degrees_east" + ds_info["name"] = m_lon_name if res == 750 else i_lon_name + elif is_lat: + ds_info["standard_name"] = "latitude" + ds_info["units"] = "degrees_north" + ds_info["name"] = m_lat_name if res == 750 else i_lat_name + yield True, ds_info + class VIIRSSurfaceReflectanceWithVIHandler(VIIRSJRRFileHandler): """File handler for surface reflectance files with optional vegetation indexes.""" diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index cc06e16647..09cc2769b6 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -106,8 +106,8 @@ def _create_surf_refl_variables() -> dict[str, xr.DataArray]: data_arrs = { "Longitude_at_375m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lon_attrs), "Latitude_at_375m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lat_attrs), - "Longitude_at_750m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lon_attrs), - "Latitude_at_750m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lat_attrs), + "Longitude_at_750m_resolution": xr.DataArray(m_data, dims=m_dims, attrs=lon_attrs), + "Latitude_at_750m_resolution": xr.DataArray(m_data, dims=m_dims, attrs=lat_attrs), "375m Surface Reflectance Band I1": xr.DataArray(i_data, dims=i_dims, attrs=sr_attrs), "750m Surface Reflectance Band M1": xr.DataArray(m_data, dims=m_dims, attrs=sr_attrs), } @@ -368,6 +368,7 @@ def _check_continuous_data_arr(data_arr: xr.DataArray) -> None: def _array_checks(data_arr: xr.DataArray, dtype: npt.Dtype = np.float32) -> None: assert data_arr.dims == ("y", "x") assert isinstance(data_arr.attrs["area"], SwathDefinition) + assert data_arr.attrs["area"].shape == data_arr.shape assert isinstance(data_arr.data, da.Array) assert np.issubdtype(data_arr.data.dtype, dtype) is_mband_res = _is_mband_res(data_arr) From 65511ecf98a89346ab161c749e135ddebfcd38fe Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 11 Aug 2023 14:02:29 -0500 Subject: [PATCH 126/702] Deprecate "viirs_l2_cloud_mask_nc" reader --- satpy/readers/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 7efab8d904..2b1bbc37ba 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -37,7 +37,7 @@ # Old Name -> New Name -PENDING_OLD_READER_NAMES = {'fci_l1c_fdhsi': 'fci_l1c_nc'} +PENDING_OLD_READER_NAMES = {'fci_l1c_fdhsi': 'fci_l1c_nc', 'viirs_l2_cloud_mask_nc': 'viirs_edr'} OLD_READER_NAMES: dict[str, str] = {} From fbd2802c0747adde8a7582894d3c18f495bfc7ce Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 11 Aug 2023 14:15:55 -0500 Subject: [PATCH 127/702] Add flag to control QF filtering of vegetation indexes --- satpy/readers/viirs_edr.py | 18 +++++++++++++++++- satpy/tests/reader_tests/test_viirs_edr.py | 22 ++++++++++++++-------- 2 files changed, 31 insertions(+), 9 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index d1ef69db96..9b73d272ce 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -45,6 +45,17 @@ and aerosol detection files contain a cloud mask, but these are not identical. For clarity, the aerosol file cloudmask is named `cloud_mask_adp` in this reader. +Vegetation Indexes +^^^^^^^^^^^^^^^^^^ + +The NDVI and EVI products can be loaded from CSPP-produced Surface Reflectance +files. By default, these products are filtered based on the Surface Reflectance +Quality Flags. This is used to remove/mask pixels in certain cloud or water +regions. This behavior can be disabled by providing the reader keyword argument +``filter_veg`` and setting it to ``False``. For example:: + + scene = satpy.Scene(filenames, reader='viirs_edr', reader_kwargs={"filter_veg": False}) + """ @@ -240,9 +251,14 @@ def available_datasets(self, configured_datasets=None): class VIIRSSurfaceReflectanceWithVIHandler(VIIRSJRRFileHandler): """File handler for surface reflectance files with optional vegetation indexes.""" + def __init__(self, *args, filter_veg: bool = True, **kwargs) -> None: + """Initialize file handler and keep track of vegetation index filtering.""" + super().__init__(*args, **kwargs) + self._filter_veg = filter_veg + def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: new_data_arr = super()._mask_invalid(data_arr, ds_info) - if ds_info["file_key"] in ("NDVI", "EVI"): + if ds_info["file_key"] in ("NDVI", "EVI") and self._filter_veg: good_mask = self._get_veg_index_good_mask() new_data_arr = new_data_arr.where(good_mask) return new_data_arr diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 09cc2769b6..c9caf3ab85 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -259,15 +259,17 @@ def test_get_dataset_surf_refl(self, surface_reflectance_file): _check_surf_refl_data_arr(scn["surf_refl_I01"]) _check_surf_refl_data_arr(scn["surf_refl_M01"]) - def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_indices_file): + @pytest.mark.parametrize("filter_veg", [False, True]) + def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_indices_file, filter_veg): """Test retrieval of vegetation indices from surface reflectance files.""" from satpy import Scene bytes_in_m_row = 4 * 3200 with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): - scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_with_veg_indices_file]) + scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_with_veg_indices_file], + reader_kwargs={"filter_veg": filter_veg}) scn.load(["NDVI", "EVI", "surf_refl_qf1"]) - _check_vi_data_arr(scn["NDVI"]) - _check_vi_data_arr(scn["EVI"]) + _check_vi_data_arr(scn["NDVI"], filter_veg) + _check_vi_data_arr(scn["EVI"], filter_veg) _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"]) @pytest.mark.parametrize( @@ -332,16 +334,20 @@ def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray) -> None: assert data_arr.attrs["standard_name"] == "quality_flag" -def _check_vi_data_arr(data_arr: xr.DataArray) -> None: +def _check_vi_data_arr(data_arr: xr.DataArray, is_filtered: bool) -> None: _array_checks(data_arr) _shared_metadata_checks(data_arr) assert data_arr.attrs["units"] == "1" assert data_arr.attrs["standard_name"] == "normalized_difference_vegetation_index" data = data_arr.data.compute() - np.testing.assert_allclose(data[0, :7], [np.nan, -1.0, -0.5, 0.0, 0.5, 1.0, np.nan]) - np.testing.assert_allclose(data[0, 8:8 + 16], np.nan) - np.testing.assert_allclose(data[0, 8 + 16:], 0.0) + if is_filtered: + np.testing.assert_allclose(data[0, :7], [np.nan, -1.0, -0.5, 0.0, 0.5, 1.0, np.nan]) + np.testing.assert_allclose(data[0, 8:8 + 16], np.nan) + np.testing.assert_allclose(data[0, 8 + 16:], 0.0) + else: + np.testing.assert_allclose(data[0, :7], [np.nan, -1.0, -0.5, 0.0, 0.5, 1.0, np.nan]) + np.testing.assert_allclose(data[0, 8:], 0.0) def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.float32) -> None: From e01b8a597d5b28569c969cb35ae0a151720c8c97 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 14 Aug 2023 10:32:41 +0000 Subject: [PATCH 128/702] Bump pypa/gh-action-pypi-publish from 1.8.8 to 1.8.10 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.8 to 1.8.10. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.8...v1.8.10) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 68463b2b03..73db646c62 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -19,7 +19,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.8.8 + uses: pypa/gh-action-pypi-publish@v1.8.10 with: user: __token__ password: ${{ secrets.pypi_password }} From d6ec23e9a3f9b3ccbeb85e40beffcc96073cd9e4 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 14 Aug 2023 11:17:03 -0500 Subject: [PATCH 129/702] Remove unstable numpy in CI to test hanging --- .github/workflows/ci.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index faa0aea2cc..078d1f9df0 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -94,7 +94,6 @@ jobs: --trusted-host pypi.anaconda.org \ --no-deps --pre --upgrade \ matplotlib \ - numpy \ pandas \ scipy; \ python -m pip install \ From b7efb9dda139c385de7f493ff8f10514ae35afbc Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 14 Aug 2023 11:44:17 -0500 Subject: [PATCH 130/702] Limit unstable environment to numpy <2 --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 078d1f9df0..b4d33026db 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -94,6 +94,7 @@ jobs: --trusted-host pypi.anaconda.org \ --no-deps --pre --upgrade \ matplotlib \ + "numpy<2" \ pandas \ scipy; \ python -m pip install \ From 036fa9fd005202f0d9a2ede365d358fd70d5ec16 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 14 Aug 2023 13:56:11 -0500 Subject: [PATCH 131/702] Install numpy 1.26b1 from PyPI in unstable CI --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b4d33026db..0ebbe68f0c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -89,12 +89,12 @@ jobs: # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels # may break the conda-forge libraries trying to use newer glibc versions run: | + python -m pip install --pre --upgrade --no-deps numpy; \ python -m pip install \ --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ --no-deps --pre --upgrade \ matplotlib \ - "numpy<2" \ pandas \ scipy; \ python -m pip install \ From 984c092501e4130921c48aa9ca62f682461330f1 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 15 Aug 2023 05:40:10 +0000 Subject: [PATCH 132/702] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.4.1 → v1.5.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.4.1...v1.5.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d100d73ca7..1b7889838a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,7 +20,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.4.1' # Use the sha / tag you want to point at + rev: 'v1.5.0' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: From f4b90e9da64be3713b3d9abfaa3e57b97be6eb6c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 15 Aug 2023 09:22:00 -0500 Subject: [PATCH 133/702] Slim down CI for easier debugging --- .github/workflows/ci.yaml | 66 +++------------------------------------ 1 file changed, 4 insertions(+), 62 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0ebbe68f0c..477930cba6 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -11,36 +11,15 @@ env: CACHE_NUMBER: 1 jobs: - lint: - name: lint and style checks - runs-on: ubuntu-latest - steps: - - name: Checkout source - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.9 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install flake8 flake8-docstrings flake8-debugger flake8-bugbear pytest - - name: Install Satpy - run: | - pip install -e . - - name: Run linting - run: | - flake8 satpy/ test: runs-on: ${{ matrix.os }} continue-on-error: ${{ matrix.experimental }} - needs: [lint] strategy: fail-fast: true matrix: - os: ["windows-latest", "ubuntu-latest", "macos-latest"] - python-version: ["3.9", "3.10", "3.11"] + os: ["ubuntu-latest"] + python-version: ["3.11"] experimental: [false] include: - python-version: "3.11" @@ -89,12 +68,12 @@ jobs: # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels # may break the conda-forge libraries trying to use newer glibc versions run: | - python -m pip install --pre --upgrade --no-deps numpy; \ python -m pip install \ --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ --no-deps --pre --upgrade \ matplotlib \ + numpy \ pandas \ scipy; \ python -m pip install \ @@ -119,41 +98,4 @@ jobs: shell: bash -l {0} run: | export LD_PRELOAD=${{ env.LD_PRELOAD }}; - pytest --cov=satpy satpy/tests --cov-report=xml --cov-report= - - - name: Upload unittest coverage to Codecov - uses: codecov/codecov-action@v3 - with: - flags: unittests - file: ./coverage.xml - env_vars: OS,PYTHON_VERSION,UNSTABLE - - - name: Coveralls Parallel - uses: AndreMiras/coveralls-python-action@develop - with: - flag-name: run-${{ matrix.test_number }} - parallel: true - if: runner.os == 'Linux' - - - name: Run behaviour tests - shell: bash -l {0} - run: | - export LD_PRELOAD=${{ env.LD_PRELOAD }}; - coverage run --source=satpy -m behave satpy/tests/features --tags=-download - coverage xml - - - name: Upload behaviour test coverage to Codecov - uses: codecov/codecov-action@v3 - with: - flags: behaviourtests - file: ./coverage.xml - env_vars: OS,PYTHON_VERSION,UNSTABLE - - coveralls: - needs: [test] - runs-on: ubuntu-latest - steps: - - name: Coveralls Finished - uses: AndreMiras/coveralls-python-action@develop - with: - parallel-finished: true + pytest satpy/tests From 5705f4db4e1e67eb8a37028442da63a7e7641d9f Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 15 Aug 2023 10:10:16 -0500 Subject: [PATCH 134/702] Try just viirs compact tests --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 477930cba6..03f0a88016 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -98,4 +98,4 @@ jobs: shell: bash -l {0} run: | export LD_PRELOAD=${{ env.LD_PRELOAD }}; - pytest satpy/tests + pytest satpy/tests/reader_tests/test_viirs_compact.py From 326b4df7e440cc5188832db9a29de47bc72a5264 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 15 Aug 2023 10:18:58 -0500 Subject: [PATCH 135/702] Remove bokeh upper limit from CI environment --- continuous_integration/environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 48976401a2..e1b52b384e 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -55,7 +55,7 @@ dependencies: - xarray-datatree - pint-xarray - ephem - - bokeh<3 + - bokeh - pip: - trollsift - trollimage>=1.20 From 91310cc8e57f106d8b92cefb119b615891c0561c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 15 Aug 2023 10:46:28 -0500 Subject: [PATCH 136/702] Revert changes to ci.yaml --- .github/workflows/ci.yaml | 64 +++++++++++++++++++++++++++++++++++++-- 1 file changed, 61 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 03f0a88016..faa0aea2cc 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -11,15 +11,36 @@ env: CACHE_NUMBER: 1 jobs: + lint: + name: lint and style checks + runs-on: ubuntu-latest + steps: + - name: Checkout source + uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: 3.9 + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install flake8 flake8-docstrings flake8-debugger flake8-bugbear pytest + - name: Install Satpy + run: | + pip install -e . + - name: Run linting + run: | + flake8 satpy/ test: runs-on: ${{ matrix.os }} continue-on-error: ${{ matrix.experimental }} + needs: [lint] strategy: fail-fast: true matrix: - os: ["ubuntu-latest"] - python-version: ["3.11"] + os: ["windows-latest", "ubuntu-latest", "macos-latest"] + python-version: ["3.9", "3.10", "3.11"] experimental: [false] include: - python-version: "3.11" @@ -98,4 +119,41 @@ jobs: shell: bash -l {0} run: | export LD_PRELOAD=${{ env.LD_PRELOAD }}; - pytest satpy/tests/reader_tests/test_viirs_compact.py + pytest --cov=satpy satpy/tests --cov-report=xml --cov-report= + + - name: Upload unittest coverage to Codecov + uses: codecov/codecov-action@v3 + with: + flags: unittests + file: ./coverage.xml + env_vars: OS,PYTHON_VERSION,UNSTABLE + + - name: Coveralls Parallel + uses: AndreMiras/coveralls-python-action@develop + with: + flag-name: run-${{ matrix.test_number }} + parallel: true + if: runner.os == 'Linux' + + - name: Run behaviour tests + shell: bash -l {0} + run: | + export LD_PRELOAD=${{ env.LD_PRELOAD }}; + coverage run --source=satpy -m behave satpy/tests/features --tags=-download + coverage xml + + - name: Upload behaviour test coverage to Codecov + uses: codecov/codecov-action@v3 + with: + flags: behaviourtests + file: ./coverage.xml + env_vars: OS,PYTHON_VERSION,UNSTABLE + + coveralls: + needs: [test] + runs-on: ubuntu-latest + steps: + - name: Coveralls Finished + uses: AndreMiras/coveralls-python-action@develop + with: + parallel-finished: true From 4d54512565ebd3ffaf9f4c5e35252295ac29ac42 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 15 Aug 2023 10:56:21 -0500 Subject: [PATCH 137/702] Reset CI cache number to force updating packages --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index faa0aea2cc..b2b53c691b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -8,7 +8,7 @@ concurrency: on: [push, pull_request] env: - CACHE_NUMBER: 1 + CACHE_NUMBER: 0 jobs: lint: From d5b1caca16a9442f6ddb3b40bd1f83041530f086 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 15 Aug 2023 20:35:25 -0500 Subject: [PATCH 138/702] Refactor viirs_edr available_datasets --- satpy/readers/viirs_edr.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 9b73d272ce..f95f6a901c 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -57,9 +57,10 @@ scene = satpy.Scene(filenames, reader='viirs_edr', reader_kwargs={"filter_veg": False}) """ - +from __future__ import annotations import logging +from typing import Iterable import numpy as np import xarray as xr @@ -212,6 +213,9 @@ def available_datasets(self, configured_datasets=None): handled_var_names.add(file_key) yield file_key in self.nc, ds_info + yield from self._dynamic_variables_from_file(handled_var_names) + + def _dynamic_variables_from_file(self, handled_var_names: set) -> Iterable[tuple[bool, dict]]: ftype = self.filetype_info["file_type"] m_lon_name = f"longitude_{ftype}" m_lat_name = f"latitude_{ftype}" From aa3f904cc0795eb4a1af2da3d1794e7bca5037cd Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 16 Aug 2023 10:57:00 -0500 Subject: [PATCH 139/702] Fix coordinate variables not being dynamically included --- satpy/readers/viirs_edr.py | 3 ++- satpy/tests/reader_tests/test_viirs_edr.py | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index f95f6a901c..8b3cf99d27 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -223,7 +223,8 @@ def _dynamic_variables_from_file(self, handled_var_names: set) -> Iterable[tuple i_lon_name = f"longitude_i_{ftype}" i_lat_name = f"latitude_i_{ftype}" i_coords = (i_lon_name, i_lat_name) - for var_name, data_arr in self.nc.items(): + for var_name in self.nc.variables.keys(): + data_arr = self.nc[var_name] is_lon = "longitude" in var_name.lower() is_lat = "latitude" in var_name.lower() if var_name in handled_var_names and not (is_lon or is_lat): diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index c9caf3ab85..f4f5799444 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -225,6 +225,7 @@ def _create_continuous_variables(var_names: Iterable[str]) -> dict[str, xr.DataA data_arr.encoding["dtype"] = np.int16 data_arr.encoding["scale_factor"] = data_arr.attrs.pop("scale_factor") data_arr.encoding["add_offset"] = data_arr.attrs.pop("add_offset") + data_arr.encoding["coordinates"] = "Longitude Latitude" return data_arrs From 4fbeeaa72cfbd1704ac422c662c07631d7defa1e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 16 Aug 2023 11:17:41 -0500 Subject: [PATCH 140/702] Fix YAML definitions being ignored when multiple files are provided --- satpy/readers/viirs_edr.py | 4 +- satpy/tests/reader_tests/test_viirs_edr.py | 63 ++++++++++++++++------ 2 files changed, 48 insertions(+), 19 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 8b3cf99d27..b007e710b3 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -200,6 +200,8 @@ def available_datasets(self, configured_datasets=None): # duplicate entries for them in the dynamic portion handled_var_names = set() for is_avail, ds_info in (configured_datasets or []): + file_key = ds_info.get("file_key", ds_info["name"]) + handled_var_names.add(file_key) if is_avail is not None: # some other file handler said it has this dataset # we don't know any more information than the previous @@ -209,8 +211,6 @@ def available_datasets(self, configured_datasets=None): if self.file_type_matches(ds_info['file_type']) is None: # this is not the file type for this dataset yield None, ds_info - file_key = ds_info.get("file_key", ds_info["name"]) - handled_var_names.add(file_key) yield file_key in self.nc, ds_info yield from self._dynamic_variables_from_file(handled_var_names) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index f4f5799444..5286ed3461 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -245,33 +245,57 @@ def _create_fake_dataset(vars_dict: dict[str, xr.DataArray]) -> xr.Dataset: return ds +def _copy_to_second_granule(first_granule_path: Path) -> Path: + # hack to make multiple time steps + second_fn = Path(str(first_granule_path).replace("0.nc", "1.nc")) + shutil.copy(first_granule_path, second_fn) + return second_fn + + class TestVIIRSJRRReader: """Test the VIIRS JRR L2 reader.""" - def test_get_dataset_surf_refl(self, surface_reflectance_file): + @pytest.mark.parametrize("multiple_files", [False, True]) + def test_get_dataset_surf_refl(self, surface_reflectance_file, multiple_files): """Test retrieval of datasets.""" from satpy import Scene + + files = [surface_reflectance_file] + if multiple_files: + files.append(_copy_to_second_granule(surface_reflectance_file)) + bytes_in_m_row = 4 * 3200 with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): - scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_file]) + scn = Scene(reader="viirs_edr", filenames=files) scn.load(["surf_refl_I01", "surf_refl_M01"]) assert scn.start_time == START_TIME assert scn.end_time == END_TIME - _check_surf_refl_data_arr(scn["surf_refl_I01"]) - _check_surf_refl_data_arr(scn["surf_refl_M01"]) + _check_surf_refl_data_arr(scn["surf_refl_I01"], multiple_files=multiple_files) + _check_surf_refl_data_arr(scn["surf_refl_M01"], multiple_files=multiple_files) @pytest.mark.parametrize("filter_veg", [False, True]) - def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_indices_file, filter_veg): + @pytest.mark.parametrize("multiple_files", [False, True]) + def test_get_dataset_surf_refl_with_veg_idx( + self, + surface_reflectance_with_veg_indices_file, + filter_veg, + multiple_files + ): """Test retrieval of vegetation indices from surface reflectance files.""" from satpy import Scene + + files = [surface_reflectance_with_veg_indices_file] + if multiple_files: + files.append(_copy_to_second_granule(surface_reflectance_with_veg_indices_file)) + bytes_in_m_row = 4 * 3200 with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): - scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_with_veg_indices_file], + scn = Scene(reader="viirs_edr", filenames=files, reader_kwargs={"filter_veg": filter_veg}) scn.load(["NDVI", "EVI", "surf_refl_qf1"]) - _check_vi_data_arr(scn["NDVI"], filter_veg) - _check_vi_data_arr(scn["EVI"], filter_veg) - _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"]) + _check_vi_data_arr(scn["NDVI"], filter_veg, multiple_files) + _check_vi_data_arr(scn["EVI"], filter_veg, multiple_files) + _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"], multiple_files) @pytest.mark.parametrize( ("var_names", "data_file"), @@ -328,15 +352,15 @@ def test_get_platformname(self, surface_reflectance_file, filename_platform, exp assert scn["surf_refl_I01"].attrs["platform_name"] == exp_shortname -def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray) -> None: - _array_checks(data_arr, dtype=np.uint8) +def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray, multiple_files: bool) -> None: + _array_checks(data_arr, dtype=np.uint8, multiple_files=multiple_files) _shared_metadata_checks(data_arr) assert data_arr.attrs["units"] == "1" assert data_arr.attrs["standard_name"] == "quality_flag" -def _check_vi_data_arr(data_arr: xr.DataArray, is_filtered: bool) -> None: - _array_checks(data_arr) +def _check_vi_data_arr(data_arr: xr.DataArray, is_filtered: bool, multiple_files: bool) -> None: + _array_checks(data_arr, multiple_files=multiple_files) _shared_metadata_checks(data_arr) assert data_arr.attrs["units"] == "1" assert data_arr.attrs["standard_name"] == "normalized_difference_vegetation_index" @@ -351,8 +375,12 @@ def _check_vi_data_arr(data_arr: xr.DataArray, is_filtered: bool) -> None: np.testing.assert_allclose(data[0, 8:], 0.0) -def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.float32) -> None: - _array_checks(data_arr, dtype) +def _check_surf_refl_data_arr( + data_arr: xr.DataArray, + dtype: npt.DType = np.float32, + multiple_files: bool = False +) -> None: + _array_checks(data_arr, dtype, multiple_files=multiple_files) data = data_arr.data.compute() assert data.max() > 1.0 # random 0-1 test data multiplied by 100 @@ -372,14 +400,15 @@ def _check_continuous_data_arr(data_arr: xr.DataArray) -> None: _shared_metadata_checks(data_arr) -def _array_checks(data_arr: xr.DataArray, dtype: npt.Dtype = np.float32) -> None: +def _array_checks(data_arr: xr.DataArray, dtype: npt.Dtype = np.float32, multiple_files: bool = False) -> None: assert data_arr.dims == ("y", "x") assert isinstance(data_arr.attrs["area"], SwathDefinition) assert data_arr.attrs["area"].shape == data_arr.shape assert isinstance(data_arr.data, da.Array) assert np.issubdtype(data_arr.data.dtype, dtype) is_mband_res = _is_mband_res(data_arr) - exp_shape = (M_ROWS, M_COLS) if is_mband_res else (I_ROWS, I_COLS) + shape_multiplier = 1 + int(multiple_files) + exp_shape = (M_ROWS * shape_multiplier, M_COLS) if is_mband_res else (I_ROWS * shape_multiplier, I_COLS) assert data_arr.shape == exp_shape exp_row_chunks = 4 if is_mband_res else 8 assert all(c == exp_row_chunks for c in data_arr.chunks[0]) From 8d664a654bb219dd8a85ff5bc60b50f37c16dceb Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 16 Aug 2023 11:18:38 -0500 Subject: [PATCH 141/702] Remove old VIIRS L2 Cloud Mask reader --- satpy/etc/readers/viirs_l2_cloud_mask_nc.yaml | 50 -------- satpy/readers/viirs_l2.py | 120 ------------------ 2 files changed, 170 deletions(-) delete mode 100644 satpy/etc/readers/viirs_l2_cloud_mask_nc.yaml delete mode 100644 satpy/readers/viirs_l2.py diff --git a/satpy/etc/readers/viirs_l2_cloud_mask_nc.yaml b/satpy/etc/readers/viirs_l2_cloud_mask_nc.yaml deleted file mode 100644 index 0f2650bdc1..0000000000 --- a/satpy/etc/readers/viirs_l2_cloud_mask_nc.yaml +++ /dev/null @@ -1,50 +0,0 @@ -reader: - name: viirs_l2_cloud_mask_nc - short_name: VIIRS CSPP Cloud Mask - long_name: VIIRS CSPP Cloud Mask data in NetCDF4 format - description: VIIRS CSPP Cloud Mask reader - status: beta - supports_fsspec: false - reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader - sensors: [viirs] - -file_types: - cspp_cloud_mask_file: - file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSCloudMaskFileHandler - file_patterns: ['JRR-CloudMask_{delivery_package:4s}_{platform_shortname:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc'] - # Example filenames - # JRR-CloudMask_v3r0_npp_s202212070726217_e202212070727459_c202212071917430.nc - -datasets: - longitude: - name: longitude - resolution: 750 - file_type: cspp_cloud_mask_file - file_key: Longitude - file_units: "degrees_east" - standard_name: longitude - coordinates: [longitude, latitude] - latitude: - name: latitude - resolution: 750 - file_type: cspp_cloud_mask_file - file_key: Latitude - file_units: "degrees_north" - standard_name: latitude - coordinates: [longitude, latitude] - cloud_mask: - name: cloud_mask - resolution: 750 - file_type: cspp_cloud_mask_file - file_key: CloudMask - file_units: "1" - standard_name: cloud_mask - coordinates: [longitude, latitude] - cloud_mask_binary: - name: cloud_mask_binary - resolution: 750 - file_type: cspp_cloud_mask_file - file_key: CloudMaskBinary - file_units: "1" - standard_name: cloud_mask_binary - coordinates: [longitude, latitude] diff --git a/satpy/readers/viirs_l2.py b/satpy/readers/viirs_l2.py deleted file mode 100644 index 90d272504a..0000000000 --- a/satpy/readers/viirs_l2.py +++ /dev/null @@ -1,120 +0,0 @@ -# Copyright (c) 2022-2023 Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Interface to VIIRS L2 files.""" - -from datetime import datetime - -from satpy.readers.netcdf_utils import NetCDF4FileHandler - - -class VIIRSCloudMaskFileHandler(NetCDF4FileHandler): - """VIIRS L2 Cloud Mask reader.""" - - def __init__(self, filename, filename_info, filetype_info): - """Initialize the file handler.""" - super().__init__(filename, filename_info, filetype_info, cache_handle=True) - - def _parse_datetime(self, datestr): - """Parse datetime.""" - return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ") - - @property - def start_orbit_number(self): - """Get start orbit number.""" - return int(self['/attr/start_orbit_number']) - - @property - def end_orbit_number(self): - """Get end orbit number.""" - return int(self['/attr/end_orbit_number']) - - @property - def platform_name(self): - """Get platform name.""" - res = self.filename_info['platform_shortname'] - - return { - 'npp': 'Suomi-NPP', - 'j01': 'NOAA-20', - 'j02': 'NOAA-21', - }.get(res, res) - - @property - def sensor_name(self): - """Get sensor name.""" - return self['/attr/instrument_name'].lower() - - def get_shape(self, ds_id, ds_info): - """Get shape.""" - return self.get(ds_id['name'] + '/shape', 1) - - @property - def start_time(self): - """Get start time.""" - return self._parse_datetime(self['/attr/time_coverage_start']) - - @property - def end_time(self): - """Get end time.""" - return self._parse_datetime(self['/attr/time_coverage_end']) - - def get_metadata(self, dataset_id, ds_info): - """Get metadata.""" - var_path = ds_info['file_key'] - shape = self.get_shape(dataset_id, ds_info) - file_units = ds_info.get('file_units') - - attr = getattr(self[var_path], 'attrs', {}) - attr.update(ds_info) - attr.update(dataset_id.to_dict()) - attr.update({ - "shape": shape, - "units": ds_info.get("units", file_units), - "file_units": file_units, - "platform_name": self.platform_name, - "sensor": self.sensor_name, - "start_orbit": self.start_orbit_number, - "end_orbit": self.end_orbit_number, - }) - attr.update(dataset_id.to_dict()) - return attr - - def get_dataset(self, dataset_id, ds_info): - """Get dataset.""" - var_path = ds_info['file_key'] - metadata = self.get_metadata(dataset_id, ds_info) - - valid_min, valid_max = self._get_dataset_valid_range(var_path) - data = self[var_path] - data.attrs.update(metadata) - - if valid_min is not None and valid_max is not None: - data = data.where((data >= valid_min) & (data <= valid_max)) - - if isinstance(data.attrs.get('flag_meanings'), str): - data.attrs['flag_meanings'] = data.attrs['flag_meanings'].split(' ') - - # rename dimensions to correspond to satpy's 'y' and 'x' standard - if 'Rows' in data.dims: - data = data.rename({'Rows': 'y', 'Columns': 'x'}) - return data - - def _get_dataset_valid_range(self, var_path): - valid_range = self.get(var_path + '/attr/valid_range') - valid_min = valid_range[0] - valid_max = valid_range[1] - - return valid_min, valid_max From f7b60b457fdd166796e1aec28451bc59589e95f4 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 16 Aug 2023 11:20:09 -0500 Subject: [PATCH 142/702] Remove old VIIRS L2 reader tests --- satpy/tests/reader_tests/test_viirs_l2.py | 142 ---------------------- 1 file changed, 142 deletions(-) delete mode 100644 satpy/tests/reader_tests/test_viirs_l2.py diff --git a/satpy/tests/reader_tests/test_viirs_l2.py b/satpy/tests/reader_tests/test_viirs_l2.py deleted file mode 100644 index 5d6e8ffb4f..0000000000 --- a/satpy/tests/reader_tests/test_viirs_l2.py +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright (c) 2022 Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Tests for the VIIRS CSPP L2 readers.""" - -import numpy as np -import pytest -import xarray as xr - -from satpy import Scene - -# NOTE: -# The following Pytest fixtures are not defined in this file, but are used and injected by Pytest: -# - tmp_path - -CLOUD_MASK_FILE = "JRR-CloudMask_v3r0_npp_s202212070905565_e202212070907207_c202212071932513.nc" -NUM_COLUMNS = 3200 -NUM_ROWS = 768 -DATASETS = ['Latitude', 'Longitude', 'CloudMask', 'CloudMaskBinary'] - - -@pytest.fixture -def cloud_mask_file(tmp_path): - """Create a temporary JRR CloudMask file as a fixture.""" - file_path = tmp_path / CLOUD_MASK_FILE - _write_cloud_mask_file(file_path) - yield file_path - - -def _write_cloud_mask_file(file_path): - dset = xr.Dataset() - dset.attrs = _get_global_attrs() - dset['Latitude'] = _get_lat_arr() - dset['Longitude'] = _get_lon_arr() - dset['CloudMask'] = _get_cloud_mask_arr() - dset['CloudMaskBinary'] = _get_cloud_mask_binary_arr() - dset.to_netcdf(file_path, 'w') - - -def _get_global_attrs(): - return { - 'time_coverage_start': '2022-12-07T09:05:56Z', - 'time_coverage_end': '2022-12-07T09:07:20Z', - 'start_orbit_number': np.array(57573), - 'end_orbit_number': np.array(57573), - 'instrument_name': 'VIIRS', - } - - -def _get_lat_arr(): - arr = np.zeros((NUM_ROWS, NUM_COLUMNS), dtype=np.float32) - attrs = { - 'long_name': 'Latitude', - 'units': 'degrees_north', - 'valid_range': np.array([-90, 90], dtype=np.float32), - '_FillValue': -999. - } - return xr.DataArray(arr, attrs=attrs, dims=('Rows', 'Columns')) - - -def _get_lon_arr(): - arr = np.zeros((NUM_ROWS, NUM_COLUMNS), dtype=np.float32) - attrs = { - 'long_name': 'Longitude', - 'units': 'degrees_east', - 'valid_range': np.array([-180, 180], dtype=np.float32), - '_FillValue': -999. - } - return xr.DataArray(arr, attrs=attrs, dims=('Rows', 'Columns')) - - -def _get_cloud_mask_arr(): - arr = np.random.randint(0, 4, (NUM_ROWS, NUM_COLUMNS), dtype=np.byte) - attrs = { - 'long_name': 'Cloud Mask', - '_FillValue': np.byte(-128), - 'valid_range': np.array([0, 3], dtype=np.byte), - 'units': '1', - 'flag_values': np.array([0, 1, 2, 3], dtype=np.byte), - 'flag_meanings': 'clear probably_clear probably_cloudy cloudy', - } - return xr.DataArray(arr, attrs=attrs, dims=('Rows', 'Columns')) - - -def _get_cloud_mask_binary_arr(): - arr = np.random.randint(0, 2, (NUM_ROWS, NUM_COLUMNS), dtype=np.byte) - attrs = { - 'long_name': 'Cloud Mask Binary', - '_FillValue': np.byte(-128), - 'valid_range': np.array([0, 1], dtype=np.byte), - 'units': '1', - } - return xr.DataArray(arr, attrs=attrs, dims=('Rows', 'Columns')) - - -def test_cloud_mask_read_latitude(cloud_mask_file): - """Test reading latitude dataset.""" - data = _read_viirs_l2_cloud_mask_nc_data(cloud_mask_file, 'latitude') - _assert_common(data) - - -def test_cloud_mask_read_longitude(cloud_mask_file): - """Test reading longitude dataset.""" - data = _read_viirs_l2_cloud_mask_nc_data(cloud_mask_file, 'longitude') - _assert_common(data) - - -def test_cloud_mask_read_cloud_mask(cloud_mask_file): - """Test reading cloud mask dataset.""" - data = _read_viirs_l2_cloud_mask_nc_data(cloud_mask_file, 'cloud_mask') - _assert_common(data) - np.testing.assert_equal(data.attrs['flag_values'], [0, 1, 2, 3]) - assert data.attrs['flag_meanings'] == ['clear', 'probably_clear', 'probably_cloudy', 'cloudy'] - - -def test_cloud_mas_read_binary_cloud_mask(cloud_mask_file): - """Test reading binary cloud mask dataset.""" - data = _read_viirs_l2_cloud_mask_nc_data(cloud_mask_file, 'cloud_mask_binary') - _assert_common(data) - - -def _read_viirs_l2_cloud_mask_nc_data(fname, dset_name): - scn = Scene(reader="viirs_l2_cloud_mask_nc", filenames=[fname]) - scn.load([dset_name]) - return scn[dset_name] - - -def _assert_common(data): - assert data.dims == ('y', 'x') - assert "units" in data.attrs From 9e813f62f3678ce59373778085ea545ca22304b3 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 16 Aug 2023 13:21:36 -0500 Subject: [PATCH 143/702] Change multiple files tests to use a pytest fixture --- satpy/tests/reader_tests/test_viirs_edr.py | 97 +++++++++++++++------- 1 file changed, 65 insertions(+), 32 deletions(-) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 5286ed3461..2acbd0d55a 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -22,7 +22,7 @@ from __future__ import annotations import shutil -from datetime import datetime +from datetime import datetime, timedelta from pathlib import Path from typing import Iterable @@ -72,17 +72,48 @@ @pytest.fixture(scope="module") def surface_reflectance_file(tmp_path_factory: TempPathFactory) -> Path: """Generate fake surface reflectance EDR file.""" - return _create_surface_reflectance_file(tmp_path_factory, include_veg_indices=False) + return _create_surface_reflectance_file(tmp_path_factory, START_TIME, include_veg_indices=False) + + +@pytest.fixture(scope="module") +def surface_reflectance_file2(tmp_path_factory: TempPathFactory) -> Path: + """Generate fake surface reflectance EDR file.""" + return _create_surface_reflectance_file(tmp_path_factory, START_TIME + timedelta(minutes=5), + include_veg_indices=False) + + +@pytest.fixture(scope="module") +def multiple_surface_reflectance_files(surface_reflectance_file, surface_reflectance_file2) -> list[Path]: + """Get two multiple surface reflectance files.""" + return [surface_reflectance_file, surface_reflectance_file2] @pytest.fixture(scope="module") def surface_reflectance_with_veg_indices_file(tmp_path_factory: TempPathFactory) -> Path: """Generate fake surface reflectance EDR file with vegetation indexes included.""" - return _create_surface_reflectance_file(tmp_path_factory, include_veg_indices=True) + return _create_surface_reflectance_file(tmp_path_factory, START_TIME, include_veg_indices=True) + + +@pytest.fixture(scope="module") +def surface_reflectance_with_veg_indices_file2(tmp_path_factory: TempPathFactory) -> Path: + """Generate fake surface reflectance EDR file with vegetation indexes included.""" + return _create_surface_reflectance_file(tmp_path_factory, START_TIME + timedelta(minutes=5), + include_veg_indices=True) -def _create_surface_reflectance_file(tmp_path_factory: TempPathFactory, include_veg_indices: bool = False) -> Path: - fn = f"SurfRefl_v1r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" +@pytest.fixture(scope="module") +def multiple_surface_reflectance_files_with_veg_indices(surface_reflectance_with_veg_indices_file, + surface_reflectance_with_veg_indices_file2) -> list[Path]: + """Get two multiple surface reflectance files with vegetation indexes included.""" + return [surface_reflectance_with_veg_indices_file, surface_reflectance_with_veg_indices_file2] + + +def _create_surface_reflectance_file( + tmp_path_factory: TempPathFactory, + start_time: datetime, + include_veg_indices: bool = False, +) -> Path: + fn = f"SurfRefl_v1r2_npp_s{start_time:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" sr_vars = _create_surf_refl_variables() if include_veg_indices: sr_vars.update(_create_veg_index_variables()) @@ -245,57 +276,59 @@ def _create_fake_dataset(vars_dict: dict[str, xr.DataArray]) -> xr.Dataset: return ds -def _copy_to_second_granule(first_granule_path: Path) -> Path: - # hack to make multiple time steps - second_fn = Path(str(first_granule_path).replace("0.nc", "1.nc")) - shutil.copy(first_granule_path, second_fn) - return second_fn - - class TestVIIRSJRRReader: """Test the VIIRS JRR L2 reader.""" - @pytest.mark.parametrize("multiple_files", [False, True]) - def test_get_dataset_surf_refl(self, surface_reflectance_file, multiple_files): + @pytest.mark.parametrize( + "data_files", + [ + lazy_fixture("surface_reflectance_file"), + lazy_fixture("multiple_surface_reflectance_files"), + ], + ) + def test_get_dataset_surf_refl(self, data_files): """Test retrieval of datasets.""" from satpy import Scene - files = [surface_reflectance_file] - if multiple_files: - files.append(_copy_to_second_granule(surface_reflectance_file)) - + if not isinstance(data_files, list): + data_files = [data_files] + is_multiple = len(data_files) > 1 bytes_in_m_row = 4 * 3200 with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): - scn = Scene(reader="viirs_edr", filenames=files) + scn = Scene(reader="viirs_edr", filenames=data_files) scn.load(["surf_refl_I01", "surf_refl_M01"]) assert scn.start_time == START_TIME assert scn.end_time == END_TIME - _check_surf_refl_data_arr(scn["surf_refl_I01"], multiple_files=multiple_files) - _check_surf_refl_data_arr(scn["surf_refl_M01"], multiple_files=multiple_files) + _check_surf_refl_data_arr(scn["surf_refl_I01"], multiple_files=is_multiple) + _check_surf_refl_data_arr(scn["surf_refl_M01"], multiple_files=is_multiple) @pytest.mark.parametrize("filter_veg", [False, True]) - @pytest.mark.parametrize("multiple_files", [False, True]) + @pytest.mark.parametrize( + "data_files", + [ + lazy_fixture("surface_reflectance_with_veg_indices_file2"), + lazy_fixture("multiple_surface_reflectance_files_with_veg_indices"), + ], + ) def test_get_dataset_surf_refl_with_veg_idx( self, - surface_reflectance_with_veg_indices_file, + data_files, filter_veg, - multiple_files ): """Test retrieval of vegetation indices from surface reflectance files.""" from satpy import Scene - files = [surface_reflectance_with_veg_indices_file] - if multiple_files: - files.append(_copy_to_second_granule(surface_reflectance_with_veg_indices_file)) - + if not isinstance(data_files, list): + data_files = [data_files] + is_multiple = len(data_files) > 1 bytes_in_m_row = 4 * 3200 with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): - scn = Scene(reader="viirs_edr", filenames=files, + scn = Scene(reader="viirs_edr", filenames=data_files, reader_kwargs={"filter_veg": filter_veg}) scn.load(["NDVI", "EVI", "surf_refl_qf1"]) - _check_vi_data_arr(scn["NDVI"], filter_veg, multiple_files) - _check_vi_data_arr(scn["EVI"], filter_veg, multiple_files) - _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"], multiple_files) + _check_vi_data_arr(scn["NDVI"], filter_veg, is_multiple) + _check_vi_data_arr(scn["EVI"], filter_veg, is_multiple) + _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"], is_multiple) @pytest.mark.parametrize( ("var_names", "data_file"), From e5be4e3887f67c865a83590d04c5582a81bad270 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 16 Aug 2023 15:15:41 -0500 Subject: [PATCH 144/702] Fix old colormap definition for VIIRS water detection --- satpy/etc/enhancements/viirs.yaml | 129 +++++++++++++++--------------- 1 file changed, 65 insertions(+), 64 deletions(-) diff --git a/satpy/etc/enhancements/viirs.yaml b/satpy/etc/enhancements/viirs.yaml index c740a2a6e6..8b3751167d 100644 --- a/satpy/etc/enhancements/viirs.yaml +++ b/satpy/etc/enhancements/viirs.yaml @@ -14,67 +14,68 @@ enhancements: - name: WaterDetection method: !!python/name:satpy.enhancements.viirs.water_detection kwargs: - palettes: {colors: - [[14, [0.0, 0.0, 0.0]], - [15, [0.0, 0.0, 0.39215686274509803]], - [16, [0.7686274509803922, 0.6352941176470588, 0.4470588235294118]], - [17, [0.7686274509803922, 0.6352941176470588, 0.4470588235294118]], - [18, [0.0, 0.0, 1.0]], - [20, [1.0, 1.0, 1.0]], - [27, [0.0, 1.0, 1.0]], - [30, [0.7843137254901961, 0.7843137254901961, 0.7843137254901961]], - [31, [0.39215686274509803, 0.39215686274509803, 0.39215686274509803]], - [88, [0.7058823529411765, 0.0, 0.9019607843137255]], - [100, [0.19607843137254902, 1.0, 0.39215686274509803]], - [120, [0.19607843137254902, 1.0, 0.39215686274509803]], - [121, [0.0, 1.0, 0.0]], - [130, [0.0, 1.0, 0.0]], - [131, [0.7843137254901961, 1.0, 0.0]], - [140, [0.7843137254901961, 1.0, 0.0]], - [141, [1.0, 1.0, 0.5882352941176471]], - [150, [1.0, 1.0, 0.5882352941176471]], - [151, [1.0, 1.0, 0.0]], - [160, [1.0, 1.0, 0.0]], - [161, [1.0, 0.7843137254901961, 0.0]], - [170, [1.0, 0.7843137254901961, 0.0]], - [171, [1.0, 0.5882352941176471, 0.19607843137254902]], - [180, [1.0, 0.5882352941176471, 0.19607843137254902]], - [181, [1.0, 0.39215686274509803, 0.0]], - [190, [1.0, 0.39215686274509803, 0.0]], - [191, [1.0, 0.0, 0.0]], - [200, [1.0, 0.0, 0.0]], - [201, [0.0, 0.0, 0.0]]], - min_value: 0, - max_value: 201} -# palettes: {colors: -# [[14, [0.0, 0.0, 0.0, 0.0]], -# [15, [0.0, 0.0, 0.39215686274509803, 1.0]], -# [16, [0.7686274509803922, 0.6352941176470588, 0.4470588235294118, 1.0]], -# [17, [0.7686274509803922, 0.6352941176470588, 0.4470588235294118, 1.0]], -# [18, [0.0, 0.0, 1.0, 1.0]], -# [20, [1.0, 1.0, 1.0, 1.0]], -# [27, [0.0, 1.0, 1.0, 1.0]], -# [30, [0.7843137254901961, 0.7843137254901961, 0.7843137254901961, 1.0]], -# [31, [0.39215686274509803, 0.39215686274509803, 0.39215686274509803, 1.0]], -# [88, [0.7058823529411765, 0.0, 0.9019607843137255, 1.0]], -# [100, [0.19607843137254902, 1.0, 0.39215686274509803, 1.0]], -# [120, [0.19607843137254902, 1.0, 0.39215686274509803, 1.0]], -# [121, [0.0, 1.0, 0.0, 1.0]], -# [130, [0.0, 1.0, 0.0, 1.0]], -# [131, [0.7843137254901961, 1.0, 0.0, 1.0]], -# [140, [0.7843137254901961, 1.0, 0.0, 1.0]], -# [141, [1.0, 1.0, 0.5882352941176471, 1.0]], -# [150, [1.0, 1.0, 0.5882352941176471, 1.0]], -# [151, [1.0, 1.0, 0.0, 1.0]], -# [160, [1.0, 1.0, 0.0, 1.0]], -# [161, [1.0, 0.7843137254901961, 0.0, 1.0]], -# [170, [1.0, 0.7843137254901961, 0.0, 1.0]], -# [171, [1.0, 0.5882352941176471, 0.19607843137254902, 1.0]], -# [180, [1.0, 0.5882352941176471, 0.19607843137254902, 1.0]], -# [181, [1.0, 0.39215686274509803, 0.0, 1.0]], -# [190, [1.0, 0.39215686274509803, 0.0, 1.0]], -# [191, [1.0, 0.0, 0.0, 1.0]], -# [200, [1.0, 0.0, 0.0, 1.0]], -# [201, [0.0, 0.0, 0.0, 0.0]]], -# min_value: 0, -# max_value: 201} + palettes: { + values: [ + 14, + 15, + 16, + 17, + 18, + 20, + 27, + 30, + 31, + 88, + 100, + 120, + 121, + 130, + 131, + 140, + 141, + 150, + 151, + 160, + 161, + 170, + 171, + 180, + 181, + 190, + 191, + 200, + 201, + ], + colors: [ + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.39215686274509803], + [0.7686274509803922, 0.6352941176470588, 0.4470588235294118], + [0.7686274509803922, 0.6352941176470588, 0.4470588235294118], + [0.0, 0.0, 1.0], + [1.0, 1.0, 1.0], + [0.0, 1.0, 1.0], + [0.7843137254901961, 0.7843137254901961, 0.7843137254901961], + [0.39215686274509803, 0.39215686274509803, 0.39215686274509803], + [0.7058823529411765, 0.0, 0.9019607843137255], + [0.19607843137254902, 1.0, 0.39215686274509803], + [0.19607843137254902, 1.0, 0.39215686274509803], + [0.0, 1.0, 0.0], + [0.0, 1.0, 0.0], + [0.7843137254901961, 1.0, 0.0], + [0.7843137254901961, 1.0, 0.0], + [1.0, 1.0, 0.5882352941176471], + [1.0, 1.0, 0.5882352941176471], + [1.0, 1.0, 0.0], + [1.0, 1.0, 0.0], + [1.0, 0.7843137254901961, 0.0], + [1.0, 0.7843137254901961, 0.0], + [1.0, 0.5882352941176471, 0.19607843137254902], + [1.0, 0.5882352941176471, 0.19607843137254902], + [1.0, 0.39215686274509803, 0.0], + [1.0, 0.39215686274509803, 0.0], + [1.0, 0.0, 0.0], + [1.0, 0.0, 0.0], + [0.0, 0.0, 0.0], + ], + min_value: 0, + max_value: 201} From 04fe3e05a025892c52b450f1a3139ba853c00b1e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 16 Aug 2023 15:16:24 -0500 Subject: [PATCH 145/702] Remove non-existent M06 surface reflectance product --- satpy/etc/readers/viirs_edr.yaml | 9 --------- 1 file changed, 9 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 4f33bcc184..c078e754aa 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -144,15 +144,6 @@ datasets: coordinates: [longitude_750, latitude_750] units: '%' standard_name: "surface_bidirectional_reflectance" - surf_refl_M06: - name: surf_refl_M06 - resolution: 750 - wavelength: [0.739, 0.746, 0.754] - file_type: [jrr_surfref] - file_key: "750m Surface Reflectance Band M6" - coordinates: [longitude_750, latitude_750] - units: '%' - standard_name: "surface_bidirectional_reflectance" surf_refl_M07: name: surf_refl_M07 resolution: 750 From 44c5b3970ea927e2a72f95096042a51ed5fe4e82 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 17 Aug 2023 13:03:40 +0200 Subject: [PATCH 146/702] Use normalize_chunks for ahi hsd chunk sizes --- satpy/readers/ahi_hsd.py | 10 ++- satpy/tests/reader_tests/test_ahi_hsd.py | 85 ++++++++++++++++++++++++ 2 files changed, 92 insertions(+), 3 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 86e9aa9e0c..e06f7ebc50 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -78,9 +78,8 @@ np2str, unzip_file, ) -from satpy.utils import get_legacy_chunk_size +from satpy.utils import get_chunk_size_limit -CHUNK_SIZE = get_legacy_chunk_size() AHI_CHANNEL_NAMES = ("1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16") @@ -620,9 +619,14 @@ def _read_data(self, fp_, header): """Read data block.""" nlines = int(header["block2"]['number_of_lines'][0]) ncols = int(header["block2"]['number_of_columns'][0]) + chunks = da.core.normalize_chunks("auto", + shape=(nlines, ncols), + limit=get_chunk_size_limit(), + dtype='f8', + previous_chunks=(550, 550)) return da.from_array(np.memmap(self.filename, offset=fp_.tell(), dtype=' Date: Thu, 17 Aug 2023 09:24:57 -0500 Subject: [PATCH 147/702] Fix swath definitions not having all lon/lat metadata --- satpy/readers/viirs_edr.py | 13 +++++++++++++ satpy/tests/reader_tests/test_viirs_edr.py | 8 +++++++- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index b007e710b3..da61114fca 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -130,6 +130,11 @@ def get_dataset(self, dataset_id: DataID, info: dict) -> xr.DataArray: data_arr.attrs["platform_name"] = self.platform_name data_arr.attrs["sensor"] = self.sensor_name data_arr.attrs["rows_per_scan"] = self.rows_per_scans(data_arr) + if data_arr.attrs.get("standard_name") in ("longitude", "latitude"): + # recursive swath definitions are a problem for the base reader right now + # delete the coordinates here so the base reader doesn't try to + # make a SwathDefinition + data_arr = data_arr.reset_coords(drop=True) return data_arr def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: @@ -201,6 +206,10 @@ def available_datasets(self, configured_datasets=None): handled_var_names = set() for is_avail, ds_info in (configured_datasets or []): file_key = ds_info.get("file_key", ds_info["name"]) + # we must add all variables here even if another file handler has + # claimed the variable. It could be another instance of this file + # type and we don't want to add that variable dynamically if the + # other file handler defined it by the YAML definition. handled_var_names.add(file_key) if is_avail is not None: # some other file handler said it has this dataset @@ -246,10 +255,14 @@ def _dynamic_variables_from_file(self, handled_var_names: set) -> Iterable[tuple ds_info["standard_name"] = "longitude" ds_info["units"] = "degrees_east" ds_info["name"] = m_lon_name if res == 750 else i_lon_name + # recursive coordinate/SwathDefinitions are not currently handled well in the base reader + del ds_info["coordinates"] elif is_lat: ds_info["standard_name"] = "latitude" ds_info["units"] = "degrees_north" ds_info["name"] = m_lat_name if res == 750 else i_lat_name + # recursive coordinate/SwathDefinitions are not currently handled well in the base reader + del ds_info["coordinates"] yield True, ds_info diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 2acbd0d55a..146a0cd2c4 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -450,8 +450,14 @@ def _array_checks(data_arr: xr.DataArray, dtype: npt.Dtype = np.float32, multipl def _shared_metadata_checks(data_arr: xr.DataArray) -> None: is_mband_res = _is_mband_res(data_arr) + exp_rps = 16 if is_mband_res else 32 assert data_arr.attrs["sensor"] == "viirs" - assert data_arr.attrs["rows_per_scan"] == 16 if is_mband_res else 32 + assert data_arr.attrs["rows_per_scan"] == exp_rps + + lons = data_arr.attrs["area"].lons + lats = data_arr.attrs["area"].lats + assert lons.attrs["rows_per_scan"] == exp_rps + assert lats.attrs["rows_per_scan"] == exp_rps def _is_mband_res(data_arr: xr.DataArray) -> bool: From 3bb0920fe68b50274baa58051ddb67e4a2f09381 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 21 Aug 2023 09:46:47 +0200 Subject: [PATCH 148/702] Allow using s3 files for AMI reader --- satpy/readers/ami_l1b.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/readers/ami_l1b.py b/satpy/readers/ami_l1b.py index 4569b996fa..9adeaf76f1 100644 --- a/satpy/readers/ami_l1b.py +++ b/satpy/readers/ami_l1b.py @@ -26,6 +26,7 @@ import xarray as xr from pyspectral.blackbody import blackbody_wn_rad2temp as rad2temp +from satpy.readers import open_file_or_filename from satpy.readers._geos_area import get_area_definition, get_area_extent from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import apply_rad_correction, get_user_calibration_factors @@ -93,7 +94,8 @@ def __init__(self, filename, filename_info, filetype_info, user_calibration=None): """Open the NetCDF file with xarray and prepare the Dataset for reading.""" super(AMIL1bNetCDF, self).__init__(filename, filename_info, filetype_info) - self.nc = xr.open_dataset(self.filename, + f_obj = open_file_or_filename(self.filename) + self.nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, chunks={'dim_image_x': CHUNK_SIZE, 'dim_image_y': CHUNK_SIZE}) From 54e5d19aaf09b574fc749f0bd7a6657817fe295b Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 21 Aug 2023 09:54:59 +0200 Subject: [PATCH 149/702] Fix doc --- satpy/etc/readers/ami_l1b.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/ami_l1b.yaml b/satpy/etc/readers/ami_l1b.yaml index 8366f12117..e1990f9871 100644 --- a/satpy/etc/readers/ami_l1b.yaml +++ b/satpy/etc/readers/ami_l1b.yaml @@ -8,7 +8,7 @@ reader: `here `_. sensors: [ami] status: Beta - supports_fsspec: false + supports_fsspec: true default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' From aef188b283164f5fcf12fd94fb71702fed61c235 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 21 Aug 2023 08:20:01 -0500 Subject: [PATCH 150/702] Bump xarray expected version in test_cf.py --- satpy/tests/writer_tests/test_cf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index baeb45a4e4..54770b9176 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -1448,5 +1448,5 @@ def _should_use_compression_keyword(): versions = _get_backend_versions() return ( versions["libnetcdf"] >= Version("4.9.0") and - versions["xarray"] >= Version("2023.8") + versions["xarray"] >= Version("2023.9") ) From bd1c7f3e9e56b389dfff5738e1f7961b233f1b71 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 22 Aug 2023 05:52:31 +0000 Subject: [PATCH 151/702] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.5.0 → v1.5.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.5.0...v1.5.1) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1b7889838a..3999be8b04 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,7 +20,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.5.0' # Use the sha / tag you want to point at + rev: 'v1.5.1' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: From 6cd7f5ca4c98b9b925cbcb7a2709082164b5f55a Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 22 Aug 2023 10:50:33 +0100 Subject: [PATCH 152/702] Add OLCI enhancement YAML to cope with the `mask` dataset. --- satpy/etc/enhancements/olci.yaml | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 satpy/etc/enhancements/olci.yaml diff --git a/satpy/etc/enhancements/olci.yaml b/satpy/etc/enhancements/olci.yaml new file mode 100644 index 0000000000..9724b986aa --- /dev/null +++ b/satpy/etc/enhancements/olci.yaml @@ -0,0 +1,4 @@ +enhancements: + mask: + name: mask + operations: [] From 38bda954fc2a7b80bf394ea6bf71a4616bfb332f Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 23 Aug 2023 14:28:32 +0200 Subject: [PATCH 153/702] Implement non-linearity term for NDVI-weighted hybrid-green correction when converting NDVI to blend fraction. --- satpy/composites/spectral.py | 27 ++++++++++++++++--- satpy/tests/compositor_tests/test_spectral.py | 20 ++++++++++---- 2 files changed, 38 insertions(+), 9 deletions(-) diff --git a/satpy/composites/spectral.py b/satpy/composites/spectral.py index c0ccaff64f..5e6e03c148 100644 --- a/satpy/composites/spectral.py +++ b/satpy/composites/spectral.py @@ -114,8 +114,8 @@ class NDVIHybridGreen(SpectralBlender): This green band correction follows the same approach as the HybridGreen compositor, but with a dynamic blend factor `f` that depends on the pixel-level Normalized Differece Vegetation Index (NDVI). The higher the NDVI, the - smaller the contribution from the nir channel will be, following a liner relationship between the two ranges - `[ndvi_min, ndvi_max]` and `limits`. + smaller the contribution from the nir channel will be, following a liner (default) or non-linear relationship + between the two ranges `[ndvi_min, ndvi_max]` and `limits`. As an example, a new green channel using e.g. FCI data and the NDVIHybridGreen compositor can be defined like:: @@ -124,6 +124,7 @@ class NDVIHybridGreen(SpectralBlender): ndvi_min: 0.0 ndvi_max: 1.0 limits: [0.15, 0.05] + strength: 1.0 prerequisites: - name: vis_05 modifiers: [sunz_corrected, rayleigh_corrected] @@ -138,17 +139,29 @@ class NDVIHybridGreen(SpectralBlender): pixels with NDVI=1.0 will be a weighted average with 5% contribution from the near-infrared vis_08 channel and the remaining 95% from the native green vis_05 channel. For other values of NDVI a linear interpolation between these values will be performed. + + A strength larger or smaller than 1.0 will introduce a non-linear relationship between the two ranges + `[ndvi_min, ndvi_max]` and `limits`. Hence, a higher strength (> 1.0) will result in a slower transition + to higher/lower fractions at the NDVI extremes. Similarly, a lower strength (< 1.0) will result in a + faster transition to higher/lower fractions at the NDVI extremes. """ - def __init__(self, *args, ndvi_min=0.0, ndvi_max=1.0, limits=(0.15, 0.05), **kwargs): - """Initialize class and set the NDVI limits and the corresponding blending fraction limits.""" + def __init__(self, *args, ndvi_min=0.0, ndvi_max=1.0, limits=(0.15, 0.05), strength=1.0, **kwargs): + """Initialize class and set the NDVI limits, blending fraction limits and strength.""" + if strength <= 0.0: + raise ValueError(f"Expected stength greater than 0.0, got {strength}.") + self.ndvi_min = ndvi_min self.ndvi_max = ndvi_max self.limits = limits + self.strength = strength super().__init__(*args, **kwargs) def __call__(self, projectables, optional_datasets=None, **attrs): """Construct the hybrid green channel weighted by NDVI.""" + LOG.info(f"Applying NDVI-weighted hybrid-green correction with limits [{self.limits[0]}, " + f"{self.limits[1]}] and strength {self.strength}.") + ndvi_input = self.match_data_arrays([projectables[1], projectables[2]]) ndvi = (ndvi_input[1] - ndvi_input[0]) / (ndvi_input[1] + ndvi_input[0]) @@ -156,6 +169,12 @@ def __call__(self, projectables, optional_datasets=None, **attrs): ndvi.data = da.where(ndvi > self.ndvi_min, ndvi, self.ndvi_min) ndvi.data = da.where(ndvi < self.ndvi_max, ndvi, self.ndvi_max) + # Apply non-linearity to the ndvi for a non-linear conversion from ndvi to fraction. This can be used for a + # slower or faster transision to higher/lower fractions at the ndvi extremes. If strength equals 1.0, this + # operation has no effect on the ndvi. + ndvi = ndvi ** self.strength / (ndvi ** self.strength + (1 - ndvi) ** self.strength) + + # Compute blending fraction from ndvi fraction = (ndvi - self.ndvi_min) / (self.ndvi_max - self.ndvi_min) * (self.limits[1] - self.limits[0]) \ + self.limits[0] self.fractions = (1 - fraction, fraction) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 467adf119b..03e51a5043 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -14,7 +14,6 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for spectral correction compositors.""" -import warnings import dask.array as da import numpy as np @@ -78,6 +77,7 @@ def test_ndvi_hybrid_green(self): comp = NDVIHybridGreen('ndvi_hybrid_green', limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), standard_name='toa_bidirectional_reflectance') + # Test General functionality with linear strength (=1.0) res = comp((self.c01, self.c02, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) @@ -86,12 +86,22 @@ def test_ndvi_hybrid_green(self): data = res.values np.testing.assert_array_almost_equal(data, np.array([[0.2633, 0.3071], [0.2115, 0.3420]]), decimal=4) + # Test invalid strength + with pytest.raises(ValueError): + _ = NDVIHybridGreen('ndvi_hybrid_green', strength=0.0, prerequisites=(0.51, 0.65, 0.85), + standard_name='toa_bidirectional_reflectance') + + # Test non-linear strength + comp = NDVIHybridGreen('ndvi_hybrid_green', limits=(0.15, 0.05), strength=2.0, prerequisites=(0.51, 0.65, 0.85), + standard_name='toa_bidirectional_reflectance') + + res = comp((self.c01, self.c02, self.c03)) + np.testing.assert_array_almost_equal(res.values, np.array([[0.2646, 0.3075], [0.2120, 0.3471]]), decimal=4) + def test_green_corrector(self): """Test the deprecated class for green corrections.""" - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=UserWarning, message=r'.*deprecated.*') - comp = GreenCorrector('blended_channel', fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), - standard_name='toa_bidirectional_reflectance') + comp = GreenCorrector('blended_channel', fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), + standard_name='toa_bidirectional_reflectance') res = comp((self.c01, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) From 478a3b5ef6e81bbfb8489cca5df50ef78698670e Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 23 Aug 2023 14:32:47 +0200 Subject: [PATCH 154/702] Modify default strength for FCI green band correction to be in line with EUMETSAT recipe used for first public images. --- satpy/etc/composites/fci.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 193415656b..30860fbb28 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -7,10 +7,11 @@ composites: The FCI green band at 0.51 µm deliberately misses the chlorophyll band, such that the signal comes from aerosols and ash rather than vegetation. An effect is that vegetation in a true colour RGB looks rather brown than green and barren rather red. Mixing in - some part of the NIR 0.8 channel reduced this effect. Note that the fractions + some part of the NIR 0.8 channel reduced this effect. Note that the fractions and non-linear strength currently implemented are experimental and may change in future versions of Satpy. compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen limits: [0.15, 0.05] + strength: 3.0 prerequisites: - name: vis_05 modifiers: [sunz_corrected, rayleigh_corrected] @@ -25,6 +26,7 @@ composites: Alternative to ndvi_hybrid_green, but without solar zenith or rayleigh correction. compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen limits: [0.15, 0.05] + strength: 3.0 prerequisites: - name: vis_05 - name: vis_06 From 2c0406010d49b69add2de73621c7b5c77adeb680 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 23 Aug 2023 16:19:14 +0200 Subject: [PATCH 155/702] Specify name of GeoColor enhancements. Add enhancement for GeoColor day-night blend. --- satpy/etc/enhancements/generic.yaml | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 362625fa15..fd2c02fe48 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -838,8 +838,8 @@ enhancements: kwargs: weight: 1.0 - ir_high_cloud: - standard_name: ir_high_cloud + geo_color_high_clouds: + standard_name: geo_color_high_clouds operations: - name: inverse method: !!python/name:satpy.enhancements.invert @@ -849,11 +849,9 @@ enhancements: method: !!python/name:satpy.enhancements.stretch kwargs: stretch: linear - - name: 3d - method: !!python/name:satpy.enhancements.three_d_effect - ir_low_cloud: - standard_name: ir_low_cloud + geo_color_low_clouds: + standard_name: geo_color_low_clouds operations: - name: inverse method: !!python/name:satpy.enhancements.invert @@ -869,6 +867,16 @@ enhancements: palettes: - {colors: [[140.25, 191.25, 249.9]]} + geo_color_day_night_blend: + standard_name: geo_color_day_night_blend + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [ 0,0,0 ] + max_stretch: [ 1,1,1 ] + colorized_ir_clouds: standard_name: colorized_ir_clouds operations: From 2c4f086e2a5d65ea4bf712bc745d930b611e5a7b Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 23 Aug 2023 16:20:15 +0200 Subject: [PATCH 156/702] Add FCI GeoColor recipes. --- satpy/etc/composites/fci.yaml | 47 +++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 4e450a9779..4c091bd012 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -64,3 +64,50 @@ composites: - name: ndvi_hybrid_green_raw - name: vis_04 standard_name: true_color_raw + + # GeoColor + geo_color: + compositor: !!python/name:satpy.composites.DayNightCompositor + lim_low: 73 + lim_high: 82 + standard_name: geo_color_day_night_blend + prerequisites: + - true_color + - geo_color_night + + # GeoColor Night-time + geo_color_high_clouds: + standard_name: geo_color_high_clouds + compositor: !!python/name:satpy.composites.HighCloudCompositor + prerequisites: + - name: ir_105 + + geo_color_low_clouds: + standard_name: geo_color_low_clouds + compositor: !!python/name:satpy.composites.LowCloudCompositor + values_sea: 0 + values_land: 100 + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: ir_105 + - name: ir_38 + - name: ir_105 + - compositor: !!python/name:satpy.composites.StaticImageCompositor + standard_name: land_sea_mask + # TODO Change filename + filename: "/tcenas/proj/optcalimg/strandgren/GeoColor/static_data/gshhs_land_sea_mask_3km_i.tif" + + geo_color_background_with_low_clouds: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - geo_color_low_clouds + - _night_background_hires + + geo_color_night: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - geo_color_high_clouds + - geo_color_background_with_low_clouds From b0a6172c39a7517b44569d4b0d46c20fb028135c Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 23 Aug 2023 16:29:41 +0200 Subject: [PATCH 157/702] Align AHI GeoColor recipes with FCI developments/recipes. --- satpy/etc/composites/ahi.yaml | 53 ++++++++--------------------------- 1 file changed, 11 insertions(+), 42 deletions(-) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 5c73eea7e9..c0008366cc 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -14,22 +14,6 @@ modifiers: - solar_azimuth_angle - solar_zenith_angle - geo_color_rayleigh_corrected: - modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance - atmosphere: us-standard - aerosol_type: rayleigh_only - reduce_lim_low: 70 - reduce_lim_high: 105 - reduce_strength: 1.5 - prerequisites: - - name: B03 - modifiers: [sunz_corrected] - optional_prerequisites: - - satellite_azimuth_angle - - satellite_zenith_angle - - solar_azimuth_angle - - solar_zenith_angle - composites: green: deprecation_warning: "'green' is a deprecated composite. Use the equivalent 'hybrid_green' instead." @@ -488,30 +472,22 @@ composites: geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor - lim_low: 80 - lim_high: 88 - standard_name: true_color_with_night_ir + lim_low: 73 + lim_high: 82 + standard_name: geo_color_day_night_blend prerequisites: - geo_color_true_color - geo_color_night - geo_color_without_background: - compositor: !!python/name:satpy.composites.DayNightCompositor - lim_low: 80 - lim_high: 88 - standard_name: true_color_with_night_ir - prerequisites: - - geo_color_true_color - - geo_color_night_without_background - # GeoColor Daytime geo_color_green: compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen + limits: [0.15, 0.05] prerequisites: - name: B02 - modifiers: [ sunz_corrected, geo_color_rayleigh_corrected ] + modifiers: [ sunz_corrected, rayleigh_corrected ] - name: B03 - modifiers: [ sunz_corrected, geo_color_rayleigh_corrected ] + modifiers: [ sunz_corrected, rayleigh_corrected ] - name: B04 modifiers: [ sunz_corrected ] standard_name: toa_bidirectional_reflectance @@ -520,22 +496,22 @@ composites: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: B03 - modifiers: [ sunz_corrected, geo_color_rayleigh_corrected ] + modifiers: [ sunz_corrected, rayleigh_corrected ] - name: geo_color_green - name: B01 - modifiers: [ sunz_corrected, geo_color_rayleigh_corrected ] + modifiers: [ sunz_corrected, rayleigh_corrected ] high_resolution_band: red standard_name: true_color # GeoColor Night-time geo_color_high_clouds: - standard_name: ir_high_cloud + standard_name: geo_color_high_clouds compositor: !!python/name:satpy.composites.HighCloudCompositor prerequisites: - name: B13 geo_color_low_clouds: - standard_name: ir_low_cloud + standard_name: geo_color_low_clouds compositor: !!python/name:satpy.composites.LowCloudCompositor values_sea: 0 values_land: 100 @@ -548,7 +524,7 @@ composites: - compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: land_sea_mask # TODO Change filename - filename: "/tcenas/scratch/strandgren/GeoColor/gshhs_land_sea_mask_3km_i.tif" + filename: "/tcenas/proj/optcalimg/strandgren/GeoColor/static_data/gshhs_land_sea_mask_3km_i.tif" geo_color_background_with_low_clouds: compositor: !!python/name:satpy.composites.BackgroundCompositor @@ -563,10 +539,3 @@ composites: prerequisites: - geo_color_high_clouds - geo_color_background_with_low_clouds - - geo_color_night_without_background: - compositor: !!python/name:satpy.composites.BackgroundCompositor - standard_name: night_ir - prerequisites: - - geo_color_low_clouds - - geo_color_high_clouds From c8aa8aeebce0e1fbba99d4d6117582d0bb0792de Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 23 Aug 2023 16:47:54 +0200 Subject: [PATCH 158/702] Modify thresholds of FCI GeoColor low cloud detection in order to reduce false alarms with early FCI data. --- satpy/etc/composites/fci.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 4c091bd012..caa8858734 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -87,6 +87,8 @@ composites: compositor: !!python/name:satpy.composites.LowCloudCompositor values_sea: 0 values_land: 100 + range_land: [4.35, 6.75] + range_sea: [1.35, 5.0] prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: From 6e410f4aa8483b79cdd5900e5e64a93cbcff8882 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 24 Aug 2023 08:33:12 +0200 Subject: [PATCH 159/702] Add non-linearity term to AHI NDVI green correction. --- satpy/etc/composites/ahi.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 9e36bf7d7f..9f771b712c 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -105,6 +105,8 @@ composites: ndvi_hybrid_green: compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen + limits: [0.15, 0.05] + strength: 3.0 prerequisites: - name: B02 modifiers: [sunz_corrected, rayleigh_corrected] From 1d26f79489149b37e61be97d5adaf2209651d1e8 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 24 Aug 2023 11:45:48 +0100 Subject: [PATCH 160/702] Update AMI `true_color_reproduction` composites to use the hybrid NDVI green method rather than the deprecated green metho. --- satpy/etc/composites/ami.yaml | 33 +++++++++++++++++++++++++++++++-- 1 file changed, 31 insertions(+), 2 deletions(-) diff --git a/satpy/etc/composites/ami.yaml b/satpy/etc/composites/ami.yaml index 55466ea7e1..5a8608d795 100644 --- a/satpy/etc/composites/ami.yaml +++ b/satpy/etc/composites/ami.yaml @@ -60,6 +60,35 @@ composites: standard_name: toa_reflectance fraction: 0.15 + ndvi_hybrid_green: + description: > + The FCI green band at 0.51 µm deliberately misses the chlorophyll band, such that + the signal comes from aerosols and ash rather than vegetation. An effect + is that vegetation in a true colour RGB looks rather brown than green and barren rather red. Mixing in + some part of the NIR 0.8 channel reduced this effect. Note that the fractions + currently implemented are experimental and may change in future versions of Satpy. + compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen + limits: [0.15, 0.05] + prerequisites: + - name: VI005 + modifiers: [sunz_corrected, rayleigh_corrected] + - name: VI006 + modifiers: [sunz_corrected, rayleigh_corrected] + - name: VI008 + modifiers: [sunz_corrected ] + standard_name: toa_bidirectional_reflectance + + ndvi_hybrid_green_raw: + description: > + Alternative to ndvi_hybrid_green, but without solar zenith or rayleigh correction. + compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen + limits: [0.15, 0.05] + prerequisites: + - name: VI005 + - name: VI006 + - name: VI008 + standard_name: toa_bidirectional_reflectance + true_color_raw: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: @@ -278,7 +307,7 @@ composites: prerequisites: - name: VI006 modifiers: [sunz_corrected, rayleigh_corrected] - - name: green + - name: ndvi_hybrid_green - name: VI004 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color_reproduction_color_stretch @@ -288,6 +317,6 @@ composites: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: VI006 - - name: green_nocorr + - name: ndvi_hybrid_green_raw - name: VI004 standard_name: true_color_reproduction_color_stretch From 813845f863a0ddfd61b85e24978262f7a0183166 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 24 Aug 2023 17:16:17 +0200 Subject: [PATCH 161/702] Restore NDVI true color recipe and use in GeoColor. --- satpy/etc/composites/ahi.yaml | 48 +++++++++++++++++------------------ 1 file changed, 23 insertions(+), 25 deletions(-) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index c0008366cc..a4e0994a9c 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -103,6 +103,17 @@ composites: - wavelength: 0.85 standard_name: toa_reflectance + ndvi_hybrid_green: + compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen + prerequisites: + - name: B02 + modifiers: [ sunz_corrected, rayleigh_corrected ] + - name: B03 + modifiers: [ sunz_corrected, rayleigh_corrected ] + - name: B04 + modifiers: [ sunz_corrected ] + standard_name: toa_bidirectional_reflectance + airmass: # PDF slides: https://www.eumetsat.int/website/home/News/ConferencesandEvents/DAT_2833302.html # Under session 2 by Akihiro Shimizu (JMA) @@ -260,6 +271,17 @@ composites: high_resolution_band: red standard_name: true_color + true_color_ndvi_green: + compositor: !!python/name:satpy.composites.SelfSharpenedRGB + prerequisites: + - name: B03 + modifiers: [ sunz_corrected, rayleigh_corrected ] + - name: ndvi_hybrid_green + - name: B01 + modifiers: [ sunz_corrected, rayleigh_corrected ] + high_resolution_band: red + standard_name: true_color + natural_color_nocorr: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: @@ -476,33 +498,9 @@ composites: lim_high: 82 standard_name: geo_color_day_night_blend prerequisites: - - geo_color_true_color + - true_color_ndvi_green - geo_color_night - # GeoColor Daytime - geo_color_green: - compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen - limits: [0.15, 0.05] - prerequisites: - - name: B02 - modifiers: [ sunz_corrected, rayleigh_corrected ] - - name: B03 - modifiers: [ sunz_corrected, rayleigh_corrected ] - - name: B04 - modifiers: [ sunz_corrected ] - standard_name: toa_bidirectional_reflectance - - geo_color_true_color: - compositor: !!python/name:satpy.composites.SelfSharpenedRGB - prerequisites: - - name: B03 - modifiers: [ sunz_corrected, rayleigh_corrected ] - - name: geo_color_green - - name: B01 - modifiers: [ sunz_corrected, rayleigh_corrected ] - high_resolution_band: red - standard_name: true_color - # GeoColor Night-time geo_color_high_clouds: standard_name: geo_color_high_clouds From fcbae549dd85353d282004f956860978fa8c7d08 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 24 Aug 2023 21:30:56 +0100 Subject: [PATCH 162/702] Initial commit for Himawari L2 NOAA enterprise cloud products. --- satpy/etc/readers/ahi_l2_nc.yaml | 21 ++++++ satpy/readers/ahi_l2_nc.py | 112 +++++++++++++++++++++++++++++++ 2 files changed, 133 insertions(+) create mode 100644 satpy/etc/readers/ahi_l2_nc.yaml create mode 100644 satpy/readers/ahi_l2_nc.py diff --git a/satpy/etc/readers/ahi_l2_nc.yaml b/satpy/etc/readers/ahi_l2_nc.yaml new file mode 100644 index 0000000000..b1f0461838 --- /dev/null +++ b/satpy/etc/readers/ahi_l2_nc.yaml @@ -0,0 +1,21 @@ +reader: + name: ahi_l2_nc + short_name: AHI L2 NetCDF4 + long_name: Himawari-8/9 AHI Level 2 products in netCDF4 format from NOAA enterprise + status: Beta + supports_fsspec: true + sensors: ['ahi'] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + + +file_types: + ahi_l2_cloudmask: + file_reader: !!python/name:satpy.readers.ahi_l2_nc.HIML2NCFileHandler + file_patterns: + - '{sensor:3s}-{product:_4s}_{version:4s}_{platform:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' + +datasets: + cloudmask: + name: cloudmask + file_key: CloudMask + file_type: [ ahi_l2_cloudmask ] \ No newline at end of file diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py new file mode 100644 index 0000000000..68c4f55a38 --- /dev/null +++ b/satpy/readers/ahi_l2_nc.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2018 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Reader for Himawari L2 cloud products from NOAA's big data programme.""" + +import logging +from datetime import datetime + +import xarray as xr + +from satpy._compat import cached_property +from satpy.readers._geos_area import get_area_definition, get_area_extent +from satpy.readers.file_handlers import BaseFileHandler +from satpy.utils import get_legacy_chunk_size + +logger = logging.getLogger(__name__) + +CHUNK_SIZE = get_legacy_chunk_size() + +EXPECTED_DATA_AREA = 'Full Disk' + + +class HIML2NCFileHandler(BaseFileHandler): + """File handler for Himawari L2 NOAA enterprise data in netCDF format.""" + + def __init__(self, filename, filename_info, filetype_info, geo_data=None): + """Initialize the reader.""" + super(HIML2NCFileHandler, self).__init__(filename, filename_info, + filetype_info) + self.nc = xr.open_dataset(self.filename, + decode_cf=True, + mask_and_scale=False, + chunks={'xc': CHUNK_SIZE, 'yc': CHUNK_SIZE}) + + # Check that file is a full disk scene, we don't know the area for anything else + if self.nc.attrs['cdm_data_type'] != EXPECTED_DATA_AREA: + raise ValueError('File is not a full disk scene') + + self.sensor = self.nc.attrs['instrument_name'].lower() + self.nlines = self.nc.dims['Columns'] + self.ncols = self.nc.dims['Rows'] + self.platform_name = self.nc.attrs['satellite_name'] + self.platform_shortname = filename_info['platform'] + self._meta = None + + @property + def start_time(self): + """Start timestamp of the dataset.""" + dt = self.nc.attrs['time_coverage_start'] + return datetime.strptime(dt, '%Y-%m-%dT%H:%M:%SZ') + + @property + def end_time(self): + """End timestamp of the dataset.""" + dt = self.nc.attrs['time_coverage_end'] + return datetime.strptime(dt, '%Y-%m-%dT%H:%M:%SZ') + + def get_dataset(self, key, info): + """Load a dataset.""" + var = info['file_key'] + logger.debug('Reading in get_dataset %s.', var) + variable = self.nc[var] + variable.attrs.update(key.to_dict()) + return variable + + @cached_property + def area(self): + """Get AreaDefinition representing this file's data.""" + return self._get_area_def() + + def get_area_def(self, dsid): + """Get the area definition.""" + del dsid + return self.area + + def _get_area_def(self): + logger.warning('This product misses metadata required to produce an appropriate area definition.' + 'Assuming standard Himawari-8/9 full disk projection.') + pdict = {} + pdict['cfac'] = 20466275 + pdict['lfac'] = 20466275 + pdict['coff'] = 2750.5 + pdict['loff'] = 2750.5 + pdict['a'] = 6378137.0 + pdict['h'] = 35785863.0 + pdict['b'] = 6356752.3 + pdict['ssp_lon'] = 140.7 + pdict['nlines'] = self.nlines + pdict['ncols'] = self.ncols + pdict['scandir'] = 'N2S' + + aex = get_area_extent(pdict) + + pdict['a_name'] = 'Himawari_Area' + pdict['a_desc'] = "AHI Full Disk area" + pdict['p_id'] = f'geos{self.platform_shortname}' + + return get_area_definition(pdict, aex) \ No newline at end of file From fa491a6816cf218f0e65a7cb1dd3c1e47d46f6fa Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 24 Aug 2023 21:01:44 +0000 Subject: [PATCH 163/702] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/etc/readers/ahi_l2_nc.yaml | 2 +- satpy/readers/ahi_l2_nc.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/ahi_l2_nc.yaml b/satpy/etc/readers/ahi_l2_nc.yaml index b1f0461838..04d0571f79 100644 --- a/satpy/etc/readers/ahi_l2_nc.yaml +++ b/satpy/etc/readers/ahi_l2_nc.yaml @@ -18,4 +18,4 @@ datasets: cloudmask: name: cloudmask file_key: CloudMask - file_type: [ ahi_l2_cloudmask ] \ No newline at end of file + file_type: [ ahi_l2_cloudmask ] diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 68c4f55a38..76823017d3 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -109,4 +109,4 @@ def _get_area_def(self): pdict['a_desc'] = "AHI Full Disk area" pdict['p_id'] = f'geos{self.platform_shortname}' - return get_area_definition(pdict, aex) \ No newline at end of file + return get_area_definition(pdict, aex) From 865c7f634c05e1aa2dd3147da0228b766cc655b4 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 25 Aug 2023 08:49:28 +0100 Subject: [PATCH 164/702] Correct typo in AHI L2 code. --- satpy/readers/ahi_l2_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 68c4f55a38..f795eec748 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -106,7 +106,7 @@ def _get_area_def(self): aex = get_area_extent(pdict) pdict['a_name'] = 'Himawari_Area' - pdict['a_desc'] = "AHI Full Disk area" + pdict['a_desc'] = "AHI Full Disk area" pdict['p_id'] = f'geos{self.platform_shortname}' return get_area_definition(pdict, aex) \ No newline at end of file From ad89b9a4ad9a514ab8f2cb2ad593a0c0858eac59 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 25 Aug 2023 08:54:46 +0100 Subject: [PATCH 165/702] Add basic check to ensure we're working on a full disk scene. --- satpy/readers/ahi_l2_nc.py | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 3167896e66..3db9d1528c 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -90,18 +90,13 @@ def get_area_def(self, dsid): def _get_area_def(self): logger.warning('This product misses metadata required to produce an appropriate area definition.' 'Assuming standard Himawari-8/9 full disk projection.') - pdict = {} - pdict['cfac'] = 20466275 - pdict['lfac'] = 20466275 - pdict['coff'] = 2750.5 - pdict['loff'] = 2750.5 - pdict['a'] = 6378137.0 - pdict['h'] = 35785863.0 - pdict['b'] = 6356752.3 - pdict['ssp_lon'] = 140.7 - pdict['nlines'] = self.nlines - pdict['ncols'] = self.ncols - pdict['scandir'] = 'N2S' + + # Basic check to ensure we're processing a full disk (2km) scene. + if self.nlines != 5500 or self.ncols != 5500: + raise ValueError("Input L2 file is not a full disk Himawari scene. Only full disk data is supported.") + + pdict = {'cfac': 20466275, 'lfac': 20466275, 'coff': 2750.5, 'loff': 2750.5, 'a': 6378137.0, 'h': 35785863.0, + 'b': 6356752.3, 'ssp_lon': 140.7, 'nlines': self.nlines, 'ncols': self.ncols, 'scandir': 'N2S'} aex = get_area_extent(pdict) From 70f30e68bd539e113e2555a7c9bc0b99c2f2b69d Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 25 Aug 2023 12:18:40 +0200 Subject: [PATCH 166/702] Add Geocolor recipes for ABI. --- satpy/etc/composites/abi.yaml | 47 +++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/satpy/etc/composites/abi.yaml b/satpy/etc/composites/abi.yaml index 489ef1f210..2f5d68e09e 100644 --- a/satpy/etc/composites/abi.yaml +++ b/satpy/etc/composites/abi.yaml @@ -705,3 +705,50 @@ composites: prerequisites: - name: C14 standard_name: highlighted_toa_brightness_temperature + + # GeoColor + geo_color: + compositor: !!python/name:satpy.composites.DayNightCompositor + lim_low: 73 + lim_high: 82 + standard_name: geo_color_day_night_blend + prerequisites: + - true_color + - geo_color_night + + # GeoColor Night-time + geo_color_high_clouds: + standard_name: geo_color_high_clouds + compositor: !!python/name:satpy.composites.HighCloudCompositor + prerequisites: + - name: C13 + + geo_color_low_clouds: + standard_name: geo_color_low_clouds + compositor: !!python/name:satpy.composites.LowCloudCompositor + values_sea: 0 + values_land: 100 + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: C13 + - name: C07 + - name: C13 + - compositor: !!python/name:satpy.composites.StaticImageCompositor + standard_name: land_sea_mask + # TODO Change filename + filename: "/tcenas/proj/optcalimg/strandgren/GeoColor/static_data/gshhs_land_sea_mask_3km_i.tif" + + geo_color_background_with_low_clouds: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - geo_color_low_clouds + - _night_background_hires + + geo_color_night: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - geo_color_high_clouds + - geo_color_background_with_low_clouds From 81bb4c8b2b45c36cc6b372504c6205754fb4bdce Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 25 Aug 2023 11:47:52 +0100 Subject: [PATCH 167/702] Add tests for AHI L2 reader. --- satpy/tests/reader_tests/test_ahi_l2_nc.py | 110 +++++++++++++++++++++ 1 file changed, 110 insertions(+) create mode 100644 satpy/tests/reader_tests/test_ahi_l2_nc.py diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py new file mode 100644 index 0000000000..68f8c3a420 --- /dev/null +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -0,0 +1,110 @@ +"""Tests for the Himawari L2 netCDF reader.""" + +from datetime import datetime + +import h5netcdf +import numpy as np +import pytest + +from satpy.readers.ahi_l2_nc import HIML2NCFileHandler +from satpy.tests.utils import make_dataid + +rng = np.random.default_rng() +clmk_data = rng.integers(0, 3, (5500, 5500), dtype=np.uint16) +cprob_data = rng.uniform(0, 1, (5500, 5500)) + +start_time = datetime(2023, 8, 24, 5, 40, 21) +end_time = datetime(2023, 8, 24, 5, 49, 40) + +dimensions = {'Columns': 5500, 'Rows': 5500} + +exp_ext = (-5499999.9012, -5499999.9012, 5499999.9012, 5499999.9012) + +global_attrs = {"time_coverage_start": start_time.strftime("%Y-%m-%dT%H:%M:%SZ"), + "time_coverage_end": end_time.strftime("%Y-%m-%dT%H:%M:%SZ"), + "instrument_name": "AHI", + "satellite_name": "Himawari-9", + "cdm_data_type": "Full Disk", + } + +badarea_attrs = global_attrs.copy() +badarea_attrs['cdm_data_type'] = 'bad_area' + + +def ahil2_filehandler(fname, platform='h09'): + """Instantiate a Filehandler.""" + fileinfo = {'platform': platform} + filetype = None + fh = HIML2NCFileHandler(fname, fileinfo, filetype) + return fh + + +@pytest.fixture(scope="session") +def himl2_filename(tmp_path_factory): + """Create a fake himawari l2 file.""" + fname = f'{tmp_path_factory.mktemp("data")}/AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc' + with h5netcdf.File(fname, mode="w") as h5f: + h5f.dimensions = dimensions + h5f.attrs.update(global_attrs) + var = h5f.create_variable("CloudMask", ("Rows", "Columns"), np.uint16, chunks=(200, 200)) + var[:] = clmk_data + + return fname + + +@pytest.fixture(scope="session") +def himl2_filename_bad(tmp_path_factory): + """Create a fake himawari l2 file.""" + fname = f'{tmp_path_factory.mktemp("data")}/AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc' + with h5netcdf.File(fname, mode="w") as h5f: + h5f.dimensions = dimensions + h5f.attrs.update(badarea_attrs) + var = h5f.create_variable("CloudMask", ("Rows", "Columns"), np.uint16, chunks=(200, 200)) + var[:] = clmk_data + + return fname + + +def test_startend(himl2_filename): + """Test start and end times are set correctly.""" + fh = ahil2_filehandler(himl2_filename) + assert fh.start_time == start_time + assert fh.end_time == end_time + + +def test_ahi_l2_area_def(himl2_filename, caplog): + """Test reader handles area definition correctly.""" + warntxt = "This product misses metadata" + ps = '+proj=geos +lon_0=140.7 +h=35785863 +x_0=0 +y_0=0 +a=6378137 +rf=298.257024882273 +units=m +no_defs +type=crs' + + # Check case where input data is correct size. + fh = ahil2_filehandler(himl2_filename) + clmk_id = make_dataid(name="cloudmask") + area_def = fh.get_area_def(clmk_id) + assert area_def.width == dimensions['Columns'] + assert area_def.height == dimensions['Rows'] + assert np.allclose(area_def.area_extent, exp_ext) + assert area_def.proj4_string == ps + assert warntxt in caplog.text + + # Check case where input data is incorrect size. + with pytest.raises(ValueError): + fh = ahil2_filehandler(himl2_filename) + fh.nlines = 3000 + fh.get_area_def(clmk_id) + + +def test_bad_area_name(himl2_filename_bad): + """Check case where area name is not correct.""" + global_attrs['cdm_data_type'] = 'bad_area' + with pytest.raises(ValueError): + ahil2_filehandler(himl2_filename_bad) + global_attrs['cdm_data_type'] = 'Full Disk' + + +def test_load_data(himl2_filename): + """Test that data is loaded successfully.""" + fh = ahil2_filehandler(himl2_filename) + clmk_id = make_dataid(name="cloudmask") + clmk = fh.get_dataset(clmk_id, {'file_key': 'CloudMask'}) + assert np.allclose(clmk.data, clmk_data) From c7ce9735a6d48806f1d3de1cd19ce117776dea0d Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 25 Aug 2023 11:48:40 +0100 Subject: [PATCH 168/702] Remove unneeded variable from AHI L2 NC reader. --- satpy/readers/ahi_l2_nc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 3db9d1528c..3675f4f419 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -37,7 +37,7 @@ class HIML2NCFileHandler(BaseFileHandler): """File handler for Himawari L2 NOAA enterprise data in netCDF format.""" - def __init__(self, filename, filename_info, filetype_info, geo_data=None): + def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(HIML2NCFileHandler, self).__init__(filename, filename_info, filetype_info) @@ -91,7 +91,7 @@ def _get_area_def(self): logger.warning('This product misses metadata required to produce an appropriate area definition.' 'Assuming standard Himawari-8/9 full disk projection.') - # Basic check to ensure we're processing a full disk (2km) scene. + # Basic check to ensure we're processing a full disk (2km) scene.n if self.nlines != 5500 or self.ncols != 5500: raise ValueError("Input L2 file is not a full disk Himawari scene. Only full disk data is supported.") From 71476306cda0cd82345752ea4355c64445fd2bca Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 25 Aug 2023 12:30:35 +0100 Subject: [PATCH 169/702] Add additional AHI L2 netcdf datasets to the YAML file. --- satpy/etc/readers/ahi_l2_nc.yaml | 164 ++++++++++++++++++++++++++++++- 1 file changed, 159 insertions(+), 5 deletions(-) diff --git a/satpy/etc/readers/ahi_l2_nc.yaml b/satpy/etc/readers/ahi_l2_nc.yaml index 04d0571f79..955d41bbcd 100644 --- a/satpy/etc/readers/ahi_l2_nc.yaml +++ b/satpy/etc/readers/ahi_l2_nc.yaml @@ -9,13 +9,167 @@ reader: file_types: - ahi_l2_cloudmask: + ahi_l2_mask: file_reader: !!python/name:satpy.readers.ahi_l2_nc.HIML2NCFileHandler file_patterns: - - '{sensor:3s}-{product:_4s}_{version:4s}_{platform:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' + - '{sensor:3s}-CMSK_{version:4s}_{platform:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' + + ahi_l2_type: + file_reader: !!python/name:satpy.readers.ahi_l2_nc.HIML2NCFileHandler + file_patterns: + - '{sensor:3s}-CPHS_{version:4s}_{platform:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' + + ahi_l2_height: + file_reader: !!python/name:satpy.readers.ahi_l2_nc.HIML2NCFileHandler + file_patterns: + - '{sensor:3s}-CHGT_{version:4s}_{platform:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' datasets: - cloudmask: - name: cloudmask + # Products from the cloud mask files + cloud_mask: + name: cloud_mask file_key: CloudMask - file_type: [ ahi_l2_cloudmask ] + file_type: [ ahi_l2_mask ] + + cloud_mask_binary: + name: cloud_mask_binary + file_key: CloudMaskBinary + file_type: [ ahi_l2_mask ] + + cloud_probability: + name: cloud_probability + file_key: CloudProbability + file_type: [ ahi_l2_mask ] + + ice_cloud_probability: + name: ice_cloud_probability + file_key: IceCloudProbability + file_type: [ ahi_l2_mask ] + + phase_uncertainty: + name: phase_uncertainty + file_key: PhaseUncertainty + file_type: [ ahi_l2_mask ] + + dust_mask: + name: dust_mask + file_key: Dust_Mask + file_type: [ ahi_l2_mask ] + + fire_mask: + name: fire_mask + file_key: Fire_Mask + file_type: [ ahi_l2_mask ] + + smoke_mask: + name: smoke_mask + file_key: Smoke_Mask + file_type: [ ahi_l2_mask ] + + # Products from the cloud phase / type files + cloud_phase: + name: cloud_phase + file_key: CloudPhase + file_type: [ ahi_l2_type ] + + cloud_phase_flag: + name: cloud_phase_flag + file_key: CloudPhaseFlag + file_type: [ ahi_l2_type ] + + cloud_type: + name: cloud_type + file_key: CloudType + file_type: [ ahi_l2_type ] + + # Products from the cloud height files + cloud_optical_depth: + name: cloud_optical_depth + file_key: CldOptDpth + file_type: [ ahi_l2_height ] + + cloud_top_emissivity: + name: cloud_top_emissivity + file_key: CldTopEmss + file_type: [ ahi_l2_height ] + + cloud_top_pressure: + name: cloud_top_pressure + file_key: CldTopPres + file_type: [ ahi_l2_height ] + + cloud_top_pressure_low: + name: cloud_top_pressure_low + file_key: CldTopPresLow + file_type: [ ahi_l2_height ] + + cloud_top_temperature: + name: cloud_top_temperature + file_key: CldTopTemp + file_type: [ ahi_l2_height ] + + cloud_top_temperature_low: + name: cloud_top_temperature_low + file_key: CldTopTempLow + file_type: [ ahi_l2_height ] + + cloud_height_quality: + name: cloud_height_quality + file_key: CloudHgtQF + file_type: [ ahi_l2_height ] + + retrieval_cost: + name: retrieval_cost + file_key: Cost + file_type: [ ahi_l2_height ] + + inversion_flag: + name: inversion_flag + file_key: InverFlag + file_type: [ ahi_l2_height ] + + latitude_parallax_corrected: + name: latitude_parallax_corrected + file_key: Latitude_Pc + file_type: [ ahi_l2_height ] + + longitude_parallax_corrected: + name: longitude_parallax_corrected + file_key: Longitude_Pc + file_type: [ ahi_l2_height ] + + cloud_top_pressure_error: + name: cloud_top_pressure_error + file_key: PcError + file_type: [ ahi_l2_height ] + + processing_order: + name: processing_order + file_key: ProcOrder + file_type: [ ahi_l2_height ] + + shadow_mask: + name: shadow_mask + file_key: Shadow_Mask + file_type: [ ahi_l2_height ] + + cloud_top_temperature_error: + name: cloud_top_temperature_error + file_key: TcError + file_type: [ ahi_l2_height ] + + cloud_top_height_error: + name: cloud_top_height_error + file_key: ZcError + file_type: [ ahi_l2_height ] + + # Datasets in all three file types + latitude: + name: latitude + file_key: Latitude + file_type: [ ahi_l2_height, ahi_l2_type, ahi_l2_mask ] + + longitude: + name: longitude + file_key: Longitude + file_type: [ ahi_l2_height, ahi_l2_type, ahi_l2_mask ] From 46a2f2919901a8432e60e8d914fcfcd2b07b3e7c Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 25 Aug 2023 12:35:57 +0100 Subject: [PATCH 170/702] Add some extra documentation to the AHI L2 nc reader. --- satpy/readers/ahi_l2_nc.py | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 3675f4f419..51d73af11f 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -15,7 +15,28 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -"""Reader for Himawari L2 cloud products from NOAA's big data programme.""" +"""Reader for Himawari L2 cloud products from NOAA's big data programme. + +These products are generated by the NOAA enterprise cloud suite and have filenames like: +AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc + +The second letter grouping (CMSK above) indicates the product type: + CMSK - Cloud mask + CHGT - Cloud height + CPHS - Cloud type and phase +These products are generated from the AHI sensor on Himawari-8 and Himawari-9, and are +produced at the native instrument resolution for the IR channels (2km at nadir). + +NOTE: This reader is currently only compatible with full disk scenes. Unlike level 1 himawari +data, the netCDF files do not contain the required metadata to produce an appropriate area +definition for the data contents, and hence the area definition is hardcoded into the reader. + +A warning is displayed to the user highlighting this. The assumed area definition is a full +disk image at the nominal subsatellite longitude of 140.7 degrees East. + +All the simple data products are supported here, but multidimensional products are not yet +supported. These include the CldHgtFlag and the CloudMaskPacked variables. +""" import logging from datetime import datetime From 09051c8dca2344305e06290d576693c4d538a0b3 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 25 Aug 2023 12:46:49 +0100 Subject: [PATCH 171/702] Update area definition warning message for AHI L2 NC reader. --- satpy/readers/ahi_l2_nc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 51d73af11f..a238f1bb73 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -109,8 +109,8 @@ def get_area_def(self, dsid): return self.area def _get_area_def(self): - logger.warning('This product misses metadata required to produce an appropriate area definition.' - 'Assuming standard Himawari-8/9 full disk projection.') + logger.warning('The AHI L2 cloud products do not have the metadata required to produce an area definition.' + ' Assuming standard Himawari-8/9 full disk projection.') # Basic check to ensure we're processing a full disk (2km) scene.n if self.nlines != 5500 or self.ncols != 5500: From 425256777dff3bd9deae07af916c4451c6d2fe23 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 25 Aug 2023 13:49:53 +0200 Subject: [PATCH 172/702] Remove previous chunks --- satpy/readers/ahi_hsd.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index e06f7ebc50..be0ef31ca0 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -622,8 +622,7 @@ def _read_data(self, fp_, header): chunks = da.core.normalize_chunks("auto", shape=(nlines, ncols), limit=get_chunk_size_limit(), - dtype='f8', - previous_chunks=(550, 550)) + dtype='f8') return da.from_array(np.memmap(self.filename, offset=fp_.tell(), dtype=' Date: Fri, 25 Aug 2023 13:20:06 +0100 Subject: [PATCH 173/702] Fir AHI L2 NC tests for warning message. --- satpy/tests/reader_tests/test_ahi_l2_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index 68f8c3a420..84b3c667d7 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -74,7 +74,7 @@ def test_startend(himl2_filename): def test_ahi_l2_area_def(himl2_filename, caplog): """Test reader handles area definition correctly.""" - warntxt = "This product misses metadata" + warntxt = "The AHI L2 cloud products do not have the metadata" ps = '+proj=geos +lon_0=140.7 +h=35785863 +x_0=0 +y_0=0 +a=6378137 +rf=298.257024882273 +units=m +no_defs +type=crs' # Check case where input data is correct size. From c447480cb0444e506c219b3d654f83768cbcb0a0 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 25 Aug 2023 16:35:16 +0100 Subject: [PATCH 174/702] Fix AHI L2 NC documentation. --- satpy/readers/ahi_l2_nc.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index a238f1bb73..e44c882898 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -22,8 +22,11 @@ The second letter grouping (CMSK above) indicates the product type: CMSK - Cloud mask + CHGT - Cloud height + CPHS - Cloud type and phase + These products are generated from the AHI sensor on Himawari-8 and Himawari-9, and are produced at the native instrument resolution for the IR channels (2km at nadir). From ed890dc2de9d156e286d988f207bacf5dbec21b6 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 28 Aug 2023 19:45:52 -0500 Subject: [PATCH 175/702] Allow for more platform shortnames in ABI base reader --- satpy/readers/abi_base.py | 16 ++++++++-------- satpy/readers/ahi_hsd.py | 9 +++++---- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 5f4cf506fe..4a6bf069c1 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -34,13 +34,13 @@ CHUNK_SIZE = get_legacy_chunk_size() PLATFORM_NAMES = { - 'G16': 'GOES-16', - 'G17': 'GOES-17', - 'G18': 'GOES-18', - 'G19': 'GOES-19', - 'GOES16': 'GOES-16', - 'GOES17': 'GOES-17', - 'GOES18': 'GOES-18', + 'g16': 'GOES-16', + 'g17': 'GOES-17', + 'g18': 'GOES-18', + 'g19': 'GOES-19', + 'goes16': 'GOES-16', + 'goes17': 'GOES-17', + 'goes18': 'GOES-18', } @@ -52,7 +52,7 @@ def __init__(self, filename, filename_info, filetype_info): super(NC_ABI_BASE, self).__init__(filename, filename_info, filetype_info) platform_shortname = filename_info['platform_shortname'] - self.platform_name = PLATFORM_NAMES.get(platform_shortname) + self.platform_name = PLATFORM_NAMES.get(platform_shortname.lower()) self.nlines = self.nc['y'].size self.ncols = self.nc['x'].size diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 86e9aa9e0c..8a2c82ac42 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -616,13 +616,14 @@ def _read_header(self, fp_): return header - def _read_data(self, fp_, header): + def _read_data(self, fp_, header, resolution): """Read data block.""" nlines = int(header["block2"]['number_of_lines'][0]) ncols = int(header["block2"]['number_of_columns'][0]) + chunk_size = CHUNK_SIZE * (500 / resolution) return da.from_array(np.memmap(self.filename, offset=fp_.tell(), dtype=' Date: Mon, 28 Aug 2023 19:50:34 -0500 Subject: [PATCH 176/702] Fix ABI GFLS test to be more accurate --- satpy/tests/reader_tests/test_abi_l2_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py index b05ceb0f64..2f2131461e 100644 --- a/satpy/tests/reader_tests/test_abi_l2_nc.py +++ b/satpy/tests/reader_tests/test_abi_l2_nc.py @@ -125,7 +125,7 @@ def test_get_dataset(self): def test_get_dataset_gfls(self): """Test that Low Cloud and Fog filenames work.""" from satpy.tests.utils import make_dataid - filename_info = {'platform_shortname': 'GOES16', 'scene_abbr': 'FD'} + filename_info = {'platform_shortname': 'g16', 'scene_abbr': 'FD'} key = make_dataid(name='MVFR_Fog_Prob') with _create_reader_for_fake_data("GFLS", _create_cmip_dataset("MVFR_Fog_Prob"), filename_info) as reader: res = reader.get_dataset(key, {'file_key': 'MVFR_Fog_Prob'}) From 238f4796cb4e4703a5a37f5394451e6b9fb1b1e7 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 29 Aug 2023 15:17:09 +0100 Subject: [PATCH 177/702] Add Himawari full disk areas to the default areas YAML. --- satpy/etc/areas.yaml | 48 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/satpy/etc/areas.yaml b/satpy/etc/areas.yaml index 6abbc18c82..28129f72d9 100644 --- a/satpy/etc/areas.yaml +++ b/satpy/etc/areas.yaml @@ -268,6 +268,54 @@ msg_seviri_iodc_48km: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] +himawari_ahi_fes_500m: + description: + Himawari-8/9 full disk area definition at 500m resolution + projection: + proj: geos + lon_0: 140.7 + a: 6378137.0 + rf: 298.257024882273 + h: 35785863.0 + shape: + height: 22000 + width: 22000 + area_extent: + lower_left_xy: [-5499999.9684, -5499999.9684] + upper_right_xy: [5499999.9684, 5499999.9684] + +himawari_ahi_fes_1km: + description: + Himawari-8/9 full disk area definition at 1km resolution + projection: + proj: geos + lon_0: 140.7 + a: 6378137.0 + rf: 298.257024882273 + h: 35785863.0 + shape: + height: 11000 + width: 11000 + area_extent: + lower_left_xy: [-5500000.0355, -5500000.0355] + upper_right_xy: [5500000.0355, 5500000.0355] + + himawari_ahi_fes_2km: + description: + Himawari-8/9 full disk area definition at 2km resolution + projection: + proj: geos + lon_0: 140.7 + a: 6378137.0 + rf: 298.257024882273 + h: 35785863.0 + shape: + height: 5500 + width: 5500 + area_extent: + lower_left_xy: [ -5499999.9012, -5499999.9012 ] + upper_right_xy: [ 5499999.9012, 5499999.9012 ] + # Regional From 5fa71c40e5a99f65799c53f3ec3f942405ade416 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 29 Aug 2023 15:18:44 +0100 Subject: [PATCH 178/702] Fix Himawari area indentation. --- satpy/etc/areas.yaml | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/satpy/etc/areas.yaml b/satpy/etc/areas.yaml index 28129f72d9..4f71368375 100644 --- a/satpy/etc/areas.yaml +++ b/satpy/etc/areas.yaml @@ -300,21 +300,21 @@ himawari_ahi_fes_1km: lower_left_xy: [-5500000.0355, -5500000.0355] upper_right_xy: [5500000.0355, 5500000.0355] - himawari_ahi_fes_2km: - description: - Himawari-8/9 full disk area definition at 2km resolution - projection: - proj: geos - lon_0: 140.7 - a: 6378137.0 - rf: 298.257024882273 - h: 35785863.0 - shape: - height: 5500 - width: 5500 - area_extent: - lower_left_xy: [ -5499999.9012, -5499999.9012 ] - upper_right_xy: [ 5499999.9012, 5499999.9012 ] +himawari_ahi_fes_2km: + description: + Himawari-8/9 full disk area definition at 2km resolution + projection: + proj: geos + lon_0: 140.7 + a: 6378137.0 + rf: 298.257024882273 + h: 35785863.0 + shape: + height: 5500 + width: 5500 + area_extent: + lower_left_xy: [ -5499999.9012, -5499999.9012 ] + upper_right_xy: [ 5499999.9012, 5499999.9012 ] # Regional From 4a54a55f0ea756df36a4ed21c5bbdc00a294e524 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 29 Aug 2023 18:37:56 -0500 Subject: [PATCH 179/702] Fix invalid AVHRR product names in AWIPS config --- satpy/etc/writers/awips_tiled.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/etc/writers/awips_tiled.yaml b/satpy/etc/writers/awips_tiled.yaml index 8700b63ad5..f0ee9c1d3b 100644 --- a/satpy/etc/writers/awips_tiled.yaml +++ b/satpy/etc/writers/awips_tiled.yaml @@ -233,42 +233,42 @@ templates: # AVHRR L1B products avhrr_band1_vis: - name: band1_vis + name: "1" var_name: data attributes: physical_element: raw_value: 0.63 um units: {} avhrr_band2_vis: - name: band2_vis + name: "2" var_name: data attributes: physical_element: raw_value: 0.86 um units: {} avhrr_band3a_vis: - name: band3a_vis + name: "3a" var_name: data attributes: physical_element: raw_value: 1.61 um units: {} avhrr_band3b_bt: - name: band3b_bt + name: "3b" var_name: data attributes: physical_element: raw_value: 3.74 um units: {} avhrr_band4_bt: - name: band4_bt + name: "4" var_name: data attributes: physical_element: raw_value: 10.8 um units: {} avhrr_band5_bt: - name: band5_bt + name: "5" var_name: data attributes: physical_element: From d75842dfb7088b389c16ed3848b223f871315b34 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 29 Aug 2023 18:43:54 -0500 Subject: [PATCH 180/702] Move MODIS test fixtures to MODIS-specific sub-directory Closes #2559 --- satpy/tests/reader_tests/conftest.py | 17 --------- .../reader_tests/modis_tests/__init__.py | 21 +++++++++++ .../{ => modis_tests}/_modis_fixtures.py | 0 .../reader_tests/modis_tests/conftest.py | 35 +++++++++++++++++++ .../{ => modis_tests}/test_modis_l1b.py | 2 +- .../{ => modis_tests}/test_modis_l2.py | 2 +- setup.cfg | 1 + 7 files changed, 59 insertions(+), 19 deletions(-) create mode 100644 satpy/tests/reader_tests/modis_tests/__init__.py rename satpy/tests/reader_tests/{ => modis_tests}/_modis_fixtures.py (100%) create mode 100644 satpy/tests/reader_tests/modis_tests/conftest.py rename satpy/tests/reader_tests/{ => modis_tests}/test_modis_l1b.py (99%) rename satpy/tests/reader_tests/{ => modis_tests}/test_modis_l2.py (99%) diff --git a/satpy/tests/reader_tests/conftest.py b/satpy/tests/reader_tests/conftest.py index ca9e3fc66e..8f6f572494 100644 --- a/satpy/tests/reader_tests/conftest.py +++ b/satpy/tests/reader_tests/conftest.py @@ -16,20 +16,3 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Setup and configuration for all reader tests.""" - -from ._modis_fixtures import ( - modis_l1b_imapp_1000m_file, - modis_l1b_imapp_geo_file, - modis_l1b_nasa_1km_mod03_files, - modis_l1b_nasa_mod02hkm_file, - modis_l1b_nasa_mod02qkm_file, - modis_l1b_nasa_mod03_file, - modis_l1b_nasa_mod021km_file, - modis_l2_imapp_mask_byte1_file, - modis_l2_imapp_mask_byte1_geo_files, - modis_l2_imapp_snowmask_file, - modis_l2_imapp_snowmask_geo_files, - modis_l2_nasa_mod06_file, - modis_l2_nasa_mod35_file, - modis_l2_nasa_mod35_mod03_files, -) diff --git a/satpy/tests/reader_tests/modis_tests/__init__.py b/satpy/tests/reader_tests/modis_tests/__init__.py new file mode 100644 index 0000000000..45c8d67254 --- /dev/null +++ b/satpy/tests/reader_tests/modis_tests/__init__.py @@ -0,0 +1,21 @@ +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Unit tests for MODIS readers. + +This subdirectory mostly exists to have MODIS-based pytest fixtures only loaded +for MODIS tests. + +""" diff --git a/satpy/tests/reader_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py similarity index 100% rename from satpy/tests/reader_tests/_modis_fixtures.py rename to satpy/tests/reader_tests/modis_tests/_modis_fixtures.py diff --git a/satpy/tests/reader_tests/modis_tests/conftest.py b/satpy/tests/reader_tests/modis_tests/conftest.py new file mode 100644 index 0000000000..09f98049db --- /dev/null +++ b/satpy/tests/reader_tests/modis_tests/conftest.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Setup and configuration for all reader tests.""" + +from ._modis_fixtures import ( + modis_l1b_imapp_1000m_file, + modis_l1b_imapp_geo_file, + modis_l1b_nasa_1km_mod03_files, + modis_l1b_nasa_mod02hkm_file, + modis_l1b_nasa_mod02qkm_file, + modis_l1b_nasa_mod03_file, + modis_l1b_nasa_mod021km_file, + modis_l2_imapp_mask_byte1_file, + modis_l2_imapp_mask_byte1_geo_files, + modis_l2_imapp_snowmask_file, + modis_l2_imapp_snowmask_geo_files, + modis_l2_nasa_mod06_file, + modis_l2_nasa_mod35_file, + modis_l2_nasa_mod35_mod03_files, +) diff --git a/satpy/tests/reader_tests/test_modis_l1b.py b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py similarity index 99% rename from satpy/tests/reader_tests/test_modis_l1b.py rename to satpy/tests/reader_tests/modis_tests/test_modis_l1b.py index 3d8d569ca8..56e8687844 100644 --- a/satpy/tests/reader_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py @@ -25,8 +25,8 @@ from pytest_lazyfixture import lazy_fixture from satpy import Scene, available_readers +from satpy.tests.utils import CustomScheduler, make_dataid -from ..utils import CustomScheduler, make_dataid from ._modis_fixtures import ( AVAILABLE_1KM_PRODUCT_NAMES, AVAILABLE_HKM_PRODUCT_NAMES, diff --git a/satpy/tests/reader_tests/test_modis_l2.py b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py similarity index 99% rename from satpy/tests/reader_tests/test_modis_l2.py rename to satpy/tests/reader_tests/modis_tests/test_modis_l2.py index a1870fe390..222f365d87 100644 --- a/satpy/tests/reader_tests/test_modis_l2.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py @@ -26,8 +26,8 @@ from pytest_lazyfixture import lazy_fixture from satpy import Scene, available_readers +from satpy.tests.utils import CustomScheduler, make_dataid -from ..utils import CustomScheduler, make_dataid from ._modis_fixtures import _shape_for_resolution # NOTE: diff --git a/setup.cfg b/setup.cfg index e2ef375dc0..594f9dc8cd 100644 --- a/setup.cfg +++ b/setup.cfg @@ -20,6 +20,7 @@ exclude = satpy/tests/features per-file-ignores = satpy/tests/*/conftest.py:F401 + satpy/tests/*/*/conftest.py:F401 doc/source/doi_role.py:D103 satpy/tests/features/steps/*.py:F811 From 52de91b02a573e2fece5b8cec9ff4cf1c3b90d46 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 30 Aug 2023 09:32:02 +0200 Subject: [PATCH 181/702] Put back previous chunks --- satpy/readers/ahi_hsd.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index be0ef31ca0..e06f7ebc50 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -622,7 +622,8 @@ def _read_data(self, fp_, header): chunks = da.core.normalize_chunks("auto", shape=(nlines, ncols), limit=get_chunk_size_limit(), - dtype='f8') + dtype='f8', + previous_chunks=(550, 550)) return da.from_array(np.memmap(self.filename, offset=fp_.tell(), dtype=' Date: Thu, 31 Aug 2023 09:08:34 -0500 Subject: [PATCH 182/702] Fix VIIRS EDR handling of multiple fill values in geolocation --- satpy/readers/viirs_edr.py | 10 ++++----- satpy/tests/reader_tests/test_viirs_edr.py | 25 ++++++++++++++++------ 2 files changed, 23 insertions(+), 12 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index da61114fca..30aeaa6d52 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -62,7 +62,6 @@ import logging from typing import Iterable -import numpy as np import xarray as xr from satpy import DataID @@ -138,14 +137,13 @@ def get_dataset(self, dataset_id: DataID, info: dict) -> xr.DataArray: return data_arr def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: - fill_value = data_arr.encoding.get("_FillValue") - if fill_value is not None and not np.isnan(fill_value): - # xarray auto mask and scale handled this - return data_arr yaml_fill = ds_info.get("_FillValue") + # xarray auto mask and scale handled any fills from the file if yaml_fill is not None: - return data_arr.where(data_arr != yaml_fill) + data_arr = data_arr.where(data_arr != yaml_fill) valid_range = ds_info.get("valid_range", data_arr.attrs.get("valid_range")) + if "valid_min" in data_arr.attrs and valid_range is None: + valid_range = (data_arr.attrs["valid_min"], data_arr.attrs["valid_max"]) if valid_range is not None: return data_arr.where((valid_range[0] <= data_arr) & (data_arr <= valid_range[1])) return data_arr diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 146a0cd2c4..da6dc9a55b 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -128,17 +128,26 @@ def _create_surf_refl_variables() -> dict[str, xr.DataArray]: dim_x_375 = "Along_Scan_375m" i_dims = (dim_y_375, dim_x_375) - lon_attrs = {"standard_name": "longitude", "units": "degrees_east", "_FillValue": -999.9} - lat_attrs = {"standard_name": "latitude", "units": "degrees_north", "_FillValue": -999.9} + lon_attrs = {"standard_name": "longitude", "units": "degrees_east", "_FillValue": -999.9, + "valid_min": -180.0, "valid_max": 180.0} + lat_attrs = {"standard_name": "latitude", "units": "degrees_north", "_FillValue": -999.9, + "valid_min": -90.0, "valid_max": 90.0} sr_attrs = {"units": "unitless", "_FillValue": -9999, "scale_factor": 0.0001, "add_offset": 0.0} i_data = np.random.random_sample((I_ROWS, I_COLS)).astype(np.float32) m_data = np.random.random_sample((M_ROWS, M_COLS)).astype(np.float32) + lon_i_data = (i_data * 360) - 180.0 + lon_m_data = (m_data * 360) - 180.0 + lat_i_data = (i_data * 180) - 90.0 + lat_m_data = (m_data * 180) - 90.0 + for geo_var in (lon_i_data, lon_m_data, lat_i_data, lat_m_data): + geo_var[0, 0] = -999.9 + geo_var[0, 1] = -999.3 data_arrs = { - "Longitude_at_375m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lon_attrs), - "Latitude_at_375m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lat_attrs), - "Longitude_at_750m_resolution": xr.DataArray(m_data, dims=m_dims, attrs=lon_attrs), - "Latitude_at_750m_resolution": xr.DataArray(m_data, dims=m_dims, attrs=lat_attrs), + "Longitude_at_375m_resolution": xr.DataArray(lon_i_data, dims=i_dims, attrs=lon_attrs), + "Latitude_at_375m_resolution": xr.DataArray(lat_i_data, dims=i_dims, attrs=lat_attrs), + "Longitude_at_750m_resolution": xr.DataArray(lon_m_data, dims=m_dims, attrs=lon_attrs), + "Latitude_at_750m_resolution": xr.DataArray(lat_m_data, dims=m_dims, attrs=lat_attrs), "375m Surface Reflectance Band I1": xr.DataArray(i_data, dims=i_dims, attrs=sr_attrs), "750m Surface Reflectance Band M1": xr.DataArray(m_data, dims=m_dims, attrs=sr_attrs), } @@ -458,6 +467,10 @@ def _shared_metadata_checks(data_arr: xr.DataArray) -> None: lats = data_arr.attrs["area"].lats assert lons.attrs["rows_per_scan"] == exp_rps assert lats.attrs["rows_per_scan"] == exp_rps + assert lons.min() >= -180.0 + assert lons.max() <= 180.0 + assert lats.min() >= -90.0 + assert lats.max() <= 90.0 def _is_mband_res(data_arr: xr.DataArray) -> bool: From 99a63d5d3fac72e2f8637549e5e4b060d7c07a4f Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 31 Aug 2023 09:46:39 -0500 Subject: [PATCH 183/702] Remove unnecessary _FillValue from YAML handling --- satpy/readers/viirs_edr.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 30aeaa6d52..a8c6c934b2 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -137,10 +137,7 @@ def get_dataset(self, dataset_id: DataID, info: dict) -> xr.DataArray: return data_arr def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: - yaml_fill = ds_info.get("_FillValue") # xarray auto mask and scale handled any fills from the file - if yaml_fill is not None: - data_arr = data_arr.where(data_arr != yaml_fill) valid_range = ds_info.get("valid_range", data_arr.attrs.get("valid_range")) if "valid_min" in data_arr.attrs and valid_range is None: valid_range = (data_arr.attrs["valid_min"], data_arr.attrs["valid_max"]) From 1686cdd68fc7b26a9238874da52c9b512a3e3ac3 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 31 Aug 2023 10:07:42 -0500 Subject: [PATCH 184/702] Revert accidental commit of AHI experimental resolution-based chunking --- satpy/readers/ahi_hsd.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 8a2c82ac42..86e9aa9e0c 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -616,14 +616,13 @@ def _read_header(self, fp_): return header - def _read_data(self, fp_, header, resolution): + def _read_data(self, fp_, header): """Read data block.""" nlines = int(header["block2"]['number_of_lines'][0]) ncols = int(header["block2"]['number_of_columns'][0]) - chunk_size = CHUNK_SIZE * (500 / resolution) return da.from_array(np.memmap(self.filename, offset=fp_.tell(), dtype=' Date: Thu, 31 Aug 2023 10:11:56 -0500 Subject: [PATCH 185/702] Remove unused low cloud fog file pattern CSPP Geo is no longer producing this filenaming scheme --- satpy/etc/readers/abi_l2_nc.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/etc/readers/abi_l2_nc.yaml b/satpy/etc/readers/abi_l2_nc.yaml index 7c0f1a75ac..3c3a94cc40 100644 --- a/satpy/etc/readers/abi_l2_nc.yaml +++ b/satpy/etc/readers/abi_l2_nc.yaml @@ -576,8 +576,6 @@ file_types: abi_l2_gfls: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: - # AIT scheme: GOES16_ABI_2KM_MESO_2019147_1800_48_AVIATION_FOG_EN.nc - - '{platform_shortname:s}_{mission_id:3s}_2KM_{scene_abbr:s}_{start_time:%Y%j_%H%M}_{file_product:s}_{algorithm_type:2s}.nc' # NDE scheme: ABI-L2-GFLSC-M6_v3r1_g16_s202306071931181_e202306071933554_c202306071934440.nc - '{mission_id:3s}-L2-GFLS{scene_abbr:s}-{scan_mode:2s}_v{sw_version:d}r{sw_revision:d}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' observation_type: "GFLS" From b8bdcf05e9f9cb8b8dc857a8708db2bee7a31a45 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 1 Sep 2023 10:19:03 -0500 Subject: [PATCH 186/702] Fix floating point edge case in elevation indexing in CREFL --- satpy/modifiers/_crefl_utils.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/satpy/modifiers/_crefl_utils.py b/satpy/modifiers/_crefl_utils.py index ddde8c9765..c8d6920056 100644 --- a/satpy/modifiers/_crefl_utils.py +++ b/satpy/modifiers/_crefl_utils.py @@ -390,17 +390,21 @@ def _runner_class_for_sensor(sensor_name: str) -> Type[_CREFLRunner]: def _space_mask_height(lon, lat, avg_elevation): - lat[(lat <= -90) | (lat >= 90)] = np.nan - lon[(lon <= -180) | (lon >= 180)] = np.nan - row = ((90.0 - lat) * avg_elevation.shape[0] / 180.0).astype(np.int32) - col = ((lon + 180.0) * avg_elevation.shape[1] / 360.0).astype(np.int32) - space_mask = np.isnan(lon) | np.isnan(lat) - row[space_mask] = 0 - col[space_mask] = 0 + row = ((90.0 - lat) * avg_elevation.shape[0] / 180.0) + col = ((lon + 180.0) * avg_elevation.shape[1] / 360.0) + np.clip(row, 0, avg_elevation.shape[0] - 1, out=row) + np.clip(col, 0, avg_elevation.shape[1] - 1, out=col) + row = row.astype(np.int32) + col = col.astype(np.int32) + # conditions need to be this way to include NaNs + bad_mask = ~((lon >= -180) | (lon <= 180) | (lat >= -90) | (lat <= 90)) + # convert any NaNs to valid indexes + row[bad_mask] = 0 + col[bad_mask] = 0 height = avg_elevation[row, col] # negative heights aren't allowed, clip to 0 - height[(height < 0.0) | np.isnan(height) | space_mask] = 0.0 + height[(height < 0.0) | np.isnan(height) | bad_mask] = 0.0 return height From bbbd70e15273c696e4ea6a8187de78520e0cfaa0 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 6 Sep 2023 10:29:11 -0500 Subject: [PATCH 187/702] Fix readthedocs creating a dirty git environment --- .readthedocs.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.readthedocs.yml b/.readthedocs.yml index 7a15d5578b..065634995b 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -13,5 +13,8 @@ build: os: "ubuntu-20.04" tools: python: "mambaforge-4.10" + jobs: + pre_install: + - git update-index --assume-unchanged doc/rtd_environment.yml doc/source/conf.py conda: environment: doc/rtd_environment.yml From 8ed4f4a135c92cdf18f29bf9abe0f4dbe3ba6793 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 6 Sep 2023 10:51:30 -0500 Subject: [PATCH 188/702] Debug RTD git state --- .readthedocs.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.readthedocs.yml b/.readthedocs.yml index 065634995b..59229176ec 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -15,6 +15,7 @@ build: python: "mambaforge-4.10" jobs: pre_install: + - git status - git update-index --assume-unchanged doc/rtd_environment.yml doc/source/conf.py conda: environment: doc/rtd_environment.yml From 344beac625d2a9f7a54c2dd797f8c8a164682f5b Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 6 Sep 2023 17:07:10 +0100 Subject: [PATCH 189/702] Fix AHI L2 nc coordinate names --- satpy/readers/ahi_l2_nc.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index e44c882898..ef3b7611aa 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -98,6 +98,13 @@ def get_dataset(self, key, info): var = info['file_key'] logger.debug('Reading in get_dataset %s.', var) variable = self.nc[var] + + # Data has 'Latitude' and 'Longitude' coords, these must be replaced. + variable = variable.rename({'Rows': 'y', 'Columns': 'x'}) + + variable = variable.drop('Latitude') + variable = variable.drop('Longitude') + variable.attrs.update(key.to_dict()) return variable From 641a738c41e22b96973a30f6f485fbc0d1baa31f Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 6 Sep 2023 11:08:13 -0500 Subject: [PATCH 190/702] Debug RTD git state --- .readthedocs.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.readthedocs.yml b/.readthedocs.yml index 59229176ec..d16daabffc 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -17,5 +17,8 @@ build: pre_install: - git status - git update-index --assume-unchanged doc/rtd_environment.yml doc/source/conf.py + - git status + post_install: + - python -m pip install --no-deps -e . conda: environment: doc/rtd_environment.yml From cce6349ae1bd0da06b8d58f8f0e6b496f7bcf503 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 6 Sep 2023 17:09:26 +0100 Subject: [PATCH 191/702] Update AHI tests for new coord names. --- satpy/tests/reader_tests/test_ahi_l2_nc.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index 84b3c667d7..d2e9c24489 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -16,7 +16,7 @@ start_time = datetime(2023, 8, 24, 5, 40, 21) end_time = datetime(2023, 8, 24, 5, 49, 40) -dimensions = {'Columns': 5500, 'Rows': 5500} +dimensions = {'X': 5500, 'Y': 5500} exp_ext = (-5499999.9012, -5499999.9012, 5499999.9012, 5499999.9012) @@ -46,7 +46,7 @@ def himl2_filename(tmp_path_factory): with h5netcdf.File(fname, mode="w") as h5f: h5f.dimensions = dimensions h5f.attrs.update(global_attrs) - var = h5f.create_variable("CloudMask", ("Rows", "Columns"), np.uint16, chunks=(200, 200)) + var = h5f.create_variable("CloudMask", ("Y", "X"), np.uint16, chunks=(200, 200)) var[:] = clmk_data return fname @@ -59,7 +59,7 @@ def himl2_filename_bad(tmp_path_factory): with h5netcdf.File(fname, mode="w") as h5f: h5f.dimensions = dimensions h5f.attrs.update(badarea_attrs) - var = h5f.create_variable("CloudMask", ("Rows", "Columns"), np.uint16, chunks=(200, 200)) + var = h5f.create_variable("CloudMask", ("Y", "X"), np.uint16, chunks=(200, 200)) var[:] = clmk_data return fname From 1e208e97c6d4ba2fdbc4485b00d6478cdd39693e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 6 Sep 2023 13:34:40 -0500 Subject: [PATCH 192/702] Fix colormap-based tests with new version of trollimage --- .../enhancement_tests/test_enhancements.py | 20 +++++++------------ satpy/tests/test_composites.py | 14 ++++++------- 2 files changed, 14 insertions(+), 20 deletions(-) diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index 4420f4ea9b..e95c55a362 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -149,19 +149,13 @@ def test_colorize(self): from trollimage.colormap import brbg from satpy.enhancements import colorize - expected = np.array([[ - [np.nan, 3.29409498e-01, 3.29409498e-01, - 4.35952940e-06, 4.35952940e-06], - [4.35952940e-06, 4.35952940e-06, 4.35952940e-06, - 4.35952940e-06, 4.35952940e-06]], - [[np.nan, 1.88249866e-01, 1.88249866e-01, - 2.35302110e-01, 2.35302110e-01], - [2.35302110e-01, 2.35302110e-01, 2.35302110e-01, - 2.35302110e-01, 2.35302110e-01]], - [[np.nan, 1.96102817e-02, 1.96102817e-02, - 1.88238767e-01, 1.88238767e-01], - [1.88238767e-01, 1.88238767e-01, 1.88238767e-01, - 1.88238767e-01, 1.88238767e-01]]]) + expected = np.array([ + [[np.nan, 3.29411723e-01, 3.29411723e-01, 3.21825881e-08, 3.21825881e-08], + [3.21825881e-08, 3.21825881e-08, 3.21825881e-08, 3.21825881e-08, 3.21825881e-08]], + [[np.nan, 1.88235327e-01, 1.88235327e-01, 2.35294109e-01, 2.35294109e-01], + [2.35294109e-01, 2.35294109e-01, 2.35294109e-01, 2.35294109e-01, 2.35294109e-01]], + [[np.nan, 1.96078164e-02, 1.96078164e-02, 1.88235281e-01, 1.88235281e-01], + [1.88235281e-01, 1.88235281e-01, 1.88235281e-01, 1.88235281e-01, 1.88235281e-01]]]) run_and_check_enhancement(colorize, self.ch1, expected, palettes=brbg) def test_palettize(self): diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index f27c73d849..f056d2fa93 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -773,13 +773,13 @@ def test_colorize_with_interpolation(self): dims=['y', 'x'], attrs={'valid_range': np.array([2, 4])}) res = colormap_composite([data, palette]) - exp = np.array([[[1.0000149, 0.49804664, 0.24907766], - [0., 0.59844028, 1.0000149]], - [[1.00005405, 0.49806613, 0.24902255], - [0., 0.59846373, 1.00005405]], - [[1.00001585, 0.49804711, 0.24896771], - [0., 0.59844073, 1.00001585]]]) - self.assertTrue(np.allclose(res, exp, atol=1e-4)) + exp = np.array([[[1.0, 0.498039, 0.246575], + [0., 0.59309977, 1.0]], + [[1.0, 0.49803924, 0.24657543], + [0., 0.59309983, 1.0]], + [[1.0, 0.4980392, 0.24657541], + [0., 0.59309978, 1.0]]]) + np.testing.assert_allclose(res, exp, atol=1e-4) class TestCloudCompositorWithoutCloudfree: From c83a60aafd578518879b080102a205869186213f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Sep 2023 10:44:20 +0000 Subject: [PATCH 193/702] Bump actions/checkout from 3 to 4 Bumps [actions/checkout](https://github.com/actions/checkout) from 3 to 4. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yaml | 4 ++-- .github/workflows/deploy-sdist.yaml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 178138fe3e..9897f8886a 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout source - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 with: @@ -55,7 +55,7 @@ jobs: steps: - name: Checkout source - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Conda Environment uses: conda-incubator/setup-miniconda@v2 diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 73db646c62..ba8653a9f2 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -11,7 +11,7 @@ jobs: steps: - name: Checkout source - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Create sdist shell: bash -l {0} From 62e8ac3b21d9f9ab5cf2e29aefc35852a7a3e0e7 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 13 Sep 2023 11:39:18 -0500 Subject: [PATCH 194/702] Rename "night_microphsyics_abi" composite to "night_microphysics" --- satpy/etc/composites/abi.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/composites/abi.yaml b/satpy/etc/composites/abi.yaml index d34e86313b..783bd5187a 100644 --- a/satpy/etc/composites/abi.yaml +++ b/satpy/etc/composites/abi.yaml @@ -537,7 +537,7 @@ composites: - name: C13 standard_name: snow - night_microphysics_abi: + night_microphysics: description: > Nighttime Microphysics RGB, for GOESR: NASA, NOAA references: From 9ad3995849bc0688b973895faf20f1c8674f8000 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 14 Sep 2023 10:42:06 -0500 Subject: [PATCH 195/702] Fetch all tags in RTD for versioning --- .readthedocs.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index d16daabffc..a04fbdda78 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -14,11 +14,11 @@ build: tools: python: "mambaforge-4.10" jobs: + post_checkout: + - git fetch --tags pre_install: - - git status - git update-index --assume-unchanged doc/rtd_environment.yml doc/source/conf.py - - git status - post_install: - - python -m pip install --no-deps -e . + # post_install: + # - python -m pip install --no-deps -e . conda: environment: doc/rtd_environment.yml From 6d4fcc2898237dc4e07f24aaa188c4b2649a25c1 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 14 Sep 2023 10:50:34 -0500 Subject: [PATCH 196/702] Remove commented out pip install in RTD config --- .readthedocs.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index a04fbdda78..9f3d7bd1b5 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -18,7 +18,5 @@ build: - git fetch --tags pre_install: - git update-index --assume-unchanged doc/rtd_environment.yml doc/source/conf.py - # post_install: - # - python -m pip install --no-deps -e . conda: environment: doc/rtd_environment.yml From ed1c4a171e703c10dc8d9fa1e4e9260a2bde3abd Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 18 Sep 2023 16:03:28 +0200 Subject: [PATCH 197/702] Add non-linear strength for AMI ndvi_hybrid_green band. --- satpy/etc/composites/ami.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/etc/composites/ami.yaml b/satpy/etc/composites/ami.yaml index 5a8608d795..b0d943c5ca 100644 --- a/satpy/etc/composites/ami.yaml +++ b/satpy/etc/composites/ami.yaml @@ -69,6 +69,7 @@ composites: currently implemented are experimental and may change in future versions of Satpy. compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen limits: [0.15, 0.05] + strength: 3.0 prerequisites: - name: VI005 modifiers: [sunz_corrected, rayleigh_corrected] @@ -83,6 +84,7 @@ composites: Alternative to ndvi_hybrid_green, but without solar zenith or rayleigh correction. compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen limits: [0.15, 0.05] + strength: 3.0 prerequisites: - name: VI005 - name: VI006 From 049a2a3690e07166aa301d68e9d7e9ee569e0d2d Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 18 Sep 2023 16:49:49 +0200 Subject: [PATCH 198/702] Refactor code for applying non-linearity and computing blend fractions. --- satpy/composites/spectral.py | 34 ++++++++++++++++++++++++++++------ 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/satpy/composites/spectral.py b/satpy/composites/spectral.py index 5e6e03c148..7d05a000d6 100644 --- a/satpy/composites/spectral.py +++ b/satpy/composites/spectral.py @@ -169,17 +169,39 @@ def __call__(self, projectables, optional_datasets=None, **attrs): ndvi.data = da.where(ndvi > self.ndvi_min, ndvi, self.ndvi_min) ndvi.data = da.where(ndvi < self.ndvi_max, ndvi, self.ndvi_max) - # Apply non-linearity to the ndvi for a non-linear conversion from ndvi to fraction. This can be used for a - # slower or faster transision to higher/lower fractions at the ndvi extremes. If strength equals 1.0, this - # operation has no effect on the ndvi. + # Introduce non-linearity to ndvi for non-linear scaling to NIR blend fraction + if self.strength != 1.0: # self._apply_strength() has no effect if strength = 1.0 -> no non-linear behaviour + ndvi = self._apply_strength(ndvi) + + # Compute pixel-level NIR blend fractions from ndvi + fraction = self._compute_blend_fraction(ndvi) + + # Prepare input as required by parent class (SpectralBlender) + self.fractions = (1 - fraction, fraction) + + return super().__call__([projectables[0], projectables[2]], **attrs) + + def _apply_strength(self, ndvi): + """Introduce non-linearity by applying strength factor. + + The method introduces non-linearity to the ndvi for a non-linear scaling from ndvi to blend fraction in + `_compute_blend_fraction`. This can be used for a slower or faster transision to higher/lower fractions + at the ndvi extremes. If strength equals 1.0, this operation has no effect on the ndvi. + """ ndvi = ndvi ** self.strength / (ndvi ** self.strength + (1 - ndvi) ** self.strength) - # Compute blending fraction from ndvi + return ndvi + + def _compute_blend_fraction(self, ndvi): + """Compute pixel-level fraction of NIR signal to blend with native green signal. + + This method linearly scales the input ndvi values to pixel-level blend fractions within the range + `[limits[0], limits[1]]` following this implementation . + """ fraction = (ndvi - self.ndvi_min) / (self.ndvi_max - self.ndvi_min) * (self.limits[1] - self.limits[0]) \ + self.limits[0] - self.fractions = (1 - fraction, fraction) - return super().__call__([projectables[0], projectables[2]], **attrs) + return fraction class GreenCorrector(SpectralBlender): From 3767f61a4b2545843232a97c2b7c02dd04448c33 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 18 Sep 2023 17:03:03 +0200 Subject: [PATCH 199/702] Refactor unit tests for NDVI-weighted hybrid green correction. --- satpy/tests/compositor_tests/test_spectral.py | 46 +++++++++++-------- 1 file changed, 27 insertions(+), 19 deletions(-) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 03e51a5043..b6460c911b 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -65,8 +65,24 @@ def test_hybrid_green(self): data = res.compute() np.testing.assert_allclose(data, 0.23) - def test_ndvi_hybrid_green(self): - """Test NDVI-scaled hybrid green correction of 'green' band.""" + def test_green_corrector(self): + """Test the deprecated class for green corrections.""" + comp = GreenCorrector('blended_channel', fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), + standard_name='toa_bidirectional_reflectance') + res = comp((self.c01, self.c03)) + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs['name'] == 'blended_channel' + assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' + data = res.compute() + np.testing.assert_allclose(data, 0.23) + + +class TestNdviHybridGreenCompositor: + """Test NDVI-weighted hybrid green correction of green band.""" + + def setup_method(self): + """Initialize channels.""" self.c01 = xr.DataArray(da.from_array([[0.25, 0.30], [0.20, 0.30]], chunks=25), dims=('y', 'x'), attrs={'name': 'C02'}) self.c02 = xr.DataArray(da.from_array([[0.25, 0.30], [0.25, 0.35]], chunks=25), @@ -74,6 +90,8 @@ def test_ndvi_hybrid_green(self): self.c03 = xr.DataArray(da.from_array([[0.35, 0.35], [0.28, 0.65]], chunks=25), dims=('y', 'x'), attrs={'name': 'C04'}) + def test_ndvi_hybrid_green(self): + """Test General functionality with linear scaling from ndvi to blend fraction.""" comp = NDVIHybridGreen('ndvi_hybrid_green', limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), standard_name='toa_bidirectional_reflectance') @@ -86,26 +104,16 @@ def test_ndvi_hybrid_green(self): data = res.values np.testing.assert_array_almost_equal(data, np.array([[0.2633, 0.3071], [0.2115, 0.3420]]), decimal=4) - # Test invalid strength - with pytest.raises(ValueError): - _ = NDVIHybridGreen('ndvi_hybrid_green', strength=0.0, prerequisites=(0.51, 0.65, 0.85), - standard_name='toa_bidirectional_reflectance') - - # Test non-linear strength + def test_nonliniear_scaling(self): + """Test non-linear scaling using `strength` term.""" comp = NDVIHybridGreen('ndvi_hybrid_green', limits=(0.15, 0.05), strength=2.0, prerequisites=(0.51, 0.65, 0.85), standard_name='toa_bidirectional_reflectance') res = comp((self.c01, self.c02, self.c03)) np.testing.assert_array_almost_equal(res.values, np.array([[0.2646, 0.3075], [0.2120, 0.3471]]), decimal=4) - def test_green_corrector(self): - """Test the deprecated class for green corrections.""" - comp = GreenCorrector('blended_channel', fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), - standard_name='toa_bidirectional_reflectance') - res = comp((self.c01, self.c03)) - assert isinstance(res, xr.DataArray) - assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'blended_channel' - assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' - data = res.compute() - np.testing.assert_allclose(data, 0.23) + def test_invalid_strength(self): + """Test using invalid `strength` term for non-linear scaling.""" + with pytest.raises(ValueError): + _ = NDVIHybridGreen('ndvi_hybrid_green', strength=0.0, prerequisites=(0.51, 0.65, 0.85), + standard_name='toa_bidirectional_reflectance') From 08e833eafbcb0d44a1f436a2eb7fbac12c7739f0 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 24 Aug 2023 14:59:41 +0200 Subject: [PATCH 200/702] Add modifier for reducing measurement signal within a defined sunz interval. --- satpy/modifiers/__init__.py | 1 + satpy/modifiers/angles.py | 46 +++++++++++++++++++++++++++++++++++++ satpy/modifiers/geometry.py | 41 ++++++++++++++++++++++++++++++++- 3 files changed, 87 insertions(+), 1 deletion(-) diff --git a/satpy/modifiers/__init__.py b/satpy/modifiers/__init__.py index a0888167b3..c8d32f246e 100644 --- a/satpy/modifiers/__init__.py +++ b/satpy/modifiers/__init__.py @@ -25,5 +25,6 @@ from .atmosphere import PSPRayleighReflectance # noqa: F401 from .geometry import EffectiveSolarPathLengthCorrector # noqa: F401 from .geometry import SunZenithCorrector # noqa: F401 +from .geometry import SunZenithReducer # noqa: F401 from .spectral import NIREmissivePartFromReflectance # noqa: F401 from .spectral import NIRReflectance # noqa: F401 diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 28adb60028..a914863b04 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -235,6 +235,7 @@ def cache_to_zarr_if( out old entries. It is up to the user to manage the size of the cache. """ + def _decorator(func: Callable) -> Callable: zarr_cacher = ZarrCacheHelper(func, cache_config_key, @@ -242,6 +243,7 @@ def _decorator(func: Callable) -> Callable: sanitize_args_func) wrapper = update_wrapper(zarr_cacher, func) return wrapper + return _decorator @@ -542,3 +544,47 @@ def _sunzen_corr_cos_ndarray(data: np.ndarray, # Force "night" pixels to 0 (where SZA is invalid) corr[np.isnan(cos_zen)] = 0 return data * corr + + +def sunzen_reduction(data: da.Array, + sunz: da.Array, + limit: float = 65., + max_sza: float = 95., + strength: float = 2.) -> da.Array: + """Reduced strength of signal at high sun zenith angles.""" + return da.map_blocks(_sunzen_reduction_ndarray, data, sunz, limit, max_sza, strength, + meta=np.array((), dtype=data.dtype), chunks=data.chunks) + + +def _sunzen_reduction_ndarray(data: np.ndarray, + sunz: np.ndarray, + limit: float, + max_sza: float, + strength: float) -> np.ndarray: + if max_sza is None: + raise ValueError("`max_sza` must be defined when using the SunZenithReducer.") + + # compute reduction factor (0.0 - 1.0) between limit and maz_sza + reduction_factor = (sunz - limit) / (max_sza - limit) + reduction_factor = reduction_factor.clip(0., 1.) + + # invert the reduction factor such that minimum reduction is done at `limit` and gradually increases towards max_sza + with np.errstate(invalid='ignore'): # we expect space pixels to be invalid + reduction_factor = 1. - np.log(reduction_factor + 1) / np.log(2) + + # apply non-linearity to the reduction factor for a non-linear reduction of the signal. This can be used for a + # slower or faster transision to higher/lower fractions at the ndvi extremes. If strength equals 1.0, this + # operation has no effect on the reduction_factor. + reduction_factor = reduction_factor ** strength / ( + reduction_factor ** strength + (1 - reduction_factor) ** strength) + + # compute final correction term, with no reduction for angles < limit + corr = np.where(sunz < limit, 1.0, reduction_factor) + + # force "night" pixels to 0 (where SZA is invalid) + corr[np.isnan(sunz)] = 0 + + # reduce data signal with correction term + res = data * corr + + return res diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py index ecd83f80e5..0022a487a4 100644 --- a/satpy/modifiers/geometry.py +++ b/satpy/modifiers/geometry.py @@ -24,7 +24,7 @@ import numpy as np from satpy.modifiers import ModifierBase -from satpy.modifiers.angles import sunzen_corr_cos +from satpy.modifiers.angles import sunzen_corr_cos, sunzen_reduction from satpy.utils import atmospheric_path_length_correction logger = logging.getLogger(__name__) @@ -159,3 +159,42 @@ def __init__(self, correction_limit=88., **kwargs): def _apply_correction(self, proj, coszen): logger.debug("Apply the effective solar atmospheric path length correction method by Li and Shibata") return atmospheric_path_length_correction(proj, coszen, limit=self.correction_limit, max_sza=self.max_sza) + + +class SunZenithReducer(SunZenithCorrectorBase): + """Reduce signal strength at large sun zenith angles. + + Within a given sunz interval [correction_limit, max_sza] the strength of the signal is reduced following the + formula: + + res = signal * reduction_factor + + where reduction_factor is a pixel-level value ranging from 0 to 1 within the sunz interval. + + The `strength` parameter can be used for a non-linear reduction within the sunz interval. A strength larger + than 1.0 will decelerate the signal reduction towards the sunz interval extremes, whereas a strength + smaller than 1.0 will accelerate the signal reduction towards the sunz interval extremes. + + """ + + def __init__(self, correction_limit=60., strength=2.0, **kwargs): + """Collect custom configuration values. + + Args: + correction_limit (float): Maximum solar zenith angle to apply the correction in degrees. Default 60. + strength (float): The strength of the non-linear signal reduction. Default 2.0 + + """ + self.correction_limit = correction_limit + self.strength = strength + super(SunZenithReducer, self).__init__(**kwargs) + + def _apply_correction(self, proj, coszen): + logger.debug("Apply sun-zenith signal reduction") + res = proj.copy() + sunz = np.rad2deg(np.arccos(coszen.data)) + res.data = sunzen_reduction(proj.data, sunz, + limit=self.correction_limit, + max_sza=self.max_sza, + strength=self.strength) + return res From f3970cd780f41457e7c6485ebc834703b700ed85 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 24 Aug 2023 16:28:44 +0200 Subject: [PATCH 201/702] Modify default values and add default modifier recipe in visir.yaml. --- satpy/etc/composites/visir.yaml | 5 +++++ satpy/modifiers/geometry.py | 10 ++++++---- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/satpy/etc/composites/visir.yaml b/satpy/etc/composites/visir.yaml index 0bb177e9ff..d9798057a2 100644 --- a/satpy/etc/composites/visir.yaml +++ b/satpy/etc/composites/visir.yaml @@ -16,6 +16,11 @@ modifiers: optional_prerequisites: - solar_zenith_angle + sunz_reduced: + modifier: !!python/name:satpy.modifiers.SunZenithReducer + optional_prerequisites: + - solar_zenith_angle + co2_corrected: modifier: !!python/name:satpy.modifiers.CO2Corrector prerequisites: diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py index 0022a487a4..e93b34e1a8 100644 --- a/satpy/modifiers/geometry.py +++ b/satpy/modifiers/geometry.py @@ -177,17 +177,19 @@ class SunZenithReducer(SunZenithCorrectorBase): """ - def __init__(self, correction_limit=60., strength=2.0, **kwargs): + def __init__(self, correction_limit=55., max_sza=90, strength=1.5, **kwargs): """Collect custom configuration values. Args: - correction_limit (float): Maximum solar zenith angle to apply the correction in degrees. Default 60. - strength (float): The strength of the non-linear signal reduction. Default 2.0 + correction_limit (float): Solar zenith angle in degrees where to start the signal reduction. Default 60. + max_sza (float): Maximum solar zenith angle in degrees where to apply the signal reduction. Beyond + this solar zenith angle the signal will become zero. Default 90. + strength (float): The strength of the non-linear signal reduction. Default 1.5 """ self.correction_limit = correction_limit self.strength = strength - super(SunZenithReducer, self).__init__(**kwargs) + super(SunZenithReducer, self).__init__(max_sza=max_sza, **kwargs) def _apply_correction(self, proj, coszen): logger.debug("Apply sun-zenith signal reduction") From 2dff5696e2f71128790c919886173cd32923d422 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 24 Aug 2023 16:30:18 +0200 Subject: [PATCH 202/702] Add satz reducer modified in FCI true color recipes. --- satpy/etc/composites/fci.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 193415656b..55876cb98d 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -13,11 +13,11 @@ composites: limits: [0.15, 0.05] prerequisites: - name: vis_05 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: vis_06 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: vis_08 - modifiers: [sunz_corrected ] + modifiers: [sunz_corrected, sunz_reduced ] standard_name: toa_bidirectional_reflectance ndvi_hybrid_green_raw: @@ -48,10 +48,10 @@ composites: of the ndvi_hybrid_green composites for details. prerequisites: - name: vis_06 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: ndvi_hybrid_green - name: vis_04 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] standard_name: true_color true_color_raw_with_corrected_green: From 25a77c2dbe3ed841115839a3a2d2d969045ff945 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 24 Aug 2023 16:42:39 +0200 Subject: [PATCH 203/702] Add satz reducer modified in AHI NDVI based true color recipe. --- satpy/etc/composites/ahi.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 9e36bf7d7f..5b000689dc 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -107,11 +107,11 @@ composites: compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen prerequisites: - name: B02 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: B03 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: B04 - modifiers: [sunz_corrected] + modifiers: [sunz_corrected, sunz_reduced] standard_name: toa_bidirectional_reflectance airmass: @@ -275,10 +275,10 @@ composites: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: B03 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: ndvi_hybrid_green - name: B01 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] high_resolution_band: red standard_name: true_color From d2d82cf2d4815c76a312ac1403d410f72a89390b Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 25 Aug 2023 09:58:50 +0200 Subject: [PATCH 204/702] Align default values for sunz reduction. --- satpy/modifiers/angles.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index a914863b04..103ea7362b 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -548,9 +548,9 @@ def _sunzen_corr_cos_ndarray(data: np.ndarray, def sunzen_reduction(data: da.Array, sunz: da.Array, - limit: float = 65., - max_sza: float = 95., - strength: float = 2.) -> da.Array: + limit: float = 55., + max_sza: float = 90., + strength: float = 1.5) -> da.Array: """Reduced strength of signal at high sun zenith angles.""" return da.map_blocks(_sunzen_reduction_ndarray, data, sunz, limit, max_sza, strength, meta=np.array((), dtype=data.dtype), chunks=data.chunks) From 1a22f1c8d43d3840374c818d6763b31ef623f49d Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 25 Aug 2023 10:03:52 +0200 Subject: [PATCH 205/702] Move check for max_sza=None to class init method. --- satpy/modifiers/angles.py | 3 --- satpy/modifiers/geometry.py | 2 ++ 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 103ea7362b..391278bf8f 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -561,9 +561,6 @@ def _sunzen_reduction_ndarray(data: np.ndarray, limit: float, max_sza: float, strength: float) -> np.ndarray: - if max_sza is None: - raise ValueError("`max_sza` must be defined when using the SunZenithReducer.") - # compute reduction factor (0.0 - 1.0) between limit and maz_sza reduction_factor = (sunz - limit) / (max_sza - limit) reduction_factor = reduction_factor.clip(0., 1.) diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py index e93b34e1a8..61c406adb6 100644 --- a/satpy/modifiers/geometry.py +++ b/satpy/modifiers/geometry.py @@ -190,6 +190,8 @@ def __init__(self, correction_limit=55., max_sza=90, strength=1.5, **kwargs): self.correction_limit = correction_limit self.strength = strength super(SunZenithReducer, self).__init__(max_sza=max_sza, **kwargs) + if self.max_sza is None: + raise ValueError("`max_sza` must be defined when using the SunZenithReducer.") def _apply_correction(self, proj, coszen): logger.debug("Apply sun-zenith signal reduction") From a4c9c18a3eb3b2a5e0127c5c8799ee1b4320ca92 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 25 Aug 2023 10:06:38 +0200 Subject: [PATCH 206/702] Add unit tests for SunZenithReducer modifier. --- satpy/tests/test_modifiers.py | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index c21a514808..0ba788b4d1 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -156,6 +156,35 @@ def test_imcompatible_areas(self, sunz_ds2, sunz_sza): comp((sunz_ds2, sunz_sza), test_attr='test') +class TestSunZenithReducer: + """Test case for the sun zenith reducer.""" + + def test_basic_default_provided(self, sunz_ds1, sunz_sza): + """Test default settings with sza data available.""" + from satpy.modifiers.geometry import SunZenithReducer + comp = SunZenithReducer(name='sza_reduction_test_default', modifiers=tuple()) + res = comp((sunz_ds1, sunz_sza), test_attr='test') + np.testing.assert_allclose(res.values, + np.array([[0.00242814, 0.00235669], [0.00245885, 0.00238707]]), + rtol=1e-5) + + def test_basic_lims_provided(self, sunz_ds1, sunz_sza): + """Test custom settings with sza data available.""" + from satpy.modifiers.geometry import SunZenithReducer + comp = SunZenithReducer(name='sza_reduction_test_custom', modifiers=tuple(), + correction_limit=70, max_sza=95, strength=3.0) + res = comp((sunz_ds1, sunz_sza), test_attr='test') + np.testing.assert_allclose(res.values, + np.array([[0.01041319, 0.01030033], [0.01046164, 0.01034834]]), + rtol=1e-5) + + def test_invalid_max_sza(self, sunz_ds1, sunz_sza): + """Test invalid max_sza with sza data available.""" + from satpy.modifiers.geometry import SunZenithReducer + with pytest.raises(ValueError): + SunZenithReducer(name='sza_reduction_test_invalid', modifiers=tuple(), max_sza=None) + + class TestNIRReflectance(unittest.TestCase): """Test NIR reflectance compositor.""" From ba655df5cd61265a16dfa4d45a9a70fc7ea89385 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 25 Aug 2023 10:45:35 +0200 Subject: [PATCH 207/702] Refactor test to reduce code duplication. --- satpy/tests/test_modifiers.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 0ba788b4d1..f7cc37391d 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -159,21 +159,24 @@ def test_imcompatible_areas(self, sunz_ds2, sunz_sza): class TestSunZenithReducer: """Test case for the sun zenith reducer.""" - def test_basic_default_provided(self, sunz_ds1, sunz_sza): - """Test default settings with sza data available.""" + @classmethod + def setup_class(cls): + """Initialze SunZenithReducer classes that shall be tested.""" from satpy.modifiers.geometry import SunZenithReducer - comp = SunZenithReducer(name='sza_reduction_test_default', modifiers=tuple()) - res = comp((sunz_ds1, sunz_sza), test_attr='test') + cls.default = SunZenithReducer(name='sza_reduction_test_default', modifiers=tuple()) + cls.custom = SunZenithReducer(name='sza_reduction_test_custom', modifiers=tuple(), + correction_limit=70, max_sza=95, strength=3.0) + + def test_default_settings(self, sunz_ds1, sunz_sza): + """Test default settings with sza data available.""" + res = self.default((sunz_ds1, sunz_sza), test_attr='test') np.testing.assert_allclose(res.values, np.array([[0.00242814, 0.00235669], [0.00245885, 0.00238707]]), rtol=1e-5) - def test_basic_lims_provided(self, sunz_ds1, sunz_sza): + def test_custom_settings(self, sunz_ds1, sunz_sza): """Test custom settings with sza data available.""" - from satpy.modifiers.geometry import SunZenithReducer - comp = SunZenithReducer(name='sza_reduction_test_custom', modifiers=tuple(), - correction_limit=70, max_sza=95, strength=3.0) - res = comp((sunz_ds1, sunz_sza), test_attr='test') + res = self.custom((sunz_ds1, sunz_sza), test_attr='test') np.testing.assert_allclose(res.values, np.array([[0.01041319, 0.01030033], [0.01046164, 0.01034834]]), rtol=1e-5) From 2fa3dc871608169d2ea7efb20415d623be309cd0 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 20 Sep 2023 10:07:03 -0500 Subject: [PATCH 208/702] Remove use of deprecated setuptools_scm_git_archive build package --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index e27dcfd9e2..64c68d60eb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=45", "wheel", "setuptools_scm[toml]>=6.2", 'setuptools_scm_git_archive'] +requires = ["setuptools>=60", "wheel", "setuptools_scm[toml]>=8.0"] build-backend = "setuptools.build_meta" [tool.setuptools_scm] From 0fc4c911b77f60285e2a5d97c7e5a262a696cbf9 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 20 Sep 2023 10:46:13 -0500 Subject: [PATCH 209/702] Remove setuptools_scm hack in setup.py and exclude more files in sdist --- MANIFEST.in | 11 ++++++++--- setup.py | 8 -------- 2 files changed, 8 insertions(+), 11 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index 7c8ea0e146..1b2aca456e 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,11 @@ -include doc/Makefile -include doc/source/* -include doc/examples/*.py +prune * +exclude * +graft doc +recursive-exclude doc/build * +graft satpy include LICENSE.txt include README.rst +include AUTHORS.md +include CHANGELOG.md include satpy/version.py +global-exclude *.py[cod] diff --git a/setup.py b/setup.py index 2e6154ea92..b3eb4c5577 100644 --- a/setup.py +++ b/setup.py @@ -22,14 +22,6 @@ from setuptools import find_packages, setup -try: - # HACK: https://github.com/pypa/setuptools_scm/issues/190#issuecomment-351181286 - # Stop setuptools_scm from including all repository files - import setuptools_scm.integration - setuptools_scm.integration.find_files = lambda _: [] -except ImportError: - pass - requires = ['numpy >=1.13', 'pillow', 'pyresample >=1.24.0', 'trollsift', 'trollimage >=1.20', 'pykdtree', 'pyyaml >=5.1', 'xarray >=0.10.1, !=0.13.0', 'dask[array] >=0.17.1', 'pyproj>=2.2', 'zarr', 'donfig', 'appdirs', From a6d7b3a492e88266ab68000793c3f60bfe46f049 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 20 Sep 2023 13:47:35 -0500 Subject: [PATCH 210/702] Run sdist CI on pushes and PRs --- .github/workflows/deploy-sdist.yaml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index ba8653a9f2..53168e95a8 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -1,6 +1,8 @@ name: Deploy sdist on: + push: + pull_request: release: types: - published @@ -12,10 +14,14 @@ jobs: steps: - name: Checkout source uses: actions/checkout@v4 + with: + fetch-tags: true - name: Create sdist shell: bash -l {0} - run: python setup.py sdist + run: | + python -m pip install -q build + python -m build -s - name: Publish package to PyPI if: github.event.action == 'published' From a87020477c3eca459799561662be7f590a0d68e9 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 20 Sep 2023 14:26:53 -0500 Subject: [PATCH 211/702] Remove unnecessary fetch tags --- .github/workflows/deploy-sdist.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 53168e95a8..4ed63fefdd 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -14,8 +14,6 @@ jobs: steps: - name: Checkout source uses: actions/checkout@v4 - with: - fetch-tags: true - name: Create sdist shell: bash -l {0} From 72bdca2920ba367c95c3d197aa3fc6e5a4ee3047 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 20 Sep 2023 14:41:19 -0500 Subject: [PATCH 212/702] Add missing python package files in sdist --- MANIFEST.in | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/MANIFEST.in b/MANIFEST.in index 1b2aca456e..3a7cdb0b43 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -7,5 +7,10 @@ include LICENSE.txt include README.rst include AUTHORS.md include CHANGELOG.md +include SECURITY.md +include CITATION include satpy/version.py +include pyproject.toml +include setup.py +include setup.cfg global-exclude *.py[cod] From 1b154fbdd89667f26fdc48f35d104e7cee9c9c6c Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 21 Sep 2023 08:33:33 +0200 Subject: [PATCH 213/702] Remove unneeded performance tracker --- satpy/readers/seviri_l1b_hrit.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 1fedadb0e2..2b153edfcc 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -214,7 +214,7 @@ import copy import logging -from datetime import datetime, timedelta +from datetime import timedelta import dask.array as da import numpy as np @@ -721,7 +721,6 @@ def pad_hrv_data(self, res): def calibrate(self, data, calibration): """Calibrate the data.""" - tic = datetime.now() calib = SEVIRICalibrationHandler( platform_id=self.platform_id, channel_name=self.channel_name, @@ -730,7 +729,6 @@ def calibrate(self, data, calibration): scan_time=self.observation_start_time ) res = calib.calibrate(data, calibration) - logger.debug("Calibration time " + str(datetime.now() - tic)) return res def _mask_bad_quality(self, data): From c980ab2b2306b3a7e3490e7398c4bfab89a13369 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 21 Sep 2023 08:47:52 +0200 Subject: [PATCH 214/702] Fix cf tests for numpy 2 --- satpy/tests/writer_tests/test_cf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 54770b9176..82919dc65c 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -537,7 +537,7 @@ def get_test_attrs(self): 'numpy_bool': True, 'numpy_void': np.void(0), 'numpy_bytes': np.bytes_('test'), - 'numpy_string': np.string_('test'), + 'numpy_string': np.str_('test'), 'list': [1, 2, np.float64(3)], 'nested_list': ["1", ["2", [3]]], 'bool': True, From 3bfcc345b61fa7c3c6c72acfa26e5367f34b341b Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 21 Sep 2023 09:08:34 +0200 Subject: [PATCH 215/702] Replace np.string_ with np.bytes_ for numpy 2 in cf writer --- satpy/writers/cf_writer.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index b9a24b9292..ed149391bb 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -200,7 +200,7 @@ np.dtype('int32'), np.dtype('uint32'), np.dtype('int64'), np.dtype('uint64'), np.dtype('float32'), np.dtype('float64'), - np.string_] + np.bytes_] # Unsigned and int64 isn't CF 1.7 compatible # Note: Unsigned and int64 are CF 1.9 compatible @@ -209,7 +209,7 @@ np.dtype('int32'), np.dtype('float32'), np.dtype('float64'), - np.string_] + np.bytes_] CF_VERSION = 'CF-1.7' @@ -582,7 +582,7 @@ def _remove_satpy_attrs(new_data): def _format_prerequisites_attrs(dataarray): """Reformat prerequisites attribute value to string.""" if 'prerequisites' in dataarray.attrs: - dataarray.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] + dataarray.attrs['prerequisites'] = [np.bytes_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] return dataarray From 88379e47ccd631a80654c4dbf60dc6fccdf7667a Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 21 Sep 2023 10:10:00 +0200 Subject: [PATCH 216/702] Remove other usages of np.string_ --- satpy/readers/hdf4_utils.py | 2 +- satpy/readers/utils.py | 2 +- satpy/tests/reader_tests/test_utils.py | 12 ++++++------ 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/readers/hdf4_utils.py b/satpy/readers/hdf4_utils.py index acc86fd64d..f8bf4ade79 100644 --- a/satpy/readers/hdf4_utils.py +++ b/satpy/readers/hdf4_utils.py @@ -69,7 +69,7 @@ def __init__(self, filename, filename_info, filetype_info): def _collect_attrs(self, name, attrs): for key, value in attrs.items(): value = np.squeeze(value) - if issubclass(value.dtype.type, (np.string_, np.unicode_)) and not value.shape: + if issubclass(value.dtype.type, (np.bytes_, np.unicode_)) and not value.shape: value = value.item() # convert to scalar if not isinstance(value, str): # python 3 - was scalar numpy array of bytes diff --git a/satpy/readers/utils.py b/satpy/readers/utils.py index 31f6dea6d9..e2035af479 100644 --- a/satpy/readers/utils.py +++ b/satpy/readers/utils.py @@ -54,7 +54,7 @@ def np2str(value): """ if hasattr(value, 'dtype') and \ - issubclass(value.dtype.type, (np.str_, np.string_, np.object_)) \ + issubclass(value.dtype.type, (np.str_, np.bytes_, np.object_)) \ and value.size == 1: value = value.item() if not isinstance(value, str): diff --git a/satpy/tests/reader_tests/test_utils.py b/satpy/tests/reader_tests/test_utils.py index 54b156e4c5..fc38e36c88 100644 --- a/satpy/tests/reader_tests/test_utils.py +++ b/satpy/tests/reader_tests/test_utils.py @@ -202,20 +202,20 @@ def test_sub_area(self, adef): def test_np2str(self): """Test the np2str function.""" # byte object - npstring = np.string_('hej') - self.assertEqual(hf.np2str(npstring), 'hej') + npbytes = np.bytes_('hej') + self.assertEqual(hf.np2str(npbytes), 'hej') # single element numpy array - np_arr = np.array([npstring]) + np_arr = np.array([npbytes]) self.assertEqual(hf.np2str(np_arr), 'hej') # scalar numpy array - np_arr = np.array(npstring) + np_arr = np.array(npbytes) self.assertEqual(hf.np2str(np_arr), 'hej') # multi-element array - npstring = np.array([npstring, npstring]) - self.assertRaises(ValueError, hf.np2str, npstring) + npbytes = np.array([npbytes, npbytes]) + self.assertRaises(ValueError, hf.np2str, npbytes) # non-array self.assertRaises(ValueError, hf.np2str, 5) From 18da8fdd79b7f3b516ac3408981efb6ad0df9c35 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 21 Sep 2023 10:58:52 +0200 Subject: [PATCH 217/702] Use np.float64 instead of np.float_ --- satpy/readers/aapp_mhs_amsub_l1c.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/aapp_mhs_amsub_l1c.py b/satpy/readers/aapp_mhs_amsub_l1c.py index 39216431f4..f5765545f3 100644 --- a/satpy/readers/aapp_mhs_amsub_l1c.py +++ b/satpy/readers/aapp_mhs_amsub_l1c.py @@ -152,7 +152,7 @@ def _calibrate(data, if calib_type == 'counts': return channel - channel = channel.astype(np.float_) + channel = channel.astype(np.float64) return da.where(mask, channel, np.nan) From 6f77d18191cd1a6816caf114a13ea33636a63202 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 21 Sep 2023 11:01:20 +0200 Subject: [PATCH 218/702] Use np.str_ instead of np.unicode_ --- satpy/readers/hdf4_utils.py | 2 +- satpy/writers/cf_writer.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/hdf4_utils.py b/satpy/readers/hdf4_utils.py index f8bf4ade79..fb20c0ce11 100644 --- a/satpy/readers/hdf4_utils.py +++ b/satpy/readers/hdf4_utils.py @@ -69,7 +69,7 @@ def __init__(self, filename, filename_info, filetype_info): def _collect_attrs(self, name, attrs): for key, value in attrs.items(): value = np.squeeze(value) - if issubclass(value.dtype.type, (np.bytes_, np.unicode_)) and not value.shape: + if issubclass(value.dtype.type, (np.bytes_, np.str_)) and not value.shape: value = value.item() # convert to scalar if not isinstance(value, str): # python 3 - was scalar numpy array of bytes diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index ed149391bb..702e25c2fa 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -192,7 +192,7 @@ if netCDF4 is None and h5netcdf is None: raise ImportError('Ensure that the netCDF4 or h5netcdf package is installed.') -# Numpy datatypes compatible with all netCDF4 backends. ``np.unicode_`` is +# Numpy datatypes compatible with all netCDF4 backends. ``np.str_`` is # excluded because h5py (and thus h5netcdf) has problems with unicode, see # https://github.com/h5py/h5py/issues/624.""" NC4_DTYPES = [np.dtype('int8'), np.dtype('uint8'), From d1a433d6fc39dec0c28713cd1e96e6acd89a9a35 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 26 Sep 2023 10:44:17 -0500 Subject: [PATCH 219/702] Remove unused chunk size in modis readers --- satpy/readers/hdfeos_base.py | 2 -- satpy/readers/modis_l1b.py | 2 -- 2 files changed, 4 deletions(-) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 172fd6d808..8ad2944dd6 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -33,10 +33,8 @@ from satpy import DataID from satpy.readers.file_handlers import BaseFileHandler -from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) -CHUNK_SIZE = get_legacy_chunk_size() def interpolate(clons, clats, csatz, src_resolution, dst_resolution): diff --git a/satpy/readers/modis_l1b.py b/satpy/readers/modis_l1b.py index 4b11560936..1d0e209d57 100644 --- a/satpy/readers/modis_l1b.py +++ b/satpy/readers/modis_l1b.py @@ -78,10 +78,8 @@ from satpy.readers.hdf4_utils import from_sds from satpy.readers.hdfeos_base import HDFEOSBaseFileReader, HDFEOSGeoReader -from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) -CHUNK_SIZE = get_legacy_chunk_size() class HDFEOSBandReader(HDFEOSBaseFileReader): From d4ce372ba7a0048cf1dfd70911ca71e4096d6ed2 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 26 Sep 2023 15:34:55 -0500 Subject: [PATCH 220/702] Fix chunk checks in MODIS L1b tests --- satpy/tests/reader_tests/modis_tests/test_modis_l1b.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py index 92d7962cef..53f0ca46ce 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py @@ -195,7 +195,8 @@ def test_load_vis_saturation(self, mask_saturated, modis_l1b_nasa_mod021km_file) scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file, reader_kwargs={"mask_saturated": mask_saturated}) dataset_name = '2' - scene.load([dataset_name]) + with dask.config.set({'array.chunk-size': '1 MiB'}): + scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset.shape == _shape_for_resolution(1000) assert dataset.attrs['resolution'] == 1000 From 0300434fbcf20a9cc8e1d0ef55e13090e24b3c09 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 27 Sep 2023 14:04:00 -0500 Subject: [PATCH 221/702] Remove compatibility with Python 3.8 and below --- .pre-commit-config.yaml | 2 +- satpy/_compat.py | 71 +---------------------------------------- satpy/_config.py | 13 +------- satpy/resample.py | 9 +----- 4 files changed, 4 insertions(+), 91 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3999be8b04..3ce81859ae 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -28,7 +28,7 @@ repos: - types-pkg-resources - types-PyYAML - types-requests - args: ["--python-version", "3.8", "--ignore-missing-imports"] + args: ["--python-version", "3.9", "--ignore-missing-imports"] - repo: https://github.com/pycqa/isort rev: 5.12.0 hooks: diff --git a/satpy/_compat.py b/satpy/_compat.py index b49b5a961b..aad2009db3 100644 --- a/satpy/_compat.py +++ b/satpy/_compat.py @@ -17,70 +17,7 @@ # satpy. If not, see . """Backports and compatibility fixes for satpy.""" -from threading import RLock - -_NOT_FOUND = object() - - -class CachedPropertyBackport: - """Backport of cached_property from Python-3.8. - - Source: https://github.com/python/cpython/blob/v3.8.0/Lib/functools.py#L930 - """ - - def __init__(self, func): # noqa - self.func = func - self.attrname = None - self.__doc__ = func.__doc__ - self.lock = RLock() - - def __set_name__(self, owner, name): # noqa - if self.attrname is None: - self.attrname = name - elif name != self.attrname: - raise TypeError( - "Cannot assign the same cached_property to two different names " - f"({self.attrname!r} and {name!r})." - ) - - def __get__(self, instance, owner=None): # noqa - if instance is None: - return self - if self.attrname is None: - raise TypeError( - "Cannot use cached_property instance without calling __set_name__ on it.") - try: - cache = instance.__dict__ # noqa - except AttributeError: # not all objects have __dict__ (e.g. class defines slots) - msg = ( - f"No '__dict__' attribute on {type(instance).__name__!r} " - f"instance to cache {self.attrname!r} property." - ) - raise TypeError(msg) from None - val = cache.get(self.attrname, _NOT_FOUND) - if val is _NOT_FOUND: - with self.lock: - # check if another thread filled cache while we awaited lock - val = cache.get(self.attrname, _NOT_FOUND) - if val is _NOT_FOUND: - val = self.func(instance) - try: - cache[self.attrname] = val - except TypeError: - msg = ( - f"The '__dict__' attribute on {type(instance).__name__!r} instance " - f"does not support item assignment for caching {self.attrname!r} property." - ) - raise TypeError(msg) from None - return val - - -try: - from functools import cached_property # type: ignore -except ImportError: - # for python < 3.8 - cached_property = CachedPropertyBackport # type: ignore - +from functools import cache, cached_property # noqa try: from numpy.typing import ArrayLike, DTypeLike # noqa @@ -88,9 +25,3 @@ def __get__(self, instance, owner=None): # noqa # numpy <1.20 from numpy import dtype as DTypeLike # noqa from numpy import ndarray as ArrayLike # noqa - - -try: - from functools import cache # type: ignore -except ImportError: - from functools import lru_cache as cache # noqa diff --git a/satpy/_config.py b/satpy/_config.py index 4abc00aba2..7a0d7aaac3 100644 --- a/satpy/_config.py +++ b/satpy/_config.py @@ -26,20 +26,9 @@ import tempfile from collections import OrderedDict from importlib.metadata import EntryPoint, entry_points -from pathlib import Path +from importlib.resources import files as impr_files from typing import Iterable -try: - from importlib.resources import files as impr_files # type: ignore -except ImportError: - # Python 3.8 - def impr_files(module_name: str) -> Path: - """Get path to module as a backport for Python 3.8.""" - from importlib.resources import path as impr_path - - with impr_path(module_name, "__init__.py") as pkg_init_path: - return pkg_init_path.parent - import appdirs from donfig import Config diff --git a/satpy/resample.py b/satpy/resample.py index b124c84933..289371d8cb 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -143,6 +143,7 @@ import os import warnings from logging import getLogger +from math import lcm # type: ignore from weakref import WeakValueDictionary import dask @@ -157,14 +158,6 @@ from satpy.utils import PerformanceWarning, get_legacy_chunk_size -try: - from math import lcm # type: ignore -except ImportError: - def lcm(a, b): - """Get 'Least Common Multiple' with Python 3.8 compatibility.""" - from math import gcd - return abs(a * b) // gcd(a, b) - try: from pyresample.resampler import BaseResampler as PRBaseResampler except ImportError: From 482f3e27bacb2b77fa2100d6682e0eecf832fa0d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 27 Sep 2023 14:09:16 -0500 Subject: [PATCH 222/702] Add initial utility function for resolution-based chunking --- satpy/readers/hdfeos_base.py | 24 +++----- satpy/utils.py | 106 +++++++++++++++++++++++++++++++++++ 2 files changed, 114 insertions(+), 16 deletions(-) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 8ad2944dd6..751d286828 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -25,7 +25,6 @@ from contextlib import suppress from datetime import datetime -import dask.array.core import numpy as np import xarray as xr from pyhdf.error import HDF4Error @@ -33,6 +32,7 @@ from satpy import DataID from satpy.readers.file_handlers import BaseFileHandler +from satpy.utils import chunks_by_resolution logger = logging.getLogger(__name__) @@ -227,21 +227,13 @@ def _chunks_for_variable(self, hdf_dataset): scan_length_250m = 40 var_shape = hdf_dataset.info()[2] res_multiplier = self._get_res_multiplier(var_shape) - non_yx_chunks = tuple() - if len(var_shape) == 3: - # assume (band, y, x) - non_yx_chunks = ((1,) * var_shape[0],) - var_shape = var_shape[-2:] - elif len(var_shape) != 2: - # don't guess - return dask.array.core.normalize_chunks("auto", shape=var_shape, dtype=np.float32) - shape_for_250m = tuple(dim_size * res_multiplier for dim_size in var_shape) - chunks_for_250m = dask.array.core.normalize_chunks(("auto", -1), shape=shape_for_250m, dtype=np.float32) - row_chunks_for_250m = chunks_for_250m[0][0] - scanbased_row_chunks_for_250m = np.round(row_chunks_for_250m / scan_length_250m) * scan_length_250m - var_row_chunks = scanbased_row_chunks_for_250m / res_multiplier - var_row_chunks = max(var_row_chunks, scan_length_250m / res_multiplier) # avoid getting 0 chunk size - return non_yx_chunks + (var_row_chunks, -1) + return chunks_by_resolution( + var_shape, + np.float32, + scan_length_250m, + res_multiplier, + whole_scan_width=True + ) @staticmethod def _get_res_multiplier(var_shape): diff --git a/satpy/utils.py b/satpy/utils.py index a9785a544a..20bd604104 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -35,6 +35,8 @@ import yaml from yaml import BaseLoader, UnsafeLoader +from satpy._compat import DTypeLike + _is_logging_on = False TRACE_LEVEL = 5 @@ -631,6 +633,110 @@ def _get_pytroll_chunk_size(): return None +def chunks_by_resolution( + input_shape: tuple[int, ...], + input_dtype: DTypeLike, + num_high_res_elements: int, + low_res_multiplier: int, + whole_scan_width: bool = False, +) -> tuple[int, ...]: + """Compute dask chunk sizes based on data resolution. + + First, chunks are computed for the highest resolution version of the data. + This is done by multiplying the input array shape by the + ``low_res_multiplier`` and then using Dask's utility functions and + configuration to produce a chunk size to fit into a specific number of + bytes. See :ref:`dask:automatic-chunking` for more information. + Next, the same multiplier is used to reduce the high resolution chunk sizes + to the lower resolution of the input data. The end result of reading + multiple resolutions of data is that each dask chunk covers the same + geographic region. This also means replicating or aggregating one + resolution and then combining arrays should not require any rechunking. + + .. note:: + + Only 2 or 3-dimensional shapes are supported. In the case of 3D arrays + the first dimension is assumed to be "bands" and is given a chunk + size of 1. For shapes with other numbers of dimensions, the chunk size + for the entire array is determined by dask's "auto" chunking and + resolution is ignored. + + Args: + input_shape: Shape of the array to compute dask chunk size for. + input_dtype: Dtype for the final unscaled array. This is usually + 32-bit float (``np.float32``) or 64-bit float (``np.float64``) + for non-category data. If this doesn't represent the final data + type of the data then the final size of chunks in memory will not + match the user's request via dask's ``array.chunk-size`` + configuration. + num_high_res_elements: Smallest number of high (fine) resolution + elements that make up a single "unit" or chunk of data. This could + be a multiple or factor of the scan size for some instruments and/or + could be based on the on-disk chunk size. This value ensures that + chunks are aligned to the underlying data structure for best + performance. + low_res_multiplier: Number of high (fine) resolution pixels that fit + in a single low (coarse) resolution pixel. + whole_scan_width: To create the entire width (x dimension) of the + array as a single chunk. This is useful in cases when future + operations will operate on entire instrument scans of data at + a time. For example, polar-orbiter scan geolocation being + interpolated from low resolution to high resolution. + + Returns: + A tuple where each element is the chunk size for that axis/dimension. + + """ + if len(input_shape) not in (2, 3): + # we're not sure about this shape so don't guess + return dask.array.core.normalize_chunks("auto", shape=input_shape, dtype=input_dtype) + + pre_non_yx_chunks, yx_shape, post_non_yx_chunks = _split_non_yx_chunks(input_shape) + high_res_shape = tuple(dim_size * low_res_multiplier for dim_size in yx_shape) + col_chunks = -1 if whole_scan_width else "auto" + chunks_for_high_res = dask.array.core.normalize_chunks( + ("auto", col_chunks), + shape=high_res_shape, + dtype=input_dtype + ) + var_row_chunks = _low_res_chunks_from_high_res( + chunks_for_high_res[0][0], + num_high_res_elements, + low_res_multiplier + ) + var_col_chunks = -1 + if not whole_scan_width: + var_col_chunks = _low_res_chunks_from_high_res( + chunks_for_high_res[1][0], + num_high_res_elements, + low_res_multiplier + ) + return pre_non_yx_chunks + (var_row_chunks, var_col_chunks) + post_non_yx_chunks + + +def _split_non_yx_chunks( + input_shape: tuple[int, ...], +) -> tuple[tuple[int, ...] | tuple[()], tuple[int, int], tuple[int, ...] | tuple[()]]: + pre_non_yx_chunks: tuple[int, ...] = tuple() + post_non_yx_chunks: tuple[int, ...] = tuple() + yx_shape = (input_shape[-2], input_shape[-1]) + if len(input_shape) == 3: + # assume (band, y, x) + pre_non_yx_chunks = (1,) + return pre_non_yx_chunks, yx_shape, post_non_yx_chunks + + +def _low_res_chunks_from_high_res( + chunk_size_for_high_res: int, + num_high_res_elements: int, + low_res_multiplier: int +) -> int: + aligned_chunk_size = np.round(chunk_size_for_high_res / num_high_res_elements) * num_high_res_elements + low_res_chunk_size = aligned_chunk_size / low_res_multiplier + # avoid getting 0 chunk size + return max(low_res_chunk_size, num_high_res_elements / low_res_multiplier) + + def convert_remote_files_to_fsspec(filenames, storage_options=None): """Check filenames for transfer protocols, convert to FSFile objects if possible.""" if storage_options is None: From eba9c8221d29ffd7090941f459a52bd3c85c52aa Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 28 Sep 2023 11:18:51 +0200 Subject: [PATCH 223/702] Add mastodon link --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index b3eb4c5577..612db4fa05 100644 --- a/setup.py +++ b/setup.py @@ -142,6 +142,7 @@ def _config_data_files(base_dirs, extensions=(".cfg", )): "Slack": "https://pytroll.slack.com/", "Twitter": "https://twitter.com/hashtag/satpy?src=hashtag_click", "Release Notes": "https://github.com/pytroll/satpy/blob/main/CHANGELOG.md", + "Mastodon": "https://fosstodon.org/tags/satpy", }, packages=find_packages(), # Always use forward '/', even on Windows From 2125eb26706c6a1d6a5193dcb3e6496e90471449 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 28 Sep 2023 14:30:25 +0200 Subject: [PATCH 224/702] Fix cf tests for new xarray release --- satpy/tests/writer_tests/test_cf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 54770b9176..a325cb9cc8 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -1448,5 +1448,5 @@ def _should_use_compression_keyword(): versions = _get_backend_versions() return ( versions["libnetcdf"] >= Version("4.9.0") and - versions["xarray"] >= Version("2023.9") + versions["xarray"] >= Version("2023.10") ) From dcb87ffce0363221eff189cce575b908aed9f69d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 28 Sep 2023 08:40:58 -0500 Subject: [PATCH 225/702] Remove tests for removed compatibility code --- satpy/tests/test_compat.py | 48 -------------------------------------- 1 file changed, 48 deletions(-) delete mode 100644 satpy/tests/test_compat.py diff --git a/satpy/tests/test_compat.py b/satpy/tests/test_compat.py deleted file mode 100644 index f084f88e53..0000000000 --- a/satpy/tests/test_compat.py +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2022 Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Test backports and compatibility fixes.""" - -import gc - -from satpy._compat import CachedPropertyBackport - - -class ClassWithCachedProperty: # noqa - def __init__(self, x): # noqa - self.x = x - - @CachedPropertyBackport - def property(self): # noqa - return 2 * self.x - - -def test_cached_property_backport(): - """Test cached property backport.""" - c = ClassWithCachedProperty(1) - assert c.property == 2 - - -def test_cached_property_backport_releases_memory(): - """Test that cached property backport releases memory.""" - c1 = ClassWithCachedProperty(2) - del c1 - instances = [ - obj for obj in gc.get_objects() - if isinstance(obj, ClassWithCachedProperty) - ] - assert len(instances) == 0 From e0a8ea4bff608ee316c61f7de58161d20e5e2921 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 28 Sep 2023 13:48:18 -0500 Subject: [PATCH 226/702] Convert some utility tests to pytest --- satpy/tests/test_utils.py | 255 +++++++++++++------------------------- 1 file changed, 87 insertions(+), 168 deletions(-) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 56dbe25324..5babc29804 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -21,6 +21,7 @@ import typing import unittest import warnings +from math import sqrt from unittest import mock import dask.array as da @@ -44,182 +45,100 @@ # - caplog -class TestUtils(unittest.TestCase): - """Testing utils.""" +class TestGeoUtils: + """Testing geo-related utility functions.""" - def test_lonlat2xyz(self): - """Test the lonlat2xyz function.""" - x__, y__, z__ = lonlat2xyz(0, 0) - self.assertAlmostEqual(x__, 1) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = lonlat2xyz(90, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 1) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = lonlat2xyz(0, 90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) - - x__, y__, z__ = lonlat2xyz(180, 0) - self.assertAlmostEqual(x__, -1) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = lonlat2xyz(-90, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, -1) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = lonlat2xyz(0, -90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, -1) - - x__, y__, z__ = lonlat2xyz(0, 45) - self.assertAlmostEqual(x__, np.sqrt(2) / 2) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, np.sqrt(2) / 2) - - x__, y__, z__ = lonlat2xyz(0, 60) - self.assertAlmostEqual(x__, np.sqrt(1) / 2) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, np.sqrt(3) / 2) - - def test_angle2xyz(self): + @pytest.mark.parametrize( + ("lonlat", "xyz"), + [ + ((0, 0), (1, 0, 0)), + ((90, 0), (0, 1, 0)), + ((0, 90), (0, 0, 1)), + ((180, 0), (-1, 0, 0)), + ((-90, 0), (0, -1, 0)), + ((0, -90), (0, 0, -1)), + ((0, 45), (sqrt(2) / 2, 0, sqrt(2) / 2)), + ((0, 60), (sqrt(1) / 2, 0, sqrt(3) / 2)), + ], + ) + def test_lonlat2xyz(self, lonlat, xyz): """Test the lonlat2xyz function.""" - x__, y__, z__ = angle2xyz(0, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) - - x__, y__, z__ = angle2xyz(90, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) - - x__, y__, z__ = angle2xyz(0, 90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 1) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = angle2xyz(180, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) - - x__, y__, z__ = angle2xyz(-90, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) - - x__, y__, z__ = angle2xyz(0, -90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, -1) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = angle2xyz(90, 90) - self.assertAlmostEqual(x__, 1) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = angle2xyz(-90, 90) - self.assertAlmostEqual(x__, -1) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = angle2xyz(180, 90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, -1) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = angle2xyz(0, -90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, -1) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = angle2xyz(0, 45) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, np.sqrt(2) / 2) - self.assertAlmostEqual(z__, np.sqrt(2) / 2) - - x__, y__, z__ = angle2xyz(0, 60) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, np.sqrt(3) / 2) - self.assertAlmostEqual(z__, np.sqrt(1) / 2) - - def test_xyz2lonlat(self): - """Test xyz2lonlat.""" - lon, lat = xyz2lonlat(1, 0, 0) - self.assertAlmostEqual(lon, 0) - self.assertAlmostEqual(lat, 0) - - lon, lat = xyz2lonlat(0, 1, 0) - self.assertAlmostEqual(lon, 90) - self.assertAlmostEqual(lat, 0) - - lon, lat = xyz2lonlat(0, 0, 1, asin=True) - self.assertAlmostEqual(lon, 0) - self.assertAlmostEqual(lat, 90) + x__, y__, z__ = lonlat2xyz(*lonlat) + assert x__ == pytest.approx(xyz[0]) + assert y__ == pytest.approx(xyz[1]) + assert z__ == pytest.approx(xyz[2]) - lon, lat = xyz2lonlat(0, 0, 1) - self.assertAlmostEqual(lon, 0) - self.assertAlmostEqual(lat, 90) + @pytest.mark.parametrize( + ("azizen", "xyz"), + [ + ((0, 0), (0, 0, 1)), + ((90, 0), (0, 0, 1)), + ((0, 90), (0, 1, 0)), + ((180, 0), (0, 0, 1)), + ((-90, 0), (0, 0, 1)), + ((0, -90), (0, -1, 0)), + ((90, 90), (1, 0, 0)), + ((-90, 90), (-1, 0, 0)), + ((180, 90), (0, -1, 0)), + ((0, -90), (0, -1, 0)), + ((0, 45), (0, sqrt(2) / 2, sqrt(2) / 2)), + ((0, 60), (0, sqrt(3) / 2, sqrt(1) / 2)), + ], + ) + def test_angle2xyz(self, azizen, xyz): + """Test the angle2xyz function.""" + x__, y__, z__ = angle2xyz(*azizen) + assert x__ == pytest.approx(xyz[0]) + assert y__ == pytest.approx(xyz[1]) + assert z__ == pytest.approx(xyz[2]) - lon, lat = xyz2lonlat(np.sqrt(2) / 2, np.sqrt(2) / 2, 0) - self.assertAlmostEqual(lon, 45) - self.assertAlmostEqual(lat, 0) + @pytest.mark.parametrize( + ("xyz", "asin", "lonlat"), + [ + ((1, 0, 0), False, (0, 0)), + ((0, 1, 0), False, (90, 0)), + ((0, 0, 1), True, (0, 90)), + ((0, 0, 1), False, (0, 90)), + ((sqrt(2) / 2, sqrt(2) / 2, 0), False, (45, 0)), + ], + ) + def test_xyz2lonlat(self, xyz, asin, lonlat): + """Test xyz2lonlat.""" + lon, lat = xyz2lonlat(*xyz, asin=asin) + assert lon == pytest.approx(lonlat[0]) + assert lat == pytest.approx(lonlat[1]) - def test_xyz2angle(self): + @pytest.mark.parametrize( + ("xyz", "acos", "azizen"), + [ + ((1, 0, 0), False, (90, 90)), + ((0, 1, 0), False, (0, 90)), + ((0, 0, 1), False, (0, 0)), + ((0, 0, 1), True, (0, 0)), + ((sqrt(2) / 2, sqrt(2) / 2, 0), False, (45, 90)), + ((-1, 0, 0), False, (-90, 90)), + ((0, -1, 0), False, (180, 90)), + ], + ) + def test_xyz2angle(self, xyz, acos, azizen): """Test xyz2angle.""" - azi, zen = xyz2angle(1, 0, 0) - self.assertAlmostEqual(azi, 90) - self.assertAlmostEqual(zen, 90) - - azi, zen = xyz2angle(0, 1, 0) - self.assertAlmostEqual(azi, 0) - self.assertAlmostEqual(zen, 90) - - azi, zen = xyz2angle(0, 0, 1) - self.assertAlmostEqual(azi, 0) - self.assertAlmostEqual(zen, 0) - - azi, zen = xyz2angle(0, 0, 1, acos=True) - self.assertAlmostEqual(azi, 0) - self.assertAlmostEqual(zen, 0) + azi, zen = xyz2angle(*xyz, acos=acos) + assert azi == pytest.approx(azi) + assert zen == pytest.approx(zen) - azi, zen = xyz2angle(np.sqrt(2) / 2, np.sqrt(2) / 2, 0) - self.assertAlmostEqual(azi, 45) - self.assertAlmostEqual(zen, 90) - - azi, zen = xyz2angle(-1, 0, 0) - self.assertAlmostEqual(azi, -90) - self.assertAlmostEqual(zen, 90) - - azi, zen = xyz2angle(0, -1, 0) - self.assertAlmostEqual(azi, 180) - self.assertAlmostEqual(zen, 90) - - def test_proj_units_to_meters(self): + @pytest.mark.parametrize( + ("prj", "exp_prj"), + [ + ("+asd=123123123123", "+asd=123123123123"), + ("+a=6378.137", "+a=6378137.000"), + ("+a=6378.137 +units=km", "+a=6378137.000"), + ("+a=6378.137 +b=6378.137", "+a=6378137.000 +b=6378137.000"), + ("+a=6378.137 +b=6378.137 +h=35785.863", "+a=6378137.000 +b=6378137.000 +h=35785863.000"), + ], + ) + def test_proj_units_to_meters(self, prj, exp_prj): """Test proj units to meters conversion.""" - prj = '+asd=123123123123' - res = proj_units_to_meters(prj) - self.assertEqual(res, prj) - prj = '+a=6378.137' - res = proj_units_to_meters(prj) - self.assertEqual(res, '+a=6378137.000') - prj = '+a=6378.137 +units=km' - res = proj_units_to_meters(prj) - self.assertEqual(res, '+a=6378137.000') - prj = '+a=6378.137 +b=6378.137' - res = proj_units_to_meters(prj) - self.assertEqual(res, '+a=6378137.000 +b=6378137.000') - prj = '+a=6378.137 +b=6378.137 +h=35785.863' - res = proj_units_to_meters(prj) - self.assertEqual(res, '+a=6378137.000 +b=6378137.000 +h=35785863.000') + assert proj_units_to_meters(prj) == exp_prj class TestGetSatPos: From 37170e11f7f9b9ba1d2271c02c7b268aa2a6cc3a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 29 Sep 2023 13:23:33 -0500 Subject: [PATCH 227/702] Add tests for resolution-based chunking utilities --- satpy/tests/test_utils.py | 36 ++++++++++++++++++++++++++++++++++++ satpy/utils.py | 8 +++++--- 2 files changed, 41 insertions(+), 3 deletions(-) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 5babc29804..b45885f312 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -416,6 +416,42 @@ def test_get_legacy_chunk_size(): assert get_legacy_chunk_size() == 2048 +@pytest.mark.parametrize( + ("shape", "chunk_dtype", "num_hr", "lr_mult", "scan_width", "exp_result"), + [ + ((1000, 3200), np.float32, 40, 4, True, (160, -1)), # 1km swath + ((1000 // 5, 3200 // 5), np.float32, 40, 20, True, (160 // 5, -1)), # 5km swath + ((1000 * 4, 3200 * 4), np.float32, 40, 1, True, (160 * 4, -1)), # 250m swath + ((21696 // 2, 21696 // 2), np.float32, 226, 2, False, (1469, 1469)), # 1km area (ABI chunk 226) + ((21696 // 2, 21696 // 2), np.float64, 226, 2, False, (1017, 1017)), # 1km area (64-bit) + ((21696 // 3, 21696 // 3), np.float32, 226, 6, False, (1469 // 3, 1469 // 3)), # 3km area + ((21696, 21696), np.float32, 226, 1, False, (1469 * 2, 1469 * 2)), # 500m area + ((7, 1000 * 4, 3200 * 4), np.float32, 40, 1, True, (1, 160 * 4, -1)), # 250m swath with bands + ((1, 7, 1000, 3200), np.float32, 40, 1, True, ((1,), (7,), (1000,), (1198, 1198, 804))), # lots of dimensions + ], +) +def test_resolution_chunking(shape, chunk_dtype, num_hr, lr_mult, scan_width, exp_result): + """Test chunks_by_resolution helper function.""" + import dask.config + + from satpy.utils import chunks_by_resolution + + with dask.config.set({"array.chunk-size": "32MiB"}): + chunk_results = chunks_by_resolution( + shape, + chunk_dtype, + num_hr, + lr_mult, + whole_scan_width=scan_width, + ) + assert chunk_results == exp_result + for chunk_size in chunk_results: + assert isinstance(chunk_size[0], int) if isinstance(chunk_size, tuple) else isinstance(chunk_size, int) + + # make sure the chunks are understandable by dask + da.zeros(shape, dtype=chunk_dtype, chunks=chunk_results) + + def test_convert_remote_files_to_fsspec_local_files(): """Test convertion of remote files to fsspec objects. diff --git a/satpy/utils.py b/satpy/utils.py index 20bd604104..28212c6510 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -639,7 +639,7 @@ def chunks_by_resolution( num_high_res_elements: int, low_res_multiplier: int, whole_scan_width: bool = False, -) -> tuple[int, ...]: +) -> tuple[int | tuple[int, ...], ...]: """Compute dask chunk sizes based on data resolution. First, chunks are computed for the highest resolution version of the data. @@ -668,7 +668,9 @@ def chunks_by_resolution( for non-category data. If this doesn't represent the final data type of the data then the final size of chunks in memory will not match the user's request via dask's ``array.chunk-size`` - configuration. + configuration. Sometimes it is useful to keep this as a single + dtype for all reading functionality (ex. ``np.float32``) in order + to keep all read variable chunks the same size regardless of dtype. num_high_res_elements: Smallest number of high (fine) resolution elements that make up a single "unit" or chunk of data. This could be a multiple or factor of the scan size for some instruments and/or @@ -734,7 +736,7 @@ def _low_res_chunks_from_high_res( aligned_chunk_size = np.round(chunk_size_for_high_res / num_high_res_elements) * num_high_res_elements low_res_chunk_size = aligned_chunk_size / low_res_multiplier # avoid getting 0 chunk size - return max(low_res_chunk_size, num_high_res_elements / low_res_multiplier) + return int(max(low_res_chunk_size, num_high_res_elements / low_res_multiplier)) def convert_remote_files_to_fsspec(filenames, storage_options=None): From 4954d9607e85d345c0119936e6024e7b5490955a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 3 Oct 2023 15:51:00 -0500 Subject: [PATCH 228/702] Fix reference to dask documentation --- satpy/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/utils.py b/satpy/utils.py index 28212c6510..94f7fbf86e 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -646,7 +646,7 @@ def chunks_by_resolution( This is done by multiplying the input array shape by the ``low_res_multiplier`` and then using Dask's utility functions and configuration to produce a chunk size to fit into a specific number of - bytes. See :ref:`dask:automatic-chunking` for more information. + bytes. See :doc:`dask:array-chunks` for more information. Next, the same multiplier is used to reduce the high resolution chunk sizes to the lower resolution of the input data. The end result of reading multiple resolutions of data is that each dask chunk covers the same From 82c57c74dec625a339c633c785a32e57375f9757 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 3 Oct 2023 20:46:14 -0500 Subject: [PATCH 229/702] Add night_microphysics_eum alias for ABI composites --- satpy/etc/composites/abi.yaml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/satpy/etc/composites/abi.yaml b/satpy/etc/composites/abi.yaml index 783bd5187a..1437b91df4 100644 --- a/satpy/etc/composites/abi.yaml +++ b/satpy/etc/composites/abi.yaml @@ -555,6 +555,22 @@ composites: - name: C13 standard_name: night_microphysics + night_microphysics_eum: + description: > + Nighttime Microphysics RGB following the EUMETSAT recipe + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: C15 + - name: C14 + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: C14 + - name: C07 + - name: C14 + standard_name: night_microphysics + fire_temperature_awips: description: > Fire Temperature RGB, for GOESR: NASA, NOAA From 04cfbd521d905a3bfe72b9f51044654610a3b00b Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 4 Oct 2023 09:12:02 +0200 Subject: [PATCH 230/702] Make caching fail if one of the args is unhashable --- satpy/modifiers/angles.py | 2 +- satpy/tests/modifier_tests/test_angles.py | 12 ++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 28adb60028..2f904a1a02 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -250,7 +250,7 @@ def _hash_args(*args, unhashable_types=DEFAULT_UNCACHE_TYPES): hashable_args = [] for arg in args: if isinstance(arg, unhashable_types): - continue + raise TypeError(f"Unhashable type in function signature ({type(arg)}), cannot be cached.") if isinstance(arg, HASHABLE_GEOMETRIES): arg = hash(arg) elif isinstance(arg, datetime): diff --git a/satpy/tests/modifier_tests/test_angles.py b/satpy/tests/modifier_tests/test_angles.py index cd5082a5b7..46a8a8443f 100644 --- a/satpy/tests/modifier_tests/test_angles.py +++ b/satpy/tests/modifier_tests/test_angles.py @@ -322,6 +322,18 @@ def _fake_func(shape, chunks): satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)): _fake_func((5, 5), ((5,), (5,))) + def test_caching_with_array_in_args_fails(self, tmp_path): + """Test that trying to cache with non-dask arrays fails.""" + from satpy.modifiers.angles import cache_to_zarr_if + + @cache_to_zarr_if("cache_lonlats") + def _fake_func(array): + return array + 1 + + with pytest.raises(TypeError), \ + satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)): + _fake_func(da.zeros(100)) + def test_no_cache_dir_fails(self, tmp_path): """Test that 'cache_dir' not being set fails.""" from satpy.modifiers.angles import _get_sensor_angles_from_sat_pos, get_angles From 9ad6b161219f87cf94df251390a0dfa2aeab811b Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 4 Oct 2023 09:15:35 +0200 Subject: [PATCH 231/702] Remove redundant hashability check --- satpy/modifiers/angles.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 2f904a1a02..01e7c1ff2d 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -163,8 +163,6 @@ def __call__(self, *args, cache_dir: Optional[str] = None) -> Any: def _get_should_cache_and_cache_dir(self, args, cache_dir: Optional[str]) -> tuple[bool, str]: should_cache: bool = satpy.config.get(self._cache_config_key, False) - can_cache = not any(isinstance(arg, self._uncacheable_arg_types) for arg in args) - should_cache = should_cache and can_cache cache_dir = self._get_cache_dir_from_config(cache_dir) return should_cache, cache_dir From 6e87d199e202f14e3f6460f68f92da86489a2909 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 4 Oct 2023 09:33:18 +0200 Subject: [PATCH 232/702] Replace flake8 with ruff in pre-commit --- .github/workflows/ci.yaml | 4 ++-- .pre-commit-config.yaml | 10 +++++----- pyproject.toml | 15 +++++++++++++++ 3 files changed, 22 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9897f8886a..9635d9efb8 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -24,13 +24,13 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install flake8 flake8-docstrings flake8-debugger flake8-bugbear pytest + pip install ruff pytest - name: Install Satpy run: | pip install -e . - name: Run linting run: | - flake8 satpy/ + ruff satpy/ test: runs-on: ${{ matrix.os }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3999be8b04..b5b21a52fa 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,12 +1,12 @@ exclude: '^$' fail_fast: false repos: - - repo: https://github.com/PyCQA/flake8 - rev: 6.1.0 + - repo: https://github.com/charliermarsh/ruff-pre-commit + # Ruff version. + rev: 'v0.0.247' hooks: - - id: flake8 - additional_dependencies: [flake8-docstrings, flake8-debugger, flake8-bugbear, mccabe] - args: [--max-complexity, "10"] + - id: ruff + args: [--fix, --exit-non-zero-on-fix] - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 hooks: diff --git a/pyproject.toml b/pyproject.toml index 64c68d60eb..7bed2a2fdd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,3 +12,18 @@ skip_gitignore = true default_section = "THIRDPARTY" known_first_party = "satpy" line_length = 120 + +[tool.ruff] +select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10"] +ignore = ["B905"] # only available from python 3.10 +line-length = 120 + +[tool.ruff.per-file-ignores] +"satpy/tests/*" = ["S101"] # assert allowed in tests + +[tool.ruff.pydocstyle] +convention = "google" + +[tool.ruff.mccabe] +# Unlike Flake8, default to a complexity level of 10. +max-complexity = 10 From 6a4ae2c2e9609182f67bf23b7c3c1a80cfebe8bf Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 4 Oct 2023 10:10:49 +0200 Subject: [PATCH 233/702] Add pep8-naming checks --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 7bed2a2fdd..d0eac01fe0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ known_first_party = "satpy" line_length = 120 [tool.ruff] -select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10"] +select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] ignore = ["B905"] # only available from python 3.10 line-length = 120 From c6454b5deaf7993cdc66b58ea3a86f242bf5a874 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 4 Oct 2023 09:27:20 -0500 Subject: [PATCH 234/702] Remove libnetcdf specific build from CI env --- .github/workflows/ci.yaml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9897f8886a..0bd05f273d 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -82,9 +82,6 @@ jobs: - name: Update environment run: mamba env update -n test-environment -f continuous_integration/environment.yaml if: steps.cache.outputs.cache-hit != 'true' - - name: Update environment - libnetcdf - run: mamba install -y -n test-environment libnetcdf=4.9.2=nompi_h5902ca5_107 - if: runner.os == 'Windows' - name: Install unstable dependencies if: matrix.experimental == true From 86997cc5facdfe1afc2d61e8cdba7346357412b9 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 4 Oct 2023 17:41:59 +0200 Subject: [PATCH 235/702] Remove linting from github actions Rely on pre-commit entirely now. --- .github/workflows/ci.yaml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9635d9efb8..ad7a7f96bf 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -28,9 +28,6 @@ jobs: - name: Install Satpy run: | pip install -e . - - name: Run linting - run: | - ruff satpy/ test: runs-on: ${{ matrix.os }} From 014edb6a6e4a3bde4fa5389f7544e83e8391b834 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Tue, 12 Sep 2023 15:12:53 +0200 Subject: [PATCH 236/702] add reader for GERB high-resolution HDF5 files --- satpy/etc/readers/gerb_l2_hr_h5.yaml | 31 ++++++++ satpy/readers/gerb_l2_hr_h5.py | 110 +++++++++++++++++++++++++++ 2 files changed, 141 insertions(+) create mode 100644 satpy/etc/readers/gerb_l2_hr_h5.yaml create mode 100644 satpy/readers/gerb_l2_hr_h5.py diff --git a/satpy/etc/readers/gerb_l2_hr_h5.yaml b/satpy/etc/readers/gerb_l2_hr_h5.yaml new file mode 100644 index 0000000000..b05c662dc9 --- /dev/null +++ b/satpy/etc/readers/gerb_l2_hr_h5.yaml @@ -0,0 +1,31 @@ +reader: + name: gerb_l2_hr_h5 + short_name: GERB HR + long_name: Meteosat Second Generation Geostationary Earth Radiation Budget L2 High-Resolution + description: Reader for the HR product of the Geostationary Earth Radiation Budget instrument + status: Beta + supports_fsspec: false + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + sensors: [gerb] + +file_types: + gerb_l2_hr_h5: + file_reader: !!python/name:satpy.readers.gerb_l2_hr_h5.GERB_HR_FileHandler + file_patterns: ['{sensor_name}_{seviri_name}_L20_HR_SOL_TH_{sensing_time:%Y%m%d_%H%M%S}_{gerb_version}.hdf'] + +datasets: + Solar_Flux: + name: Solar Flux + sensor: gerb + wavelength: [0.3, 3., 4.] + units: W m-2 + standard_name: toa_outgoing_shortwave_flux + file_type: gerb_l2_hr_h5 + + Thermal_Flux: + name: Thermal Flux + sensor: gerb + wavelength: [4., 22., 40.] + units: W m-2 + standard_name: toa_outgoing_longwave_flux + file_type: gerb_l2_hr_h5 diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py new file mode 100644 index 0000000000..b5e4d0d5e8 --- /dev/null +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . + + +"""A reader for the Top of Atmosphere outgoing fluxes from the Geostationary Earth Radiation +Budget instrument aboard the Meteosat Second Generation satellites.""" + + +import logging +from datetime import timedelta + +import dask.array as da +import h5py +import numpy as np +import xarray as xr + +from satpy.readers.file_handlers import BaseFileHandler +from satpy.resample import get_area_def + +LOG = logging.getLogger(__name__) + +def gerb_get_dataset(hfile, name): + """ + Load a GERB dataset in memory from a HDF5 file + + The routine takes into account the quantisation factor and fill values. + """ + ds = hfile[name] + if 'Quantisation Factor' in ds.attrs and 'Unit' in ds.attrs: + ds_real = ds[...]*ds.attrs['Quantisation Factor'] + else: + ds_real = ds[...]*1. + ds_min = ds[...].min() + if ds_min < 0: + mask = ds == ds_min + ds_real[mask] = np.nan + return ds_real + + +class GERB_HR_FileHandler(BaseFileHandler): + """File handler for GERB L2 High Resolution H5 files.""" + + def __init__(self, filename, filename_info, filetype_info): + """Init the file handler.""" + super(GERB_HR_FileHandler, self).__init__(filename, + filename_info, + filetype_info) + self._h5fh = h5py.File(self.filename, 'r') + self.ssp_lon = self._h5fh["Geolocation"].attrs["Nominal Satellite Longitude (degrees)"][()] + + @property + def end_time(self): + """Get end time.""" + return self.start_time + timedelta(minutes=14, seconds=59) + + + @property + def start_time(self): + """Get start time.""" + return self.filename_info['sensing_time'] + + + def _get_dataset(self, ds_name): + """Access the GERB dataset from the HDF5 file.""" + if ds_name in ['Solar Flux', 'Thermal Flux', 'Solar Radiance', 'Thermal Radiance']: + return gerb_get_dataset(self._h5fh, f'Radiometry/{ds_name}') + else: + raise ValueError + + + def get_dataset(self, ds_id, ds_info): + """Read a HDF5 file into an xarray DataArray.""" + ds = self._get_dataset(ds_id['name']) + ds_info = {} + + ds_info['start_time'] = self.start_time + ds_info['data_time'] = self.start_time + ds_info['end_time'] = self.end_time + + data = da.from_array(ds) + return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) + + + def get_area_def(self, dsid): + """Area definition for the GERB product""" + + if abs(self.ssp_lon) < 1e-6: + return get_area_def("msg_seviri_fes_9km") + elif abs(self.ssp_lon - 9.5) < 1e-6: + return get_area_def("msg_seviri_fes_9km") + elif abs(self.ssp_lon - 45.5) < 1e-6: + return get_area_def("msg_seviri_iodc_9km") + else: + raise ValueError + From 045c1ea1914d0cbded44f5764908fb75b487e066 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 13 Sep 2023 13:12:57 +0200 Subject: [PATCH 237/702] fix style --- satpy/readers/gerb_l2_hr_h5.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index b5e4d0d5e8..686b283907 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -17,8 +17,10 @@ # satpy. If not, see . -"""A reader for the Top of Atmosphere outgoing fluxes from the Geostationary Earth Radiation -Budget instrument aboard the Meteosat Second Generation satellites.""" +""" +A reader for the Top of Atmosphere outgoing fluxes from the Geostationary Earth Radiation +Budget instrument aboard the Meteosat Second Generation satellites. +""" import logging @@ -34,6 +36,7 @@ LOG = logging.getLogger(__name__) + def gerb_get_dataset(hfile, name): """ Load a GERB dataset in memory from a HDF5 file @@ -68,13 +71,11 @@ def end_time(self): """Get end time.""" return self.start_time + timedelta(minutes=14, seconds=59) - @property def start_time(self): """Get start time.""" return self.filename_info['sensing_time'] - def _get_dataset(self, ds_name): """Access the GERB dataset from the HDF5 file.""" if ds_name in ['Solar Flux', 'Thermal Flux', 'Solar Radiance', 'Thermal Radiance']: @@ -82,7 +83,6 @@ def _get_dataset(self, ds_name): else: raise ValueError - def get_dataset(self, ds_id, ds_info): """Read a HDF5 file into an xarray DataArray.""" ds = self._get_dataset(ds_id['name']) @@ -95,7 +95,6 @@ def get_dataset(self, ds_id, ds_info): data = da.from_array(ds) return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) - def get_area_def(self, dsid): """Area definition for the GERB product""" @@ -107,4 +106,3 @@ def get_area_def(self, dsid): return get_area_def("msg_seviri_iodc_9km") else: raise ValueError - From 9a6ba0dde134f34ee6fef59bbe103a33ffe089aa Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 13 Sep 2023 13:19:08 +0200 Subject: [PATCH 238/702] flake8 --- satpy/readers/gerb_l2_hr_h5.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 686b283907..59db186a6f 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -17,7 +17,8 @@ # satpy. If not, see . -""" +"""GERB L2 HR HDF5 reader. + A reader for the Top of Atmosphere outgoing fluxes from the Geostationary Earth Radiation Budget instrument aboard the Meteosat Second Generation satellites. """ @@ -39,7 +40,7 @@ def gerb_get_dataset(hfile, name): """ - Load a GERB dataset in memory from a HDF5 file + Load a GERB dataset in memory from a HDF5 file. The routine takes into account the quantisation factor and fill values. """ @@ -96,8 +97,7 @@ def get_dataset(self, ds_id, ds_info): return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) def get_area_def(self, dsid): - """Area definition for the GERB product""" - + """Area definition for the GERB product.""" if abs(self.ssp_lon) < 1e-6: return get_area_def("msg_seviri_fes_9km") elif abs(self.ssp_lon - 9.5) < 1e-6: From 05038af0f63c2e516dec5f1c826f2306090ff43f Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 14 Sep 2023 11:15:37 +0200 Subject: [PATCH 239/702] remove wavelength entry for GERB reader --- satpy/etc/readers/gerb_l2_hr_h5.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/etc/readers/gerb_l2_hr_h5.yaml b/satpy/etc/readers/gerb_l2_hr_h5.yaml index b05c662dc9..a501fec32c 100644 --- a/satpy/etc/readers/gerb_l2_hr_h5.yaml +++ b/satpy/etc/readers/gerb_l2_hr_h5.yaml @@ -17,7 +17,6 @@ datasets: Solar_Flux: name: Solar Flux sensor: gerb - wavelength: [0.3, 3., 4.] units: W m-2 standard_name: toa_outgoing_shortwave_flux file_type: gerb_l2_hr_h5 @@ -25,7 +24,6 @@ datasets: Thermal_Flux: name: Thermal Flux sensor: gerb - wavelength: [4., 22., 40.] units: W m-2 standard_name: toa_outgoing_longwave_flux file_type: gerb_l2_hr_h5 From 078f541b26cdb974b7993b04638f278eb2c30476 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 14 Sep 2023 11:48:27 +0200 Subject: [PATCH 240/702] use fill_value for GERB reader fluxes --- satpy/etc/readers/gerb_l2_hr_h5.yaml | 2 ++ satpy/readers/gerb_l2_hr_h5.py | 28 ++++++++++++++-------------- 2 files changed, 16 insertions(+), 14 deletions(-) diff --git a/satpy/etc/readers/gerb_l2_hr_h5.yaml b/satpy/etc/readers/gerb_l2_hr_h5.yaml index a501fec32c..2d47c19b8f 100644 --- a/satpy/etc/readers/gerb_l2_hr_h5.yaml +++ b/satpy/etc/readers/gerb_l2_hr_h5.yaml @@ -18,6 +18,7 @@ datasets: name: Solar Flux sensor: gerb units: W m-2 + fill_value: -32767 standard_name: toa_outgoing_shortwave_flux file_type: gerb_l2_hr_h5 @@ -25,5 +26,6 @@ datasets: name: Thermal Flux sensor: gerb units: W m-2 + fill_value: -32767 standard_name: toa_outgoing_longwave_flux file_type: gerb_l2_hr_h5 diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 59db186a6f..4673db4887 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -38,22 +38,22 @@ LOG = logging.getLogger(__name__) -def gerb_get_dataset(hfile, name): +def gerb_get_dataset(hfile, name, ds_info): """ Load a GERB dataset in memory from a HDF5 file. The routine takes into account the quantisation factor and fill values. """ - ds = hfile[name] - if 'Quantisation Factor' in ds.attrs and 'Unit' in ds.attrs: - ds_real = ds[...]*ds.attrs['Quantisation Factor'] + ds = hfile[name][...] + ds_attrs = hfile[name].attrs + ds_fill = ds_info['fill_value'] + fill_mask = ds == ds_fill + if 'Quantisation Factor' in ds_attrs and 'Unit' in ds_attrs: + ds = ds*ds_attrs['Quantisation Factor'] else: - ds_real = ds[...]*1. - ds_min = ds[...].min() - if ds_min < 0: - mask = ds == ds_min - ds_real[mask] = np.nan - return ds_real + ds = ds*1. + ds[fill_mask] = np.nan + return ds class GERB_HR_FileHandler(BaseFileHandler): @@ -77,16 +77,16 @@ def start_time(self): """Get start time.""" return self.filename_info['sensing_time'] - def _get_dataset(self, ds_name): + def _get_dataset(self, ds_name, ds_info): """Access the GERB dataset from the HDF5 file.""" if ds_name in ['Solar Flux', 'Thermal Flux', 'Solar Radiance', 'Thermal Radiance']: - return gerb_get_dataset(self._h5fh, f'Radiometry/{ds_name}') + return gerb_get_dataset(self._h5fh, f'Radiometry/{ds_name}', ds_info) else: - raise ValueError + raise KeyError(f"{ds_name} is an unknown dataset for this reader.") def get_dataset(self, ds_id, ds_info): """Read a HDF5 file into an xarray DataArray.""" - ds = self._get_dataset(ds_id['name']) + ds = self._get_dataset(ds_id['name'], ds_info) ds_info = {} ds_info['start_time'] = self.start_time From 58eaddc78e872842b4ddfd8de95cd2ee0ae34029 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 14 Sep 2023 12:07:48 +0200 Subject: [PATCH 241/702] Add message for ValueError in get_area_def for GERB reader --- satpy/readers/gerb_l2_hr_h5.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 4673db4887..470db89478 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -105,4 +105,4 @@ def get_area_def(self, dsid): elif abs(self.ssp_lon - 45.5) < 1e-6: return get_area_def("msg_seviri_iodc_9km") else: - raise ValueError + raise ValueError(f"There is no matching grid for SSP longitude {self.ssp_lon}") From 0ab1693154c155759caa005e6328a10528627b23 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 14 Sep 2023 23:02:16 +0200 Subject: [PATCH 242/702] use 15 mins instead of 14:59 in GERB reader --- satpy/readers/gerb_l2_hr_h5.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 470db89478..c56dc0e9a5 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -70,7 +70,7 @@ def __init__(self, filename, filename_info, filetype_info): @property def end_time(self): """Get end time.""" - return self.start_time + timedelta(minutes=14, seconds=59) + return self.start_time + timedelta(minutes=15) @property def start_time(self): From 3c2e5b0e30c9c8af307ee0acce43c5e69a2d9c93 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 14 Sep 2023 23:02:47 +0200 Subject: [PATCH 243/702] Use xarray "where" instead of NumPy masking --- satpy/readers/gerb_l2_hr_h5.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index c56dc0e9a5..c7ce8f9f37 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -44,15 +44,15 @@ def gerb_get_dataset(hfile, name, ds_info): The routine takes into account the quantisation factor and fill values. """ - ds = hfile[name][...] + ds = xr.DataArray(hfile[name][...]) ds_attrs = hfile[name].attrs ds_fill = ds_info['fill_value'] - fill_mask = ds == ds_fill + fill_mask = ds != ds_fill if 'Quantisation Factor' in ds_attrs and 'Unit' in ds_attrs: ds = ds*ds_attrs['Quantisation Factor'] else: ds = ds*1. - ds[fill_mask] = np.nan + ds = ds.where(fill_mask) return ds From ffe5f7f17c0fa6d4ba1d3f0e7d011e941aa3f386 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Mon, 18 Sep 2023 09:29:08 +0200 Subject: [PATCH 244/702] use HDF5FileHandler for GERB reader --- satpy/readers/gerb_l2_hr_h5.py | 29 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index c7ce8f9f37..245bab1405 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -32,7 +32,7 @@ import numpy as np import xarray as xr -from satpy.readers.file_handlers import BaseFileHandler +from satpy.readers.hdf5_utils import HDF5FileHandler from satpy.resample import get_area_def LOG = logging.getLogger(__name__) @@ -40,7 +40,7 @@ def gerb_get_dataset(hfile, name, ds_info): """ - Load a GERB dataset in memory from a HDF5 file. + Load a GERB dataset in memory from a HDF5 file or HDF5FileHandler. The routine takes into account the quantisation factor and fill values. """ @@ -56,7 +56,7 @@ def gerb_get_dataset(hfile, name, ds_info): return ds -class GERB_HR_FileHandler(BaseFileHandler): +class GERB_HR_FileHandler(HDF5FileHandler): """File handler for GERB L2 High Resolution H5 files.""" def __init__(self, filename, filename_info, filetype_info): @@ -65,7 +65,6 @@ def __init__(self, filename, filename_info, filetype_info): filename_info, filetype_info) self._h5fh = h5py.File(self.filename, 'r') - self.ssp_lon = self._h5fh["Geolocation"].attrs["Nominal Satellite Longitude (degrees)"][()] @property def end_time(self): @@ -77,16 +76,14 @@ def start_time(self): """Get start time.""" return self.filename_info['sensing_time'] - def _get_dataset(self, ds_name, ds_info): - """Access the GERB dataset from the HDF5 file.""" - if ds_name in ['Solar Flux', 'Thermal Flux', 'Solar Radiance', 'Thermal Radiance']: - return gerb_get_dataset(self._h5fh, f'Radiometry/{ds_name}', ds_info) - else: - raise KeyError(f"{ds_name} is an unknown dataset for this reader.") - def get_dataset(self, ds_id, ds_info): """Read a HDF5 file into an xarray DataArray.""" - ds = self._get_dataset(ds_id['name'], ds_info) + + ds_name = ds_id['name'] + if ds_name not in ['Solar Flux', 'Thermal Flux', 'Solar Radiance', 'Thermal Radiance']: + raise KeyError(f"{ds_name} is an unknown dataset for this reader.") + + ds = gerb_get_dataset(self, f'Radiometry/{ds_name}', ds_info) ds_info = {} ds_info['start_time'] = self.start_time @@ -98,11 +95,13 @@ def get_dataset(self, ds_id, ds_info): def get_area_def(self, dsid): """Area definition for the GERB product.""" - if abs(self.ssp_lon) < 1e-6: + ssp_lon = self.file_content["Geolocation/attr/Nominal Satellite Longitude (degrees)"] + + if abs(ssp_lon) < 1e-6: return get_area_def("msg_seviri_fes_9km") - elif abs(self.ssp_lon - 9.5) < 1e-6: + elif abs(ssp_lon - 9.5) < 1e-6: return get_area_def("msg_seviri_fes_9km") - elif abs(self.ssp_lon - 45.5) < 1e-6: + elif abs(ssp_lon - 45.5) < 1e-6: return get_area_def("msg_seviri_iodc_9km") else: raise ValueError(f"There is no matching grid for SSP longitude {self.ssp_lon}") From 929972aa3d8b7fd4c58df26efdf722d63b2c5702 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Mon, 18 Sep 2023 15:19:41 +0200 Subject: [PATCH 245/702] flake8 --- satpy/readers/gerb_l2_hr_h5.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 245bab1405..4dad36f0e8 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -29,7 +29,6 @@ import dask.array as da import h5py -import numpy as np import xarray as xr from satpy.readers.hdf5_utils import HDF5FileHandler @@ -78,7 +77,6 @@ def start_time(self): def get_dataset(self, ds_id, ds_info): """Read a HDF5 file into an xarray DataArray.""" - ds_name = ds_id['name'] if ds_name not in ['Solar Flux', 'Thermal Flux', 'Solar Radiance', 'Thermal Radiance']: raise KeyError(f"{ds_name} is an unknown dataset for this reader.") From 2f8365491064d2bb19f5a222c07636fc5fb837ef Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 4 Oct 2023 12:50:37 +0200 Subject: [PATCH 246/702] add test for GERB L2 HR HDF5 reader --- .../tests/reader_tests/test_gerb_l2_hr_h5.py | 208 ++++++++++++++++++ 1 file changed, 208 insertions(+) create mode 100644 satpy/tests/reader_tests/test_gerb_l2_hr_h5.py diff --git a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py new file mode 100644 index 0000000000..102317937e --- /dev/null +++ b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py @@ -0,0 +1,208 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2018 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Unit tests for GERB L2 HR HDF5 reader.""" + +import numpy as np +import pytest +import h5py +from satpy import Scene + +FNAME = "G4_SEV4_L20_HR_SOL_TH_20190606_130000_V000.hdf" + + +@pytest.fixture(scope="session") +def gerb_l2_hr_h5_dummy_file(tmp_path_factory): + """Create a dummy HDF5 file for the GERB L2 HR product.""" + filename = tmp_path_factory.mktemp("data") / FNAME + + with h5py.File(filename, 'w') as fid: + fid.create_group('/Angles') + fid['/Angles/Relative Azimuth'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Angles/Relative Azimuth'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(7) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Angles/Relative Azimuth'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Degree', dtype='|S7')) + fid['/Angles/Solar Zenith'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Angles/Solar Zenith'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(7) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Angles/Solar Zenith'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Degree', dtype='|S7')) + fid['/Angles/Viewing Azimuth'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Angles/Viewing Azimuth'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(7) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Angles/Viewing Azimuth'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Degree', dtype='|S7')) + fid['/Angles/Viewing Zenith'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Angles/Viewing Zenith'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(7) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Angles/Viewing Zenith'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Degree', dtype='|S7')) + fid.create_group('/GERB') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(3) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/GERB'].id, b'Instrument Identifier', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'G4', dtype='|S3')) + fid.create_group('/GGSPS') + fid['/GGSPS'].attrs['L1.5 NANRG Product Version'] = np.array(-1, dtype='int32') + fid.create_group('/Geolocation') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(44) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Geolocation'].id, b'Geolocation File Name', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'G4_SEV4_L20_HR_GEO_20180111_181500_V010.hdf', dtype='|S44')) + fid['/Geolocation'].attrs['Line of Sight North-South Speed'] = np.array(0.0, dtype='float64') + fid['/Geolocation'].attrs['Nominal Satellite Longitude (degrees)'] = np.array(0.0, dtype='float64') + fid.create_group('/Geolocation/Rectified Grid') + fid['/Geolocation/Rectified Grid'].attrs['Grid Orientation'] = np.array(0.0, dtype='float64') + fid['/Geolocation/Rectified Grid'].attrs['Lap'] = np.array(0.0, dtype='float64') + fid['/Geolocation/Rectified Grid'].attrs['Lop'] = np.array(0.0, dtype='float64') + fid['/Geolocation/Rectified Grid'].attrs['Nr'] = np.array(6.610674630916804, dtype='float64') + fid['/Geolocation/Rectified Grid'].attrs['Nx'] = np.array(1237, dtype='int32') + fid['/Geolocation/Rectified Grid'].attrs['Ny'] = np.array(1237, dtype='int32') + fid['/Geolocation/Rectified Grid'].attrs['Xp'] = np.array(618.3333333333334, dtype='float64') + fid['/Geolocation/Rectified Grid'].attrs['Yp'] = np.array(617.6666666666666, dtype='float64') + fid['/Geolocation/Rectified Grid'].attrs['dx'] = np.array(1207.4379446281002, dtype='float64') + fid['/Geolocation/Rectified Grid'].attrs['dy'] = np.array(1203.3201568249945, dtype='float64') + fid.create_group('/Geolocation/Rectified Grid/Resolution Flags') + fid['/Geolocation/Rectified Grid/Resolution Flags'].attrs['East West'] = np.array(0.014411607, dtype='float64') + fid['/Geolocation/Rectified Grid/Resolution Flags'].attrs['North South'] = np.array(0.014411607, dtype='float64') + fid.create_group('/Imager') + fid['/Imager'].attrs['Instrument Identifier'] = np.array(4, dtype='int32') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(7) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Imager'].id, b'Type', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'SEVIRI', dtype='|S7')) + fid.create_group('/RMIB') + fid['/RMIB'].attrs['Product Version'] = np.array(10, dtype='int32') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(16) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/RMIB'].id, b'Software Identifier', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'20220812_151631', dtype='|S16')) + fid.create_group('/Radiometry') + fid['/Radiometry'].attrs['SEVIRI Radiance Definition Flag'] = np.array(2, dtype='int32') + fid['/Radiometry/A Values (per GERB detector cell)'] = np.ones(shape=(256,), dtype=np.dtype('>f8')) + fid['/Radiometry/C Values (per GERB detector cell)'] = np.ones(shape=(256,), dtype=np.dtype('>f8')) + fid['/Radiometry/Longwave Correction'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Radiometry/Longwave Correction'].attrs['Offset'] = np.array(1.0, dtype='float64') + fid['/Radiometry/Longwave Correction'].attrs['Quantisation Factor'] = np.array(0.005, dtype='float64') + fid['/Radiometry/Shortwave Correction'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Radiometry/Shortwave Correction'].attrs['Offset'] = np.array(1.0, dtype='float64') + fid['/Radiometry/Shortwave Correction'].attrs['Quantisation Factor'] = np.array(0.005, dtype='float64') + fid['/Radiometry/Solar Flux'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Radiometry/Solar Flux'].attrs['Quantisation Factor'] = np.array(0.25, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(22) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Radiometry/Solar Flux'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Watt per square meter', dtype='|S22')) + fid['/Radiometry/Solar Radiance'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Radiometry/Solar Radiance'].attrs['Quantisation Factor'] = np.array(0.05, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(36) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Radiometry/Solar Radiance'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Watt per square meter per steradian', dtype='|S36')) + fid['/Radiometry/Thermal Flux'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Radiometry/Thermal Flux'].attrs['Quantisation Factor'] = np.array(0.25, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(22) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Radiometry/Thermal Flux'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Watt per square meter', dtype='|S22')) + fid['/Radiometry/Thermal Radiance'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Radiometry/Thermal Radiance'].attrs['Quantisation Factor'] = np.array(0.05, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(36) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Radiometry/Thermal Radiance'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Watt per square meter per steradian', dtype='|S36')) + fid.create_group('/Scene Identification') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(13) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Scene Identification'].id, b'Solar Angular Dependency Models Set Version', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'CERES_TRMM.1', dtype='|S13')) + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(7) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Scene Identification'].id, b'Thermal Angular Dependency Models Set Version', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'RMIB.3', dtype='|S7')) + fid['/Scene Identification/Aerosol Optical Depth IR 1.6'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Scene Identification/Aerosol Optical Depth IR 1.6'].attrs['Quantisation Factor'] = np.array(0.001, dtype='float64') + fid['/Scene Identification/Aerosol Optical Depth VIS 0.6'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Scene Identification/Aerosol Optical Depth VIS 0.6'].attrs['Quantisation Factor'] = np.array(0.001, dtype='float64') + fid['/Scene Identification/Aerosol Optical Depth VIS 0.8'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Scene Identification/Aerosol Optical Depth VIS 0.8'].attrs['Quantisation Factor'] = np.array(0.001, dtype='float64') + fid['/Scene Identification/Cloud Cover'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) + fid['/Scene Identification/Cloud Cover'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(8) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Scene Identification/Cloud Cover'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Percent', dtype='|S8')) + fid['/Scene Identification/Cloud Optical Depth (logarithm)'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Scene Identification/Cloud Optical Depth (logarithm)'].attrs['Quantisation Factor'] = np.array(0.00025, dtype='float64') + fid['/Scene Identification/Cloud Phase'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) + fid['/Scene Identification/Cloud Phase'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(34) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Scene Identification/Cloud Phase'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Percent (Water=0%,Mixed,Ice=100%)', dtype='|S34')) + fid['/Scene Identification/Dust Detection'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) + fid['/Scene Identification/Dust Detection'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') + fid['/Scene Identification/Solar Angular Dependency Model'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Scene Identification/Surface Type'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) + fid['/Scene Identification/Thermal Angular Dependency Model'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) + fid.create_group('/Times') + fid['/Times/Time (per row)'] = np.ones(shape=(1237,), dtype=np.dtype('|S22')) + + return filename + + +def test_gerb_solar_flux_dataset(gerb_l2_hr_h5_dummy_file): + """Test the GERB L2 HR HDF5 file. + + Load the solar flux component. + """ + scene = Scene(reader='gerb_l2_hr_h5', filenames=[gerb_l2_hr_h5_dummy_file]) + scene.load(['Solar Flux']) + assert scene['Solar Flux'].shape == (1237, 1237) + assert np.nanmax((scene['Solar Flux'].to_numpy().flatten() - 0.25)) < 1e-6 + + +def test_gerb_thermal_flux_dataset(gerb_l2_hr_h5_dummy_file): + """Test the GERB L2 HR HDF5 file. + + Load the thermal flux component. + """ + scene = Scene(reader='gerb_l2_hr_h5', filenames=[gerb_l2_hr_h5_dummy_file]) + scene.load(['Thermal Flux']) + assert scene['Thermal Flux'].shape == (1237, 1237) + assert np.nanmax((scene['Thermal Flux'].to_numpy().flatten() - 0.25)) < 1e-6 From 7de9d8024cdd8b33956c2f63b618699a8b11b751 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 4 Oct 2023 14:56:34 +0200 Subject: [PATCH 247/702] flake8 --- .../tests/reader_tests/test_gerb_l2_hr_h5.py | 27 ++++++++++++------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py index 102317937e..188c9f1141 100644 --- a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py +++ b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py @@ -89,7 +89,8 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): fid['/Geolocation/Rectified Grid'].attrs['dy'] = np.array(1203.3201568249945, dtype='float64') fid.create_group('/Geolocation/Rectified Grid/Resolution Flags') fid['/Geolocation/Rectified Grid/Resolution Flags'].attrs['East West'] = np.array(0.014411607, dtype='float64') - fid['/Geolocation/Rectified Grid/Resolution Flags'].attrs['North South'] = np.array(0.014411607, dtype='float64') + fid['/Geolocation/Rectified Grid/Resolution Flags'].attrs['North South'] = \ + np.array(0.014411607, dtype='float64') fid.create_group('/Imager') fid['/Imager'].attrs['Instrument Identifier'] = np.array(4, dtype='int32') dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) @@ -146,19 +147,24 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) dt.set_size(13) dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Scene Identification'].id, b'Solar Angular Dependency Models Set Version', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at = h5py.h5a.create(fid['/Scene Identification'].id, b'Solar Angular Dependency Models Set Version', dt, + h5py.h5s.create(h5py.h5s.SCALAR)) at.write(np.array(b'CERES_TRMM.1', dtype='|S13')) dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) dt.set_size(7) dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Scene Identification'].id, b'Thermal Angular Dependency Models Set Version', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at = h5py.h5a.create(fid['/Scene Identification'].id, b'Thermal Angular Dependency Models Set Version', dt, + h5py.h5s.create(h5py.h5s.SCALAR)) at.write(np.array(b'RMIB.3', dtype='|S7')) fid['/Scene Identification/Aerosol Optical Depth IR 1.6'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Scene Identification/Aerosol Optical Depth IR 1.6'].attrs['Quantisation Factor'] = np.array(0.001, dtype='float64') + fid['/Scene Identification/Aerosol Optical Depth IR 1.6'].attrs['Quantisation Factor'] = \ + np.array(0.001, dtype='float64') fid['/Scene Identification/Aerosol Optical Depth VIS 0.6'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Scene Identification/Aerosol Optical Depth VIS 0.6'].attrs['Quantisation Factor'] = np.array(0.001, dtype='float64') + fid['/Scene Identification/Aerosol Optical Depth VIS 0.6'].attrs['Quantisation Factor'] = \ + np.array(0.001, dtype='float64') fid['/Scene Identification/Aerosol Optical Depth VIS 0.8'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Scene Identification/Aerosol Optical Depth VIS 0.8'].attrs['Quantisation Factor'] = np.array(0.001, dtype='float64') + fid['/Scene Identification/Aerosol Optical Depth VIS 0.8'].attrs['Quantisation Factor'] = \ + np.array(0.001, dtype='float64') fid['/Scene Identification/Cloud Cover'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) fid['/Scene Identification/Cloud Cover'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) @@ -166,8 +172,10 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): dt.set_strpad(h5py.h5t.STR_NULLTERM) at = h5py.h5a.create(fid['/Scene Identification/Cloud Cover'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) at.write(np.array(b'Percent', dtype='|S8')) - fid['/Scene Identification/Cloud Optical Depth (logarithm)'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Scene Identification/Cloud Optical Depth (logarithm)'].attrs['Quantisation Factor'] = np.array(0.00025, dtype='float64') + fid['/Scene Identification/Cloud Optical Depth (logarithm)'] = \ + np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Scene Identification/Cloud Optical Depth (logarithm)'].attrs['Quantisation Factor'] = \ + np.array(0.00025, dtype='float64') fid['/Scene Identification/Cloud Phase'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) fid['/Scene Identification/Cloud Phase'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) @@ -179,7 +187,8 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): fid['/Scene Identification/Dust Detection'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') fid['/Scene Identification/Solar Angular Dependency Model'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Scene Identification/Surface Type'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) - fid['/Scene Identification/Thermal Angular Dependency Model'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) + fid['/Scene Identification/Thermal Angular Dependency Model'] = \ + np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) fid.create_group('/Times') fid['/Times/Time (per row)'] = np.ones(shape=(1237,), dtype=np.dtype('|S22')) From 1b16ffe505e2bc980b3deb74cf7000511edfc8f3 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 4 Oct 2023 19:56:48 +0200 Subject: [PATCH 248/702] use pytest.mark.parametrize for GERB reader Co-authored-by: David Hoese --- .../tests/reader_tests/test_gerb_l2_hr_h5.py | 26 +++++-------------- 1 file changed, 6 insertions(+), 20 deletions(-) diff --git a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py index 188c9f1141..b6a5deda13 100644 --- a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py +++ b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py @@ -194,24 +194,10 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): return filename - -def test_gerb_solar_flux_dataset(gerb_l2_hr_h5_dummy_file): - """Test the GERB L2 HR HDF5 file. - - Load the solar flux component. - """ - scene = Scene(reader='gerb_l2_hr_h5', filenames=[gerb_l2_hr_h5_dummy_file]) - scene.load(['Solar Flux']) - assert scene['Solar Flux'].shape == (1237, 1237) - assert np.nanmax((scene['Solar Flux'].to_numpy().flatten() - 0.25)) < 1e-6 - - -def test_gerb_thermal_flux_dataset(gerb_l2_hr_h5_dummy_file): - """Test the GERB L2 HR HDF5 file. - - Load the thermal flux component. - """ +@pytest.mark.parametrize("name", ["Solar Flux", "Thermal Flux"]) +def test_dataset_load(gerb_l2_hr_h5_dummy_file, name): + """Test loading the solar flux component.""" scene = Scene(reader='gerb_l2_hr_h5', filenames=[gerb_l2_hr_h5_dummy_file]) - scene.load(['Thermal Flux']) - assert scene['Thermal Flux'].shape == (1237, 1237) - assert np.nanmax((scene['Thermal Flux'].to_numpy().flatten() - 0.25)) < 1e-6 + scene.load([name]) + assert scene[name].shape == (1237, 1237) + assert np.nanmax((scene[name].to_numpy().flatten() - 0.25)) < 1e-6 From 1e858b185b088135c4d087e0abe33d85cdb5d410 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 4 Oct 2023 20:23:21 +0200 Subject: [PATCH 249/702] flake8 --- satpy/tests/reader_tests/test_gerb_l2_hr_h5.py | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py index b6a5deda13..3eb115759f 100644 --- a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py +++ b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py @@ -194,6 +194,7 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): return filename + @pytest.mark.parametrize("name", ["Solar Flux", "Thermal Flux"]) def test_dataset_load(gerb_l2_hr_h5_dummy_file, name): """Test loading the solar flux component.""" From ee26d3765830d9257021da57ae461951b4eea6ba Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 4 Oct 2023 20:54:22 +0200 Subject: [PATCH 250/702] Reduce code size 1. Use function for common HDF5 operations 2. Remove unused parts of the dummy file --- .../tests/reader_tests/test_gerb_l2_hr_h5.py | 146 +++++------------- 1 file changed, 35 insertions(+), 111 deletions(-) diff --git a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py index 3eb115759f..65c8583b19 100644 --- a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py +++ b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py @@ -25,6 +25,23 @@ FNAME = "G4_SEV4_L20_HR_SOL_TH_20190606_130000_V000.hdf" +def make_h5_null_string(length): + """Make a HDF5 type for a NULL terminated string of fixed length.""" + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(7) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + return dt + + +def write_h5_null_string_att(loc_id, name, s): + """Write a NULL terminated string attribute at loc_id.""" + dt = make_h5_null_string(length=7) + name = bytes(name.encode('ascii')) + s = bytes(s.encode('ascii')) + at = h5py.h5a.create(loc_id, name, dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(s, dtype=f'|S{len(s)+1}')) + + @pytest.fixture(scope="session") def gerb_l2_hr_h5_dummy_file(tmp_path_factory): """Create a dummy HDF5 file for the GERB L2 HR product.""" @@ -34,77 +51,30 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): fid.create_group('/Angles') fid['/Angles/Relative Azimuth'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Angles/Relative Azimuth'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(7) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Angles/Relative Azimuth'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Degree', dtype='|S7')) fid['/Angles/Solar Zenith'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Angles/Solar Zenith'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(7) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Angles/Solar Zenith'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Degree', dtype='|S7')) + write_h5_null_string_att(fid['/Angles/Relative Azimuth'].id, 'Unit', 'Degree') fid['/Angles/Viewing Azimuth'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Angles/Viewing Azimuth'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(7) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Angles/Viewing Azimuth'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Degree', dtype='|S7')) + write_h5_null_string_att(fid['/Angles/Viewing Azimuth'].id, 'Unit', 'Degree') fid['/Angles/Viewing Zenith'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Angles/Viewing Zenith'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(7) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Angles/Viewing Zenith'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Degree', dtype='|S7')) + write_h5_null_string_att(fid['/Angles/Viewing Zenith'].id, 'Unit', 'Degree') fid.create_group('/GERB') dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) dt.set_size(3) dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/GERB'].id, b'Instrument Identifier', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'G4', dtype='|S3')) + write_h5_null_string_att(fid['/GERB'].id, 'Instrument Identifier', 'G4') fid.create_group('/GGSPS') fid['/GGSPS'].attrs['L1.5 NANRG Product Version'] = np.array(-1, dtype='int32') fid.create_group('/Geolocation') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(44) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Geolocation'].id, b'Geolocation File Name', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'G4_SEV4_L20_HR_GEO_20180111_181500_V010.hdf', dtype='|S44')) - fid['/Geolocation'].attrs['Line of Sight North-South Speed'] = np.array(0.0, dtype='float64') + write_h5_null_string_att(fid['/Geolocation'].id, 'Geolocation File Name', + 'G4_SEV4_L20_HR_GEO_20180111_181500_V010.hdf') fid['/Geolocation'].attrs['Nominal Satellite Longitude (degrees)'] = np.array(0.0, dtype='float64') - fid.create_group('/Geolocation/Rectified Grid') - fid['/Geolocation/Rectified Grid'].attrs['Grid Orientation'] = np.array(0.0, dtype='float64') - fid['/Geolocation/Rectified Grid'].attrs['Lap'] = np.array(0.0, dtype='float64') - fid['/Geolocation/Rectified Grid'].attrs['Lop'] = np.array(0.0, dtype='float64') - fid['/Geolocation/Rectified Grid'].attrs['Nr'] = np.array(6.610674630916804, dtype='float64') - fid['/Geolocation/Rectified Grid'].attrs['Nx'] = np.array(1237, dtype='int32') - fid['/Geolocation/Rectified Grid'].attrs['Ny'] = np.array(1237, dtype='int32') - fid['/Geolocation/Rectified Grid'].attrs['Xp'] = np.array(618.3333333333334, dtype='float64') - fid['/Geolocation/Rectified Grid'].attrs['Yp'] = np.array(617.6666666666666, dtype='float64') - fid['/Geolocation/Rectified Grid'].attrs['dx'] = np.array(1207.4379446281002, dtype='float64') - fid['/Geolocation/Rectified Grid'].attrs['dy'] = np.array(1203.3201568249945, dtype='float64') - fid.create_group('/Geolocation/Rectified Grid/Resolution Flags') - fid['/Geolocation/Rectified Grid/Resolution Flags'].attrs['East West'] = np.array(0.014411607, dtype='float64') - fid['/Geolocation/Rectified Grid/Resolution Flags'].attrs['North South'] = \ - np.array(0.014411607, dtype='float64') fid.create_group('/Imager') fid['/Imager'].attrs['Instrument Identifier'] = np.array(4, dtype='int32') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(7) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Imager'].id, b'Type', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'SEVIRI', dtype='|S7')) + write_h5_null_string_att(fid['/Imager'].id, 'Type', 'SEVIRI') fid.create_group('/RMIB') - fid['/RMIB'].attrs['Product Version'] = np.array(10, dtype='int32') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(16) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/RMIB'].id, b'Software Identifier', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'20220812_151631', dtype='|S16')) fid.create_group('/Radiometry') fid['/Radiometry'].attrs['SEVIRI Radiance Definition Flag'] = np.array(2, dtype='int32') fid['/Radiometry/A Values (per GERB detector cell)'] = np.ones(shape=(256,), dtype=np.dtype('>f8')) @@ -117,78 +87,32 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): fid['/Radiometry/Shortwave Correction'].attrs['Quantisation Factor'] = np.array(0.005, dtype='float64') fid['/Radiometry/Solar Flux'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Radiometry/Solar Flux'].attrs['Quantisation Factor'] = np.array(0.25, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(22) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Radiometry/Solar Flux'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Watt per square meter', dtype='|S22')) + write_h5_null_string_att(fid['/Radiometry/Solar Flux'].id, 'Unit', 'Watt per square meter') fid['/Radiometry/Solar Radiance'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Radiometry/Solar Radiance'].attrs['Quantisation Factor'] = np.array(0.05, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(36) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Radiometry/Solar Radiance'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Watt per square meter per steradian', dtype='|S36')) + write_h5_null_string_att(fid['/Radiometry/Solar Radiance'].id, 'Unit', 'Watt per square meter per steradian') fid['/Radiometry/Thermal Flux'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Radiometry/Thermal Flux'].attrs['Quantisation Factor'] = np.array(0.25, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(22) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Radiometry/Thermal Flux'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Watt per square meter', dtype='|S22')) + write_h5_null_string_att(fid['/Radiometry/Thermal Flux'].id, 'Unit', 'Watt per square meter') fid['/Radiometry/Thermal Radiance'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Radiometry/Thermal Radiance'].attrs['Quantisation Factor'] = np.array(0.05, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(36) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Radiometry/Thermal Radiance'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Watt per square meter per steradian', dtype='|S36')) + write_h5_null_string_att(fid['/Radiometry/Thermal Radiance'].id, 'Unit', 'Watt per square meter per steradian') fid.create_group('/Scene Identification') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(13) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Scene Identification'].id, b'Solar Angular Dependency Models Set Version', dt, - h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'CERES_TRMM.1', dtype='|S13')) - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(7) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Scene Identification'].id, b'Thermal Angular Dependency Models Set Version', dt, - h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'RMIB.3', dtype='|S7')) - fid['/Scene Identification/Aerosol Optical Depth IR 1.6'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Scene Identification/Aerosol Optical Depth IR 1.6'].attrs['Quantisation Factor'] = \ - np.array(0.001, dtype='float64') - fid['/Scene Identification/Aerosol Optical Depth VIS 0.6'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Scene Identification/Aerosol Optical Depth VIS 0.6'].attrs['Quantisation Factor'] = \ - np.array(0.001, dtype='float64') - fid['/Scene Identification/Aerosol Optical Depth VIS 0.8'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Scene Identification/Aerosol Optical Depth VIS 0.8'].attrs['Quantisation Factor'] = \ - np.array(0.001, dtype='float64') + write_h5_null_string_att(fid['/Scene Identification'].id, + 'Solar Angular Dependency Models Set Version', 'CERES_TRMM.1') + write_h5_null_string_att(fid['/Scene Identification'].id, + 'Thermal Angular Dependency Models Set Version', 'RMIB.3') fid['/Scene Identification/Cloud Cover'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) fid['/Scene Identification/Cloud Cover'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(8) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Scene Identification/Cloud Cover'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Percent', dtype='|S8')) + write_h5_null_string_att(fid['/Scene Identification/Cloud Cover'].id, 'Unit', 'Percent') fid['/Scene Identification/Cloud Optical Depth (logarithm)'] = \ np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Scene Identification/Cloud Optical Depth (logarithm)'].attrs['Quantisation Factor'] = \ np.array(0.00025, dtype='float64') fid['/Scene Identification/Cloud Phase'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) fid['/Scene Identification/Cloud Phase'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(34) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Scene Identification/Cloud Phase'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Percent (Water=0%,Mixed,Ice=100%)', dtype='|S34')) - fid['/Scene Identification/Dust Detection'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) - fid['/Scene Identification/Dust Detection'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') - fid['/Scene Identification/Solar Angular Dependency Model'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Scene Identification/Surface Type'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) - fid['/Scene Identification/Thermal Angular Dependency Model'] = \ - np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) + write_h5_null_string_att(fid['/Scene Identification/Cloud Phase'].id, 'Unit', + 'Percent (Water=0%,Mixed,Ice=100%)') fid.create_group('/Times') fid['/Times/Time (per row)'] = np.ones(shape=(1237,), dtype=np.dtype('|S22')) From b29ce40d18a3a7c4a761fcade8c209977e8111ec Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 4 Oct 2023 21:01:32 +0200 Subject: [PATCH 251/702] add radiance datasets to GERB reader --- satpy/etc/readers/gerb_l2_hr_h5.yaml | 14 ++++++++++++++ satpy/tests/reader_tests/test_gerb_l2_hr_h5.py | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/satpy/etc/readers/gerb_l2_hr_h5.yaml b/satpy/etc/readers/gerb_l2_hr_h5.yaml index 2d47c19b8f..0f53a6c92c 100644 --- a/satpy/etc/readers/gerb_l2_hr_h5.yaml +++ b/satpy/etc/readers/gerb_l2_hr_h5.yaml @@ -29,3 +29,17 @@ datasets: fill_value: -32767 standard_name: toa_outgoing_longwave_flux file_type: gerb_l2_hr_h5 + + Solar_Radiance: + name: Solar Radiance + sensor: gerb + units: W m-2 + fill_value: -32767 + file_type: gerb_l2_hr_h5 + + Thermal_Radiance: + name: Thermal Radiance + sensor: gerb + units: W m-2 + fill_value: -32767 + file_type: gerb_l2_hr_h5 diff --git a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py index 65c8583b19..3dadccb11a 100644 --- a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py +++ b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py @@ -119,7 +119,7 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): return filename -@pytest.mark.parametrize("name", ["Solar Flux", "Thermal Flux"]) +@pytest.mark.parametrize("name", ["Solar Flux", "Thermal Flux", "Solar Radiance", "Thermal Radiance"]) def test_dataset_load(gerb_l2_hr_h5_dummy_file, name): """Test loading the solar flux component.""" scene = Scene(reader='gerb_l2_hr_h5', filenames=[gerb_l2_hr_h5_dummy_file]) From e094049cf4c57fb5d63d574aff8013af9f7b6410 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 4 Oct 2023 21:04:34 +0200 Subject: [PATCH 252/702] add missing "sr-1" in units of radiance for GERB reader --- satpy/etc/readers/gerb_l2_hr_h5.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/gerb_l2_hr_h5.yaml b/satpy/etc/readers/gerb_l2_hr_h5.yaml index 0f53a6c92c..7e1ec13fe9 100644 --- a/satpy/etc/readers/gerb_l2_hr_h5.yaml +++ b/satpy/etc/readers/gerb_l2_hr_h5.yaml @@ -33,13 +33,13 @@ datasets: Solar_Radiance: name: Solar Radiance sensor: gerb - units: W m-2 + units: W m-2 sr-1 fill_value: -32767 file_type: gerb_l2_hr_h5 Thermal_Radiance: name: Thermal Radiance sensor: gerb - units: W m-2 + units: W m-2 sr-1 fill_value: -32767 file_type: gerb_l2_hr_h5 From 1123c4d18edb3e9d5926b31d186e867fcd966c22 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 4 Oct 2023 22:10:53 +0200 Subject: [PATCH 253/702] fix import order (isort) --- satpy/tests/reader_tests/test_gerb_l2_hr_h5.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py index 3dadccb11a..eb06362831 100644 --- a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py +++ b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py @@ -17,9 +17,10 @@ # satpy. If not, see . """Unit tests for GERB L2 HR HDF5 reader.""" +import h5py import numpy as np import pytest -import h5py + from satpy import Scene FNAME = "G4_SEV4_L20_HR_SOL_TH_20190606_130000_V000.hdf" From 9c3094087408d574246d44feedd185c832545917 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 5 Oct 2023 09:37:58 +0200 Subject: [PATCH 254/702] use xarray dataset from HDF5FileHandler properly in GERB reader --- satpy/readers/gerb_l2_hr_h5.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 4dad36f0e8..c9a5986894 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -43,7 +43,7 @@ def gerb_get_dataset(hfile, name, ds_info): The routine takes into account the quantisation factor and fill values. """ - ds = xr.DataArray(hfile[name][...]) + ds = hfile[name] ds_attrs = hfile[name].attrs ds_fill = ds_info['fill_value'] fill_mask = ds != ds_fill @@ -82,14 +82,10 @@ def get_dataset(self, ds_id, ds_info): raise KeyError(f"{ds_name} is an unknown dataset for this reader.") ds = gerb_get_dataset(self, f'Radiometry/{ds_name}', ds_info) - ds_info = {} - ds_info['start_time'] = self.start_time - ds_info['data_time'] = self.start_time - ds_info['end_time'] = self.end_time + ds.attrs.update({'start_time': self.start_time, 'data_time': self.start_time, 'end_time': self.end_time}) - data = da.from_array(ds) - return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) + return ds def get_area_def(self, dsid): """Area definition for the GERB product.""" From 2c79989e64b21ed5cefb11741878430b224846e9 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 5 Oct 2023 10:08:06 +0200 Subject: [PATCH 255/702] flake8 --- satpy/readers/gerb_l2_hr_h5.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index c9a5986894..2cd99d359d 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -27,9 +27,7 @@ import logging from datetime import timedelta -import dask.array as da import h5py -import xarray as xr from satpy.readers.hdf5_utils import HDF5FileHandler from satpy.resample import get_area_def From da74ce4d8d0159ca78dfed9ad243452b25243621 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 5 Oct 2023 06:09:31 -0500 Subject: [PATCH 256/702] Refactor resolution-based chunking to be more flexible --- satpy/readers/hdfeos_base.py | 11 ++-- satpy/tests/test_utils.py | 98 +++++++++++++++++++++--------------- satpy/utils.py | 89 +++++++++++++++----------------- 3 files changed, 103 insertions(+), 95 deletions(-) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 751d286828..56b15b626d 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -32,7 +32,7 @@ from satpy import DataID from satpy.readers.file_handlers import BaseFileHandler -from satpy.utils import chunks_by_resolution +from satpy.utils import normalize_low_res_chunks logger = logging.getLogger(__name__) @@ -227,12 +227,13 @@ def _chunks_for_variable(self, hdf_dataset): scan_length_250m = 40 var_shape = hdf_dataset.info()[2] res_multiplier = self._get_res_multiplier(var_shape) - return chunks_by_resolution( + num_nonyx_dims = len(var_shape) - 2 + return normalize_low_res_chunks( + (1,) * num_nonyx_dims + ("auto", -1), var_shape, + (1,) * num_nonyx_dims + (scan_length_250m, -1), + (1,) * num_nonyx_dims + (res_multiplier, res_multiplier), np.float32, - scan_length_250m, - res_multiplier, - whole_scan_width=True ) @staticmethod diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index b45885f312..987091a16e 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -192,7 +192,7 @@ def test_get_satpos(self, included_prefixes, preference, expected_result): "attrs", ( {}, - {'orbital_parameters': {'projection_longitude': 1}}, + {'orbital_parameters': {'projection_longitude': 1}}, {'satellite_altitude': 1} ) ) @@ -207,16 +207,17 @@ def test_get_satpos_from_satname(self, caplog): import pyorbital.tlefile data_arr = xr.DataArray( - (), - attrs={ - "platform_name": "Meteosat-42", - "sensor": "irives", - "start_time": datetime.datetime(2031, 11, 20, 19, 18, 17)}) + (), + attrs={ + "platform_name": "Meteosat-42", + "sensor": "irives", + "start_time": datetime.datetime(2031, 11, 20, 19, 18, 17) + }) with mock.patch("pyorbital.tlefile.read") as plr: plr.return_value = pyorbital.tlefile.Tle( - "Meteosat-42", - line1="1 40732U 15034A 22011.84285506 .00000004 00000+0 00000+0 0 9995", - line2="2 40732 0.2533 325.0106 0000976 118.8734 330.4058 1.00272123 23817") + "Meteosat-42", + line1="1 40732U 15034A 22011.84285506 .00000004 00000+0 00000+0 0 9995", + line2="2 40732 0.2533 325.0106 0000976 118.8734 330.4058 1.00272123 23817") with caplog.at_level(logging.WARNING): (lon, lat, alt) = get_satpos(data_arr, use_tle=True) assert "Orbital parameters missing from metadata" in caplog.text @@ -238,13 +239,15 @@ def test_make_fake_scene(): assert make_fake_scene({}).keys() == [] sc = make_fake_scene({ - "six": np.arange(25).reshape(5, 5)}) + "six": np.arange(25).reshape(5, 5) + }) assert len(sc.keys()) == 1 assert sc.keys().pop()['name'] == "six" assert sc["six"].attrs["area"].shape == (5, 5) sc = make_fake_scene({ - "seven": np.arange(3*7).reshape(3, 7), - "eight": np.arange(3*8).reshape(3, 8)}, + "seven": np.arange(3 * 7).reshape(3, 7), + "eight": np.arange(3 * 8).reshape(3, 8) + }, daskify=True, area=False, common_attrs={"repetency": "fourteen hundred per centimetre"}) @@ -254,9 +257,10 @@ def test_make_fake_scene(): assert isinstance(sc["seven"].data, da.Array) sc = make_fake_scene({ "nine": xr.DataArray( - np.arange(2*9).reshape(2, 9), + np.arange(2 * 9).reshape(2, 9), dims=("y", "x"), - attrs={"please": "preserve", "answer": 42})}, + attrs={"please": "preserve", "answer": 42}) + }, common_attrs={"bad words": "semprini bahnhof veerooster winterbanden"}) assert sc["nine"].attrs.keys() >= {"please", "answer", "bad words", "area"} @@ -295,6 +299,7 @@ def depwarn(): DeprecationWarning, stacklevel=2 ) + warnings.filterwarnings("ignore", category=DeprecationWarning) debug_on(False) filts_before = warnings.filters.copy() @@ -417,32 +422,41 @@ def test_get_legacy_chunk_size(): @pytest.mark.parametrize( - ("shape", "chunk_dtype", "num_hr", "lr_mult", "scan_width", "exp_result"), + ("chunks", "shape", "previous_chunks", "lr_mult", "chunk_dtype", "exp_result"), [ - ((1000, 3200), np.float32, 40, 4, True, (160, -1)), # 1km swath - ((1000 // 5, 3200 // 5), np.float32, 40, 20, True, (160 // 5, -1)), # 5km swath - ((1000 * 4, 3200 * 4), np.float32, 40, 1, True, (160 * 4, -1)), # 250m swath - ((21696 // 2, 21696 // 2), np.float32, 226, 2, False, (1469, 1469)), # 1km area (ABI chunk 226) - ((21696 // 2, 21696 // 2), np.float64, 226, 2, False, (1017, 1017)), # 1km area (64-bit) - ((21696 // 3, 21696 // 3), np.float32, 226, 6, False, (1469 // 3, 1469 // 3)), # 3km area - ((21696, 21696), np.float32, 226, 1, False, (1469 * 2, 1469 * 2)), # 500m area - ((7, 1000 * 4, 3200 * 4), np.float32, 40, 1, True, (1, 160 * 4, -1)), # 250m swath with bands - ((1, 7, 1000, 3200), np.float32, 40, 1, True, ((1,), (7,), (1000,), (1198, 1198, 804))), # lots of dimensions + # 1km swath + (("auto", -1), (1000, 3200), (40, 40), (4, 4), np.float32, (160, -1)), + # 5km swath + (("auto", -1), (1000 // 5, 3200 // 5), (40, 40), (20, 20), np.float32, (160 // 5, -1)), + # 250m swath + (("auto", -1), (1000 * 4, 3200 * 4), (40, 40), (1, 1), np.float32, (160 * 4, -1)), + # 1km area (ABI chunk 226): + (("auto", "auto"), (21696 // 2, 21696 // 2), (226, 226), (2, 2), np.float32, (1469, 1469)), + # 1km area (64-bit) + (("auto", "auto"), (21696 // 2, 21696 // 2), (226, 226), (2, 2), np.float64, (1017, 1017)), + # 3km area + (("auto", "auto"), (21696 // 3, 21696 // 3), (226, 226), (6, 6), np.float32, (1469 // 3, 1469 // 3)), + # 500m area + (("auto", "auto"), (21696, 21696), (226, 226), (1, 1), np.float32, (1469 * 2, 1469 * 2)), + # 250m swath with bands: + ((1, "auto", -1), (7, 1000 * 4, 3200 * 4), (1, 40, 40), (1, 1, 1), np.float32, (1, 160 * 4, -1)), + # lots of dimensions: + ((1, 1, "auto", -1), (1, 7, 1000, 3200), (1, 1, 40, 40), (1, 1, 1, 1), np.float32, (1, 1, 1000, -1)), ], ) -def test_resolution_chunking(shape, chunk_dtype, num_hr, lr_mult, scan_width, exp_result): - """Test chunks_by_resolution helper function.""" +def test_resolution_chunking(chunks, shape, previous_chunks, lr_mult, chunk_dtype, exp_result): + """Test normalize_low_res_chunks helper function.""" import dask.config - from satpy.utils import chunks_by_resolution + from satpy.utils import normalize_low_res_chunks with dask.config.set({"array.chunk-size": "32MiB"}): - chunk_results = chunks_by_resolution( + chunk_results = normalize_low_res_chunks( + chunks, shape, - chunk_dtype, - num_hr, + previous_chunks, lr_mult, - whole_scan_width=scan_width, + chunk_dtype, ) assert chunk_results == exp_result for chunk_size in chunk_results: @@ -570,19 +584,21 @@ def test_find_in_ancillary(): """Test finding a dataset in ancillary variables.""" from satpy.utils import find_in_ancillary index_finger = xr.DataArray( - data=np.arange(25).reshape(5, 5), - dims=("y", "x"), - attrs={"name": "index-finger"}) + data=np.arange(25).reshape(5, 5), + dims=("y", "x"), + attrs={"name": "index-finger"}) ring_finger = xr.DataArray( - data=np.arange(25).reshape(5, 5), - dims=("y", "x"), - attrs={"name": "ring-finger"}) + data=np.arange(25).reshape(5, 5), + dims=("y", "x"), + attrs={"name": "ring-finger"}) hand = xr.DataArray( - data=np.arange(25).reshape(5, 5), - dims=("y", "x"), - attrs={"name": "hand", - "ancillary_variables": [index_finger, index_finger, ring_finger]}) + data=np.arange(25).reshape(5, 5), + dims=("y", "x"), + attrs={ + "name": "hand", + "ancillary_variables": [index_finger, index_finger, ring_finger] + }) assert find_in_ancillary(hand, "ring-finger") is ring_finger with pytest.raises( diff --git a/satpy/utils.py b/satpy/utils.py index 94f7fbf86e..6b005026d2 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -26,7 +26,7 @@ import warnings from contextlib import contextmanager from copy import deepcopy -from typing import Mapping, Optional +from typing import Literal, Mapping, Optional from urllib.parse import urlparse import dask.utils @@ -633,13 +633,13 @@ def _get_pytroll_chunk_size(): return None -def chunks_by_resolution( +def normalize_low_res_chunks( + chunks: tuple[int | Literal["auto"], ...], input_shape: tuple[int, ...], + previous_chunks: tuple[int, ...], + low_res_multipliers: tuple[int, ...], input_dtype: DTypeLike, - num_high_res_elements: int, - low_res_multiplier: int, - whole_scan_width: bool = False, -) -> tuple[int | tuple[int, ...], ...]: +) -> tuple[int, ...]: """Compute dask chunk sizes based on data resolution. First, chunks are computed for the highest resolution version of the data. @@ -653,16 +653,22 @@ def chunks_by_resolution( geographic region. This also means replicating or aggregating one resolution and then combining arrays should not require any rechunking. - .. note:: - - Only 2 or 3-dimensional shapes are supported. In the case of 3D arrays - the first dimension is assumed to be "bands" and is given a chunk - size of 1. For shapes with other numbers of dimensions, the chunk size - for the entire array is determined by dask's "auto" chunking and - resolution is ignored. - Args: + chunks: Requested chunk size for each dimension. This is passed + directly to dask. Use ``"auto"`` for dimensions that should have + chunks determined for them, ``-1`` for dimensions that should be + whole (not chunked), and ``1`` or any other positive integer for + dimensions that have a known chunk size beforehand. input_shape: Shape of the array to compute dask chunk size for. + previous_chunks: Any previous chunking or structure of the data. This + can also be thought of as the smallest number of high (fine) resolution + elements that make up a single "unit" or chunk of data. This could + be a multiple or factor of the scan size for some instruments and/or + could be based on the on-disk chunk size. This value ensures that + chunks are aligned to the underlying data structure for best + performance. + low_res_multipliers: Number of high (fine) resolution pixels that fit + in a single low (coarse) resolution pixel. input_dtype: Dtype for the final unscaled array. This is usually 32-bit float (``np.float32``) or 64-bit float (``np.float64``) for non-category data. If this doesn't represent the final data @@ -671,49 +677,34 @@ def chunks_by_resolution( configuration. Sometimes it is useful to keep this as a single dtype for all reading functionality (ex. ``np.float32``) in order to keep all read variable chunks the same size regardless of dtype. - num_high_res_elements: Smallest number of high (fine) resolution - elements that make up a single "unit" or chunk of data. This could - be a multiple or factor of the scan size for some instruments and/or - could be based on the on-disk chunk size. This value ensures that - chunks are aligned to the underlying data structure for best - performance. - low_res_multiplier: Number of high (fine) resolution pixels that fit - in a single low (coarse) resolution pixel. - whole_scan_width: To create the entire width (x dimension) of the - array as a single chunk. This is useful in cases when future - operations will operate on entire instrument scans of data at - a time. For example, polar-orbiter scan geolocation being - interpolated from low resolution to high resolution. Returns: A tuple where each element is the chunk size for that axis/dimension. """ - if len(input_shape) not in (2, 3): - # we're not sure about this shape so don't guess - return dask.array.core.normalize_chunks("auto", shape=input_shape, dtype=input_dtype) - - pre_non_yx_chunks, yx_shape, post_non_yx_chunks = _split_non_yx_chunks(input_shape) - high_res_shape = tuple(dim_size * low_res_multiplier for dim_size in yx_shape) - col_chunks = -1 if whole_scan_width else "auto" + if any(len(input_shape) != len(param) for param in (low_res_multipliers, chunks, previous_chunks)): + raise ValueError("Input shape, low res multipliers, chunks, and previous chunks must all be the same size") + high_res_shape = tuple(dim_size * lr_mult for dim_size, lr_mult in zip(input_shape, low_res_multipliers)) chunks_for_high_res = dask.array.core.normalize_chunks( - ("auto", col_chunks), + chunks, shape=high_res_shape, - dtype=input_dtype - ) - var_row_chunks = _low_res_chunks_from_high_res( - chunks_for_high_res[0][0], - num_high_res_elements, - low_res_multiplier + dtype=input_dtype, ) - var_col_chunks = -1 - if not whole_scan_width: - var_col_chunks = _low_res_chunks_from_high_res( - chunks_for_high_res[1][0], - num_high_res_elements, - low_res_multiplier - ) - return pre_non_yx_chunks + (var_row_chunks, var_col_chunks) + post_non_yx_chunks + low_res_chunks: list[int] = [] + for req_chunks, hr_chunks, prev_chunks, lr_mult in zip( + chunks, + chunks_for_high_res, + previous_chunks, low_res_multipliers + ): + if req_chunks != "auto": + low_res_chunks.append(req_chunks) + continue + low_res_chunks.append(_low_res_chunks_from_high_res( + hr_chunks[0], + prev_chunks, + lr_mult, + )) + return tuple(low_res_chunks) def _split_non_yx_chunks( From 785f1c5b68c14b986c022d18fce41e00a8c3bdbe Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Tue, 3 Oct 2023 23:04:50 +0200 Subject: [PATCH 257/702] add meirink calibration method --- satpy/readers/seviri_base.py | 69 +++++++++++++++++++++++++++++++- satpy/readers/seviri_l1b_hrit.py | 1 + 2 files changed, 68 insertions(+), 2 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 131fe39ad4..c1d288f4df 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -166,7 +166,7 @@ """ import warnings -from datetime import timedelta +from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -353,6 +353,62 @@ 'ALPHA': 0.9981, 'BETA': 0.5635}} +# Calibration coefficients from Meirink, J.F., R.A. Roebeling and P. Stammes, 2013: +# Inter-calibration of polar imager solar channels using SEVIRI, Atm. Meas. Tech., 6, +# 2495-2508, doi:10.5194/amt-6-2495-2013 + +# To obtain the slope for the calibration, one should use the routine get_seviri_meirink_slope + +# Epoch for the MEIRINK re-calibration +DATE_2000 = datetime(2000, 1, 1) + +MEIRINK_COEFS = {} + +# Meteosat-8 + +MEIRINK_COEFS[321] = {'VIS006': (24.346, 0.3739), + 'VIS008': (30.989, 0.3111), + 'IR_016': (22.869, 0.0065) + } + +# Meteosat-9 + +MEIRINK_COEFS[322] = {'VIS006': (21.026, 0.3739), + 'VIS008': (26.875, 0.3111), + 'IR_016': (21.394, 0.0065) + } + +# Meteosat-10 + +MEIRINK_COEFS[323] = {'VIS006': (19.829, 0.5856), + 'VIS008': (25.284, 0.6787), + 'IR_016': (23.066, -0.0286) + } + +# Meteosat-11 + +MEIRINK_COEFS[324] = {'VIS006': (20.515, 0.3600), + 'VIS008': (25.803, 0.4844), + 'IR_016': (22.354, -0.0187) + } + + +def get_meirink_slope(meirink_coefs, acquisition_time): + """Compute the slope for the visible channel calibration according to Meirink 2013. + + S = A + B * 1.e-3* Day + + S is here in µW m-2 sr-1 (cm-1)-1 + + EUMETSAT calibration is given in mW m-2 sr-1 (cm-1)-1, so an extra factor of 1/1000 must + be applied. + """ + A = meirink_coefs[0] + B = meirink_coefs[1] + delta_t = (acquisition_time - DATE_2000).total_seconds() + S = A + B * delta_t / (3600*24) / 1000. + return S/1000 + def get_cds_time(days, msecs): """Compute timestamp given the days since epoch and milliseconds of the day. @@ -559,6 +615,11 @@ def __init__(self, platform_id, channel_name, coefs, calib_mode, scan_time): self._platform_id = platform_id self._channel_name = channel_name self._coefs = coefs + if channel_name in ['VIS006', 'VIS008', 'IR_016']: + self._coefs['coefs']['MEIRINK'] = MEIRINK_COEFS[platform_id][channel_name] + else: + self._coefs['coefs']['MEIRINK'] = None + self._calib_mode = calib_mode.upper() self._scan_time = scan_time self._algo = SEVIRICalibrationAlgorithm( @@ -566,7 +627,7 @@ def __init__(self, platform_id, channel_name, coefs, calib_mode, scan_time): scan_time=self._scan_time ) - valid_modes = ('NOMINAL', 'GSICS') + valid_modes = ('NOMINAL', 'GSICS', 'MEIRINK') if self._calib_mode not in valid_modes: raise ValueError( 'Invalid calibration mode: {}. Choose one of {}'.format( @@ -622,6 +683,10 @@ def get_gain_offset(self): internal_gain = gsics_gain internal_offset = gsics_offset + if self._calib_mode == 'MEIRINK': + if coefs['MEIRINK'] is not None: + internal_gain = get_meirink_slope(coefs['MEIRINK'], self._scan_time) + # Override with external coefficients, if any. gain = coefs['EXTERNAL'].get('gain', internal_gain) offset = coefs['EXTERNAL'].get('offset', internal_offset) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 2b153edfcc..4480afdbfb 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -247,6 +247,7 @@ mask_bad_quality, pad_data_horizontally, round_nom_time, + MEIRINK_CALIB, ) from satpy.readers.seviri_l1b_native_hdr import hrit_epilogue, hrit_prologue, impf_configuration from satpy.utils import get_legacy_chunk_size From dfe1dc9d169d08aac16d63acffad1c4ce81c3acc Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 5 Oct 2023 06:51:37 -0500 Subject: [PATCH 258/702] Ensure resolution chunks are disk-chunk aligned --- satpy/tests/test_utils.py | 6 +++--- satpy/utils.py | 18 ++---------------- 2 files changed, 5 insertions(+), 19 deletions(-) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 987091a16e..4b22d51f93 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -431,13 +431,13 @@ def test_get_legacy_chunk_size(): # 250m swath (("auto", -1), (1000 * 4, 3200 * 4), (40, 40), (1, 1), np.float32, (160 * 4, -1)), # 1km area (ABI chunk 226): - (("auto", "auto"), (21696 // 2, 21696 // 2), (226, 226), (2, 2), np.float32, (1469, 1469)), + (("auto", "auto"), (21696 // 2, 21696 // 2), (226, 226), (2, 2), np.float32, (1356, 1356)), # 1km area (64-bit) (("auto", "auto"), (21696 // 2, 21696 // 2), (226, 226), (2, 2), np.float64, (1017, 1017)), # 3km area - (("auto", "auto"), (21696 // 3, 21696 // 3), (226, 226), (6, 6), np.float32, (1469 // 3, 1469 // 3)), + (("auto", "auto"), (21696 // 3, 21696 // 3), (226, 226), (6, 6), np.float32, (1356 // 3, 1356 // 3)), # 500m area - (("auto", "auto"), (21696, 21696), (226, 226), (1, 1), np.float32, (1469 * 2, 1469 * 2)), + (("auto", "auto"), (21696, 21696), (226, 226), (1, 1), np.float32, (1356 * 2, 1356 * 2)), # 250m swath with bands: ((1, "auto", -1), (7, 1000 * 4, 3200 * 4), (1, 40, 40), (1, 1, 1), np.float32, (1, 160 * 4, -1)), # lots of dimensions: diff --git a/satpy/utils.py b/satpy/utils.py index 6b005026d2..73257d0e4f 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -689,6 +689,7 @@ def normalize_low_res_chunks( chunks, shape=high_res_shape, dtype=input_dtype, + previous_chunks=previous_chunks, ) low_res_chunks: list[int] = [] for req_chunks, hr_chunks, prev_chunks, lr_mult in zip( @@ -699,11 +700,7 @@ def normalize_low_res_chunks( if req_chunks != "auto": low_res_chunks.append(req_chunks) continue - low_res_chunks.append(_low_res_chunks_from_high_res( - hr_chunks[0], - prev_chunks, - lr_mult, - )) + low_res_chunks.append(int(max(hr_chunks[0] / lr_mult, prev_chunks / lr_mult))) return tuple(low_res_chunks) @@ -719,17 +716,6 @@ def _split_non_yx_chunks( return pre_non_yx_chunks, yx_shape, post_non_yx_chunks -def _low_res_chunks_from_high_res( - chunk_size_for_high_res: int, - num_high_res_elements: int, - low_res_multiplier: int -) -> int: - aligned_chunk_size = np.round(chunk_size_for_high_res / num_high_res_elements) * num_high_res_elements - low_res_chunk_size = aligned_chunk_size / low_res_multiplier - # avoid getting 0 chunk size - return int(max(low_res_chunk_size, num_high_res_elements / low_res_multiplier)) - - def convert_remote_files_to_fsspec(filenames, storage_options=None): """Check filenames for transfer protocols, convert to FSFile objects if possible.""" if storage_options is None: From 04b0161f3239fcc608faa5a885307e17031db3ba Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 5 Oct 2023 06:56:52 -0500 Subject: [PATCH 259/702] Add lower-limit of on-disk chunk size for resolution-based chunking --- satpy/tests/test_utils.py | 4 ++-- satpy/utils.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 4b22d51f93..d25b307675 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -426,8 +426,8 @@ def test_get_legacy_chunk_size(): [ # 1km swath (("auto", -1), (1000, 3200), (40, 40), (4, 4), np.float32, (160, -1)), - # 5km swath - (("auto", -1), (1000 // 5, 3200 // 5), (40, 40), (20, 20), np.float32, (160 // 5, -1)), + # 5km swath - 160 / 5 == 32 which is smaller than our on-disk chunk size of 40 + (("auto", -1), (1000 // 5, 3200 // 5), (40, 40), (20, 20), np.float32, (40, -1)), # 250m swath (("auto", -1), (1000 * 4, 3200 * 4), (40, 40), (1, 1), np.float32, (160 * 4, -1)), # 1km area (ABI chunk 226): diff --git a/satpy/utils.py b/satpy/utils.py index 73257d0e4f..7587617165 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -700,7 +700,7 @@ def normalize_low_res_chunks( if req_chunks != "auto": low_res_chunks.append(req_chunks) continue - low_res_chunks.append(int(max(hr_chunks[0] / lr_mult, prev_chunks / lr_mult))) + low_res_chunks.append(int(max(hr_chunks[0] / lr_mult, prev_chunks))) return tuple(low_res_chunks) From 984a7331ecccabb5fd976d8f5899e7eb91ea5e7a Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 5 Oct 2023 15:00:56 +0200 Subject: [PATCH 260/702] add test for the slope of the Meirink coefficients --- satpy/tests/reader_tests/test_seviri_base.py | 32 ++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index 157ed88bbf..f7c8525ab7 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -37,6 +37,9 @@ pad_data_horizontally, pad_data_vertically, round_nom_time, + SEVIRICalibrationHandler, + MEIRINK_COEFS, + DATE_2000, ) from satpy.utils import get_legacy_chunk_size @@ -358,3 +361,32 @@ def test_get_orbit_polynomial_exceptions(self, orbit_polynomials, time): finder = OrbitPolynomialFinder(orbit_polynomials) with pytest.raises(NoValidOrbitParams): finder.get_orbit_polynomial(time=time) + + +class TestMeirinkSlope: + """Unit tests for the slope of Meirink calibration.""" + + @pytest.mark.parametrize('platform_id', [321, 322, 323, 324]) + @pytest.mark.parametrize('channel_name', ['VIS006', 'VIS008', 'IR_016']) + def test_get_meirink_slope_epoch(self, platform_id, channel_name): + """Test the value of the slope of the Meirink calibration on 2000-01-01.""" + coefs = {'coefs': {}} + coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} + coefs['coefs']['EXTERNAL'] = {} + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK', DATE_2000) + assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS[platform_id][channel_name][0]/1000. + + @pytest.mark.parametrize('platform_id', [321, 322, 323, 324]) + @pytest.mark.parametrize('channel_name', ['VIS006', 'VIS008', 'IR_016']) + def test_get_meirink_slope_2020(self, platform_id, channel_name): + """Test the value of the slope of the Meirink calibration on 2020-01-01.""" + DATE_2020 = datetime(2020, 1, 1) + coefs = {'coefs': {}} + coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} + coefs['coefs']['EXTERNAL'] = {} + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK', DATE_2020) + A, B = MEIRINK_COEFS[platform_id][channel_name] + delta_t = (DATE_2020 - DATE_2000).total_seconds() + S = A + B * delta_t / (3600*24) / 1000. + S = S/1000 + assert calibration_handler.get_gain_offset()[0] == S From a9521ef3f9af36a7bd7aa8686443649c8f2e12a5 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 5 Oct 2023 16:45:28 +0200 Subject: [PATCH 261/702] remove un-necessary import --- satpy/readers/seviri_l1b_hrit.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 4480afdbfb..2b153edfcc 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -247,7 +247,6 @@ mask_bad_quality, pad_data_horizontally, round_nom_time, - MEIRINK_CALIB, ) from satpy.readers.seviri_l1b_native_hdr import hrit_epilogue, hrit_prologue, impf_configuration from satpy.utils import get_legacy_chunk_size From 2036251d4b868bda8b51b13c8bf3a8a1e6e881dd Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 5 Oct 2023 13:03:17 -0500 Subject: [PATCH 262/702] Add info about handling on-disk chunking --- .../modis_tests/_modis_fixtures.py | 2 +- satpy/tests/test_utils.py | 14 +++++++----- satpy/utils.py | 22 +++++++------------ 3 files changed, 17 insertions(+), 21 deletions(-) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index dfc8f0aec6..49331f5421 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -62,7 +62,7 @@ def _shape_for_resolution(resolution: int) -> tuple[int, int]: return factor * shape_1km[0], factor * shape_1km[1] -def _generate_lonlat_data(resolution: int) -> np.ndarray: +def _generate_lonlat_data(resolution: int) -> tuple[np.ndarray, np.ndarray]: shape = _shape_for_resolution(resolution) lat = np.repeat(np.linspace(35., 45., shape[0])[:, None], shape[1], 1) lat *= np.linspace(0.9, 1.1, shape[1]) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index d25b307675..ef6a359cdd 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -426,18 +426,20 @@ def test_get_legacy_chunk_size(): [ # 1km swath (("auto", -1), (1000, 3200), (40, 40), (4, 4), np.float32, (160, -1)), - # 5km swath - 160 / 5 == 32 which is smaller than our on-disk chunk size of 40 - (("auto", -1), (1000 // 5, 3200 // 5), (40, 40), (20, 20), np.float32, (40, -1)), + # 5km swath + (("auto", -1), (1000 // 5, 3200 // 5), (40, 40), (20, 20), np.float32, (160 / 5, -1)), # 250m swath (("auto", -1), (1000 * 4, 3200 * 4), (40, 40), (1, 1), np.float32, (160 * 4, -1)), # 1km area (ABI chunk 226): - (("auto", "auto"), (21696 // 2, 21696 // 2), (226, 226), (2, 2), np.float32, (1356, 1356)), + (("auto", "auto"), (21696 // 2, 21696 // 2), (226*4, 226*4), (2, 2), np.float32, (1356, 1356)), # 1km area (64-bit) - (("auto", "auto"), (21696 // 2, 21696 // 2), (226, 226), (2, 2), np.float64, (1017, 1017)), + (("auto", "auto"), (21696 // 2, 21696 // 2), (226*4, 226*4), (2, 2), np.float64, (904, 904)), # 3km area - (("auto", "auto"), (21696 // 3, 21696 // 3), (226, 226), (6, 6), np.float32, (1356 // 3, 1356 // 3)), + (("auto", "auto"), (21696 // 3, 21696 // 3), (226*4, 226*4), (6, 6), np.float32, (452, 452)), # 500m area - (("auto", "auto"), (21696, 21696), (226, 226), (1, 1), np.float32, (1356 * 2, 1356 * 2)), + (("auto", "auto"), (21696, 21696), (226*4, 226*4), (1, 1), np.float32, (1356 * 2, 1356 * 2)), + # 500m area (64-bit) + (("auto", "auto"), (21696, 21696), (226*4, 226*4), (1, 1), np.float64, (904 * 2, 904 * 2)), # 250m swath with bands: ((1, "auto", -1), (7, 1000 * 4, 3200 * 4), (1, 40, 40), (1, 1, 1), np.float32, (1, 160 * 4, -1)), # lots of dimensions: diff --git a/satpy/utils.py b/satpy/utils.py index 7587617165..67150fed9d 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -666,7 +666,13 @@ def normalize_low_res_chunks( be a multiple or factor of the scan size for some instruments and/or could be based on the on-disk chunk size. This value ensures that chunks are aligned to the underlying data structure for best - performance. + performance. On-disk chunk sizes should be multiplied by the + largest low resolution multiplier if it is the same between all + files (ex. 500m file has 226 chunk size, 1km file has 226 chunk + size, etc).. Otherwise, the resulting low resolution chunks may + not be aligned to the on-disk chunks. For example, if dask decides + on a chunk size of 226 * 3 for 500m data, that becomes 226 * 3 / 2 + for 1km data which is not aligned to the on-disk chunk size of 226. low_res_multipliers: Number of high (fine) resolution pixels that fit in a single low (coarse) resolution pixel. input_dtype: Dtype for the final unscaled array. This is usually @@ -700,22 +706,10 @@ def normalize_low_res_chunks( if req_chunks != "auto": low_res_chunks.append(req_chunks) continue - low_res_chunks.append(int(max(hr_chunks[0] / lr_mult, prev_chunks))) + low_res_chunks.append(round(max(hr_chunks[0] / lr_mult, prev_chunks / lr_mult))) return tuple(low_res_chunks) -def _split_non_yx_chunks( - input_shape: tuple[int, ...], -) -> tuple[tuple[int, ...] | tuple[()], tuple[int, int], tuple[int, ...] | tuple[()]]: - pre_non_yx_chunks: tuple[int, ...] = tuple() - post_non_yx_chunks: tuple[int, ...] = tuple() - yx_shape = (input_shape[-2], input_shape[-1]) - if len(input_shape) == 3: - # assume (band, y, x) - pre_non_yx_chunks = (1,) - return pre_non_yx_chunks, yx_shape, post_non_yx_chunks - - def convert_remote_files_to_fsspec(filenames, storage_options=None): """Check filenames for transfer protocols, convert to FSFile objects if possible.""" if storage_options is None: From f5ddc6e11af8924c7a859651dd5ec1243e05408d Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 5 Oct 2023 23:41:17 +0200 Subject: [PATCH 263/702] simplify GERB reader --- satpy/readers/gerb_l2_hr_h5.py | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 2cd99d359d..64f3fb8d71 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -35,14 +35,13 @@ LOG = logging.getLogger(__name__) -def gerb_get_dataset(hfile, name, ds_info): +def gerb_get_dataset(ds, ds_info): """ Load a GERB dataset in memory from a HDF5 file or HDF5FileHandler. The routine takes into account the quantisation factor and fill values. """ - ds = hfile[name] - ds_attrs = hfile[name].attrs + ds_attrs = ds.attrs ds_fill = ds_info['fill_value'] fill_mask = ds != ds_fill if 'Quantisation Factor' in ds_attrs and 'Unit' in ds_attrs: @@ -56,13 +55,6 @@ def gerb_get_dataset(hfile, name, ds_info): class GERB_HR_FileHandler(HDF5FileHandler): """File handler for GERB L2 High Resolution H5 files.""" - def __init__(self, filename, filename_info, filetype_info): - """Init the file handler.""" - super(GERB_HR_FileHandler, self).__init__(filename, - filename_info, - filetype_info) - self._h5fh = h5py.File(self.filename, 'r') - @property def end_time(self): """Get end time.""" @@ -79,7 +71,7 @@ def get_dataset(self, ds_id, ds_info): if ds_name not in ['Solar Flux', 'Thermal Flux', 'Solar Radiance', 'Thermal Radiance']: raise KeyError(f"{ds_name} is an unknown dataset for this reader.") - ds = gerb_get_dataset(self, f'Radiometry/{ds_name}', ds_info) + ds = gerb_get_dataset(self[f'Radiometry/{ds_name}'], ds_info) ds.attrs.update({'start_time': self.start_time, 'data_time': self.start_time, 'end_time': self.end_time}) From 39c7d56df8d010ffa37895883bf8a8c351cd48df Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 5 Oct 2023 23:45:25 +0200 Subject: [PATCH 264/702] flake8 --- satpy/readers/gerb_l2_hr_h5.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 64f3fb8d71..f663b3040f 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -27,8 +27,6 @@ import logging from datetime import timedelta -import h5py - from satpy.readers.hdf5_utils import HDF5FileHandler from satpy.resample import get_area_def From e6e5c6e5f0085c8c8216951b80a46a468b0f75a2 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 2 Oct 2023 15:44:47 -0500 Subject: [PATCH 265/702] Fix sunz correction converting 32-bit floats to 64-bit floats Also fixes that input data types were inconsistent between dask arrays and computed numpy results. --- satpy/modifiers/angles.py | 9 +++++++-- satpy/tests/test_modifiers.py | 10 ++++++++-- 2 files changed, 15 insertions(+), 4 deletions(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 28adb60028..47aedcea4d 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -399,6 +399,7 @@ def get_cos_sza(data_arr: xr.DataArray) -> xr.DataArray: @cache_to_zarr_if("cache_lonlats", sanitize_args_func=_sanitize_args_with_chunks) def _get_valid_lonlats(area: PRGeometry, chunks: Union[int, str, tuple] = "auto") -> tuple[da.Array, da.Array]: with ignore_invalid_float_warnings(): + # NOTE: This defaults to 64-bit floats due to needed precision for X/Y coordinates lons, lats = area.get_lonlats(chunks=chunks) lons = da.where(lons >= 1e30, np.nan, lons) lats = da.where(lats >= 1e30, np.nan, lats) @@ -526,7 +527,7 @@ def _sunzen_corr_cos_ndarray(data: np.ndarray, max_sza_rad = np.deg2rad(max_sza) if max_sza is not None else max_sza # Cosine correction - corr = 1. / cos_zen + corr = (1. / cos_zen).astype(data.dtype, copy=False) if max_sza is not None: # gradually fall off for larger zenith angle grad_factor = (np.arccos(cos_zen) - limit_rad) / (max_sza_rad - limit_rad) @@ -538,7 +539,11 @@ def _sunzen_corr_cos_ndarray(data: np.ndarray, else: # Use constant value (the limit) for larger zenith angles grad_factor = 1. - corr = np.where(cos_zen > limit_cos, corr, grad_factor / limit_cos) + corr = np.where( + cos_zen > limit_cos, + corr, + (grad_factor / limit_cos).astype(data.dtype, copy=False) + ) # Force "night" pixels to 0 (where SZA is invalid) corr[np.isnan(cos_zen)] = 0 return data * corr diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index c21a514808..ec69a7e05e 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -110,9 +110,13 @@ def sunz_sza(): class TestSunZenithCorrector: """Test case for the zenith corrector.""" - def test_basic_default_not_provided(self, sunz_ds1): + @pytest.mark.parametrize("as_32bit", [False, True]) + def test_basic_default_not_provided(self, sunz_ds1, as_32bit): """Test default limits when SZA isn't provided.""" from satpy.modifiers.geometry import SunZenithCorrector + + if as_32bit: + sunz_ds1 = sunz_ds1.astype(np.float32) comp = SunZenithCorrector(name='sza_test', modifiers=tuple()) res = comp((sunz_ds1,), test_attr='test') np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) @@ -120,7 +124,9 @@ def test_basic_default_not_provided(self, sunz_ds1): assert 'x' in res.coords ds1 = sunz_ds1.copy().drop_vars(('y', 'x')) res = comp((ds1,), test_attr='test') - np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) + res_np = res.compute() + np.testing.assert_allclose(res_np.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) + assert res.dtype == res_np.dtype assert 'y' not in res.coords assert 'x' not in res.coords From 72379235805731ded6b28665eab9d7c86e6c7de2 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 2 Oct 2023 15:57:24 -0500 Subject: [PATCH 266/702] Update AHI HSD reader to have resolution-based chunking --- satpy/readers/ahi_hsd.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index e06f7ebc50..12032e8395 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -78,7 +78,7 @@ np2str, unzip_file, ) -from satpy.utils import get_chunk_size_limit +from satpy.utils import chunks_by_resolution AHI_CHANNEL_NAMES = ("1", "2", "3", "4", "5", "6", "7", "8", "9", "10", @@ -615,15 +615,16 @@ def _read_header(self, fp_): return header - def _read_data(self, fp_, header): + def _read_data(self, fp_, header, resolution): """Read data block.""" nlines = int(header["block2"]['number_of_lines'][0]) ncols = int(header["block2"]['number_of_columns'][0]) - chunks = da.core.normalize_chunks("auto", - shape=(nlines, ncols), - limit=get_chunk_size_limit(), - dtype='f8', - previous_chunks=(550, 550)) + chunks = chunks_by_resolution( + (nlines, ncols), + np.float32, + 550, + int(resolution / 500), + ) return da.from_array(np.memmap(self.filename, offset=fp_.tell(), dtype=' Date: Mon, 2 Oct 2023 20:02:59 -0500 Subject: [PATCH 267/702] Fix AHI tests now that resolution is properly used --- satpy/tests/reader_tests/test_ahi_hsd.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 9832c3eb29..82ef5b905b 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -293,11 +293,12 @@ def test_bad_calibration(self): def test_actual_satellite_position(self, round_actual_position, expected_result): """Test that rounding of the actual satellite position can be controlled.""" with _fake_hsd_handler(fh_kwargs={"round_actual_position": round_actual_position}) as fh: - ds_id = make_dataid(name="B01") + ds_id = make_dataid(name="B01", resolution=1000) ds_info = { "units": "%", "standard_name": "some_name", "wavelength": (0.1, 0.2, 0.3), + "resolution": 1000, } metadata = fh._get_metadata(ds_id, ds_info) orb_params = metadata["orbital_parameters"] @@ -365,10 +366,17 @@ def test_read_band_from_actual_file(self, hsd_file_jp01): filename_info = {"segment": 1, "total_segments": 1} filetype_info = {"file_type": "blahB01"} fh = AHIHSDFileHandler(hsd_file_jp01, filename_info, filetype_info) - key = {"name": "B01", "calibration": "counts"} + key = {"name": "B01", "calibration": "counts", "resolution": 1000} import dask with dask.config.set({"array.chunk-size": "16MiB"}): - data = fh.read_band(key, {"units": "%", "standard_name": "toa_bidirectional_reflectance", "wavelength": 2}) + data = fh.read_band( + key, + { + "units": "%", + "standard_name": "toa_bidirectional_reflectance", + "wavelength": 2, + "resolution": 1000, + }) assert data.chunks == ((1100,) * 10, (1100,) * 10) @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._read_data') From 9169abe37891bde09adca693cef2f21b7e9b2d77 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 2 Oct 2023 20:52:28 -0500 Subject: [PATCH 268/702] Add tests for dtype checks --- satpy/tests/reader_tests/test_ahi_hsd.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 82ef5b905b..1a0e44ff4e 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -378,6 +378,8 @@ def test_read_band_from_actual_file(self, hsd_file_jp01): "resolution": 1000, }) assert data.chunks == ((1100,) * 10, (1100,) * 10) + assert data.dtype == data.compute().dtype + assert data.dtype == np.float32 @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._read_data') @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._mask_invalid') @@ -503,8 +505,8 @@ def setUp(self, *mocks): 'cali_offset_count2rad_conversion': [self.upd_cali[1]]}, } - self.counts = da.array(np.array([[0., 1000.], - [2000., 5000.]])) + self.counts = da.array(np.array([[0, 1000], + [2000, 5000]], dtype=np.uint16)) self.fh = fh def test_default_calibrate(self, *mocks): @@ -572,7 +574,10 @@ def test_user_calibration(self): self.fh.user_calibration = {'B13': {'slope': 0.95, 'offset': -0.1}} self.fh.band_name = 'B13' - rad = self.fh.calibrate(data=self.counts, calibration='radiance').compute() + rad = self.fh.calibrate(data=self.counts, calibration='radiance') + rad_np = rad.compute() + assert rad.dtype == rad_np.dtype + assert rad.dtype == np.float32 rad_exp = np.array([[16.10526316, 12.21052632], [8.31578947, -3.36842105]]) self.assertTrue(np.allclose(rad, rad_exp)) @@ -582,7 +587,10 @@ def test_user_calibration(self): 'offset': 15.20}, 'type': 'DN'} self.fh.band_name = 'B13' - rad = self.fh.calibrate(data=self.counts, calibration='radiance').compute() + rad = self.fh.calibrate(data=self.counts, calibration='radiance') + rad_np = rad.compute() + assert rad.dtype == rad_np.dtype + assert rad.dtype == np.float32 rad_exp = np.array([[15.2, 12.], [8.8, -0.8]]) self.assertTrue(np.allclose(rad, rad_exp)) From ec20ba9341439a72d77ba9edf3668095baad6755 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 5 Oct 2023 19:23:54 -0500 Subject: [PATCH 269/702] Update AHI tests to use new resolution-based chunking normalization --- satpy/readers/ahi_hsd.py | 10 ++++++---- satpy/tests/reader_tests/test_ahi_hsd.py | 2 +- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 12032e8395..dffafaa97b 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -78,7 +78,7 @@ np2str, unzip_file, ) -from satpy.utils import chunks_by_resolution +from satpy.utils import normalize_low_res_chunks AHI_CHANNEL_NAMES = ("1", "2", "3", "4", "5", "6", "7", "8", "9", "10", @@ -619,11 +619,13 @@ def _read_data(self, fp_, header, resolution): """Read data block.""" nlines = int(header["block2"]['number_of_lines'][0]) ncols = int(header["block2"]['number_of_columns'][0]) - chunks = chunks_by_resolution( + chunks = normalize_low_res_chunks( + ("auto", "auto"), (nlines, ncols), + # 1100 minimum chunk size for 500m, 550 for 1km, 225 for 2km + (1100, 1100), + (int(resolution / 500), int(resolution / 500)), np.float32, - 550, - int(resolution / 500), ) return da.from_array(np.memmap(self.filename, offset=fp_.tell(), dtype=' Date: Fri, 6 Oct 2023 11:18:56 +0200 Subject: [PATCH 270/702] fix MEIRINK calibration coefficients (typos from copying the coefficients over) --- satpy/readers/seviri_base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index c1d288f4df..c54a7586dc 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -373,9 +373,9 @@ # Meteosat-9 -MEIRINK_COEFS[322] = {'VIS006': (21.026, 0.3739), - 'VIS008': (26.875, 0.3111), - 'IR_016': (21.394, 0.0065) +MEIRINK_COEFS[322] = {'VIS006': (21.026, 0.2556), + 'VIS008': (26.875, 0.1835), + 'IR_016': (21.394, 0.0498) } # Meteosat-10 From 0abbb9a10376271c228839f53e59d70037aec413 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Fri, 6 Oct 2023 14:04:45 +0200 Subject: [PATCH 271/702] used fixed values to test meirink calibration --- satpy/tests/reader_tests/test_seviri_base.py | 26 +++++++++++--------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index f7c8525ab7..b924929d84 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -376,17 +376,21 @@ def test_get_meirink_slope_epoch(self, platform_id, channel_name): calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK', DATE_2000) assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS[platform_id][channel_name][0]/1000. - @pytest.mark.parametrize('platform_id', [321, 322, 323, 324]) - @pytest.mark.parametrize('channel_name', ['VIS006', 'VIS008', 'IR_016']) - def test_get_meirink_slope_2020(self, platform_id, channel_name): - """Test the value of the slope of the Meirink calibration on 2020-01-01.""" - DATE_2020 = datetime(2020, 1, 1) + @pytest.mark.parametrize('platform_id,time,expected', ( + (321, datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]), + (321, datetime(2010, 12, 31, 0, 0), [0.0258479563, 0.0322386887, 0.022895110500000003]), + (322, datetime(2010, 1, 18, 0, 0), [0.021964051999999998, 0.027548445, 0.021576766]), + (322, datetime(2015, 6, 1, 0, 0), [0.022465028, 0.027908105, 0.021674373999999996]), + (323, datetime(2005, 1, 18, 0, 0), [0.0209088464, 0.0265355228, 0.0230132616]), + (323, datetime(2010, 12, 31, 0, 0), [0.022181355200000002, 0.0280103379, 0.0229511138]), + (324, datetime(2010, 1, 18, 0, 0), [0.0218362, 0.027580748, 0.022285370999999998]), + (324, datetime(2015, 6, 1, 0, 0), [0.0225418, 0.028530172, 0.022248718999999997]), + )) + def test_get_meirink_slope_2020(self, platform_id, time, expected): + """Test the value of the slope of the Meirink calibration.""" coefs = {'coefs': {}} coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} coefs['coefs']['EXTERNAL'] = {} - calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK', DATE_2020) - A, B = MEIRINK_COEFS[platform_id][channel_name] - delta_t = (DATE_2020 - DATE_2000).total_seconds() - S = A + B * delta_t / (3600*24) / 1000. - S = S/1000 - assert calibration_handler.get_gain_offset()[0] == S + for i, channel_name in enumerate(['VIS006', 'VIS008', 'IR_016']): + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK', time) + assert abs(calibration_handler.get_gain_offset()[0] - expected[i]) < 1e-6 From 909fb05bac0235ef80b58b11646495c12f44540e Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Fri, 6 Oct 2023 14:24:41 +0200 Subject: [PATCH 272/702] isort --- satpy/tests/reader_tests/test_seviri_base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index b924929d84..f5ba38cea5 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -26,9 +26,12 @@ import xarray as xr from satpy.readers.seviri_base import ( + DATE_2000, + MEIRINK_COEFS, NoValidOrbitParams, OrbitPolynomial, OrbitPolynomialFinder, + SEVIRICalibrationHandler, chebyshev, dec10216, get_cds_time, @@ -37,9 +40,6 @@ pad_data_horizontally, pad_data_vertically, round_nom_time, - SEVIRICalibrationHandler, - MEIRINK_COEFS, - DATE_2000, ) from satpy.utils import get_legacy_chunk_size From 5c2234223f9e9fb44739830ffb24ce1ac9cf520f Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Fri, 6 Oct 2023 15:22:46 +0200 Subject: [PATCH 273/702] fix "multiple spaces after ','" for codefactor --- satpy/tests/reader_tests/test_seviri_base.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index f5ba38cea5..252da43e75 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -377,14 +377,14 @@ def test_get_meirink_slope_epoch(self, platform_id, channel_name): assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS[platform_id][channel_name][0]/1000. @pytest.mark.parametrize('platform_id,time,expected', ( - (321, datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]), - (321, datetime(2010, 12, 31, 0, 0), [0.0258479563, 0.0322386887, 0.022895110500000003]), - (322, datetime(2010, 1, 18, 0, 0), [0.021964051999999998, 0.027548445, 0.021576766]), - (322, datetime(2015, 6, 1, 0, 0), [0.022465028, 0.027908105, 0.021674373999999996]), - (323, datetime(2005, 1, 18, 0, 0), [0.0209088464, 0.0265355228, 0.0230132616]), - (323, datetime(2010, 12, 31, 0, 0), [0.022181355200000002, 0.0280103379, 0.0229511138]), - (324, datetime(2010, 1, 18, 0, 0), [0.0218362, 0.027580748, 0.022285370999999998]), - (324, datetime(2015, 6, 1, 0, 0), [0.0225418, 0.028530172, 0.022248718999999997]), + (321, datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]), + (321, datetime(2010, 12, 31, 0, 0), [0.0258479563, 0.0322386887, 0.022895110500000003]), + (322, datetime(2010, 1, 18, 0, 0), [0.021964051999999998, 0.027548445, 0.021576766]), + (322, datetime(2015, 6, 1, 0, 0), [0.022465028, 0.027908105, 0.021674373999999996]), + (323, datetime(2005, 1, 18, 0, 0), [0.0209088464, 0.0265355228, 0.0230132616]), + (323, datetime(2010, 12, 31, 0, 0), [0.022181355200000002, 0.0280103379, 0.0229511138]), + (324, datetime(2010, 1, 18, 0, 0), [0.0218362, 0.027580748, 0.022285370999999998]), + (324, datetime(2015, 6, 1, 0, 0), [0.0225418, 0.028530172, 0.022248718999999997]), )) def test_get_meirink_slope_2020(self, platform_id, time, expected): """Test the value of the slope of the Meirink calibration.""" From 09efba8ef643ae2a0af9ba61778a9bf4c6912a5d Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Mon, 9 Oct 2023 14:18:58 +0200 Subject: [PATCH 274/702] add comment with the units of the Meirink coefficients --- satpy/readers/seviri_base.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index c54a7586dc..3f548aa8ea 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -356,7 +356,10 @@ # Calibration coefficients from Meirink, J.F., R.A. Roebeling and P. Stammes, 2013: # Inter-calibration of polar imager solar channels using SEVIRI, Atm. Meas. Tech., 6, # 2495-2508, doi:10.5194/amt-6-2495-2013 - +# +# The coefficients are stored in pairs of A, B (see function `get_meirink_slope`) where the +# units of A are µW m-2 sr-1 (cm-1)-1 and those of B are µW m-2 sr-1 (cm-1)-1 (86400 s)-1 +# # To obtain the slope for the calibration, one should use the routine get_seviri_meirink_slope # Epoch for the MEIRINK re-calibration From 8193298786f788c81b3fddc8b161679fb982c4d1 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Mon, 9 Oct 2023 16:11:57 +0200 Subject: [PATCH 275/702] Use MeirinkCalibrationHandler to manage the coefficients Co-authored-by: Stephan Finkensieper --- satpy/readers/seviri_base.py | 61 +++++++++++++++++++++--------------- 1 file changed, 36 insertions(+), 25 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 3f548aa8ea..394540cb8c 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -366,34 +366,35 @@ DATE_2000 = datetime(2000, 1, 1) MEIRINK_COEFS = {} +MEIRINK_COEFS['2013'] = {} # Meteosat-8 -MEIRINK_COEFS[321] = {'VIS006': (24.346, 0.3739), - 'VIS008': (30.989, 0.3111), - 'IR_016': (22.869, 0.0065) - } +MEIRINK_COEFS['2013'][321] = {'VIS006': (24.346, 0.3739), + 'VIS008': (30.989, 0.3111), + 'IR_016': (22.869, 0.0065) + } # Meteosat-9 -MEIRINK_COEFS[322] = {'VIS006': (21.026, 0.2556), - 'VIS008': (26.875, 0.1835), - 'IR_016': (21.394, 0.0498) - } +MEIRINK_COEFS['2013'][322] = {'VIS006': (21.026, 0.2556), + 'VIS008': (26.875, 0.1835), + 'IR_016': (21.394, 0.0498) + } # Meteosat-10 -MEIRINK_COEFS[323] = {'VIS006': (19.829, 0.5856), - 'VIS008': (25.284, 0.6787), - 'IR_016': (23.066, -0.0286) - } +MEIRINK_COEFS['2013'][323] = {'VIS006': (19.829, 0.5856), + 'VIS008': (25.284, 0.6787), + 'IR_016': (23.066, -0.0286) + } # Meteosat-11 -MEIRINK_COEFS[324] = {'VIS006': (20.515, 0.3600), - 'VIS008': (25.803, 0.4844), - 'IR_016': (22.354, -0.0187) - } +MEIRINK_COEFS['2013'][324] = {'VIS006': (20.515, 0.3600), + 'VIS008': (25.803, 0.4844), + 'IR_016': (22.354, -0.0187) + } def get_meirink_slope(meirink_coefs, acquisition_time): @@ -413,6 +414,21 @@ def get_meirink_slope(meirink_coefs, acquisition_time): return S/1000 +class MeirinkCalibrationHandler: + """Re-calibration of the SEVIRI visible channels slop (see Meirink 2013).""" + + def __init__(self, coefs=MEIRINK_COEFS, calib_mode=None): + """Initialize the calibration handler.""" + if calib_mode is None: + raise ValueError("Missing calib_mode") + self.coefs = coefs[calib_mode.split('-')[1]] + + def get_slope(self, platform, channel, time): + """Return the slope using the provided calibration coefficients.""" + coefs = self.coefs[platform][channel] + return get_meirink_slope(coefs, time) + + def get_cds_time(days, msecs): """Compute timestamp given the days since epoch and milliseconds of the day. @@ -618,11 +634,6 @@ def __init__(self, platform_id, channel_name, coefs, calib_mode, scan_time): self._platform_id = platform_id self._channel_name = channel_name self._coefs = coefs - if channel_name in ['VIS006', 'VIS008', 'IR_016']: - self._coefs['coefs']['MEIRINK'] = MEIRINK_COEFS[platform_id][channel_name] - else: - self._coefs['coefs']['MEIRINK'] = None - self._calib_mode = calib_mode.upper() self._scan_time = scan_time self._algo = SEVIRICalibrationAlgorithm( @@ -630,7 +641,7 @@ def __init__(self, platform_id, channel_name, coefs, calib_mode, scan_time): scan_time=self._scan_time ) - valid_modes = ('NOMINAL', 'GSICS', 'MEIRINK') + valid_modes = ('NOMINAL', 'GSICS', 'MEIRINK-2013') if self._calib_mode not in valid_modes: raise ValueError( 'Invalid calibration mode: {}. Choose one of {}'.format( @@ -686,9 +697,9 @@ def get_gain_offset(self): internal_gain = gsics_gain internal_offset = gsics_offset - if self._calib_mode == 'MEIRINK': - if coefs['MEIRINK'] is not None: - internal_gain = get_meirink_slope(coefs['MEIRINK'], self._scan_time) + if "MEIRINK" in self._calib_mode and self._channel_name in ['VIS006', 'VIS008', 'IR_016']: + meirink = MeirinkCalibrationHandler(calib_mode=self._calib_mode) + internal_gain = meirink.get_slope(self._platform_id, self._channel_name, self._scan_time) # Override with external coefficients, if any. gain = coefs['EXTERNAL'].get('gain', internal_gain) From 55cba4f24904bef98f56f21f17cf777568575be1 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 9 Oct 2023 20:56:50 +0100 Subject: [PATCH 276/702] Update AHI L2 NOAA tests. --- satpy/tests/reader_tests/test_ahi_l2_nc.py | 27 +++++++++++----------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index d2e9c24489..ff2b5a3d53 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -2,9 +2,9 @@ from datetime import datetime -import h5netcdf import numpy as np import pytest +import xarray as xr from satpy.readers.ahi_l2_nc import HIML2NCFileHandler from satpy.tests.utils import make_dataid @@ -12,11 +12,13 @@ rng = np.random.default_rng() clmk_data = rng.integers(0, 3, (5500, 5500), dtype=np.uint16) cprob_data = rng.uniform(0, 1, (5500, 5500)) +lat_data = rng.uniform(-90, 90, (5500, 5500)) +lon_data = rng.uniform(-180, 180, (5500, 5500)) start_time = datetime(2023, 8, 24, 5, 40, 21) end_time = datetime(2023, 8, 24, 5, 49, 40) -dimensions = {'X': 5500, 'Y': 5500} +dimensions = {'Columns': 5500, 'Rows': 5500} exp_ext = (-5499999.9012, -5499999.9012, 5499999.9012, 5499999.9012) @@ -43,12 +45,11 @@ def ahil2_filehandler(fname, platform='h09'): def himl2_filename(tmp_path_factory): """Create a fake himawari l2 file.""" fname = f'{tmp_path_factory.mktemp("data")}/AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc' - with h5netcdf.File(fname, mode="w") as h5f: - h5f.dimensions = dimensions - h5f.attrs.update(global_attrs) - var = h5f.create_variable("CloudMask", ("Y", "X"), np.uint16, chunks=(200, 200)) - var[:] = clmk_data - + ds = xr.Dataset({'CloudMask': (['Rows', 'Columns'], clmk_data)}, + coords={'Latitude': (['Rows', 'Columns'], lat_data), + 'Longitude': (['Rows', 'Columns'], lon_data)}, + attrs=global_attrs) + ds.to_netcdf(fname) return fname @@ -56,11 +57,11 @@ def himl2_filename(tmp_path_factory): def himl2_filename_bad(tmp_path_factory): """Create a fake himawari l2 file.""" fname = f'{tmp_path_factory.mktemp("data")}/AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc' - with h5netcdf.File(fname, mode="w") as h5f: - h5f.dimensions = dimensions - h5f.attrs.update(badarea_attrs) - var = h5f.create_variable("CloudMask", ("Y", "X"), np.uint16, chunks=(200, 200)) - var[:] = clmk_data + ds = xr.Dataset({'CloudMask': (['Rows', 'Columns'], clmk_data)}, + coords={'Latitude': (['Rows', 'Columns'], lat_data), + 'Longitude': (['Rows', 'Columns'], lon_data)}, + attrs=badarea_attrs) + ds.to_netcdf(fname) return fname From f2410b55ae1c48c733c24ece3e585d682fe5f797 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 9 Oct 2023 20:27:20 +0000 Subject: [PATCH 277/702] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/pre-commit-hooks: v4.4.0 → v4.5.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.4.0...v4.5.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3ce81859ae..cd26d096fe 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,7 +8,7 @@ repos: additional_dependencies: [flake8-docstrings, flake8-debugger, flake8-bugbear, mccabe] args: [--max-complexity, "10"] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer From ad1b690d01609b2b50692dda3cebca14de2adf12 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Mon, 9 Oct 2023 23:22:17 +0200 Subject: [PATCH 278/702] Move criterion for application of meirink calibration in method Also fix typo --- satpy/readers/seviri_base.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 394540cb8c..f41b3cec40 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -414,8 +414,14 @@ def get_meirink_slope(meirink_coefs, acquisition_time): return S/1000 +def should_apply_meirink(calib_mode, channel_name): + """Decide whether to use the Meirink calibration coefficients.""" + + return "MEIRINK" in calib_mode and channel_name in ['VIS006', 'VIS008', 'IR_016'] + + class MeirinkCalibrationHandler: - """Re-calibration of the SEVIRI visible channels slop (see Meirink 2013).""" + """Re-calibration of the SEVIRI visible channels slope (see Meirink 2013).""" def __init__(self, coefs=MEIRINK_COEFS, calib_mode=None): """Initialize the calibration handler.""" @@ -697,7 +703,7 @@ def get_gain_offset(self): internal_gain = gsics_gain internal_offset = gsics_offset - if "MEIRINK" in self._calib_mode and self._channel_name in ['VIS006', 'VIS008', 'IR_016']: + if should_apply_meirink(self._calib_mode, self._channel_name): meirink = MeirinkCalibrationHandler(calib_mode=self._calib_mode) internal_gain = meirink.get_slope(self._platform_id, self._channel_name, self._scan_time) From 65503a00ae7cfa7492f811fd6eac7a5de2012676 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Tue, 10 Oct 2023 10:12:56 +0200 Subject: [PATCH 279/702] flake8 --- satpy/readers/seviri_base.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index f41b3cec40..bd4ce962ad 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -416,7 +416,6 @@ def get_meirink_slope(meirink_coefs, acquisition_time): def should_apply_meirink(calib_mode, channel_name): """Decide whether to use the Meirink calibration coefficients.""" - return "MEIRINK" in calib_mode and channel_name in ['VIS006', 'VIS008', 'IR_016'] From 6a59c85b7b2408f7350a2803ce2f19faed1f30b4 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 10 Oct 2023 15:22:36 +0200 Subject: [PATCH 280/702] Make caching using wrong types a warning instead of an error --- satpy/modifiers/angles.py | 3 ++- satpy/tests/modifier_tests/test_angles.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 01e7c1ff2d..02ffadfa87 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -248,7 +248,8 @@ def _hash_args(*args, unhashable_types=DEFAULT_UNCACHE_TYPES): hashable_args = [] for arg in args: if isinstance(arg, unhashable_types): - raise TypeError(f"Unhashable type in function signature ({type(arg)}), cannot be cached.") + warnings.warn(f"Unhashable type in function signature ({type(arg)}), cannot be cached.", stacklevel=2) + continue if isinstance(arg, HASHABLE_GEOMETRIES): arg = hash(arg) elif isinstance(arg, datetime): diff --git a/satpy/tests/modifier_tests/test_angles.py b/satpy/tests/modifier_tests/test_angles.py index 46a8a8443f..4c7e295e14 100644 --- a/satpy/tests/modifier_tests/test_angles.py +++ b/satpy/tests/modifier_tests/test_angles.py @@ -330,7 +330,7 @@ def test_caching_with_array_in_args_fails(self, tmp_path): def _fake_func(array): return array + 1 - with pytest.raises(TypeError), \ + with pytest.warns(UserWarning), \ satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)): _fake_func(da.zeros(100)) From b729ca36fefa57a1699ddce97cfe5819cedc161e Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 10 Oct 2023 15:24:19 +0200 Subject: [PATCH 281/702] Require numpy > 1.20 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 2e6154ea92..f5c81ee34c 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,7 @@ except ImportError: pass -requires = ['numpy >=1.13', 'pillow', 'pyresample >=1.24.0', 'trollsift', +requires = ['numpy >1.20', 'pillow', 'pyresample >=1.24.0', 'trollsift', 'trollimage >=1.20', 'pykdtree', 'pyyaml >=5.1', 'xarray >=0.10.1, !=0.13.0', 'dask[array] >=0.17.1', 'pyproj>=2.2', 'zarr', 'donfig', 'appdirs', 'packaging', 'pooch', 'pyorbital'] From dbefc0fbe67270ba04a42aae6f71426c95a010b6 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Tue, 10 Oct 2023 16:00:08 +0200 Subject: [PATCH 282/702] rename DATE_2000 to MEIRINK_EPOCH --- satpy/readers/seviri_base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index bd4ce962ad..88994d3f18 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -363,7 +363,7 @@ # To obtain the slope for the calibration, one should use the routine get_seviri_meirink_slope # Epoch for the MEIRINK re-calibration -DATE_2000 = datetime(2000, 1, 1) +MEIRINK_EPOCH = datetime(2000, 1, 1) MEIRINK_COEFS = {} MEIRINK_COEFS['2013'] = {} @@ -409,7 +409,7 @@ def get_meirink_slope(meirink_coefs, acquisition_time): """ A = meirink_coefs[0] B = meirink_coefs[1] - delta_t = (acquisition_time - DATE_2000).total_seconds() + delta_t = (acquisition_time - MEIRINK_EPOCH).total_seconds() S = A + B * delta_t / (3600*24) / 1000. return S/1000 From 60bb30f76fb13fa95b957e6168a1ec1e1c938403 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Tue, 10 Oct 2023 16:01:26 +0200 Subject: [PATCH 283/702] remove coef argument in MeirinkCalibrationHandler --- satpy/readers/seviri_base.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 88994d3f18..6471c42639 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -422,11 +422,9 @@ def should_apply_meirink(calib_mode, channel_name): class MeirinkCalibrationHandler: """Re-calibration of the SEVIRI visible channels slope (see Meirink 2013).""" - def __init__(self, coefs=MEIRINK_COEFS, calib_mode=None): + def __init__(self, calib_mode): """Initialize the calibration handler.""" - if calib_mode is None: - raise ValueError("Missing calib_mode") - self.coefs = coefs[calib_mode.split('-')[1]] + self.coefs = MEIRINK_COEFS[calib_mode.split('-')[1]] def get_slope(self, platform, channel, time): """Return the slope using the provided calibration coefficients.""" From fe43fc8e1be7047aaebbe23e82a40cfbd771b5f1 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 10 Oct 2023 16:43:20 +0200 Subject: [PATCH 284/702] Dummy changes --- satpy/tests/writer_tests/cf_tests/test_attrs.py | 2 +- satpy/tests/writer_tests/cf_tests/test_dataaarray.py | 2 +- satpy/writers/cf/attrs.py | 2 +- satpy/writers/cf/encoding.py | 8 ++++---- satpy/writers/cf_writer.py | 10 +++++----- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/satpy/tests/writer_tests/cf_tests/test_attrs.py b/satpy/tests/writer_tests/cf_tests/test_attrs.py index 87cdfd173d..5895f115c3 100644 --- a/satpy/tests/writer_tests/cf_tests/test_attrs.py +++ b/satpy/tests/writer_tests/cf_tests/test_attrs.py @@ -45,7 +45,7 @@ def get_test_attrs(self): 'numpy_bool': True, 'numpy_void': np.void(0), 'numpy_bytes': np.bytes_('test'), - 'numpy_string': np.string_('test'), + 'numpy_string': np.str_('test'), 'list': [1, 2, np.float64(3)], 'nested_list': ["1", ["2", [3]]], 'bool': True, diff --git a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py index 896de5c55b..a67cae9ca2 100644 --- a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py +++ b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py @@ -90,7 +90,7 @@ def get_test_attrs(self): 'numpy_bool': True, 'numpy_void': np.void(0), 'numpy_bytes': np.bytes_('test'), - 'numpy_string': np.string_('test'), + 'numpy_string': np.str_('test'), 'list': [1, 2, np.float64(3)], 'nested_list': ["1", ["2", [3]]], 'bool': True, diff --git a/satpy/writers/cf/attrs.py b/satpy/writers/cf/attrs.py index 153d645594..fad60fe97e 100644 --- a/satpy/writers/cf/attrs.py +++ b/satpy/writers/cf/attrs.py @@ -166,7 +166,7 @@ def _remove_satpy_attrs(new_data): def _format_prerequisites_attrs(dataarray): """Reformat prerequisites attribute value to string.""" if 'prerequisites' in dataarray.attrs: - dataarray.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] + dataarray.attrs['prerequisites'] = [np.bytes_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] return dataarray diff --git a/satpy/writers/cf/encoding.py b/satpy/writers/cf/encoding.py index 55a48f70fd..4e244e82f9 100644 --- a/satpy/writers/cf/encoding.py +++ b/satpy/writers/cf/encoding.py @@ -98,16 +98,16 @@ def _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix): return encoding -def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix='CHANNEL_'): +def update_encoding(dataset, to_engine_kwargs, numeric_name_prefix='CHANNEL_'): """Update encoding. Preserve dask chunks, avoid fill values in coordinate variables and make sure that time & time bounds have the same units. """ - other_to_netcdf_kwargs = to_netcdf_kwargs.copy() - encoding = other_to_netcdf_kwargs.pop('encoding', {}).copy() + other_to_engine_kwargs = to_engine_kwargs.copy() + encoding = other_to_engine_kwargs.pop('encoding', {}).copy() encoding = _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix) encoding = _set_default_chunks(encoding, dataset) encoding = _set_default_fill_value(encoding, dataset) encoding = _set_default_time_encoding(encoding, dataset) - return encoding, other_to_netcdf_kwargs + return encoding, other_to_engine_kwargs diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 85c6fe999b..30ca7e784e 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -186,7 +186,7 @@ CF_VERSION = 'CF-1.7' -# Numpy datatypes compatible with all netCDF4 backends. ``np.unicode_`` is +# Numpy datatypes compatible with all netCDF4 backends. ``np.str_`` is # excluded because h5py (and thus h5netcdf) has problems with unicode, see # https://github.com/h5py/h5py/issues/624.""" NC4_DTYPES = [np.dtype('int8'), np.dtype('uint8'), @@ -194,7 +194,7 @@ np.dtype('int32'), np.dtype('uint32'), np.dtype('int64'), np.dtype('uint64'), np.dtype('float32'), np.dtype('float64'), - np.string_] + np.bytes_] # Unsigned and int64 isn't CF 1.7 compatible # Note: Unsigned and int64 are CF 1.9 compatible @@ -203,7 +203,7 @@ np.dtype('int32'), np.dtype('float32'), np.dtype('float64'), - np.string_] + np.bytes_] def _sanitize_writer_kwargs(writer_kwargs): @@ -314,7 +314,7 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, # - If single netCDF, it write directly for group_name, ds in grouped_datasets.items(): encoding, other_to_netcdf_kwargs = update_encoding(ds, - to_netcdf_kwargs=to_netcdf_kwargs, + to_engine_kwargs=to_netcdf_kwargs, numeric_name_prefix=numeric_name_prefix) res = ds.to_netcdf(filename, engine=engine, @@ -346,7 +346,7 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, """ from satpy.writers.cf.dataarray import make_cf_dataarray warnings.warn('CFWriter.da2cf is deprecated.' - 'Use satpy.writers.cf_writer.make_cf_dataarray instead.', + 'Use satpy.writers.cf.dataarray.make_cf_dataarray instead.', DeprecationWarning, stacklevel=3) return make_cf_dataarray(dataarray=dataarray, epoch=epoch, From c37fcb763110e0c3fc4bbfc5bb72df249a4b6017 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 10 Oct 2023 17:08:21 +0200 Subject: [PATCH 285/702] Set private functions --- satpy/readers/amsr2_l2_gaasp.py | 4 ++-- satpy/tests/writer_tests/cf_tests/test_area.py | 18 +++++++++--------- .../tests/writer_tests/cf_tests/test_attrs.py | 6 +++--- satpy/writers/cf/area.py | 10 +++++----- satpy/writers/cf/attrs.py | 16 ++++++++-------- satpy/writers/cf/dataarray.py | 4 ++-- satpy/writers/cf/datasets.py | 6 +++--- satpy/writers/cf/time.py | 2 +- 8 files changed, 33 insertions(+), 33 deletions(-) diff --git a/satpy/readers/amsr2_l2_gaasp.py b/satpy/readers/amsr2_l2_gaasp.py index 5f91e2d965..4f045057b4 100644 --- a/satpy/readers/amsr2_l2_gaasp.py +++ b/satpy/readers/amsr2_l2_gaasp.py @@ -189,7 +189,7 @@ def _available_if_this_file_type(self, configured_datasets): continue yield self.file_type_matches(ds_info['file_type']), ds_info - def _add_lonlat_coords(self, data_arr, ds_info): + def __add_lonlat_coords(self, data_arr, ds_info): lat_coord = None lon_coord = None for coord_name in data_arr.coords: @@ -209,7 +209,7 @@ def _get_ds_info_for_data_arr(self, var_name, data_arr): if x_dim_name in self.dim_resolutions: ds_info['resolution'] = self.dim_resolutions[x_dim_name] if not self.is_gridded and data_arr.coords: - self._add_lonlat_coords(data_arr, ds_info) + self.__add_lonlat_coords(data_arr, ds_info) return ds_info def _is_2d_yx_data_array(self, data_arr): diff --git a/satpy/tests/writer_tests/cf_tests/test_area.py b/satpy/tests/writer_tests/cf_tests/test_area.py index 1dd82ddd9d..92088f6d68 100644 --- a/satpy/tests/writer_tests/cf_tests/test_area.py +++ b/satpy/tests/writer_tests/cf_tests/test_area.py @@ -356,9 +356,9 @@ def _gm_matches(gmapping, expected): assert new_ds.attrs['grid_mapping'] == 'geos' _gm_matches(grid_mapping, geos_expected) - def test_add_lonlat_coords(self): + def test__add_lonlat_coords(self): """Test the conversion from areas to lon/lat.""" - from satpy.writers.cf.area import add_lonlat_coords + from satpy.writers.cf.area import _add_lonlat_coords area = AreaDefinition( 'seviri', @@ -371,7 +371,7 @@ def test_add_lonlat_coords(self): lons_ref, lats_ref = area.get_lonlats() dataarray = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), attrs={'area': area}) - res = add_lonlat_coords(dataarray) + res = _add_lonlat_coords(dataarray) # original should be unmodified assert 'longitude' not in dataarray.coords @@ -394,7 +394,7 @@ def test_add_lonlat_coords(self): lons_ref, lats_ref = area.get_lonlats() dataarray = xr.DataArray(data=da.from_array(np.arange(3 * 10 * 10).reshape(3, 10, 10), chunks=(1, 5, 5)), dims=('bands', 'y', 'x'), attrs={'area': area}) - res = add_lonlat_coords(dataarray) + res = _add_lonlat_coords(dataarray) # original should be unmodified assert 'longitude' not in dataarray.coords @@ -469,12 +469,12 @@ def datasets(self): datasets['lon'].attrs['name'] = 'lon' return datasets - def test_is_lon_or_lat_dataarray(self, datasets): - """Test the is_lon_or_lat_dataarray function.""" - from satpy.writers.cf.area import is_lon_or_lat_dataarray + def test__is_lon_or_lat_dataarray(self, datasets): + """Test the _is_lon_or_lat_dataarray function.""" + from satpy.writers.cf.area import _is_lon_or_lat_dataarray - assert is_lon_or_lat_dataarray(datasets['lat']) - assert not is_lon_or_lat_dataarray(datasets['var1']) + assert _is_lon_or_lat_dataarray(datasets['lat']) + assert not _is_lon_or_lat_dataarray(datasets['var1']) def test_has_projection_coords(self, datasets): """Test the has_projection_coords function.""" diff --git a/satpy/tests/writer_tests/cf_tests/test_attrs.py b/satpy/tests/writer_tests/cf_tests/test_attrs.py index 5895f115c3..a969765181 100644 --- a/satpy/tests/writer_tests/cf_tests/test_attrs.py +++ b/satpy/tests/writer_tests/cf_tests/test_attrs.py @@ -124,14 +124,14 @@ def assertDictWithArraysEqual(self, d1, d2): assert isinstance(val2, np.generic) assert val1.dtype == val2.dtype - def test_encode_attrs_nc(self): + def test__encode_attrs_nc(self): """Test attributes encoding.""" - from satpy.writers.cf.attrs import encode_attrs_nc + from satpy.writers.cf.attrs import _encode_attrs_nc attrs, expected, _ = self.get_test_attrs() # Test encoding - encoded = encode_attrs_nc(attrs) + encoded = _encode_attrs_nc(attrs) self.assertDictWithArraysEqual(expected, encoded) # Test decoding of json-encoded attributes diff --git a/satpy/writers/cf/area.py b/satpy/writers/cf/area.py index 68113c1ee2..5916083d62 100644 --- a/satpy/writers/cf/area.py +++ b/satpy/writers/cf/area.py @@ -28,7 +28,7 @@ logger = logging.getLogger(__name__) -def add_lonlat_coords(dataarray): +def _add_lonlat_coords(dataarray): """Add 'longitude' and 'latitude' coordinates to DataArray.""" dataarray = dataarray.copy() area = dataarray.attrs['area'] @@ -73,7 +73,7 @@ def area2cf(dataarray, include_lonlats=False, got_lonlats=False): """Convert an area to at CF grid mapping or lon and lats.""" res = [] if not got_lonlats and (isinstance(dataarray.attrs['area'], SwathDefinition) or include_lonlats): - dataarray = add_lonlat_coords(dataarray) + dataarray = _add_lonlat_coords(dataarray) if isinstance(dataarray.attrs['area'], AreaDefinition): dataarray, gmapping = _add_grid_mapping(dataarray) res.append(gmapping) @@ -81,7 +81,7 @@ def area2cf(dataarray, include_lonlats=False, got_lonlats=False): return res -def is_lon_or_lat_dataarray(dataarray): +def _is_lon_or_lat_dataarray(dataarray): """Check if the DataArray represents the latitude or longitude coordinate.""" if 'standard_name' in dataarray.attrs and dataarray.attrs['standard_name'] in ['longitude', 'latitude']: return True @@ -91,7 +91,7 @@ def is_lon_or_lat_dataarray(dataarray): def has_projection_coords(ds_collection): """Check if DataArray collection has a "longitude" or "latitude" DataArray.""" for dataarray in ds_collection.values(): - if is_lon_or_lat_dataarray(dataarray): + if _is_lon_or_lat_dataarray(dataarray): return True return False @@ -124,7 +124,7 @@ def make_alt_coords_unique(datas, pretty=False): tokens = defaultdict(set) for dataset in datas.values(): for coord_name in dataset.coords: - if not is_lon_or_lat_dataarray(dataset[coord_name]) and coord_name not in dataset.dims: + if not _is_lon_or_lat_dataarray(dataset[coord_name]) and coord_name not in dataset.dims: tokens[coord_name].add(tokenize(dataset[coord_name].data)) coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) diff --git a/satpy/writers/cf/attrs.py b/satpy/writers/cf/attrs.py index fad60fe97e..d4a41f2bfc 100644 --- a/satpy/writers/cf/attrs.py +++ b/satpy/writers/cf/attrs.py @@ -65,7 +65,7 @@ def _encode(self, obj): return str(obj) -def _encode_nc(obj): +def __encode_nc(obj): """Try to encode `obj` as a netCDF/Zarr compatible datatype which most closely resembles the object's nature. Raises: @@ -90,7 +90,7 @@ def _encode_nc(obj): raise ValueError('Unable to encode') -def encode_nc(obj): +def _encode_nc(obj): """Encode the given object as a netcdf compatible datatype.""" try: return obj.to_cf() @@ -104,9 +104,9 @@ def _encode_python_objects(obj): If on failure, encode as a string. Plain lists are encoded recursively. """ if isinstance(obj, (list, tuple)) and all([not isinstance(item, (list, tuple)) for item in obj]): - return [encode_nc(item) for item in obj] + return [_encode_nc(item) for item in obj] try: - dump = _encode_nc(obj) + dump = __encode_nc(obj) except ValueError: try: # Decode byte-strings @@ -117,7 +117,7 @@ def _encode_python_objects(obj): return dump -def encode_attrs_nc(attrs): +def _encode_attrs_nc(attrs): """Encode dataset attributes in a netcdf compatible datatype. Args: @@ -130,7 +130,7 @@ def encode_attrs_nc(attrs): encoded_attrs = [] for key, val in sorted(attrs.items()): if val is not None: - encoded_attrs.append((key, encode_nc(val))) + encoded_attrs.append((key, _encode_nc(val))) return OrderedDict(encoded_attrs) @@ -193,7 +193,7 @@ def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): if flatten_attrs: dataarray.attrs = flatten_dict(dataarray.attrs) - dataarray.attrs = encode_attrs_nc(dataarray.attrs) + dataarray.attrs = _encode_attrs_nc(dataarray.attrs) return dataarray @@ -215,7 +215,7 @@ def preprocess_header_attrs(header_attrs, flatten_attrs=False): if header_attrs is not None: if flatten_attrs: header_attrs = flatten_dict(header_attrs) - header_attrs = encode_attrs_nc(header_attrs) # OrderedDict + header_attrs = _encode_attrs_nc(header_attrs) # OrderedDict else: header_attrs = {} header_attrs = _add_history(header_attrs) diff --git a/satpy/writers/cf/dataarray.py b/satpy/writers/cf/dataarray.py index fd9b20be5e..a5322cfee4 100644 --- a/satpy/writers/cf/dataarray.py +++ b/satpy/writers/cf/dataarray.py @@ -21,7 +21,7 @@ from satpy.writers.cf.attrs import preprocess_datarray_attrs from satpy.writers.cf.coords_attrs import add_xy_coords_attrs -from satpy.writers.cf.time import EPOCH, _process_time_coord +from satpy.writers.cf.time import EPOCH, process_time_coord logger = logging.getLogger(__name__) @@ -96,5 +96,5 @@ def make_cf_dataarray(dataarray, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs) dataarray = add_xy_coords_attrs(dataarray) - dataarray = _process_time_coord(dataarray, epoch=epoch) + dataarray = process_time_coord(dataarray, epoch=epoch) return dataarray diff --git a/satpy/writers/cf/datasets.py b/satpy/writers/cf/datasets.py index 4baabbc894..c87e6673d4 100644 --- a/satpy/writers/cf/datasets.py +++ b/satpy/writers/cf/datasets.py @@ -28,7 +28,7 @@ logger = logging.getLogger(__name__) -def get_extra_ds(dataarray, keys=None): +def _get_extra_ds(dataarray, keys=None): """Get the ancillary_variables DataArrays associated to a dataset.""" ds_collection = {} # Retrieve ancillary variable datarrays @@ -36,7 +36,7 @@ def get_extra_ds(dataarray, keys=None): ancillary_variable = ancillary_dataarray.name if keys and ancillary_variable not in keys: keys.append(ancillary_variable) - ds_collection.update(get_extra_ds(ancillary_dataarray, keys=keys)) + ds_collection.update(_get_extra_ds(ancillary_dataarray, keys=keys)) # Add input dataarray ds_collection[dataarray.attrs['name']] = dataarray return ds_collection @@ -111,7 +111,7 @@ def _collect_cf_dataset(list_dataarrays, # --> Since keys=None, it doesn't never retrieve ancillary variables !!! ds_collection = {} for dataarray in list_dataarrays: - ds_collection.update(get_extra_ds(dataarray)) + ds_collection.update(_get_extra_ds(dataarray)) # Check if one DataArray in the collection has 'longitude' or 'latitude' got_lonlats = has_projection_coords(ds_collection) diff --git a/satpy/writers/cf/time.py b/satpy/writers/cf/time.py index 05b90c4641..4c5cbf5bc9 100644 --- a/satpy/writers/cf/time.py +++ b/satpy/writers/cf/time.py @@ -47,7 +47,7 @@ def add_time_bounds_dimension(ds, time="time"): return ds -def _process_time_coord(dataarray, epoch): +def process_time_coord(dataarray, epoch): """Process the 'time' coordinate, if existing. It expand the DataArray with a time dimension if does not yet exists. From 22a8d097b698dd87039a13d36a336ccb21fcf277 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 10 Oct 2023 18:11:46 +0200 Subject: [PATCH 286/702] Reorg files --- satpy/_scene_converters.py | 4 +- satpy/scene.py | 2 +- satpy/tests/utils.py | 15 ++ .../tests/writer_tests/cf_tests/test_area.py | 32 ----- .../tests/writer_tests/cf_tests/test_attrs.py | 18 +-- .../{test_time_coords.py => test_coords.py} | 41 +++++- .../writer_tests/cf_tests/test_dataaarray.py | 22 +-- .../writer_tests/cf_tests/test_datasets.py | 27 ++-- .../writer_tests/cf_tests/test_encoding.py | 4 +- satpy/writers/cf/__init__.py | 2 + satpy/writers/cf/coords.py | 128 ++++++++++++++++++ satpy/writers/cf/coords_attrs.py | 46 ------- satpy/writers/cf/crs.py | 47 ------- satpy/writers/cf/dataarray.py | 4 +- satpy/writers/cf/datasets.py | 8 +- satpy/writers/cf/time.py | 69 ---------- satpy/writers/cf_writer.py | 2 +- 17 files changed, 219 insertions(+), 252 deletions(-) rename satpy/tests/writer_tests/cf_tests/{test_time_coords.py => test_coords.py} (56%) create mode 100644 satpy/writers/cf/coords.py delete mode 100644 satpy/writers/cf/coords_attrs.py delete mode 100644 satpy/writers/cf/crs.py delete mode 100644 satpy/writers/cf/time.py diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py index ba4432a58f..c5c0b1c896 100644 --- a/satpy/_scene_converters.py +++ b/satpy/_scene_converters.py @@ -66,7 +66,7 @@ def to_xarray(scn, epoch (str): Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using "from satpy.cf_writer import EPOCH" + If None, the default reference time is retrieved using "from satpy.writers.cf import EPOCH" flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): @@ -90,8 +90,8 @@ def to_xarray(scn, A CF-compliant xr.Dataset """ + from satpy.writers.cf import EPOCH from satpy.writers.cf.datasets import collect_cf_datasets - from satpy.writers.cf.time import EPOCH if epoch is None: epoch = EPOCH diff --git a/satpy/scene.py b/satpy/scene.py index e3e71811e9..52580d14e8 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1128,7 +1128,7 @@ def to_xarray(self, epoch (str): Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using "from satpy.cf_writer import EPOCH" + If None, the default reference time is retrieved using "from satpy.writers.cf import EPOCH" flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py index c87cd1055c..155916aca1 100644 --- a/satpy/tests/utils.py +++ b/satpy/tests/utils.py @@ -407,3 +407,18 @@ def assert_attrs_equal(attrs, attrs_exp, tolerance=0): ) except TypeError: assert attrs[key] == attrs_exp[key], err_msg + + +def assert_dict_array_equality(d1, d2): + """Check that dicts containing arrays are equal.""" + assert set(d1.keys()) == set(d2.keys()) + for key, val1 in d1.items(): + val2 = d2[key] + if isinstance(val1, np.ndarray): + np.testing.assert_array_equal(val1, val2) + assert val1.dtype == val2.dtype + else: + assert val1 == val2 + if isinstance(val1, (np.floating, np.integer, np.bool_)): + assert isinstance(val2, np.generic) + assert val1.dtype == val2.dtype diff --git a/satpy/tests/writer_tests/cf_tests/test_area.py b/satpy/tests/writer_tests/cf_tests/test_area.py index 92088f6d68..5b7dd86d38 100644 --- a/satpy/tests/writer_tests/cf_tests/test_area.py +++ b/satpy/tests/writer_tests/cf_tests/test_area.py @@ -16,18 +16,12 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the CF Area.""" -import logging - import dask.array as da import numpy as np import pytest import xarray as xr from pyresample import AreaDefinition, SwathDefinition -# NOTE: -# The following fixtures are not defined in this file, but are used and injected by Pytest: -# - caplog - class TestCFArea: """Test case for CF Area.""" @@ -406,32 +400,6 @@ def test__add_lonlat_coords(self): assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() - def test_is_projected(self, caplog): - """Tests for private _is_projected function.""" - from satpy.writers.cf.crs import _is_projected - - # test case with units but no area - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x"), - coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "m"}), - "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "m"})}) - assert _is_projected(da) - - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x"), - coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "degrees_east"}), - "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "degrees_north"})}) - assert not _is_projected(da) - - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x")) - with caplog.at_level(logging.WARNING): - assert _is_projected(da) - assert "Failed to tell if data are projected." in caplog.text - @pytest.fixture def datasets(self): """Create test dataset.""" diff --git a/satpy/tests/writer_tests/cf_tests/test_attrs.py b/satpy/tests/writer_tests/cf_tests/test_attrs.py index a969765181..6988e761ee 100644 --- a/satpy/tests/writer_tests/cf_tests/test_attrs.py +++ b/satpy/tests/writer_tests/cf_tests/test_attrs.py @@ -109,30 +109,16 @@ def get_test_attrs(self): 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} return attrs, encoded, encoded_flat - def assertDictWithArraysEqual(self, d1, d2): - """Check that dicts containing arrays are equal.""" - # TODO: this is also used by test_da2cf - assert set(d1.keys()) == set(d2.keys()) - for key, val1 in d1.items(): - val2 = d2[key] - if isinstance(val1, np.ndarray): - np.testing.assert_array_equal(val1, val2) - assert val1.dtype == val2.dtype - else: - assert val1 == val2 - if isinstance(val1, (np.floating, np.integer, np.bool_)): - assert isinstance(val2, np.generic) - assert val1.dtype == val2.dtype - def test__encode_attrs_nc(self): """Test attributes encoding.""" + from satpy.tests.utils import assert_dict_array_equality from satpy.writers.cf.attrs import _encode_attrs_nc attrs, expected, _ = self.get_test_attrs() # Test encoding encoded = _encode_attrs_nc(attrs) - self.assertDictWithArraysEqual(expected, encoded) + assert_dict_array_equality(expected, encoded) # Test decoding of json-encoded attributes raw_md_roundtrip = {'recarray': [[0, 0], [0, 0], [0, 0]], diff --git a/satpy/tests/writer_tests/cf_tests/test_time_coords.py b/satpy/tests/writer_tests/cf_tests/test_coords.py similarity index 56% rename from satpy/tests/writer_tests/cf_tests/test_time_coords.py rename to satpy/tests/writer_tests/cf_tests/test_coords.py index ce7845dcca..9e9d8c4607 100644 --- a/satpy/tests/writer_tests/cf_tests/test_time_coords.py +++ b/satpy/tests/writer_tests/cf_tests/test_coords.py @@ -16,16 +16,22 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """CF processing of time information (coordinates and dimensions).""" +import logging + import numpy as np import xarray as xr +# NOTE: +# The following fixtures are not defined in this file, but are used and injected by Pytest: +# - caplog + class TestCFtime: """Test cases for CF time dimension and coordinates.""" def test_add_time_bounds_dimension(self): """Test addition of CF-compliant time attributes.""" - from satpy.writers.cf.time import add_time_bounds_dimension + from satpy.writers.cf.coords import add_time_bounds_dimension test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01', @@ -42,3 +48,36 @@ def test_add_time_bounds_dimension(self): assert "time_bnds" in list(ds.data_vars) assert "bounds" in ds["time"].attrs assert "standard_name" in ds["time"].attrs + + +class TestCFcoords: + """Test cases for CF spatial dimension and coordinates.""" + + def test_is_projected(self, caplog): + """Tests for private _is_projected function.""" + from satpy.writers.cf.coords import _is_projected + + # test case with units but no area + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x"), + coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "m"}), + "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "m"})}) + assert _is_projected(da) + + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x"), + coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "degrees_east"}), + "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "degrees_north"})}) + assert not _is_projected(da) + + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x")) + with caplog.at_level(logging.WARNING): + assert _is_projected(da) + assert "Failed to tell if data are projected." in caplog.text + + # add_xy_coords_attrs + # process_time_coord diff --git a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py index a67cae9ca2..43b87cfc20 100644 --- a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py +++ b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py @@ -154,23 +154,9 @@ def get_test_attrs(self): 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} return attrs, encoded, encoded_flat - def assertDictWithArraysEqual(self, d1, d2): - """Check that dicts containing arrays are equal.""" - # TODO: also used by cf/test_attrs.py - assert set(d1.keys()) == set(d2.keys()) - for key, val1 in d1.items(): - val2 = d2[key] - if isinstance(val1, np.ndarray): - np.testing.assert_array_equal(val1, val2) - assert val1.dtype == val2.dtype - else: - assert val1 == val2 - if isinstance(val1, (np.floating, np.integer, np.bool_)): - assert isinstance(val2, np.generic) - assert val1.dtype == val2.dtype - def test_make_cf_dataarray(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" + from satpy.tests.utils import assert_dict_array_equality from satpy.writers.cf.dataarray import make_cf_dataarray # Create set of test attributes @@ -200,12 +186,12 @@ def test_make_cf_dataarray(self): np.testing.assert_array_equal(res['acq_time'], arr['acq_time']) assert res['x'].attrs == {'units': 'm', 'standard_name': 'projection_x_coordinate'} assert res['y'].attrs == {'units': 'm', 'standard_name': 'projection_y_coordinate'} - self.assertDictWithArraysEqual(res.attrs, attrs_expected) + assert_dict_array_equality(res.attrs, attrs_expected) # Test attribute kwargs res_flat = make_cf_dataarray(arr, flatten_attrs=True, exclude_attrs=['int']) attrs_expected_flat.pop('int') - self.assertDictWithArraysEqual(res_flat.attrs, attrs_expected_flat) + assert_dict_array_equality(res_flat.attrs, attrs_expected_flat) def test_make_cf_dataarray_one_dimensional_array(self): """Test the conversion of an 1d DataArray to a CF-compatible DataArray.""" @@ -214,3 +200,5 @@ def test_make_cf_dataarray_one_dimensional_array(self): arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=('y',), coords={'y': [0, 1, 2, 3], 'acq_time': ('y', [0, 1, 2, 3])}) _ = make_cf_dataarray(arr) + + # _handle_dataarray_name diff --git a/satpy/tests/writer_tests/cf_tests/test_datasets.py b/satpy/tests/writer_tests/cf_tests/test_datasets.py index b094feecbc..d92099e869 100644 --- a/satpy/tests/writer_tests/cf_tests/test_datasets.py +++ b/satpy/tests/writer_tests/cf_tests/test_datasets.py @@ -15,7 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -"""Tests CF-compliant DataArray creation.""" +"""Tests CF-compliant Dataset(s) creation.""" import datetime import numpy as np @@ -24,18 +24,10 @@ from pyresample import AreaDefinition, create_area_def -def test_empty_collect_cf_datasets(): - """Test that if no DataArrays, collect_cf_datasets raise error.""" - from satpy.writers.cf.datasets import collect_cf_datasets - - with pytest.raises(RuntimeError): - collect_cf_datasets(list_dataarrays=[]) - - -class TestCollectCfDatasets: +class TestCollectCfDataset: """Test case for collect_cf_dataset.""" - def test_collect_cf_dataarrays(self): + def test_collect_cf_dataset(self): """Test collecting CF datasets from a DataArray objects.""" from satpy.writers.cf.datasets import _collect_cf_dataset @@ -75,7 +67,7 @@ def test_collect_cf_dataarrays(self): assert 'grid_mapping' not in da_var2.attrs assert da_var2.attrs['long_name'] == 'variable 2' - def test_collect_cf_dataarrays_with_latitude_named_lat(self): + def test_collect_cf_dataset_with_latitude_named_lat(self): """Test collecting CF datasets with latitude named lat.""" from satpy.writers.cf.datasets import _collect_cf_dataset @@ -148,3 +140,14 @@ def test_geographic_area_coords_attrs(self): assert ds["mavas"].attrs["longitude_of_prime_meridian"] == 0.0 np.testing.assert_allclose(ds["mavas"].attrs["semi_major_axis"], 6378137.0) np.testing.assert_allclose(ds["mavas"].attrs["inverse_flattening"], 298.257223563) + + +class TestCollectCfDatasets: + """Test case for collect_cf_datasets.""" + + def test_empty_collect_cf_datasets(self): + """Test that if no DataArrays, collect_cf_datasets raise error.""" + from satpy.writers.cf.datasets import collect_cf_datasets + + with pytest.raises(RuntimeError): + collect_cf_datasets(list_dataarrays=[]) diff --git a/satpy/tests/writer_tests/cf_tests/test_encoding.py b/satpy/tests/writer_tests/cf_tests/test_encoding.py index 66f7c72a48..125c7eec94 100644 --- a/satpy/tests/writer_tests/cf_tests/test_encoding.py +++ b/satpy/tests/writer_tests/cf_tests/test_encoding.py @@ -22,8 +22,8 @@ import xarray as xr -class TestUpdateDatasetEncodings: - """Test update of Dataset encodings.""" +class TestUpdateEncoding: + """Test update of dataset encodings.""" @pytest.fixture def fake_ds(self): diff --git a/satpy/writers/cf/__init__.py b/satpy/writers/cf/__init__.py index f597a9264c..c48acebcf9 100644 --- a/satpy/writers/cf/__init__.py +++ b/satpy/writers/cf/__init__.py @@ -1,3 +1,5 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- """Code for generation of CF-compliant datasets.""" + +EPOCH = u"seconds since 1970-01-01 00:00:00" diff --git a/satpy/writers/cf/coords.py b/satpy/writers/cf/coords.py new file mode 100644 index 0000000000..dee28952b5 --- /dev/null +++ b/satpy/writers/cf/coords.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +"""Set CF-compliant spatial and temporal coordinates.""" + +import logging +from contextlib import suppress + +import numpy as np +import xarray as xr +from pyresample.geometry import AreaDefinition, SwathDefinition + +logger = logging.getLogger(__name__) + + +def add_xy_coords_attrs(dataarray): + """Add relevant attributes to x, y coordinates.""" + # If there are no coords, return dataarray + if not dataarray.coords.keys() & {"x", "y", "crs"}: + return dataarray + # If projected area + if _is_projected(dataarray): + dataarray = _add_xy_projected_coords_attrs(dataarray) + else: + dataarray = _add_xy_geographic_coords_attrs(dataarray) + if 'crs' in dataarray.coords: + dataarray = dataarray.drop_vars('crs') + return dataarray + + +def _is_projected(dataarray): + """Guess whether data are projected or not.""" + crs = _try_to_get_crs(dataarray) + if crs: + return crs.is_projected + units = _try_get_units_from_coords(dataarray) + if units: + if units.endswith("m"): + return True + if units.startswith("degrees"): + return False + logger.warning("Failed to tell if data are projected. Assuming yes.") + return True + + +def _try_to_get_crs(dataarray): + """Try to get a CRS from attributes.""" + if "area" in dataarray.attrs: + if isinstance(dataarray.attrs["area"], AreaDefinition): + return dataarray.attrs["area"].crs + if not isinstance(dataarray.attrs["area"], SwathDefinition): + logger.warning( + f"Could not tell CRS from area of type {type(dataarray.attrs['area']).__name__:s}. " + "Assuming projected CRS.") + if "crs" in dataarray.coords: + return dataarray.coords["crs"].item() + + +def _try_get_units_from_coords(dataarray): + """Try to retrieve coordinate x/y units.""" + for c in ["x", "y"]: + with suppress(KeyError): + # If the data has only 1 dimension, it has only one of x or y coords + if "units" in dataarray.coords[c].attrs: + return dataarray.coords[c].attrs["units"] + + +def _add_xy_projected_coords_attrs(dataarray, x='x', y='y'): + """Add relevant attributes to x, y coordinates of a projected CRS.""" + if x in dataarray.coords: + dataarray[x].attrs['standard_name'] = 'projection_x_coordinate' + dataarray[x].attrs['units'] = 'm' + if y in dataarray.coords: + dataarray[y].attrs['standard_name'] = 'projection_y_coordinate' + dataarray[y].attrs['units'] = 'm' + return dataarray + + +def _add_xy_geographic_coords_attrs(dataarray, x='x', y='y'): + """Add relevant attributes to x, y coordinates of a geographic CRS.""" + if x in dataarray.coords: + dataarray[x].attrs['standard_name'] = 'longitude' + dataarray[x].attrs['units'] = 'degrees_east' + if y in dataarray.coords: + dataarray[y].attrs['standard_name'] = 'latitude' + dataarray[y].attrs['units'] = 'degrees_north' + return dataarray + + +def add_time_bounds_dimension(ds, time="time"): + """Add time bound dimension to xr.Dataset.""" + start_times = [] + end_times = [] + for _var_name, data_array in ds.items(): + start_times.append(data_array.attrs.get("start_time", None)) + end_times.append(data_array.attrs.get("end_time", None)) + + start_time = min(start_time for start_time in start_times + if start_time is not None) + end_time = min(end_time for end_time in end_times + if end_time is not None) + ds['time_bnds'] = xr.DataArray([[np.datetime64(start_time), + np.datetime64(end_time)]], + dims=['time', 'bnds_1d']) + ds[time].attrs['bounds'] = "time_bnds" + ds[time].attrs['standard_name'] = "time" + return ds + + +def process_time_coord(dataarray, epoch): + """Process the 'time' coordinate, if existing. + + It expand the DataArray with a time dimension if does not yet exists. + + The function assumes + + - that x and y dimensions have at least shape > 1 + - the time coordinate has size 1 + + """ + if 'time' in dataarray.coords: + dataarray['time'].encoding['units'] = epoch + dataarray['time'].attrs['standard_name'] = 'time' + dataarray['time'].attrs.pop('bounds', None) + + if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape: + dataarray = dataarray.expand_dims('time') + + return dataarray diff --git a/satpy/writers/cf/coords_attrs.py b/satpy/writers/cf/coords_attrs.py deleted file mode 100644 index c7e559adc2..0000000000 --- a/satpy/writers/cf/coords_attrs.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -"""Set CF-compliant attributes to x and y spatial dimensions.""" - -import logging - -from satpy.writers.cf.crs import _is_projected - -logger = logging.getLogger(__name__) - - -def add_xy_coords_attrs(dataarray): - """Add relevant attributes to x, y coordinates.""" - # If there are no coords, return dataarray - if not dataarray.coords.keys() & {"x", "y", "crs"}: - return dataarray - # If projected area - if _is_projected(dataarray): - dataarray = _add_xy_projected_coords_attrs(dataarray) - else: - dataarray = _add_xy_geographic_coords_attrs(dataarray) - if 'crs' in dataarray.coords: - dataarray = dataarray.drop_vars('crs') - return dataarray - - -def _add_xy_projected_coords_attrs(dataarray, x='x', y='y'): - """Add relevant attributes to x, y coordinates of a projected CRS.""" - if x in dataarray.coords: - dataarray[x].attrs['standard_name'] = 'projection_x_coordinate' - dataarray[x].attrs['units'] = 'm' - if y in dataarray.coords: - dataarray[y].attrs['standard_name'] = 'projection_y_coordinate' - dataarray[y].attrs['units'] = 'm' - return dataarray - - -def _add_xy_geographic_coords_attrs(dataarray, x='x', y='y'): - """Add relevant attributes to x, y coordinates of a geographic CRS.""" - if x in dataarray.coords: - dataarray[x].attrs['standard_name'] = 'longitude' - dataarray[x].attrs['units'] = 'degrees_east' - if y in dataarray.coords: - dataarray[y].attrs['standard_name'] = 'latitude' - dataarray[y].attrs['units'] = 'degrees_north' - return dataarray diff --git a/satpy/writers/cf/crs.py b/satpy/writers/cf/crs.py deleted file mode 100644 index e6952a484f..0000000000 --- a/satpy/writers/cf/crs.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -"""CRS utility.""" - -import logging -from contextlib import suppress - -from pyresample.geometry import AreaDefinition, SwathDefinition - -logger = logging.getLogger(__name__) - - -def _is_projected(dataarray): - """Guess whether data are projected or not.""" - crs = _try_to_get_crs(dataarray) - if crs: - return crs.is_projected - units = _try_get_units_from_coords(dataarray) - if units: - if units.endswith("m"): - return True - if units.startswith("degrees"): - return False - logger.warning("Failed to tell if data are projected. Assuming yes.") - return True - - -def _try_to_get_crs(dataarray): - """Try to get a CRS from attributes.""" - if "area" in dataarray.attrs: - if isinstance(dataarray.attrs["area"], AreaDefinition): - return dataarray.attrs["area"].crs - if not isinstance(dataarray.attrs["area"], SwathDefinition): - logger.warning( - f"Could not tell CRS from area of type {type(dataarray.attrs['area']).__name__:s}. " - "Assuming projected CRS.") - if "crs" in dataarray.coords: - return dataarray.coords["crs"].item() - - -def _try_get_units_from_coords(dataarray): - """Try to retrieve coordinate x/y units.""" - for c in ["x", "y"]: - with suppress(KeyError): - # If the data has only 1 dimension, it has only one of x or y coords - if "units" in dataarray.coords[c].attrs: - return dataarray.coords[c].attrs["units"] diff --git a/satpy/writers/cf/dataarray.py b/satpy/writers/cf/dataarray.py index a5322cfee4..df52406f96 100644 --- a/satpy/writers/cf/dataarray.py +++ b/satpy/writers/cf/dataarray.py @@ -19,9 +19,9 @@ import logging import warnings +from satpy.writers.cf import EPOCH from satpy.writers.cf.attrs import preprocess_datarray_attrs -from satpy.writers.cf.coords_attrs import add_xy_coords_attrs -from satpy.writers.cf.time import EPOCH, process_time_coord +from satpy.writers.cf.coords import add_xy_coords_attrs, process_time_coord logger = logging.getLogger(__name__) diff --git a/satpy/writers/cf/datasets.py b/satpy/writers/cf/datasets.py index c87e6673d4..0cdf2b8210 100644 --- a/satpy/writers/cf/datasets.py +++ b/satpy/writers/cf/datasets.py @@ -22,7 +22,7 @@ import xarray as xr -from satpy.writers.cf.time import EPOCH +from satpy.writers.cf import EPOCH from satpy.writers.cf_writer import CF_DTYPES, CF_VERSION logger = logging.getLogger(__name__) @@ -77,7 +77,7 @@ def _collect_cf_dataset(list_dataarrays, epoch : str Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using `from satpy.cf_writer import EPOCH` + If None, the default reference time is retrieved using `from satpy.writers.cf import EPOCH` flatten_attrs : bool, optional If True, flatten dict-type attributes. exclude_attrs : list, optional @@ -197,7 +197,7 @@ def collect_cf_datasets(list_dataarrays, epoch (str): Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using `from satpy.cf_writer import EPOCH` + If None, the default reference time is retrieved using `from satpy.writers.cf import EPOCH` flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): @@ -228,7 +228,7 @@ def collect_cf_datasets(list_dataarrays, Global attributes to be attached to the xr.Dataset / netCDF4. """ from satpy.writers.cf.attrs import preprocess_header_attrs - from satpy.writers.cf.time import add_time_bounds_dimension + from satpy.writers.cf.coords import add_time_bounds_dimension if not list_dataarrays: raise RuntimeError("None of the requested datasets have been " diff --git a/satpy/writers/cf/time.py b/satpy/writers/cf/time.py deleted file mode 100644 index 4c5cbf5bc9..0000000000 --- a/satpy/writers/cf/time.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2017-2023 Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""CF processing of time dimension and coordinates.""" -import logging - -import numpy as np -import xarray as xr - -logger = logging.getLogger(__name__) - - -EPOCH = u"seconds since 1970-01-01 00:00:00" - - -def add_time_bounds_dimension(ds, time="time"): - """Add time bound dimension to xr.Dataset.""" - start_times = [] - end_times = [] - for _var_name, data_array in ds.items(): - start_times.append(data_array.attrs.get("start_time", None)) - end_times.append(data_array.attrs.get("end_time", None)) - - start_time = min(start_time for start_time in start_times - if start_time is not None) - end_time = min(end_time for end_time in end_times - if end_time is not None) - ds['time_bnds'] = xr.DataArray([[np.datetime64(start_time), - np.datetime64(end_time)]], - dims=['time', 'bnds_1d']) - ds[time].attrs['bounds'] = "time_bnds" - ds[time].attrs['standard_name'] = "time" - return ds - - -def process_time_coord(dataarray, epoch): - """Process the 'time' coordinate, if existing. - - It expand the DataArray with a time dimension if does not yet exists. - - The function assumes - - - that x and y dimensions have at least shape > 1 - - the time coordinate has size 1 - - """ - if 'time' in dataarray.coords: - dataarray['time'].encoding['units'] = epoch - dataarray['time'].attrs['standard_name'] = 'time' - dataarray['time'].attrs.pop('bounds', None) - - if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape: - dataarray = dataarray.expand_dims('time') - - return dataarray diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 30ca7e784e..096293e2b7 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -163,7 +163,7 @@ from packaging.version import Version from satpy.writers import Writer -from satpy.writers.cf.time import EPOCH +from satpy.writers.cf import EPOCH logger = logging.getLogger(__name__) From e3df20e32171257eeb999bcf272de5d0ae08c7bf Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 10 Oct 2023 18:27:48 +0200 Subject: [PATCH 287/702] Unest cf directories --- doc/source/writers.rst | 4 ++-- satpy/_scene_converters.py | 6 ++--- satpy/{writers => }/cf/__init__.py | 0 satpy/{writers => }/cf/area.py | 0 satpy/{writers => }/cf/attrs.py | 2 +- satpy/{writers => }/cf/coords.py | 0 satpy/{writers => }/cf/dataarray.py | 6 ++--- satpy/{writers => }/cf/datasets.py | 22 +++++++------------ satpy/{writers => }/cf/encoding.py | 0 satpy/etc/writers/cf.yaml | 2 +- satpy/scene.py | 2 +- .../{writer_tests => }/cf_tests/__init__.py | 0 .../{writer_tests => }/cf_tests/test_area.py | 16 +++++++------- .../{writer_tests => }/cf_tests/test_attrs.py | 2 +- .../cf_tests/test_coords.py | 4 ++-- .../cf_tests/test_dataaarray.py | 8 +++---- .../cf_tests/test_datasets.py | 8 +++---- .../cf_tests/test_encoding.py | 6 ++--- satpy/tests/writer_tests/test_cf.py | 8 +++---- satpy/writers/cf_writer.py | 14 ++++++------ 20 files changed, 52 insertions(+), 58 deletions(-) rename satpy/{writers => }/cf/__init__.py (100%) rename satpy/{writers => }/cf/area.py (100%) rename satpy/{writers => }/cf/attrs.py (99%) rename satpy/{writers => }/cf/coords.py (100%) rename satpy/{writers => }/cf/dataarray.py (95%) rename satpy/{writers => }/cf/datasets.py (95%) rename satpy/{writers => }/cf/encoding.py (100%) rename satpy/tests/{writer_tests => }/cf_tests/__init__.py (100%) rename satpy/tests/{writer_tests => }/cf_tests/test_area.py (97%) rename satpy/tests/{writer_tests => }/cf_tests/test_attrs.py (99%) rename satpy/tests/{writer_tests => }/cf_tests/test_coords.py (96%) rename satpy/tests/{writer_tests => }/cf_tests/test_dataaarray.py (97%) rename satpy/tests/{writer_tests => }/cf_tests/test_datasets.py (95%) rename satpy/tests/{writer_tests => }/cf_tests/test_encoding.py (96%) diff --git a/doc/source/writers.rst b/doc/source/writers.rst index f453f4d5a5..e5c33ecba2 100644 --- a/doc/source/writers.rst +++ b/doc/source/writers.rst @@ -44,9 +44,9 @@ One common parameter across almost all Writers is ``filename`` and - Deprecated from NinJo 7 (use ninjogeotiff) - * - NetCDF (Standard CF) - - :class:`cf ` + - :class:`cf ` - Beta - - :mod:`Usage example ` + - :mod:`Usage example ` * - AWIPS II Tiled NetCDF4 - :class:`awips_tiled ` - Beta diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py index c5c0b1c896..a620817236 100644 --- a/satpy/_scene_converters.py +++ b/satpy/_scene_converters.py @@ -66,7 +66,7 @@ def to_xarray(scn, epoch (str): Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using "from satpy.writers.cf import EPOCH" + If None, the default reference time is retrieved using "from satpy.cf import EPOCH" flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): @@ -90,8 +90,8 @@ def to_xarray(scn, A CF-compliant xr.Dataset """ - from satpy.writers.cf import EPOCH - from satpy.writers.cf.datasets import collect_cf_datasets + from satpy.cf import EPOCH + from satpy.cf.datasets import collect_cf_datasets if epoch is None: epoch = EPOCH diff --git a/satpy/writers/cf/__init__.py b/satpy/cf/__init__.py similarity index 100% rename from satpy/writers/cf/__init__.py rename to satpy/cf/__init__.py diff --git a/satpy/writers/cf/area.py b/satpy/cf/area.py similarity index 100% rename from satpy/writers/cf/area.py rename to satpy/cf/area.py diff --git a/satpy/writers/cf/attrs.py b/satpy/cf/attrs.py similarity index 99% rename from satpy/writers/cf/attrs.py rename to satpy/cf/attrs.py index d4a41f2bfc..28cc41be98 100644 --- a/satpy/writers/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -71,7 +71,7 @@ def __encode_nc(obj): Raises: ValueError if no such datatype could be found """ - from satpy.writers.cf_writer import NC4_DTYPES + from satpy.cf_writer import NC4_DTYPES if isinstance(obj, int) and not isinstance(obj, (bool, np.bool_)): return obj diff --git a/satpy/writers/cf/coords.py b/satpy/cf/coords.py similarity index 100% rename from satpy/writers/cf/coords.py rename to satpy/cf/coords.py diff --git a/satpy/writers/cf/dataarray.py b/satpy/cf/dataarray.py similarity index 95% rename from satpy/writers/cf/dataarray.py rename to satpy/cf/dataarray.py index df52406f96..661981681b 100644 --- a/satpy/writers/cf/dataarray.py +++ b/satpy/cf/dataarray.py @@ -19,9 +19,9 @@ import logging import warnings -from satpy.writers.cf import EPOCH -from satpy.writers.cf.attrs import preprocess_datarray_attrs -from satpy.writers.cf.coords import add_xy_coords_attrs, process_time_coord +from satpy.cf import EPOCH +from satpy.cf.attrs import preprocess_datarray_attrs +from satpy.cf.coords import add_xy_coords_attrs, process_time_coord logger = logging.getLogger(__name__) diff --git a/satpy/writers/cf/datasets.py b/satpy/cf/datasets.py similarity index 95% rename from satpy/writers/cf/datasets.py rename to satpy/cf/datasets.py index 0cdf2b8210..bcfe622f18 100644 --- a/satpy/writers/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -22,8 +22,8 @@ import xarray as xr -from satpy.writers.cf import EPOCH -from satpy.writers.cf_writer import CF_DTYPES, CF_VERSION +from satpy.cf import EPOCH +from satpy.cf_writer import CF_DTYPES, CF_VERSION logger = logging.getLogger(__name__) @@ -77,7 +77,7 @@ def _collect_cf_dataset(list_dataarrays, epoch : str Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using `from satpy.writers.cf import EPOCH` + If None, the default reference time is retrieved using `from satpy.cf import EPOCH` flatten_attrs : bool, optional If True, flatten dict-type attributes. exclude_attrs : list, optional @@ -98,14 +98,8 @@ def _collect_cf_dataset(list_dataarrays, ds : xr.Dataset A partially CF-compliant xr.Dataset """ - from satpy.writers.cf.area import ( - area2cf, - assert_xy_unique, - has_projection_coords, - link_coords, - make_alt_coords_unique, - ) - from satpy.writers.cf.dataarray import make_cf_dataarray + from satpy.cf.area import area2cf, assert_xy_unique, has_projection_coords, link_coords, make_alt_coords_unique + from satpy.cf.dataarray import make_cf_dataarray # Create dictionary of input datarrays # --> Since keys=None, it doesn't never retrieve ancillary variables !!! @@ -197,7 +191,7 @@ def collect_cf_datasets(list_dataarrays, epoch (str): Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using `from satpy.writers.cf import EPOCH` + If None, the default reference time is retrieved using `from satpy.cf import EPOCH` flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): @@ -227,8 +221,8 @@ def collect_cf_datasets(list_dataarrays, header_attrs : dict Global attributes to be attached to the xr.Dataset / netCDF4. """ - from satpy.writers.cf.attrs import preprocess_header_attrs - from satpy.writers.cf.coords import add_time_bounds_dimension + from satpy.cf.attrs import preprocess_header_attrs + from satpy.cf.coords import add_time_bounds_dimension if not list_dataarrays: raise RuntimeError("None of the requested datasets have been " diff --git a/satpy/writers/cf/encoding.py b/satpy/cf/encoding.py similarity index 100% rename from satpy/writers/cf/encoding.py rename to satpy/cf/encoding.py diff --git a/satpy/etc/writers/cf.yaml b/satpy/etc/writers/cf.yaml index 5c4a875bec..b141a68c09 100644 --- a/satpy/etc/writers/cf.yaml +++ b/satpy/etc/writers/cf.yaml @@ -1,7 +1,7 @@ writer: name: cf description: Generic netCDF4/CF Writer - writer: !!python/name:satpy.writers.cf_writer.CFWriter + writer: !!python/name:satpy.cf_writer.CFWriter filename: '{name}_{start_time:%Y%m%d_%H%M%S}.nc' compress: DEFLATE zlevel: 6 diff --git a/satpy/scene.py b/satpy/scene.py index 52580d14e8..cebf57253b 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1128,7 +1128,7 @@ def to_xarray(self, epoch (str): Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using "from satpy.writers.cf import EPOCH" + If None, the default reference time is retrieved using "from satpy.cf import EPOCH" flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): diff --git a/satpy/tests/writer_tests/cf_tests/__init__.py b/satpy/tests/cf_tests/__init__.py similarity index 100% rename from satpy/tests/writer_tests/cf_tests/__init__.py rename to satpy/tests/cf_tests/__init__.py diff --git a/satpy/tests/writer_tests/cf_tests/test_area.py b/satpy/tests/cf_tests/test_area.py similarity index 97% rename from satpy/tests/writer_tests/cf_tests/test_area.py rename to satpy/tests/cf_tests/test_area.py index 5b7dd86d38..352bf35a2e 100644 --- a/satpy/tests/writer_tests/cf_tests/test_area.py +++ b/satpy/tests/cf_tests/test_area.py @@ -28,7 +28,7 @@ class TestCFArea: def test_assert_xy_unique(self): """Test that the x and y coordinates are unique.""" - from satpy.writers.cf.area import assert_xy_unique + from satpy.cf.area import assert_xy_unique dummy = [[1, 2], [3, 4]] datas = {'a': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), @@ -42,7 +42,7 @@ def test_assert_xy_unique(self): def test_link_coords(self): """Check that coordinates link has been established correctly.""" - from satpy.writers.cf.area import link_coords + from satpy.cf.area import link_coords data = [[1, 2], [3, 4]] lon = np.zeros((2, 2)) @@ -77,7 +77,7 @@ def test_link_coords(self): def test_make_alt_coords_unique(self): """Test that created coordinate variables are unique.""" - from satpy.writers.cf.area import make_alt_coords_unique + from satpy.cf.area import make_alt_coords_unique data = [[1, 2], [3, 4]] y = [1, 2] @@ -122,7 +122,7 @@ def test_make_alt_coords_unique(self): def test_area2cf(self): """Test the conversion of an area to CF standards.""" - from satpy.writers.cf.area import area2cf + from satpy.cf.area import area2cf ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, attrs={'name': 'var1'}) @@ -168,7 +168,7 @@ def test_area2cf(self): def test__add_grid_mapping(self): """Test the conversion from pyresample area object to CF grid mapping.""" - from satpy.writers.cf.area import _add_grid_mapping + from satpy.cf.area import _add_grid_mapping def _gm_matches(gmapping, expected): """Assert that all keys in ``expected`` match the values in ``gmapping``.""" @@ -352,7 +352,7 @@ def _gm_matches(gmapping, expected): def test__add_lonlat_coords(self): """Test the conversion from areas to lon/lat.""" - from satpy.writers.cf.area import _add_lonlat_coords + from satpy.cf.area import _add_lonlat_coords area = AreaDefinition( 'seviri', @@ -439,14 +439,14 @@ def datasets(self): def test__is_lon_or_lat_dataarray(self, datasets): """Test the _is_lon_or_lat_dataarray function.""" - from satpy.writers.cf.area import _is_lon_or_lat_dataarray + from satpy.cf.area import _is_lon_or_lat_dataarray assert _is_lon_or_lat_dataarray(datasets['lat']) assert not _is_lon_or_lat_dataarray(datasets['var1']) def test_has_projection_coords(self, datasets): """Test the has_projection_coords function.""" - from satpy.writers.cf.area import has_projection_coords + from satpy.cf.area import has_projection_coords assert has_projection_coords(datasets) datasets['lat'].attrs['standard_name'] = 'dummy' diff --git a/satpy/tests/writer_tests/cf_tests/test_attrs.py b/satpy/tests/cf_tests/test_attrs.py similarity index 99% rename from satpy/tests/writer_tests/cf_tests/test_attrs.py rename to satpy/tests/cf_tests/test_attrs.py index 6988e761ee..7eb86e172b 100644 --- a/satpy/tests/writer_tests/cf_tests/test_attrs.py +++ b/satpy/tests/cf_tests/test_attrs.py @@ -111,8 +111,8 @@ def get_test_attrs(self): def test__encode_attrs_nc(self): """Test attributes encoding.""" + from satpy.cf.attrs import _encode_attrs_nc from satpy.tests.utils import assert_dict_array_equality - from satpy.writers.cf.attrs import _encode_attrs_nc attrs, expected, _ = self.get_test_attrs() diff --git a/satpy/tests/writer_tests/cf_tests/test_coords.py b/satpy/tests/cf_tests/test_coords.py similarity index 96% rename from satpy/tests/writer_tests/cf_tests/test_coords.py rename to satpy/tests/cf_tests/test_coords.py index 9e9d8c4607..1361c0d5a4 100644 --- a/satpy/tests/writer_tests/cf_tests/test_coords.py +++ b/satpy/tests/cf_tests/test_coords.py @@ -31,7 +31,7 @@ class TestCFtime: def test_add_time_bounds_dimension(self): """Test addition of CF-compliant time attributes.""" - from satpy.writers.cf.coords import add_time_bounds_dimension + from satpy.cf.coords import add_time_bounds_dimension test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01', @@ -55,7 +55,7 @@ class TestCFcoords: def test_is_projected(self, caplog): """Tests for private _is_projected function.""" - from satpy.writers.cf.coords import _is_projected + from satpy.cf.coords import _is_projected # test case with units but no area da = xr.DataArray( diff --git a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py b/satpy/tests/cf_tests/test_dataaarray.py similarity index 97% rename from satpy/tests/writer_tests/cf_tests/test_dataaarray.py rename to satpy/tests/cf_tests/test_dataaarray.py index 43b87cfc20..0fd3a9d41c 100644 --- a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py +++ b/satpy/tests/cf_tests/test_dataaarray.py @@ -29,7 +29,7 @@ def test_preprocess_dataarray_name(): """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix include orig name.""" from satpy import Scene - from satpy.writers.cf.dataarray import _preprocess_dataarray_name + from satpy.cf.dataarray import _preprocess_dataarray_name scn = Scene() scn['1'] = xr.DataArray([1, 2, 3]) @@ -53,8 +53,8 @@ def test_make_cf_dataarray_lonlat(): """Test correct CF encoding for area with lon/lat units.""" from pyresample import create_area_def + from satpy.cf.dataarray import make_cf_dataarray from satpy.resample import add_crs_xy_coords - from satpy.writers.cf.dataarray import make_cf_dataarray area = create_area_def("mavas", 4326, shape=(5, 5), center=(0, 0), resolution=(1, 1)) @@ -156,8 +156,8 @@ def get_test_attrs(self): def test_make_cf_dataarray(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" + from satpy.cf.dataarray import make_cf_dataarray from satpy.tests.utils import assert_dict_array_equality - from satpy.writers.cf.dataarray import make_cf_dataarray # Create set of test attributes attrs, attrs_expected, attrs_expected_flat = self.get_test_attrs() @@ -195,7 +195,7 @@ def test_make_cf_dataarray(self): def test_make_cf_dataarray_one_dimensional_array(self): """Test the conversion of an 1d DataArray to a CF-compatible DataArray.""" - from satpy.writers.cf.dataarray import make_cf_dataarray + from satpy.cf.dataarray import make_cf_dataarray arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=('y',), coords={'y': [0, 1, 2, 3], 'acq_time': ('y', [0, 1, 2, 3])}) diff --git a/satpy/tests/writer_tests/cf_tests/test_datasets.py b/satpy/tests/cf_tests/test_datasets.py similarity index 95% rename from satpy/tests/writer_tests/cf_tests/test_datasets.py rename to satpy/tests/cf_tests/test_datasets.py index d92099e869..dc30d1b9d1 100644 --- a/satpy/tests/writer_tests/cf_tests/test_datasets.py +++ b/satpy/tests/cf_tests/test_datasets.py @@ -29,7 +29,7 @@ class TestCollectCfDataset: def test_collect_cf_dataset(self): """Test collecting CF datasets from a DataArray objects.""" - from satpy.writers.cf.datasets import _collect_cf_dataset + from satpy.cf.datasets import _collect_cf_dataset geos = AreaDefinition( area_id='geos', @@ -69,7 +69,7 @@ def test_collect_cf_dataset(self): def test_collect_cf_dataset_with_latitude_named_lat(self): """Test collecting CF datasets with latitude named lat.""" - from satpy.writers.cf.datasets import _collect_cf_dataset + from satpy.cf.datasets import _collect_cf_dataset data = [[75, 2], [3, 4]] y = [1, 2] @@ -123,8 +123,8 @@ def test_collect_cf_dataset_with_latitude_named_lat(self): def test_geographic_area_coords_attrs(self): """Test correct storage for area with lon/lat units.""" + from satpy.cf.datasets import _collect_cf_dataset from satpy.tests.utils import make_fake_scene - from satpy.writers.cf.datasets import _collect_cf_dataset scn = make_fake_scene( {"ketolysis": np.arange(25).reshape(5, 5)}, @@ -147,7 +147,7 @@ class TestCollectCfDatasets: def test_empty_collect_cf_datasets(self): """Test that if no DataArrays, collect_cf_datasets raise error.""" - from satpy.writers.cf.datasets import collect_cf_datasets + from satpy.cf.datasets import collect_cf_datasets with pytest.raises(RuntimeError): collect_cf_datasets(list_dataarrays=[]) diff --git a/satpy/tests/writer_tests/cf_tests/test_encoding.py b/satpy/tests/cf_tests/test_encoding.py similarity index 96% rename from satpy/tests/writer_tests/cf_tests/test_encoding.py rename to satpy/tests/cf_tests/test_encoding.py index 125c7eec94..5d49ebc647 100644 --- a/satpy/tests/writer_tests/cf_tests/test_encoding.py +++ b/satpy/tests/cf_tests/test_encoding.py @@ -47,7 +47,7 @@ def fake_ds_digit(self): def test_dataset_name_digit(self, fake_ds_digit): """Test data with dataset name staring with a digit.""" - from satpy.writers.cf.encoding import update_encoding + from satpy.cf.encoding import update_encoding # Dataset with name staring with digit ds_digit = fake_ds_digit @@ -66,7 +66,7 @@ def test_dataset_name_digit(self, fake_ds_digit): def test_without_time(self, fake_ds): """Test data with no time dimension.""" - from satpy.writers.cf.encoding import update_encoding + from satpy.cf.encoding import update_encoding # Without time dimension ds = fake_ds.chunk(2) @@ -98,7 +98,7 @@ def test_without_time(self, fake_ds): def test_with_time(self, fake_ds): """Test data with a time dimension.""" - from satpy.writers.cf.encoding import update_encoding + from satpy.cf.encoding import update_encoding # With time dimension ds = fake_ds.chunk(8).expand_dims({'time': [datetime.datetime(2009, 7, 1, 12, 15)]}) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index f50a8e99d3..ba00ae4545 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -28,8 +28,8 @@ from packaging.version import Version from satpy import Scene +from satpy.cf_writer import _get_backend_versions from satpy.tests.utils import make_dsq -from satpy.writers.cf_writer import _get_backend_versions try: from pyproj import CRS @@ -66,8 +66,8 @@ class TestCFWriter: def test_init(self): """Test initializing the CFWriter class.""" + from satpy.cf_writer import CFWriter from satpy.writers import configs_for_writer - from satpy.writers.cf_writer import CFWriter CFWriter(config_files=list(configs_for_writer('cf'))[0]) @@ -403,11 +403,11 @@ def test_load_module_with_old_pyproj(self): old_version = sys.modules['pyproj'].__version__ sys.modules['pyproj'].__version__ = "1.9.6" try: - importlib.reload(sys.modules['satpy.writers.cf_writer']) + importlib.reload(sys.modules['satpy.cf_writer']) finally: # Tear down sys.modules['pyproj'].__version__ = old_version - importlib.reload(sys.modules['satpy.writers.cf_writer']) + importlib.reload(sys.modules['satpy.cf_writer']) def test_global_attr_default_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 096293e2b7..4093b7877b 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -162,8 +162,8 @@ import xarray as xr from packaging.version import Version +from satpy.cf import EPOCH from satpy.writers import Writer -from satpy.writers.cf import EPOCH logger = logging.getLogger(__name__) @@ -270,8 +270,8 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. """ - from satpy.writers.cf.datasets import collect_cf_datasets - from satpy.writers.cf.encoding import update_encoding + from satpy.cf.datasets import collect_cf_datasets + from satpy.cf.encoding import update_encoding logger.info('Saving datasets to NetCDF4/CF.') _check_backend_versions() @@ -344,9 +344,9 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, numeric_name_prefix (str): Prepend dataset name with this if starting with a digit """ - from satpy.writers.cf.dataarray import make_cf_dataarray + from satpy.cf.dataarray import make_cf_dataarray warnings.warn('CFWriter.da2cf is deprecated.' - 'Use satpy.writers.cf.dataarray.make_cf_dataarray instead.', + 'Use satpy.cf.dataarray.make_cf_dataarray instead.', DeprecationWarning, stacklevel=3) return make_cf_dataarray(dataarray=dataarray, epoch=epoch, @@ -358,10 +358,10 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, @staticmethod def update_encoding(dataset, to_netcdf_kwargs): """Update encoding info (deprecated).""" - from satpy.writers.cf.encoding import update_encoding + from satpy.cf.encoding import update_encoding warnings.warn('CFWriter.update_encoding is deprecated. ' - 'Use satpy.writers.cf.encoding.update_encoding instead.', + 'Use satpy.cf.encoding.update_encoding instead.', DeprecationWarning, stacklevel=3) return update_encoding(dataset, to_netcdf_kwargs) From bf337195456cab9c541d633bf0b35d6d866149d2 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 10 Oct 2023 18:50:16 +0200 Subject: [PATCH 288/702] Fix imports error --- doc/source/writers.rst | 4 ++-- satpy/cf/attrs.py | 2 +- satpy/cf/datasets.py | 2 +- satpy/readers/amsr2_l2_gaasp.py | 4 ++-- satpy/tests/writer_tests/test_cf.py | 4 ++-- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/doc/source/writers.rst b/doc/source/writers.rst index e5c33ecba2..f453f4d5a5 100644 --- a/doc/source/writers.rst +++ b/doc/source/writers.rst @@ -44,9 +44,9 @@ One common parameter across almost all Writers is ``filename`` and - Deprecated from NinJo 7 (use ninjogeotiff) - * - NetCDF (Standard CF) - - :class:`cf ` + - :class:`cf ` - Beta - - :mod:`Usage example ` + - :mod:`Usage example ` * - AWIPS II Tiled NetCDF4 - :class:`awips_tiled ` - Beta diff --git a/satpy/cf/attrs.py b/satpy/cf/attrs.py index 28cc41be98..d4a41f2bfc 100644 --- a/satpy/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -71,7 +71,7 @@ def __encode_nc(obj): Raises: ValueError if no such datatype could be found """ - from satpy.cf_writer import NC4_DTYPES + from satpy.writers.cf_writer import NC4_DTYPES if isinstance(obj, int) and not isinstance(obj, (bool, np.bool_)): return obj diff --git a/satpy/cf/datasets.py b/satpy/cf/datasets.py index bcfe622f18..c2799ab8d1 100644 --- a/satpy/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -23,7 +23,7 @@ import xarray as xr from satpy.cf import EPOCH -from satpy.cf_writer import CF_DTYPES, CF_VERSION +from satpy.writers.cf_writer import CF_DTYPES, CF_VERSION logger = logging.getLogger(__name__) diff --git a/satpy/readers/amsr2_l2_gaasp.py b/satpy/readers/amsr2_l2_gaasp.py index 4f045057b4..5f91e2d965 100644 --- a/satpy/readers/amsr2_l2_gaasp.py +++ b/satpy/readers/amsr2_l2_gaasp.py @@ -189,7 +189,7 @@ def _available_if_this_file_type(self, configured_datasets): continue yield self.file_type_matches(ds_info['file_type']), ds_info - def __add_lonlat_coords(self, data_arr, ds_info): + def _add_lonlat_coords(self, data_arr, ds_info): lat_coord = None lon_coord = None for coord_name in data_arr.coords: @@ -209,7 +209,7 @@ def _get_ds_info_for_data_arr(self, var_name, data_arr): if x_dim_name in self.dim_resolutions: ds_info['resolution'] = self.dim_resolutions[x_dim_name] if not self.is_gridded and data_arr.coords: - self.__add_lonlat_coords(data_arr, ds_info) + self._add_lonlat_coords(data_arr, ds_info) return ds_info def _is_2d_yx_data_array(self, data_arr): diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index ba00ae4545..31e59a2fb7 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -28,8 +28,8 @@ from packaging.version import Version from satpy import Scene -from satpy.cf_writer import _get_backend_versions from satpy.tests.utils import make_dsq +from satpy.writers.cf_writer import _get_backend_versions try: from pyproj import CRS @@ -66,8 +66,8 @@ class TestCFWriter: def test_init(self): """Test initializing the CFWriter class.""" - from satpy.cf_writer import CFWriter from satpy.writers import configs_for_writer + from satpy.writers.cf_writer import CFWriter CFWriter(config_files=list(configs_for_writer('cf'))[0]) From 0dae7746f5180d7f7ab9f960dd7150e63b710ef5 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 10 Oct 2023 20:23:25 +0200 Subject: [PATCH 289/702] Rename functions and refactor --- CHANGELOG.md | 2 +- satpy/cf/area.py | 114 ---------------- satpy/cf/coords.py | 168 ++++++++++++++++++++---- satpy/cf/dataarray.py | 5 +- satpy/cf/datasets.py | 45 ++++--- satpy/etc/writers/cf.yaml | 2 +- satpy/tests/cf_tests/_test_data.py | 111 ++++++++++++++++ satpy/tests/cf_tests/test_area.py | 147 --------------------- satpy/tests/cf_tests/test_attrs.py | 90 +------------ satpy/tests/cf_tests/test_coords.py | 151 ++++++++++++++++++++- satpy/tests/cf_tests/test_dataaarray.py | 90 +------------ 11 files changed, 442 insertions(+), 483 deletions(-) create mode 100644 satpy/tests/cf_tests/_test_data.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 799ae0a867..12c8e50194 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1143,7 +1143,7 @@ In this release 6 issues were closed. * [PR 1606](https://github.com/pytroll/satpy/pull/1606) - Make FSFile hashable again ([1605](https://github.com/pytroll/satpy/issues/1605), [1604](https://github.com/pytroll/satpy/issues/1604)) * [PR 1603](https://github.com/pytroll/satpy/pull/1603) - Update slstr_l2.yaml * [PR 1600](https://github.com/pytroll/satpy/pull/1600) - When setting `upper_right_corner` make sure that all dataset coordinates are flipped -* [PR 1588](https://github.com/pytroll/satpy/pull/1588) - Bugfix of link_coords ([1493](https://github.com/pytroll/satpy/issues/1493)) +* [PR 1588](https://github.com/pytroll/satpy/pull/1588) - Bugfix of add_coordinates_attrs_coords ([1493](https://github.com/pytroll/satpy/issues/1493)) #### Features added diff --git a/satpy/cf/area.py b/satpy/cf/area.py index 5916083d62..2eec0efd4a 100644 --- a/satpy/cf/area.py +++ b/satpy/cf/area.py @@ -17,11 +17,8 @@ # satpy. If not, see . """CF processing of pyresample area information.""" import logging -import warnings -from collections import defaultdict import xarray as xr -from dask.base import tokenize from packaging.version import Version from pyresample.geometry import AreaDefinition, SwathDefinition @@ -79,114 +76,3 @@ def area2cf(dataarray, include_lonlats=False, got_lonlats=False): res.append(gmapping) res.append(dataarray) return res - - -def _is_lon_or_lat_dataarray(dataarray): - """Check if the DataArray represents the latitude or longitude coordinate.""" - if 'standard_name' in dataarray.attrs and dataarray.attrs['standard_name'] in ['longitude', 'latitude']: - return True - return False - - -def has_projection_coords(ds_collection): - """Check if DataArray collection has a "longitude" or "latitude" DataArray.""" - for dataarray in ds_collection.values(): - if _is_lon_or_lat_dataarray(dataarray): - return True - return False - - -def make_alt_coords_unique(datas, pretty=False): - """Make non-dimensional coordinates unique among all datasets. - - Non-dimensional (or alternative) coordinates, such as scanline timestamps, - may occur in multiple datasets with the same name and dimension - but different values. - - In order to avoid conflicts, prepend the dataset name to the coordinate name. - If a non-dimensional coordinate is unique among all datasets and ``pretty=True``, - its name will not be modified. - - Since all datasets must have the same projection coordinates, - this is not applied to latitude and longitude. - - Args: - datas (dict): - Dictionary of (dataset name, dataset) - pretty (bool): - Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. - - Returns: - Dictionary holding the updated datasets - - """ - # Determine which non-dimensional coordinates are unique - tokens = defaultdict(set) - for dataset in datas.values(): - for coord_name in dataset.coords: - if not _is_lon_or_lat_dataarray(dataset[coord_name]) and coord_name not in dataset.dims: - tokens[coord_name].add(tokenize(dataset[coord_name].data)) - coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) - - # Prepend dataset name, if not unique or no pretty-format desired - new_datas = datas.copy() - for coord_name, unique in coords_unique.items(): - if not pretty or not unique: - if pretty: - warnings.warn( - 'Cannot pretty-format "{}" coordinates because they are ' - 'not identical among the given datasets'.format(coord_name), - stacklevel=2 - ) - for ds_name, dataset in datas.items(): - if coord_name in dataset.coords: - rename = {coord_name: '{}_{}'.format(ds_name, coord_name)} - new_datas[ds_name] = new_datas[ds_name].rename(rename) - - return new_datas - - -def assert_xy_unique(datas): - """Check that all datasets share the same projection coordinates x/y.""" - unique_x = set() - unique_y = set() - for dataset in datas.values(): - if 'y' in dataset.dims: - token_y = tokenize(dataset['y'].data) - unique_y.add(token_y) - if 'x' in dataset.dims: - token_x = tokenize(dataset['x'].data) - unique_x.add(token_x) - if len(unique_x) > 1 or len(unique_y) > 1: - raise ValueError('Datasets to be saved in one file (or one group) must have identical projection coordinates. ' - 'Please group them by area or save them in separate files.') - - -def link_coords(datas): - """Link dataarrays and coordinates. - - If the `coordinates` attribute of a data array links to other dataarrays in the scene, for example - `coordinates='lon lat'`, add them as coordinates to the data array and drop that attribute. In the final call to - `xr.Dataset.to_netcdf()` all coordinate relations will be resolved and the `coordinates` attributes be set - automatically. - - """ - for da_name, data in datas.items(): - declared_coordinates = data.attrs.get('coordinates', []) - if isinstance(declared_coordinates, str): - declared_coordinates = declared_coordinates.split(' ') - for coord in declared_coordinates: - if coord not in data.coords: - try: - dimensions_not_in_data = list(set(datas[coord].dims) - set(data.dims)) - data[coord] = datas[coord].squeeze(dimensions_not_in_data, drop=True) - except KeyError: - warnings.warn( - 'Coordinate "{}" referenced by dataarray {} does not ' - 'exist, dropping reference.'.format(coord, da_name), - stacklevel=2 - ) - continue - - # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() - data.attrs.pop('coordinates', None) diff --git a/satpy/cf/coords.py b/satpy/cf/coords.py index dee28952b5..0c5acc7df4 100644 --- a/satpy/cf/coords.py +++ b/satpy/cf/coords.py @@ -3,10 +3,13 @@ """Set CF-compliant spatial and temporal coordinates.""" import logging +import warnings +from collections import defaultdict from contextlib import suppress import numpy as np import xarray as xr +from dask.base import tokenize from pyresample.geometry import AreaDefinition, SwathDefinition logger = logging.getLogger(__name__) @@ -86,6 +89,149 @@ def _add_xy_geographic_coords_attrs(dataarray, x='x', y='y'): return dataarray +def set_cf_time_info(dataarray, epoch): + """Set CF time attributes and encoding. + + It expand the DataArray with a time dimension if does not yet exists. + + The function assumes + + - that x and y dimensions have at least shape > 1 + - the time coordinate has size 1 + + """ + dataarray['time'].encoding['units'] = epoch + dataarray['time'].attrs['standard_name'] = 'time' + dataarray['time'].attrs.pop('bounds', None) + + if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape: + dataarray = dataarray.expand_dims('time') + + return dataarray + + +def _is_lon_or_lat_dataarray(dataarray): + """Check if the DataArray represents the latitude or longitude coordinate.""" + if 'standard_name' in dataarray.attrs and dataarray.attrs['standard_name'] in ['longitude', 'latitude']: + return True + return False + + +def has_projection_coords(dict_datarrays): + """Check if DataArray collection has a "longitude" or "latitude" DataArray.""" + for dataarray in dict_datarrays.values(): + if _is_lon_or_lat_dataarray(dataarray): + return True + return False + + +def ensure_unique_nondimensional_coords(dict_dataarrays, pretty=False): + """Make non-dimensional coordinates unique among all datasets. + + Non-dimensional coordinates, such as scanline timestamps, + may occur in multiple datasets with the same name and dimension + but different values. + + In order to avoid conflicts, prepend the dataset name to the coordinate name. + If a non-dimensional coordinate is unique among all datasets and ``pretty=True``, + its name will not be modified. + + Since all datasets must have the same projection coordinates, + this is not applied to latitude and longitude. + + Args: + datas (dict): + Dictionary of (dataset name, dataset) + pretty (bool): + Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. + + Returns: + Dictionary holding the updated datasets + + """ + # Determine which non-dimensional coordinates are unique + # - coords_unique has structure: {coord_name: True/False} + tokens = defaultdict(set) + for dataarray in dict_dataarrays.values(): + for coord_name in dataarray.coords: + if not _is_lon_or_lat_dataarray(dataarray[coord_name]) and coord_name not in dataarray.dims: + tokens[coord_name].add(tokenize(dataarray[coord_name].data)) + coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) + + # Prepend dataset name, if not unique or no pretty-format desired + new_dict_dataarrays = dict_dataarrays.copy() + for coord_name, unique in coords_unique.items(): + if not pretty or not unique: + if pretty: + warnings.warn( + 'Cannot pretty-format "{}" coordinates because they are ' + 'not identical among the given datasets'.format(coord_name), + stacklevel=2 + ) + for name, dataarray in dict_dataarrays.items(): + if coord_name in dataarray.coords: + rename = {coord_name: '{}_{}'.format(name, coord_name)} + new_dict_dataarrays[name] = new_dict_dataarrays[name].rename(rename) + + return new_dict_dataarrays + + +def check_unique_projection_coords(dict_dataarrays): + """Check that all datasets share the same projection coordinates x/y.""" + unique_x = set() + unique_y = set() + for dataarray in dict_dataarrays.values(): + if 'y' in dataarray.dims: + token_y = tokenize(dataarray['y'].data) + unique_y.add(token_y) + if 'x' in dataarray.dims: + token_x = tokenize(dataarray['x'].data) + unique_x.add(token_x) + if len(unique_x) > 1 or len(unique_y) > 1: + raise ValueError('Datasets to be saved in one file (or one group) must have identical projection coordinates. ' + 'Please group them by area or save them in separate files.') + + +def add_coordinates_attrs_coords(dict_dataarrays): + """Add to DataArrays the coordinates specified in the 'coordinates' attribute. + + It deal with the 'coordinates' attributes indicating lat/lon coords + The 'coordinates' attribute is dropped from each DataArray + + If the `coordinates` attribute of a data array links to other dataarrays in the scene, for example + `coordinates='lon lat'`, add them as coordinates to the data array and drop that attribute. + + In the final call to `xr.Dataset.to_netcdf()` all coordinate relations will be resolved + and the `coordinates` attributes be set automatically. + """ + for da_name, dataarray in dict_dataarrays.items(): + declared_coordinates = _get_coordinates_list(dataarray) + for coord in declared_coordinates: + if coord not in dataarray.coords: + try: + dimensions_not_in_data = list(set(dict_dataarrays[coord].dims) - set(dataarray.dims)) + dataarray[coord] = dict_dataarrays[coord].squeeze(dimensions_not_in_data, drop=True) + except KeyError: + warnings.warn( + 'Coordinate "{}" referenced by dataarray {} does not ' + 'exist, dropping reference.'.format(coord, da_name), + stacklevel=2 + ) + continue + + # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() + dataarray.attrs.pop('coordinates', None) + return dict_dataarrays + + +def _get_coordinates_list(dataarray): + """Return a list with the coordinates names specified in the 'coordinates' attribute.""" + declared_coordinates = dataarray.attrs.get('coordinates', []) + if isinstance(declared_coordinates, str): + declared_coordinates = declared_coordinates.split(' ') + return declared_coordinates + + def add_time_bounds_dimension(ds, time="time"): """Add time bound dimension to xr.Dataset.""" start_times = [] @@ -104,25 +250,3 @@ def add_time_bounds_dimension(ds, time="time"): ds[time].attrs['bounds'] = "time_bnds" ds[time].attrs['standard_name'] = "time" return ds - - -def process_time_coord(dataarray, epoch): - """Process the 'time' coordinate, if existing. - - It expand the DataArray with a time dimension if does not yet exists. - - The function assumes - - - that x and y dimensions have at least shape > 1 - - the time coordinate has size 1 - - """ - if 'time' in dataarray.coords: - dataarray['time'].encoding['units'] = epoch - dataarray['time'].attrs['standard_name'] = 'time' - dataarray['time'].attrs.pop('bounds', None) - - if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape: - dataarray = dataarray.expand_dims('time') - - return dataarray diff --git a/satpy/cf/dataarray.py b/satpy/cf/dataarray.py index 661981681b..8a3c78a476 100644 --- a/satpy/cf/dataarray.py +++ b/satpy/cf/dataarray.py @@ -21,7 +21,7 @@ from satpy.cf import EPOCH from satpy.cf.attrs import preprocess_datarray_attrs -from satpy.cf.coords import add_xy_coords_attrs, process_time_coord +from satpy.cf.coords import add_xy_coords_attrs, set_cf_time_info logger = logging.getLogger(__name__) @@ -96,5 +96,6 @@ def make_cf_dataarray(dataarray, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs) dataarray = add_xy_coords_attrs(dataarray) - dataarray = process_time_coord(dataarray, epoch=epoch) + if 'time' in dataarray.coords: + dataarray = set_cf_time_info(dataarray, epoch=epoch) return dataarray diff --git a/satpy/cf/datasets.py b/satpy/cf/datasets.py index c2799ab8d1..3dcbb33985 100644 --- a/satpy/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -30,16 +30,16 @@ def _get_extra_ds(dataarray, keys=None): """Get the ancillary_variables DataArrays associated to a dataset.""" - ds_collection = {} + dict_datarrays = {} # Retrieve ancillary variable datarrays for ancillary_dataarray in dataarray.attrs.get('ancillary_variables', []): ancillary_variable = ancillary_dataarray.name if keys and ancillary_variable not in keys: keys.append(ancillary_variable) - ds_collection.update(_get_extra_ds(ancillary_dataarray, keys=keys)) + dict_datarrays.update(_get_extra_ds(ancillary_dataarray, keys=keys)) # Add input dataarray - ds_collection[dataarray.attrs['name']] = dataarray - return ds_collection + dict_datarrays[dataarray.attrs['name']] = dataarray + return dict_datarrays def _get_groups(groups, list_datarrays): @@ -98,23 +98,29 @@ def _collect_cf_dataset(list_dataarrays, ds : xr.Dataset A partially CF-compliant xr.Dataset """ - from satpy.cf.area import area2cf, assert_xy_unique, has_projection_coords, link_coords, make_alt_coords_unique + from satpy.cf.area import area2cf + from satpy.cf.coords import ( + add_coordinates_attrs_coords, + check_unique_projection_coords, + ensure_unique_nondimensional_coords, + has_projection_coords, + ) from satpy.cf.dataarray import make_cf_dataarray # Create dictionary of input datarrays # --> Since keys=None, it doesn't never retrieve ancillary variables !!! - ds_collection = {} + dict_dataarrays = {} for dataarray in list_dataarrays: - ds_collection.update(_get_extra_ds(dataarray)) + dict_dataarrays.update(_get_extra_ds(dataarray)) # Check if one DataArray in the collection has 'longitude' or 'latitude' - got_lonlats = has_projection_coords(ds_collection) + got_lonlats = has_projection_coords(dict_dataarrays) # Sort dictionary by keys name - ds_collection = dict(sorted(ds_collection.items())) + dict_dataarrays = dict(sorted(dict_dataarrays.items())) - dict_dataarrays = {} - for dataarray in ds_collection.values(): + dict_cf_dataarrays = {} + for dataarray in dict_dataarrays.values(): dataarray_type = dataarray.dtype if dataarray_type not in CF_DTYPES: warnings.warn( @@ -147,23 +153,24 @@ def _collect_cf_dataset(list_dataarrays, exclude_attrs=exclude_attrs, include_orig_name=include_orig_name, numeric_name_prefix=numeric_name_prefix) - dict_dataarrays[new_dataarray.name] = new_dataarray + dict_cf_dataarrays[new_dataarray.name] = new_dataarray - # Check all DataArray have same size - assert_xy_unique(dict_dataarrays) + # Check all DataArrays have same projection coordinates + check_unique_projection_coords(dict_cf_dataarrays) - # Deal with the 'coordinates' attributes indicating lat/lon coords - # NOTE: this currently is dropped by default !!! - link_coords(dict_dataarrays) + # Add to DataArrays the coordinates specified in the 'coordinates' attribute + # - Deal with the 'coordinates' attributes indicating lat/lon coords + # - The 'coordinates' attribute is dropped from each DataArray + dict_cf_dataarrays = add_coordinates_attrs_coords(dict_cf_dataarrays) # Ensure non-dimensional coordinates to be unique across DataArrays # --> If not unique, prepend the DataArray name to the coordinate # --> If unique, does not prepend the DataArray name only if pretty=True # --> 'longitude' and 'latitude' coordinates are not prepended - dict_dataarrays = make_alt_coords_unique(dict_dataarrays, pretty=pretty) + dict_cf_dataarrays = ensure_unique_nondimensional_coords(dict_cf_dataarrays, pretty=pretty) # Create a xr.Dataset - ds = xr.Dataset(dict_dataarrays) + ds = xr.Dataset(dict_cf_dataarrays) return ds diff --git a/satpy/etc/writers/cf.yaml b/satpy/etc/writers/cf.yaml index b141a68c09..5c4a875bec 100644 --- a/satpy/etc/writers/cf.yaml +++ b/satpy/etc/writers/cf.yaml @@ -1,7 +1,7 @@ writer: name: cf description: Generic netCDF4/CF Writer - writer: !!python/name:satpy.cf_writer.CFWriter + writer: !!python/name:satpy.writers.cf_writer.CFWriter filename: '{name}_{start_time:%Y%m%d_%H%M%S}.nc' compress: DEFLATE zlevel: 6 diff --git a/satpy/tests/cf_tests/_test_data.py b/satpy/tests/cf_tests/_test_data.py new file mode 100644 index 0000000000..2ea13afd2e --- /dev/null +++ b/satpy/tests/cf_tests/_test_data.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Functions and fixture to test CF code.""" +import datetime +from collections import OrderedDict + +import numpy as np + + +def get_test_attrs(): + """Create some dataset attributes for testing purpose. + + Returns: + Attributes, encoded attributes, encoded and flattened attributes + + """ + attrs = { + 'name': 'IR_108', + 'start_time': datetime.datetime(2018, 1, 1, 0), + 'end_time': datetime.datetime(2018, 1, 1, 0, 15), + 'int': 1, + 'float': 1.0, + 'none': None, # should be dropped + 'numpy_int': np.uint8(1), + 'numpy_float': np.float32(1), + 'numpy_bool': True, + 'numpy_void': np.void(0), + 'numpy_bytes': np.bytes_('test'), + 'numpy_string': np.str_('test'), + 'list': [1, 2, np.float64(3)], + 'nested_list': ["1", ["2", [3]]], + 'bool': True, + 'array': np.array([1, 2, 3], dtype='uint8'), + 'array_bool': np.array([True, False, True]), + 'array_2d': np.array([[1, 2], [3, 4]]), + 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), + 'dict': {'a': 1, 'b': 2}, + 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, + 'raw_metadata': OrderedDict([ + ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), + ('flag', np.bool_(True)), + ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) + ]) + } + encoded = { + 'name': 'IR_108', + 'start_time': '2018-01-01 00:00:00', + 'end_time': '2018-01-01 00:15:00', + 'int': 1, + 'float': 1.0, + 'numpy_int': np.uint8(1), + 'numpy_float': np.float32(1), + 'numpy_bool': 'true', + 'numpy_void': '[]', + 'numpy_bytes': 'test', + 'numpy_string': 'test', + 'list': [1, 2, np.float64(3)], + 'nested_list': '["1", ["2", [3]]]', + 'bool': 'true', + 'array': np.array([1, 2, 3], dtype='uint8'), + 'array_bool': ['true', 'false', 'true'], + 'array_2d': '[[1, 2], [3, 4]]', + 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', + 'dict': '{"a": 1, "b": 2}', + 'nested_dict': '{"l1": {"l2": {"l3": [1, 2, 3]}}}', + 'raw_metadata': '{"recarray": [[0, 0], [0, 0], [0, 0]], ' + '"flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}}' + } + encoded_flat = { + 'name': 'IR_108', + 'start_time': '2018-01-01 00:00:00', + 'end_time': '2018-01-01 00:15:00', + 'int': 1, + 'float': 1.0, + 'numpy_int': np.uint8(1), + 'numpy_float': np.float32(1), + 'numpy_bool': 'true', + 'numpy_void': '[]', + 'numpy_bytes': 'test', + 'numpy_string': 'test', + 'list': [1, 2, np.float64(3)], + 'nested_list': '["1", ["2", [3]]]', + 'bool': 'true', + 'array': np.array([1, 2, 3], dtype='uint8'), + 'array_bool': ['true', 'false', 'true'], + 'array_2d': '[[1, 2], [3, 4]]', + 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', + 'dict_a': 1, + 'dict_b': 2, + 'nested_dict_l1_l2_l3': np.array([1, 2, 3], dtype='uint8'), + 'raw_metadata_recarray': '[[0, 0], [0, 0], [0, 0]]', + 'raw_metadata_flag': 'true', + 'raw_metadata_dict_a': 1, + 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8') + } + return attrs, encoded, encoded_flat diff --git a/satpy/tests/cf_tests/test_area.py b/satpy/tests/cf_tests/test_area.py index 352bf35a2e..e34118c7cb 100644 --- a/satpy/tests/cf_tests/test_area.py +++ b/satpy/tests/cf_tests/test_area.py @@ -18,7 +18,6 @@ """Tests for the CF Area.""" import dask.array as da import numpy as np -import pytest import xarray as xr from pyresample import AreaDefinition, SwathDefinition @@ -26,100 +25,6 @@ class TestCFArea: """Test case for CF Area.""" - def test_assert_xy_unique(self): - """Test that the x and y coordinates are unique.""" - from satpy.cf.area import assert_xy_unique - - dummy = [[1, 2], [3, 4]] - datas = {'a': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), - 'b': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), - 'n': xr.DataArray(data=dummy, dims=('v', 'w'), coords={'v': [1, 2], 'w': [3, 4]})} - assert_xy_unique(datas) - - datas['c'] = xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 3], 'x': [3, 4]}) - with pytest.raises(ValueError): - assert_xy_unique(datas) - - def test_link_coords(self): - """Check that coordinates link has been established correctly.""" - from satpy.cf.area import link_coords - - data = [[1, 2], [3, 4]] - lon = np.zeros((2, 2)) - lon2 = np.zeros((1, 2, 2)) - lat = np.ones((2, 2)) - datasets = { - 'var1': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon lat'}), - 'var2': xr.DataArray(data=data, dims=('y', 'x')), - 'var3': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon2 lat'}), - 'var4': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'not_exist lon lat'}), - 'lon': xr.DataArray(data=lon, dims=('y', 'x')), - 'lon2': xr.DataArray(data=lon2, dims=('time', 'y', 'x')), - 'lat': xr.DataArray(data=lat, dims=('y', 'x')) - } - - link_coords(datasets) - - # Check that link has been established correctly and 'coordinate' atrribute has been dropped - assert 'lon' in datasets['var1'].coords - assert 'lat' in datasets['var1'].coords - np.testing.assert_array_equal(datasets['var1']['lon'].data, lon) - np.testing.assert_array_equal(datasets['var1']['lat'].data, lat) - assert 'coordinates' not in datasets['var1'].attrs - - # There should be no link if there was no 'coordinate' attribute - assert 'lon' not in datasets['var2'].coords - assert 'lat' not in datasets['var2'].coords - - # The non-existent dimension or coordinate should be dropped - assert 'time' not in datasets['var3'].coords - assert 'not_exist' not in datasets['var4'].coords - - def test_make_alt_coords_unique(self): - """Test that created coordinate variables are unique.""" - from satpy.cf.area import make_alt_coords_unique - - data = [[1, 2], [3, 4]] - y = [1, 2] - x = [1, 2] - time1 = [1, 2] - time2 = [3, 4] - datasets = {'var1': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x, 'acq_time': ('y', time1)}), - 'var2': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x, 'acq_time': ('y', time2)})} - - # Test that dataset names are prepended to alternative coordinates - res = make_alt_coords_unique(datasets) - np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) - np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) - assert 'acq_time' not in res['var1'].coords - assert 'acq_time' not in res['var2'].coords - - # Make sure nothing else is modified - np.testing.assert_array_equal(res['var1']['x'], x) - np.testing.assert_array_equal(res['var1']['y'], y) - np.testing.assert_array_equal(res['var2']['x'], x) - np.testing.assert_array_equal(res['var2']['y'], y) - - # Coords not unique -> Dataset names must be prepended, even if pretty=True - with pytest.warns(UserWarning, match='Cannot pretty-format "acq_time"'): - res = make_alt_coords_unique(datasets, pretty=True) - np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) - np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) - assert 'acq_time' not in res['var1'].coords - assert 'acq_time' not in res['var2'].coords - - # Coords unique and pretty=True -> Don't modify coordinate names - datasets['var2']['acq_time'] = ('y', time1) - res = make_alt_coords_unique(datasets, pretty=True) - np.testing.assert_array_equal(res['var1']['acq_time'], time1) - np.testing.assert_array_equal(res['var2']['acq_time'], time1) - assert 'var1_acq_time' not in res['var1'].coords - assert 'var2_acq_time' not in res['var2'].coords - def test_area2cf(self): """Test the conversion of an area to CF standards.""" from satpy.cf.area import area2cf @@ -399,55 +304,3 @@ def test__add_lonlat_coords(self): np.testing.assert_array_equal(lon.data, lons_ref) assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() - - @pytest.fixture - def datasets(self): - """Create test dataset.""" - data = [[75, 2], [3, 4]] - y = [1, 2] - x = [1, 2] - geos = AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, - width=2, height=2, - area_extent=[-1, -1, 1, 1]) - datasets = { - 'var1': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'var2': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'lat': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'lon': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x})} - datasets['lat'].attrs['standard_name'] = 'latitude' - datasets['var1'].attrs['standard_name'] = 'dummy' - datasets['var2'].attrs['standard_name'] = 'dummy' - datasets['var2'].attrs['area'] = geos - datasets['var1'].attrs['area'] = geos - datasets['lat'].attrs['name'] = 'lat' - datasets['var1'].attrs['name'] = 'var1' - datasets['var2'].attrs['name'] = 'var2' - datasets['lon'].attrs['name'] = 'lon' - return datasets - - def test__is_lon_or_lat_dataarray(self, datasets): - """Test the _is_lon_or_lat_dataarray function.""" - from satpy.cf.area import _is_lon_or_lat_dataarray - - assert _is_lon_or_lat_dataarray(datasets['lat']) - assert not _is_lon_or_lat_dataarray(datasets['var1']) - - def test_has_projection_coords(self, datasets): - """Test the has_projection_coords function.""" - from satpy.cf.area import has_projection_coords - - assert has_projection_coords(datasets) - datasets['lat'].attrs['standard_name'] = 'dummy' - assert not has_projection_coords(datasets) diff --git a/satpy/tests/cf_tests/test_attrs.py b/satpy/tests/cf_tests/test_attrs.py index 7eb86e172b..787d1dc82d 100644 --- a/satpy/tests/cf_tests/test_attrs.py +++ b/satpy/tests/cf_tests/test_attrs.py @@ -16,105 +16,19 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for CF-compatible attributes encoding.""" -import datetime import json -from collections import OrderedDict - -import numpy as np class TestCFAttributeEncoding: """Test case for CF attribute encodings.""" - def get_test_attrs(self): - """Create some dataset attributes for testing purpose. - - Returns: - Attributes, encoded attributes, encoded and flattened attributes - - """ - # TODO: this is also used by test_da2cf - attrs = {'name': 'IR_108', - 'start_time': datetime.datetime(2018, 1, 1, 0), - 'end_time': datetime.datetime(2018, 1, 1, 0, 15), - 'int': 1, - 'float': 1.0, - 'none': None, # should be dropped - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': True, - 'numpy_void': np.void(0), - 'numpy_bytes': np.bytes_('test'), - 'numpy_string': np.str_('test'), - 'list': [1, 2, np.float64(3)], - 'nested_list': ["1", ["2", [3]]], - 'bool': True, - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': np.array([True, False, True]), - 'array_2d': np.array([[1, 2], [3, 4]]), - 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), - 'dict': {'a': 1, 'b': 2}, - 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, - 'raw_metadata': OrderedDict([ - ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), - ('flag', np.bool_(True)), - ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) - ])} - encoded = {'name': 'IR_108', - 'start_time': '2018-01-01 00:00:00', - 'end_time': '2018-01-01 00:15:00', - 'int': 1, - 'float': 1.0, - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': 'true', - 'numpy_void': '[]', - 'numpy_bytes': 'test', - 'numpy_string': 'test', - 'list': [1, 2, np.float64(3)], - 'nested_list': '["1", ["2", [3]]]', - 'bool': 'true', - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': ['true', 'false', 'true'], - 'array_2d': '[[1, 2], [3, 4]]', - 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', - 'dict': '{"a": 1, "b": 2}', - 'nested_dict': '{"l1": {"l2": {"l3": [1, 2, 3]}}}', - 'raw_metadata': '{"recarray": [[0, 0], [0, 0], [0, 0]], ' - '"flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}}'} - encoded_flat = {'name': 'IR_108', - 'start_time': '2018-01-01 00:00:00', - 'end_time': '2018-01-01 00:15:00', - 'int': 1, - 'float': 1.0, - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': 'true', - 'numpy_void': '[]', - 'numpy_bytes': 'test', - 'numpy_string': 'test', - 'list': [1, 2, np.float64(3)], - 'nested_list': '["1", ["2", [3]]]', - 'bool': 'true', - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': ['true', 'false', 'true'], - 'array_2d': '[[1, 2], [3, 4]]', - 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', - 'dict_a': 1, - 'dict_b': 2, - 'nested_dict_l1_l2_l3': np.array([1, 2, 3], dtype='uint8'), - 'raw_metadata_recarray': '[[0, 0], [0, 0], [0, 0]]', - 'raw_metadata_flag': 'true', - 'raw_metadata_dict_a': 1, - 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} - return attrs, encoded, encoded_flat - def test__encode_attrs_nc(self): """Test attributes encoding.""" from satpy.cf.attrs import _encode_attrs_nc + from satpy.tests.cf_tests._test_data import get_test_attrs from satpy.tests.utils import assert_dict_array_equality - attrs, expected, _ = self.get_test_attrs() + attrs, expected, _ = get_test_attrs() # Test encoding encoded = _encode_attrs_nc(attrs) diff --git a/satpy/tests/cf_tests/test_coords.py b/satpy/tests/cf_tests/test_coords.py index 1361c0d5a4..0d0b5436cd 100644 --- a/satpy/tests/cf_tests/test_coords.py +++ b/satpy/tests/cf_tests/test_coords.py @@ -19,7 +19,9 @@ import logging import numpy as np +import pytest import xarray as xr +from pyresample import AreaDefinition # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -49,10 +51,106 @@ def test_add_time_bounds_dimension(self): assert "bounds" in ds["time"].attrs assert "standard_name" in ds["time"].attrs + # set_cf_time_info + class TestCFcoords: """Test cases for CF spatial dimension and coordinates.""" + def test_check_unique_projection_coords(self): + """Test that the x and y coordinates are unique.""" + from satpy.cf.coords import check_unique_projection_coords + + dummy = [[1, 2], [3, 4]] + datas = {'a': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), + 'b': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), + 'n': xr.DataArray(data=dummy, dims=('v', 'w'), coords={'v': [1, 2], 'w': [3, 4]})} + check_unique_projection_coords(datas) + + datas['c'] = xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 3], 'x': [3, 4]}) + with pytest.raises(ValueError): + check_unique_projection_coords(datas) + + def test_add_coordinates_attrs_coords(self): + """Check that coordinates link has been established correctly.""" + from satpy.cf.coords import add_coordinates_attrs_coords + + data = [[1, 2], [3, 4]] + lon = np.zeros((2, 2)) + lon2 = np.zeros((1, 2, 2)) + lat = np.ones((2, 2)) + datasets = { + 'var1': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon lat'}), + 'var2': xr.DataArray(data=data, dims=('y', 'x')), + 'var3': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon2 lat'}), + 'var4': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'not_exist lon lat'}), + 'lon': xr.DataArray(data=lon, dims=('y', 'x')), + 'lon2': xr.DataArray(data=lon2, dims=('time', 'y', 'x')), + 'lat': xr.DataArray(data=lat, dims=('y', 'x')) + } + + datasets = add_coordinates_attrs_coords(datasets) + + # Check that link has been established correctly and 'coordinate' atrribute has been dropped + assert 'lon' in datasets['var1'].coords + assert 'lat' in datasets['var1'].coords + np.testing.assert_array_equal(datasets['var1']['lon'].data, lon) + np.testing.assert_array_equal(datasets['var1']['lat'].data, lat) + assert 'coordinates' not in datasets['var1'].attrs + + # There should be no link if there was no 'coordinate' attribute + assert 'lon' not in datasets['var2'].coords + assert 'lat' not in datasets['var2'].coords + + # The non-existent dimension or coordinate should be dropped + assert 'time' not in datasets['var3'].coords + assert 'not_exist' not in datasets['var4'].coords + + def test_ensure_unique_nondimensional_coords(self): + """Test that created coordinate variables are unique.""" + from satpy.cf.coords import ensure_unique_nondimensional_coords + + data = [[1, 2], [3, 4]] + y = [1, 2] + x = [1, 2] + time1 = [1, 2] + time2 = [3, 4] + datasets = {'var1': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x, 'acq_time': ('y', time1)}), + 'var2': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x, 'acq_time': ('y', time2)})} + + # Test that dataset names are prepended to alternative coordinates + res = ensure_unique_nondimensional_coords(datasets) + np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) + np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) + assert 'acq_time' not in res['var1'].coords + assert 'acq_time' not in res['var2'].coords + + # Make sure nothing else is modified + np.testing.assert_array_equal(res['var1']['x'], x) + np.testing.assert_array_equal(res['var1']['y'], y) + np.testing.assert_array_equal(res['var2']['x'], x) + np.testing.assert_array_equal(res['var2']['y'], y) + + # Coords not unique -> Dataset names must be prepended, even if pretty=True + with pytest.warns(UserWarning, match='Cannot pretty-format "acq_time"'): + res = ensure_unique_nondimensional_coords(datasets, pretty=True) + np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) + np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) + assert 'acq_time' not in res['var1'].coords + assert 'acq_time' not in res['var2'].coords + + # Coords unique and pretty=True -> Don't modify coordinate names + datasets['var2']['acq_time'] = ('y', time1) + res = ensure_unique_nondimensional_coords(datasets, pretty=True) + np.testing.assert_array_equal(res['var1']['acq_time'], time1) + np.testing.assert_array_equal(res['var2']['acq_time'], time1) + assert 'var1_acq_time' not in res['var1'].coords + assert 'var2_acq_time' not in res['var2'].coords + def test_is_projected(self, caplog): """Tests for private _is_projected function.""" from satpy.cf.coords import _is_projected @@ -79,5 +177,56 @@ def test_is_projected(self, caplog): assert _is_projected(da) assert "Failed to tell if data are projected." in caplog.text + @pytest.fixture + def datasets(self): + """Create test dataset.""" + data = [[75, 2], [3, 4]] + y = [1, 2] + x = [1, 2] + geos = AreaDefinition( + area_id='geos', + description='geos', + proj_id='geos', + projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, + width=2, height=2, + area_extent=[-1, -1, 1, 1]) + datasets = { + 'var1': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'var2': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'lat': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'lon': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x})} + datasets['lat'].attrs['standard_name'] = 'latitude' + datasets['var1'].attrs['standard_name'] = 'dummy' + datasets['var2'].attrs['standard_name'] = 'dummy' + datasets['var2'].attrs['area'] = geos + datasets['var1'].attrs['area'] = geos + datasets['lat'].attrs['name'] = 'lat' + datasets['var1'].attrs['name'] = 'var1' + datasets['var2'].attrs['name'] = 'var2' + datasets['lon'].attrs['name'] = 'lon' + return datasets + + def test__is_lon_or_lat_dataarray(self, datasets): + """Test the _is_lon_or_lat_dataarray function.""" + from satpy.cf.coords import _is_lon_or_lat_dataarray + + assert _is_lon_or_lat_dataarray(datasets['lat']) + assert not _is_lon_or_lat_dataarray(datasets['var1']) + + def test_has_projection_coords(self, datasets): + """Test the has_projection_coords function.""" + from satpy.cf.coords import has_projection_coords + + assert has_projection_coords(datasets) + datasets['lat'].attrs['standard_name'] = 'dummy' + assert not has_projection_coords(datasets) + # add_xy_coords_attrs - # process_time_coord diff --git a/satpy/tests/cf_tests/test_dataaarray.py b/satpy/tests/cf_tests/test_dataaarray.py index 0fd3a9d41c..90fbae4558 100644 --- a/satpy/tests/cf_tests/test_dataaarray.py +++ b/satpy/tests/cf_tests/test_dataaarray.py @@ -16,10 +16,6 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests CF-compliant DataArray creation.""" - -import datetime -from collections import OrderedDict - import numpy as np import xarray as xr @@ -71,96 +67,14 @@ def test_make_cf_dataarray_lonlat(): class TestCfDataArray: """Test creation of CF DataArray.""" - def get_test_attrs(self): - """Create some dataset attributes for testing purpose. - - Returns: - Attributes, encoded attributes, encoded and flattened attributes - - """ - # TODO: also used by cf/test_attrs.py - attrs = {'name': 'IR_108', - 'start_time': datetime.datetime(2018, 1, 1, 0), - 'end_time': datetime.datetime(2018, 1, 1, 0, 15), - 'int': 1, - 'float': 1.0, - 'none': None, # should be dropped - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': True, - 'numpy_void': np.void(0), - 'numpy_bytes': np.bytes_('test'), - 'numpy_string': np.str_('test'), - 'list': [1, 2, np.float64(3)], - 'nested_list': ["1", ["2", [3]]], - 'bool': True, - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': np.array([True, False, True]), - 'array_2d': np.array([[1, 2], [3, 4]]), - 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), - 'dict': {'a': 1, 'b': 2}, - 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, - 'raw_metadata': OrderedDict([ - ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), - ('flag', np.bool_(True)), - ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) - ])} - encoded = {'name': 'IR_108', - 'start_time': '2018-01-01 00:00:00', - 'end_time': '2018-01-01 00:15:00', - 'int': 1, - 'float': 1.0, - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': 'true', - 'numpy_void': '[]', - 'numpy_bytes': 'test', - 'numpy_string': 'test', - 'list': [1, 2, np.float64(3)], - 'nested_list': '["1", ["2", [3]]]', - 'bool': 'true', - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': ['true', 'false', 'true'], - 'array_2d': '[[1, 2], [3, 4]]', - 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', - 'dict': '{"a": 1, "b": 2}', - 'nested_dict': '{"l1": {"l2": {"l3": [1, 2, 3]}}}', - 'raw_metadata': '{"recarray": [[0, 0], [0, 0], [0, 0]], ' - '"flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}}'} - encoded_flat = {'name': 'IR_108', - 'start_time': '2018-01-01 00:00:00', - 'end_time': '2018-01-01 00:15:00', - 'int': 1, - 'float': 1.0, - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': 'true', - 'numpy_void': '[]', - 'numpy_bytes': 'test', - 'numpy_string': 'test', - 'list': [1, 2, np.float64(3)], - 'nested_list': '["1", ["2", [3]]]', - 'bool': 'true', - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': ['true', 'false', 'true'], - 'array_2d': '[[1, 2], [3, 4]]', - 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', - 'dict_a': 1, - 'dict_b': 2, - 'nested_dict_l1_l2_l3': np.array([1, 2, 3], dtype='uint8'), - 'raw_metadata_recarray': '[[0, 0], [0, 0], [0, 0]]', - 'raw_metadata_flag': 'true', - 'raw_metadata_dict_a': 1, - 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} - return attrs, encoded, encoded_flat - def test_make_cf_dataarray(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" from satpy.cf.dataarray import make_cf_dataarray + from satpy.tests.cf_tests._test_data import get_test_attrs from satpy.tests.utils import assert_dict_array_equality # Create set of test attributes - attrs, attrs_expected, attrs_expected_flat = self.get_test_attrs() + attrs, attrs_expected, attrs_expected_flat = get_test_attrs() attrs['area'] = 'some_area' attrs['prerequisites'] = [make_dsq(name='hej')] attrs['_satpy_id_name'] = 'myname' From 57afb82959ac68129e89ba4179880fecc9f1fa59 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 10 Oct 2023 20:45:32 +0200 Subject: [PATCH 290/702] Fix cf_writer module path --- satpy/tests/writer_tests/test_cf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 31e59a2fb7..f50a8e99d3 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -403,11 +403,11 @@ def test_load_module_with_old_pyproj(self): old_version = sys.modules['pyproj'].__version__ sys.modules['pyproj'].__version__ = "1.9.6" try: - importlib.reload(sys.modules['satpy.cf_writer']) + importlib.reload(sys.modules['satpy.writers.cf_writer']) finally: # Tear down sys.modules['pyproj'].__version__ = old_version - importlib.reload(sys.modules['satpy.cf_writer']) + importlib.reload(sys.modules['satpy.writers.cf_writer']) def test_global_attr_default_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" From 2474b9860ed44903887a9f26a2eb431f1102534d Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 10 Oct 2023 20:00:43 +0100 Subject: [PATCH 291/702] Update doc/source/writers.rst Co-authored-by: Martin Raspaud --- doc/source/writers.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/writers.rst b/doc/source/writers.rst index 12ee786f56..e70f6ffb2b 100644 --- a/doc/source/writers.rst +++ b/doc/source/writers.rst @@ -160,7 +160,7 @@ to be added. Where `my_text` is the text you wish to add and `` is the location of the font file you wish to use, often in `/usr/share/fonts/` -This dictionary can then be passed to the `save_dataset` or `save_datasets` command. +This dictionary can then be passed to the :meth:`~satpy.scene.Scene.save_dataset` or :meth:`~satpy.scene.Scene.save_datasets` command. .. code-block:: python From 8cbe04a411ed3cffdf5aa5309623fefef486b860 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 10 Oct 2023 20:07:26 +0100 Subject: [PATCH 292/702] Minor updates to AHI L2 reader and the tests. --- satpy/readers/ahi_l2_nc.py | 16 +++++++--------- satpy/tests/reader_tests/test_ahi_l2_nc.py | 2 -- 2 files changed, 7 insertions(+), 11 deletions(-) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index ef3b7611aa..5159931819 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2018 Satpy developers +# Copyright (c) 2023 Satpy developers # # This file is part of satpy. # @@ -17,6 +17,8 @@ # satpy. If not, see . """Reader for Himawari L2 cloud products from NOAA's big data programme. +For more information about the data, see: . + These products are generated by the NOAA enterprise cloud suite and have filenames like: AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc @@ -49,12 +51,9 @@ from satpy._compat import cached_property from satpy.readers._geos_area import get_area_definition, get_area_extent from satpy.readers.file_handlers import BaseFileHandler -from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) -CHUNK_SIZE = get_legacy_chunk_size() - EXPECTED_DATA_AREA = 'Full Disk' @@ -63,12 +62,11 @@ class HIML2NCFileHandler(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" - super(HIML2NCFileHandler, self).__init__(filename, filename_info, - filetype_info) + super().__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, - chunks={'xc': CHUNK_SIZE, 'yc': CHUNK_SIZE}) + chunks={"xc": "auto", "yc": "auto"}) # Check that file is a full disk scene, we don't know the area for anything else if self.nc.attrs['cdm_data_type'] != EXPECTED_DATA_AREA: @@ -119,8 +117,8 @@ def get_area_def(self, dsid): return self.area def _get_area_def(self): - logger.warning('The AHI L2 cloud products do not have the metadata required to produce an area definition.' - ' Assuming standard Himawari-8/9 full disk projection.') + logger.info('The AHI L2 cloud products do not have the metadata required to produce an area definition.' + ' Assuming standard Himawari-8/9 full disk projection.') # Basic check to ensure we're processing a full disk (2km) scene.n if self.nlines != 5500 or self.ncols != 5500: diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index ff2b5a3d53..39de4e1053 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -75,7 +75,6 @@ def test_startend(himl2_filename): def test_ahi_l2_area_def(himl2_filename, caplog): """Test reader handles area definition correctly.""" - warntxt = "The AHI L2 cloud products do not have the metadata" ps = '+proj=geos +lon_0=140.7 +h=35785863 +x_0=0 +y_0=0 +a=6378137 +rf=298.257024882273 +units=m +no_defs +type=crs' # Check case where input data is correct size. @@ -86,7 +85,6 @@ def test_ahi_l2_area_def(himl2_filename, caplog): assert area_def.height == dimensions['Rows'] assert np.allclose(area_def.area_extent, exp_ext) assert area_def.proj4_string == ps - assert warntxt in caplog.text # Check case where input data is incorrect size. with pytest.raises(ValueError): From e0d7a2a9373143ce14509dcc98bd5ac89c4f8abb Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 10 Oct 2023 20:14:08 +0100 Subject: [PATCH 293/702] Change names of `readers` and `writers` in the docs to `reading` and `writing`. --- doc/source/{readers.rst => reading.rst} | 10 +++++----- doc/source/{writers.rst => writing.rst} | 8 ++++---- 2 files changed, 9 insertions(+), 9 deletions(-) rename doc/source/{readers.rst => reading.rst} (97%) rename doc/source/{writers.rst => writing.rst} (95%) diff --git a/doc/source/readers.rst b/doc/source/reading.rst similarity index 97% rename from doc/source/readers.rst rename to doc/source/reading.rst index fa7cfecea1..eefa4ccba1 100644 --- a/doc/source/readers.rst +++ b/doc/source/reading.rst @@ -1,5 +1,5 @@ ======= -Readers +Reading ======= .. todo:: @@ -7,10 +7,10 @@ Readers How to read cloud products from NWCSAF software. (separate document?) Satpy supports reading and loading data from many input file formats and -schemes. The :class:`~satpy.scene.Scene` object provides a simple interface -around all the complexity of these various formats through its ``load`` -method. The following sections describe the different way data can be loaded, -requested, or added to a Scene object. +schemes through the concept of *readers*. Each reader supports a specific type of input data. +The :class:`~satpy.scene.Scene` object provides a simple interface around all the complexity of +these various formats through its ``load``method. +The following sections describe the different way data can be loaded, requested, or added to a Scene object. Available Readers ================= diff --git a/doc/source/writers.rst b/doc/source/writing.rst similarity index 95% rename from doc/source/writers.rst rename to doc/source/writing.rst index 12ee786f56..19518b2755 100644 --- a/doc/source/writers.rst +++ b/doc/source/writing.rst @@ -1,10 +1,10 @@ ======= -Writers +Writing ======= -Satpy makes it possible to save datasets in multiple formats. For details -on additional arguments and features available for a specific Writer see -the table below. Most use cases will want to save datasets using the +Satpy makes it possible to save datasets in multiple formats, with *writers* designed to save in a given format. +For details on additional arguments and features available for a specific Writer see the table below. +Most use cases will want to save datasets using the :meth:`~satpy.scene.Scene.save_datasets` method:: >>> scn.save_datasets(writer='simple_image') From 700ac96958c381da0eb2ff4c6d4a0eaa5b494687 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 10 Oct 2023 22:00:13 +0200 Subject: [PATCH 294/702] Update setup.py Co-authored-by: David Hoese --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 7b321ecbcd..555f299b19 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ from setuptools import find_packages, setup -requires = ['numpy >1.20', 'pillow', 'pyresample >=1.24.0', 'trollsift', +requires = ['numpy >=1.21', 'pillow', 'pyresample >=1.24.0', 'trollsift', 'trollimage >=1.20', 'pykdtree', 'pyyaml >=5.1', 'xarray >=0.10.1, !=0.13.0', 'dask[array] >=0.17.1', 'pyproj>=2.2', 'zarr', 'donfig', 'appdirs', 'packaging', 'pooch', 'pyorbital'] From 69c5751dacd76501569ddef56cc37bf7c52faa33 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Tue, 10 Oct 2023 22:13:01 +0200 Subject: [PATCH 295/702] add documentation about Meirink calibration --- satpy/readers/seviri_base.py | 34 +++++++++++++++++++++++++++------- 1 file changed, 27 insertions(+), 7 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 6471c42639..73a1454cb8 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -61,14 +61,22 @@ reader_kwargs={'calib_mode': 'GSICS'}) scene.load(['VIS006', 'IR_108']) -Furthermore, it is possible to specify external calibration coefficients -for the conversion from counts to radiances. External coefficients take -precedence over internal coefficients, but you can also mix internal and -external coefficients: If external calibration coefficients are specified -for only a subset of channels, the remaining channels will be calibrated -using the chosen file-internal coefficients (nominal or GSICS). +In addition, two other calibration methods are available: + +1. It is possible to specify external calibration coefficients for the + conversion from counts to radiances. External coefficients take + precedence over internal coefficients and over the Meirink + coefficients, but you can also mix internal and external coefficients: + If external calibration coefficients are specified for only a subset + of channels, the remaining channels will be calibrated using the + chosen file-internal coefficients (nominal or GSICS). Calibration + coefficients must be specified in [mW m-2 sr-1 (cm-1)-1]. + +2. The calibration mode ``meirink-2013`` uses coefficients based on an + intercalibration with Aqua-MODIS for the visible channels, as found in + `Inter-calibration of polar imager solar channels using SEVIRI`_ + (2013) by J. F. Meirink, R. A. Roebeling, and P. Stammes. -Calibration coefficients must be specified in [mW m-2 sr-1 (cm-1)-1]. In the following example we use external calibration coefficients for the ``VIS006`` & ``IR_108`` channels, and nominal coefficients for the @@ -93,6 +101,15 @@ 'ext_calib_coefs': coefs}) scene.load(['VIS006', 'VIS008', 'IR_108', 'IR_120']) +In the next example we use the mode ``meirink-2013`` calibration +coefficients for all visible channels and nominal coefficients for the +rest:: + + scene = satpy.Scene(filenames, + reader='seviri_l1b_...', + reader_kwargs={'calib_mode': 'meirink-2013'}) + scene.load(['VIS006', 'VIS008', 'IR_016']) + Calibration to reflectance ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -163,6 +180,9 @@ .. _Radiometric Calibration of MSG SEVIRI Level 1.5 Image Data in Equivalent Spectral Blackbody Radiance: https://www-cdn.eumetsat.int/files/2020-04/pdf_ten_msg_seviri_rad_calib.pdf +.. _Inter-calibration of polar imager solar channels using SEVIRI: + http://dx.doi.org/10.5194/amt-6-2495-2013 + """ import warnings From 714130cba99751d3b604e9ff8252662b9334dddb Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 11 Oct 2023 09:59:30 +0200 Subject: [PATCH 296/702] Update test for changes in Meirink calibration --- satpy/tests/reader_tests/test_seviri_base.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index 252da43e75..3b357512e4 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -26,7 +26,7 @@ import xarray as xr from satpy.readers.seviri_base import ( - DATE_2000, + MEIRINK_EPOCH, MEIRINK_COEFS, NoValidOrbitParams, OrbitPolynomial, @@ -373,8 +373,8 @@ def test_get_meirink_slope_epoch(self, platform_id, channel_name): coefs = {'coefs': {}} coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} coefs['coefs']['EXTERNAL'] = {} - calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK', DATE_2000) - assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS[platform_id][channel_name][0]/1000. + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK-2013', MEIRINK_EPOCH) + assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS['2013'][platform_id][channel_name][0]/1000. @pytest.mark.parametrize('platform_id,time,expected', ( (321, datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]), @@ -392,5 +392,5 @@ def test_get_meirink_slope_2020(self, platform_id, time, expected): coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} coefs['coefs']['EXTERNAL'] = {} for i, channel_name in enumerate(['VIS006', 'VIS008', 'IR_016']): - calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK', time) + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK-2013', time) assert abs(calibration_handler.get_gain_offset()[0] - expected[i]) < 1e-6 From b2ef4b4dd549d47e35e73f44af4a986d79a20f84 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 11 Oct 2023 10:27:16 +0200 Subject: [PATCH 297/702] remove setuptools_scm_git_archive from requirement The package's functionality is now found in setuptools_scm (see https://github.com/pytroll/satpy/issues/2549 and https://github.com/Changaco/setuptools_scm_git_archive ). Minimal changes done: 1. Remove setuptools_scm_git_archive from setup.cfg and doc/rtd_environment.yml 2. Change the content of .git_archival.txt according to https://setuptools-scm.readthedocs.io/en/latest/usage/#builtin-mechanisms-for-obtaining-version-numbers fixes #2549 --- .git_archival.txt | 3 +++ doc/rtd_environment.yml | 1 - setup.cfg | 1 - 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.git_archival.txt b/.git_archival.txt index 95cb3eea4e..3994ec0a83 100644 --- a/.git_archival.txt +++ b/.git_archival.txt @@ -1 +1,4 @@ +node: $Format:%H$ +node-date: $Format:%cI$ +describe-name: $Format:%(describe:tags=true)$ ref-names: $Format:%D$ diff --git a/doc/rtd_environment.yml b/doc/rtd_environment.yml index ce147a1644..1e40cbb73a 100644 --- a/doc/rtd_environment.yml +++ b/doc/rtd_environment.yml @@ -25,7 +25,6 @@ dependencies: - rioxarray - setuptools - setuptools_scm - - setuptools_scm_git_archive - sphinx - sphinx_rtd_theme - sphinxcontrib-apidoc diff --git a/setup.cfg b/setup.cfg index 594f9dc8cd..3e09909a6a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,7 +1,6 @@ [options] setup_requires = setuptools_scm - setuptools_scm_git_archive [bdist_rpm] requires=h5py pyresample python2-numexpr pyhdf xarray dask h5netcdf From aaec89a6f3a3edbbf34eeb1488915d99347f07ce Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 11 Oct 2023 11:05:28 +0200 Subject: [PATCH 298/702] Issue warning for caching only when caching is requested --- satpy/modifiers/angles.py | 44 +++++++++++++---------- satpy/tests/modifier_tests/test_angles.py | 14 +++++++- 2 files changed, 39 insertions(+), 19 deletions(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 02ffadfa87..8a3fe4910a 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -138,27 +138,36 @@ def _zarr_pattern(self, arg_hash, cache_version: Union[None, int, str] = None) - def __call__(self, *args, cache_dir: Optional[str] = None) -> Any: """Call the decorated function.""" - new_args = self._sanitize_args_func(*args) if self._sanitize_args_func is not None else args - arg_hash = _hash_args(*new_args, unhashable_types=self._uncacheable_arg_types) - should_cache, cache_dir = self._get_should_cache_and_cache_dir(new_args, cache_dir) + sanitized_args = self._sanitize_args_func(*args) if self._sanitize_args_func is not None else args + should_cache, cache_dir = self._get_should_cache_and_cache_dir(sanitized_args, cache_dir) + if should_cache: + try: + arg_hash = _hash_args(*sanitized_args, unhashable_types=self._uncacheable_arg_types) + except TypeError as err: + warnings.warn("Cannot cache function because of unhashable argument: " + str(err), stacklevel=2) + should_cache = False + + if not should_cache: + return self._func(*args) + zarr_fn = self._zarr_pattern(arg_hash) zarr_format = os.path.join(cache_dir, zarr_fn) zarr_paths = glob(zarr_format.format("*")) - if not should_cache or not zarr_paths: - # use sanitized arguments if we are caching, otherwise use original arguments - args_to_use = new_args if should_cache else args + + if not zarr_paths: + # use sanitized arguments + args_to_use = sanitized_args res = self._func(*args_to_use) - if should_cache and not zarr_paths: - self._warn_if_irregular_input_chunks(args, args_to_use) - self._cache_results(res, zarr_format) + self._warn_if_irregular_input_chunks(args, args_to_use) + self._cache_results(res, zarr_format) + # if we did any caching, let's load from the zarr files - if should_cache: - # re-calculate the cached paths - zarr_paths = sorted(glob(zarr_format.format("*"))) - if not zarr_paths: - raise RuntimeError("Data was cached to disk but no files were found") - new_chunks = _get_output_chunks_from_func_arguments(args) - res = tuple(da.from_zarr(zarr_path, chunks=new_chunks) for zarr_path in zarr_paths) + # re-calculate the cached paths + zarr_paths = sorted(glob(zarr_format.format("*"))) + if not zarr_paths: + raise RuntimeError("Data was cached to disk but no files were found") + new_chunks = _get_output_chunks_from_func_arguments(args) + res = tuple(da.from_zarr(zarr_path, chunks=new_chunks) for zarr_path in zarr_paths) return res def _get_should_cache_and_cache_dir(self, args, cache_dir: Optional[str]) -> tuple[bool, str]: @@ -248,8 +257,7 @@ def _hash_args(*args, unhashable_types=DEFAULT_UNCACHE_TYPES): hashable_args = [] for arg in args: if isinstance(arg, unhashable_types): - warnings.warn(f"Unhashable type in function signature ({type(arg)}), cannot be cached.", stacklevel=2) - continue + raise TypeError(f"Unhashable type ({type(arg)}).") if isinstance(arg, HASHABLE_GEOMETRIES): arg = hash(arg) elif isinstance(arg, datetime): diff --git a/satpy/tests/modifier_tests/test_angles.py b/satpy/tests/modifier_tests/test_angles.py index 4c7e295e14..f51ab229c8 100644 --- a/satpy/tests/modifier_tests/test_angles.py +++ b/satpy/tests/modifier_tests/test_angles.py @@ -322,7 +322,7 @@ def _fake_func(shape, chunks): satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)): _fake_func((5, 5), ((5,), (5,))) - def test_caching_with_array_in_args_fails(self, tmp_path): + def test_caching_with_array_in_args_warns(self, tmp_path): """Test that trying to cache with non-dask arrays fails.""" from satpy.modifiers.angles import cache_to_zarr_if @@ -334,6 +334,18 @@ def _fake_func(array): satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)): _fake_func(da.zeros(100)) + def test_caching_with_array_in_args_does_not_warn_when_caching_is_not_enabled(self, tmp_path, recwarn): + """Test that trying to cache with non-dask arrays fails.""" + from satpy.modifiers.angles import cache_to_zarr_if + + @cache_to_zarr_if("cache_lonlats") + def _fake_func(array): + return array + 1 + + with satpy.config.set(cache_lonlats=False, cache_dir=str(tmp_path)): + _fake_func(da.zeros(100)) + assert len(recwarn) == 0 + def test_no_cache_dir_fails(self, tmp_path): """Test that 'cache_dir' not being set fails.""" from satpy.modifiers.angles import _get_sensor_angles_from_sat_pos, get_angles From 51a3331206c158cdf464eab219da121da34cd869 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 11 Oct 2023 11:56:39 +0200 Subject: [PATCH 299/702] isort --- satpy/tests/reader_tests/test_seviri_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index 3b357512e4..910623bcd5 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -26,8 +26,8 @@ import xarray as xr from satpy.readers.seviri_base import ( - MEIRINK_EPOCH, MEIRINK_COEFS, + MEIRINK_EPOCH, NoValidOrbitParams, OrbitPolynomial, OrbitPolynomialFinder, From 3b465a0dcbac71f3e007a8e825e1b0c1aa5b6910 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 11 Oct 2023 14:06:12 +0200 Subject: [PATCH 300/702] Change version of Meirink coefficients to 2023. The coefficients on the webpage https://msgcpp.knmi.nl/solar-channel-calibration.html were updated in place. The current set of coefficients were obtained in 2023, the code now reflects this correctly. --- satpy/readers/seviri_base.py | 21 +++++++++++--------- satpy/tests/reader_tests/test_seviri_base.py | 6 +++--- 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 73a1454cb8..83977bf40c 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -72,7 +72,7 @@ chosen file-internal coefficients (nominal or GSICS). Calibration coefficients must be specified in [mW m-2 sr-1 (cm-1)-1]. -2. The calibration mode ``meirink-2013`` uses coefficients based on an +2. The calibration mode ``meirink-2023`` uses coefficients based on an intercalibration with Aqua-MODIS for the visible channels, as found in `Inter-calibration of polar imager solar channels using SEVIRI`_ (2013) by J. F. Meirink, R. A. Roebeling, and P. Stammes. @@ -101,13 +101,13 @@ 'ext_calib_coefs': coefs}) scene.load(['VIS006', 'VIS008', 'IR_108', 'IR_120']) -In the next example we use the mode ``meirink-2013`` calibration +In the next example we use the mode ``meirink-2023`` calibration coefficients for all visible channels and nominal coefficients for the rest:: scene = satpy.Scene(filenames, reader='seviri_l1b_...', - reader_kwargs={'calib_mode': 'meirink-2013'}) + reader_kwargs={'calib_mode': 'meirink-2023'}) scene.load(['VIS006', 'VIS008', 'IR_016']) @@ -377,6 +377,9 @@ # Inter-calibration of polar imager solar channels using SEVIRI, Atm. Meas. Tech., 6, # 2495-2508, doi:10.5194/amt-6-2495-2013 # +# The coeffients in the 2023 entry have been obtained from the webpage +# https://msgcpp.knmi.nl/solar-channel-calibration.html on 2023-10-11. +# # The coefficients are stored in pairs of A, B (see function `get_meirink_slope`) where the # units of A are µW m-2 sr-1 (cm-1)-1 and those of B are µW m-2 sr-1 (cm-1)-1 (86400 s)-1 # @@ -386,32 +389,32 @@ MEIRINK_EPOCH = datetime(2000, 1, 1) MEIRINK_COEFS = {} -MEIRINK_COEFS['2013'] = {} +MEIRINK_COEFS['2023'] = {} # Meteosat-8 -MEIRINK_COEFS['2013'][321] = {'VIS006': (24.346, 0.3739), +MEIRINK_COEFS['2023'][321] = {'VIS006': (24.346, 0.3739), 'VIS008': (30.989, 0.3111), 'IR_016': (22.869, 0.0065) } # Meteosat-9 -MEIRINK_COEFS['2013'][322] = {'VIS006': (21.026, 0.2556), +MEIRINK_COEFS['2023'][322] = {'VIS006': (21.026, 0.2556), 'VIS008': (26.875, 0.1835), 'IR_016': (21.394, 0.0498) } # Meteosat-10 -MEIRINK_COEFS['2013'][323] = {'VIS006': (19.829, 0.5856), +MEIRINK_COEFS['2023'][323] = {'VIS006': (19.829, 0.5856), 'VIS008': (25.284, 0.6787), 'IR_016': (23.066, -0.0286) } # Meteosat-11 -MEIRINK_COEFS['2013'][324] = {'VIS006': (20.515, 0.3600), +MEIRINK_COEFS['2023'][324] = {'VIS006': (20.515, 0.3600), 'VIS008': (25.803, 0.4844), 'IR_016': (22.354, -0.0187) } @@ -664,7 +667,7 @@ def __init__(self, platform_id, channel_name, coefs, calib_mode, scan_time): scan_time=self._scan_time ) - valid_modes = ('NOMINAL', 'GSICS', 'MEIRINK-2013') + valid_modes = ('NOMINAL', 'GSICS', 'MEIRINK-2023') if self._calib_mode not in valid_modes: raise ValueError( 'Invalid calibration mode: {}. Choose one of {}'.format( diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index 910623bcd5..32918ea45b 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -373,8 +373,8 @@ def test_get_meirink_slope_epoch(self, platform_id, channel_name): coefs = {'coefs': {}} coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} coefs['coefs']['EXTERNAL'] = {} - calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK-2013', MEIRINK_EPOCH) - assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS['2013'][platform_id][channel_name][0]/1000. + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK-2023', MEIRINK_EPOCH) + assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS['2023'][platform_id][channel_name][0]/1000. @pytest.mark.parametrize('platform_id,time,expected', ( (321, datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]), @@ -392,5 +392,5 @@ def test_get_meirink_slope_2020(self, platform_id, time, expected): coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} coefs['coefs']['EXTERNAL'] = {} for i, channel_name in enumerate(['VIS006', 'VIS008', 'IR_016']): - calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK-2013', time) + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK-2023', time) assert abs(calibration_handler.get_gain_offset()[0] - expected[i]) < 1e-6 From 761d273385808a20696af3baaa95cd95c5d314d7 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 11 Oct 2023 14:39:17 +0200 Subject: [PATCH 301/702] Update pyproject.toml Co-authored-by: Gerrit Holl --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index d0eac01fe0..e3a3a2efbe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,6 +14,7 @@ known_first_party = "satpy" line_length = 120 [tool.ruff] +# See https://docs.astral.sh/ruff/rules/ select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] ignore = ["B905"] # only available from python 3.10 line-length = 120 From 49a156804b12faa92fd266659acaad5aea948963 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 11 Oct 2023 09:18:01 -0500 Subject: [PATCH 302/702] Remove unused setup section in setup.cfg --- setup.cfg | 4 ---- 1 file changed, 4 deletions(-) diff --git a/setup.cfg b/setup.cfg index 3e09909a6a..205f924b33 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,7 +1,3 @@ -[options] -setup_requires = - setuptools_scm - [bdist_rpm] requires=h5py pyresample python2-numexpr pyhdf xarray dask h5netcdf release=1 From 056a3a9da5c8de9d811655441c234f735482f1a8 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 11 Oct 2023 10:47:00 -0500 Subject: [PATCH 303/702] Fix type annotations in seviri_base.py --- satpy/readers/seviri_base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 83977bf40c..5e6d69ea68 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -184,6 +184,7 @@ http://dx.doi.org/10.5194/amt-6-2495-2013 """ +from __future__ import annotations import warnings from datetime import datetime, timedelta @@ -388,7 +389,7 @@ # Epoch for the MEIRINK re-calibration MEIRINK_EPOCH = datetime(2000, 1, 1) -MEIRINK_COEFS = {} +MEIRINK_COEFS: dict[str, dict[int, dict[str, tuple[float, float]]]] = {} MEIRINK_COEFS['2023'] = {} # Meteosat-8 From 1d63b698e0ce7126434a72b562f61d353a639032 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 11 Oct 2023 19:11:14 +0200 Subject: [PATCH 304/702] Improve comment --- satpy/modifiers/angles.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 8a3fe4910a..a11a3d2cd7 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -161,7 +161,7 @@ def __call__(self, *args, cache_dir: Optional[str] = None) -> Any: self._warn_if_irregular_input_chunks(args, args_to_use) self._cache_results(res, zarr_format) - # if we did any caching, let's load from the zarr files + # if we did any caching, let's load from the zarr files, so that future calls have the same name # re-calculate the cached paths zarr_paths = sorted(glob(zarr_format.format("*"))) if not zarr_paths: From e1ea7cedb6b945bfca31a8078fd0e55c74c3abc0 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 12 Oct 2023 08:14:12 +0100 Subject: [PATCH 305/702] Update documentation index for renamed files and replace some single quotes with double quotes. --- doc/source/index.rst | 4 ++-- doc/source/writing.rst | 30 +++++++++++++++--------------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/doc/source/index.rst b/doc/source/index.rst index 413f7864a1..052a7e2d03 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -63,12 +63,12 @@ Documentation data_download examples/index quickstart - readers + reading remote_reading composites resample enhancements - writers + writing multiscene dev_guide/index diff --git a/doc/source/writing.rst b/doc/source/writing.rst index 779a550761..426be0b458 100644 --- a/doc/source/writing.rst +++ b/doc/source/writing.rst @@ -7,20 +7,20 @@ For details on additional arguments and features available for a specific Writer Most use cases will want to save datasets using the :meth:`~satpy.scene.Scene.save_datasets` method:: - >>> scn.save_datasets(writer='simple_image') + >>> scn.save_datasets(writer="simple_image") The ``writer`` parameter defaults to using the ``geotiff`` writer. One common parameter across almost all Writers is ``filename`` and ``base_dir`` to help automate saving files with custom filenames:: >>> scn.save_datasets( - ... filename='{name}_{start_time:%Y%m%d_%H%M%S}.tif', - ... base_dir='/tmp/my_ouput_dir') + ... filename="{name}_{start_time:%Y%m%d_%H%M%S}.tif", + ... base_dir="/tmp/my_ouput_dir") .. versionchanged:: 0.10 The `file_pattern` keyword argument was renamed to `filename` to match - the `save_dataset` method's keyword argument. + the `save_dataset` method"s keyword argument. .. _writer_table: @@ -129,10 +129,10 @@ and save them all at once. >>> from satpy.writers import compute_writer_results >>> res1 = scn.save_datasets(filename="/tmp/{name}.png", - ... writer='simple_image', + ... writer="simple_image", ... compute=False) >>> res2 = scn.save_datasets(filename="/tmp/{name}.tif", - ... writer='geotiff', + ... writer="geotiff", ... compute=False) >>> results = [res1, res2] >>> compute_writer_results(results) @@ -147,14 +147,14 @@ to be added. .. code-block:: python - >>> decodict = {'decorate': [{'text': {'txt': f' {my_text}', - ... 'align': {'top_bottom': 'top', 'left_right': 'left'}, - ... 'font': , - ... 'font_size': 48, - ... 'line': 'white', - ... 'bg_opacity': 255, - ... 'bg': 'black', - ... 'height': 30, + >>> decodict = {"decorate": [{"text": {"txt": "my_text", + ... "align": {"top_bottom": "top", "left_right": "left"}, + ... "font": , + ... "font_size": 48, + ... "line": "white", + ... "bg_opacity": 255, + ... "bg": "black", + ... "height": 30, ... }}]} Where `my_text` is the text you wish to add and `` is the @@ -164,5 +164,5 @@ This dictionary can then be passed to the :meth:`~satpy.scene.Scene.save_dataset .. code-block:: python - >>> scene.save_dataset(my_dataset, writer='simple_image', fill_value=False, + >>> scene.save_dataset(my_dataset, writer="simple_image", fill_value=False, ... decorate=decodict) From d130317b2cac98d7ba25c335646c856c1055862a Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 12 Oct 2023 08:50:06 +0100 Subject: [PATCH 306/702] Add some global EPSG 4326 gridded lat/lon areas. --- satpy/etc/areas.yaml | 67 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 67 insertions(+) diff --git a/satpy/etc/areas.yaml b/satpy/etc/areas.yaml index 4f71368375..68f771d1d9 100644 --- a/satpy/etc/areas.yaml +++ b/satpy/etc/areas.yaml @@ -2030,3 +2030,70 @@ msg_resample_area: area_extent: lower_left_xy: [-5570248.477339261, -5567248.074173444] upper_right_xy: [5567248.074173444, 5570248.477339261] + +# Global lat / lon gridded areas +EPSG_4326_36000x18000: + description: Global equal latitude/longitude grid at 0.01 degree resolution + projection: + EPSG:4326 + shape: + height: 18000 + width: 36000 + area_extent: + lower_left_xy: [-180.0, -90.0] + upper_right_xy: [180.0, 90.0] + +EPSG_4326_7200x3600: + description: Global equal latitude/longitude grid at 0.05 degree resolution + projection: + EPSG:4326 + shape: + height: 3600 + width: 7200 + area_extent: + lower_left_xy: [-180.0, -90.0] + upper_right_xy: [180.0, 90.0] + +EPSG_4326_1440x720: + description: Global equal latitude/longitude grid at 0.25 degree resolution + projection: + EPSG:4326 + shape: + height: 720 + width: 1440 + area_extent: + lower_left_xy: [-180.0, -90.0] + upper_right_xy: [180.0, 90.0] + +EPSG_4326_720x360: + description: Global equal latitude/longitude grid at 0.5 degree resolution + projection: + EPSG:4326 + shape: + height: 360 + width: 720 + area_extent: + lower_left_xy: [-180.0, -90.0] + upper_right_xy: [180.0, 90.0] + +EPSG_4326_3600x1800: + description: Global equal latitude/longitude grid at 0.1 degree resolution + projection: + EPSG:4326 + shape: + height: 1800 + width: 3600 + area_extent: + lower_left_xy: [-180.0, -90.0] + upper_right_xy: [180.0, 90.0] + +EPSG_4326_360x180: + description: Global equal latitude/longitude grid at 1 degree resolution + projection: + EPSG:4326 + shape: + height: 180 + width: 360 + area_extent: + lower_left_xy: [-180.0, -90.0] + upper_right_xy: [180.0, 90.0] \ No newline at end of file From d5ab7e47dc58953b4b554dab67f85a7aff9da640 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 12 Oct 2023 07:54:05 +0000 Subject: [PATCH 307/702] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/etc/areas.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/areas.yaml b/satpy/etc/areas.yaml index 68f771d1d9..ec5cc5d254 100644 --- a/satpy/etc/areas.yaml +++ b/satpy/etc/areas.yaml @@ -2096,4 +2096,4 @@ EPSG_4326_360x180: width: 360 area_extent: lower_left_xy: [-180.0, -90.0] - upper_right_xy: [180.0, 90.0] \ No newline at end of file + upper_right_xy: [180.0, 90.0] From 81af83893c483900c37898ef35a06241bdf88040 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 12 Oct 2023 09:00:09 +0100 Subject: [PATCH 308/702] Update areas.yaml Re-order some of the area definitions. --- satpy/etc/areas.yaml | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/satpy/etc/areas.yaml b/satpy/etc/areas.yaml index ec5cc5d254..28805855d9 100644 --- a/satpy/etc/areas.yaml +++ b/satpy/etc/areas.yaml @@ -2054,6 +2054,17 @@ EPSG_4326_7200x3600: lower_left_xy: [-180.0, -90.0] upper_right_xy: [180.0, 90.0] +EPSG_4326_3600x1800: + description: Global equal latitude/longitude grid at 0.1 degree resolution + projection: + EPSG:4326 + shape: + height: 1800 + width: 3600 + area_extent: + lower_left_xy: [-180.0, -90.0] + upper_right_xy: [180.0, 90.0] + EPSG_4326_1440x720: description: Global equal latitude/longitude grid at 0.25 degree resolution projection: @@ -2076,17 +2087,6 @@ EPSG_4326_720x360: lower_left_xy: [-180.0, -90.0] upper_right_xy: [180.0, 90.0] -EPSG_4326_3600x1800: - description: Global equal latitude/longitude grid at 0.1 degree resolution - projection: - EPSG:4326 - shape: - height: 1800 - width: 3600 - area_extent: - lower_left_xy: [-180.0, -90.0] - upper_right_xy: [180.0, 90.0] - EPSG_4326_360x180: description: Global equal latitude/longitude grid at 1 degree resolution projection: From 06ad2e573285e535cca3731761af4358e7240bd9 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 12 Oct 2023 11:35:15 +0100 Subject: [PATCH 309/702] Fix error in reading.rst file. --- doc/source/reading.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/reading.rst b/doc/source/reading.rst index eefa4ccba1..8553b52e76 100644 --- a/doc/source/reading.rst +++ b/doc/source/reading.rst @@ -116,7 +116,7 @@ Starting with Satpy version 0.25.1 with supported readers it is possible to load data from remote file systems like ``s3fs`` or ``fsspec``. For example: -:: +.. code-block:: python >>> from satpy import Scene >>> from satpy.readers import FSFile From d012c2180d1b76d83dc2b7e96a131ff104e8957c Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 12 Oct 2023 11:42:57 +0100 Subject: [PATCH 310/702] Rename 'readers' page in documentation within two satpy readers. --- satpy/readers/modis_l2.py | 2 +- satpy/readers/seviri_base.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/modis_l2.py b/satpy/readers/modis_l2.py index 1a7fc3ae38..ac1522dfe9 100644 --- a/satpy/readers/modis_l2.py +++ b/satpy/readers/modis_l2.py @@ -28,7 +28,7 @@ - m[o/y]d35_l2: cloud_mask dataset - some datasets in m[o/y]d06 files -To get a list of the available datasets for a given file refer to the "Load data" section in :doc:`../readers`. +To get a list of the available datasets for a given file refer to the "Load data" section in :doc:`../reading`. Geolocation files diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 131fe39ad4..7870e591a0 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -122,7 +122,7 @@ * The ``orbital_parameters`` attribute provides the nominal and actual satellite position, as well as the projection centre. See the `Metadata` section in - the :doc:`../readers` chapter for more information. + the :doc:`../reading` chapter for more information. * The ``acq_time`` coordinate provides the mean acquisition time for each scanline. Use a ``MultiIndex`` to enable selection by acquisition time: From 16ab6869e551f84090a00de6b04056c87ced79e0 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 12 Oct 2023 09:33:52 -0500 Subject: [PATCH 311/702] Fix typo in doc/source/reading.rst --- doc/source/reading.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/reading.rst b/doc/source/reading.rst index 8553b52e76..b7264eeb6e 100644 --- a/doc/source/reading.rst +++ b/doc/source/reading.rst @@ -9,7 +9,7 @@ Reading Satpy supports reading and loading data from many input file formats and schemes through the concept of *readers*. Each reader supports a specific type of input data. The :class:`~satpy.scene.Scene` object provides a simple interface around all the complexity of -these various formats through its ``load``method. +these various formats through its ``load`` method. The following sections describe the different way data can be loaded, requested, or added to a Scene object. Available Readers From 67d4b415a3b7490b381b49652903b448f6ab4737 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 16 Oct 2023 20:13:03 +0100 Subject: [PATCH 312/702] Change names of `readers` and `writers` in the docs to `reading` and `writing`. --- doc/source/overview.rst | 6 +++--- doc/source/quickstart.rst | 4 ++-- satpy/dataset/dataid.py | 1 + satpy/scene.py | 4 ++-- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/doc/source/overview.rst b/doc/source/overview.rst index 8282c40aa3..73923a63cd 100644 --- a/doc/source/overview.rst +++ b/doc/source/overview.rst @@ -127,7 +127,7 @@ Reading One of the biggest advantages of using Satpy is the large number of input file formats that it can read. It encapsulates this functionality into -individual :doc:`readers`. Satpy Readers handle all of the complexity of +individual :doc:`reading`. Satpy Readers handle all of the complexity of reading whatever format they represent. Meteorological Satellite file formats can be extremely complex and formats are rarely reused across satellites or instruments. No matter the format, Satpy's Reader interface is meant to @@ -174,7 +174,7 @@ should look. Satpy tries to hide the complexity of all the possible enhancement methods from the user and just provide the best looking image by default. Satpy still makes it possible to customize these procedures, but in most cases it shouldn't be necessary. See the documentation on -:doc:`writers` for more information on what's possible for output formats +:doc:`writing` for more information on what's possible for output formats and enhancing images. Writing @@ -187,4 +187,4 @@ users to save data in image formats like PNG or GeoTIFF as well as data file formats like NetCDF. Each format's complexity is hidden behind the interface of individual Writer objects and includes keyword arguments for accessing specific format features like compression and output data type. See the -:doc:`writers` documentation for the available writers and how to use them. +:doc:`writing` documentation for the available writers and how to use them. diff --git a/doc/source/quickstart.rst b/doc/source/quickstart.rst index 9f9885a750..83ca60770b 100644 --- a/doc/source/quickstart.rst +++ b/doc/source/quickstart.rst @@ -122,7 +122,7 @@ To access the loaded data use the wavelength or name: >>> print(global_scene[0.8]) For more information on loading datasets by resolution, calibration, or other -advanced loading methods see the :doc:`readers` documentation. +advanced loading methods see the :doc:`reading` documentation. Calculating measurement values and navigation coordinates @@ -255,7 +255,7 @@ Or to save an individual dataset: Datasets are automatically scaled or "enhanced" to be compatible with the output format and to provide the best looking image. For more information on saving datasets and customizing enhancements see the documentation on -:doc:`writers`. +:doc:`writing`. Slicing and subsetting scenes diff --git a/satpy/dataset/dataid.py b/satpy/dataset/dataid.py index ded6cec146..135b2af35b 100644 --- a/satpy/dataset/dataid.py +++ b/satpy/dataset/dataid.py @@ -253,6 +253,7 @@ def __hash__(self): 'reflectance', 'brightness_temperature', 'radiance', + 'radiance_wavenumber', 'counts' ], 'transitive': True, diff --git a/satpy/scene.py b/satpy/scene.py index d43c9d80d2..308f02163d 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1119,7 +1119,7 @@ def save_dataset(self, dataset_id, filename=None, writer=None, :doc:`dask:delayed` object or two lists to be passed to a `dask.array.store` call. See return values below for more details. - kwargs: Additional writer arguments. See :doc:`../writers` for more + kwargs: Additional writer arguments. See :doc:`../writing` for more information. Returns: @@ -1173,7 +1173,7 @@ def save_datasets(self, writer=None, filename=None, datasets=None, compute=True, :doc:`dask:delayed` object or two lists to be passed to a `dask.array.store` call. See return values below for more details. - kwargs: Additional writer arguments. See :doc:`../writers` for more + kwargs: Additional writer arguments. See :doc:`../writing` for more information. Returns: From 97de220c1014684824ae558a404cbbadc02a28a7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 16 Oct 2023 19:58:13 +0000 Subject: [PATCH 313/702] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.5.1 → v1.6.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.5.1...v1.6.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cd26d096fe..07c9509616 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,7 +20,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.5.1' # Use the sha / tag you want to point at + rev: 'v1.6.0' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: From 5038b50a52d15445eb62134edd85ff60d864bd8a Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 17 Oct 2023 13:02:00 +0200 Subject: [PATCH 314/702] Refactor cache helper's call --- satpy/modifiers/angles.py | 38 ++++++++++++++++++++------------------ 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index a11a3d2cd7..f4146b60d5 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -138,42 +138,44 @@ def _zarr_pattern(self, arg_hash, cache_version: Union[None, int, str] = None) - def __call__(self, *args, cache_dir: Optional[str] = None) -> Any: """Call the decorated function.""" - sanitized_args = self._sanitize_args_func(*args) if self._sanitize_args_func is not None else args - should_cache, cache_dir = self._get_should_cache_and_cache_dir(sanitized_args, cache_dir) - if should_cache: - try: - arg_hash = _hash_args(*sanitized_args, unhashable_types=self._uncacheable_arg_types) - except TypeError as err: - warnings.warn("Cannot cache function because of unhashable argument: " + str(err), stacklevel=2) - should_cache = False - + should_cache: bool = satpy.config.get(self._cache_config_key, False) if not should_cache: return self._func(*args) - zarr_fn = self._zarr_pattern(arg_hash) - zarr_format = os.path.join(cache_dir, zarr_fn) + try: + return self._cache_and_read(args, cache_dir) + except TypeError as err: + warnings.warn("Cannot cache function because of unhashable argument: " + str(err), stacklevel=2) + return self._func(*args) + + def _cache_and_read(self, args, cache_dir): + sanitized_args = self._sanitize_args_func(*args) if self._sanitize_args_func is not None else args + + zarr_format = self._get_zarr_format(sanitized_args, cache_dir) zarr_paths = glob(zarr_format.format("*")) if not zarr_paths: # use sanitized arguments - args_to_use = sanitized_args - res = self._func(*args_to_use) - self._warn_if_irregular_input_chunks(args, args_to_use) - self._cache_results(res, zarr_format) + self._warn_if_irregular_input_chunks(args, sanitized_args) + res_to_cache = self._func(*(sanitized_args)) + self._cache_results(res_to_cache, zarr_format) # if we did any caching, let's load from the zarr files, so that future calls have the same name # re-calculate the cached paths zarr_paths = sorted(glob(zarr_format.format("*"))) if not zarr_paths: raise RuntimeError("Data was cached to disk but no files were found") + new_chunks = _get_output_chunks_from_func_arguments(args) res = tuple(da.from_zarr(zarr_path, chunks=new_chunks) for zarr_path in zarr_paths) return res - def _get_should_cache_and_cache_dir(self, args, cache_dir: Optional[str]) -> tuple[bool, str]: - should_cache: bool = satpy.config.get(self._cache_config_key, False) + def _get_zarr_format(self, sanitized_args, cache_dir): + arg_hash = _hash_args(*sanitized_args, unhashable_types=self._uncacheable_arg_types) + zarr_filename = self._zarr_pattern(arg_hash) cache_dir = self._get_cache_dir_from_config(cache_dir) - return should_cache, cache_dir + zarr_format = os.path.join(cache_dir, zarr_filename) + return zarr_format @staticmethod def _get_cache_dir_from_config(cache_dir: Optional[str]) -> str: From 3078273c33bf3113dff9643c686da44ececa7b91 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 17 Oct 2023 13:05:29 +0200 Subject: [PATCH 315/702] Update changelog for v0.44.0 --- CHANGELOG.md | 53 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 799ae0a867..4b9ab4e1b7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,56 @@ +## Version 0.44.0 (2023/10/17) + +### Issues Closed + +* [Issue 2593](https://github.com/pytroll/satpy/issues/2593) - FY4A REGC data resampling return all nan +* [Issue 2591](https://github.com/pytroll/satpy/issues/2591) - Is there a corresponding reader for S3A_SL_2_WST? +* [Issue 2581](https://github.com/pytroll/satpy/issues/2581) - Can reader 'modis_l1b' correct MODIS Bow Tie Effect? +* [Issue 2580](https://github.com/pytroll/satpy/issues/2580) - Does python3.8 and below seem to fail to install via the command line "conda install -c conda-forge satpy"? +* [Issue 2571](https://github.com/pytroll/satpy/issues/2571) - Add Calibration by Meirink et al for SEVIRI ([PR 2589](https://github.com/pytroll/satpy/pull/2589) by [@pdebuyl](https://github.com/pdebuyl)) +* [Issue 2549](https://github.com/pytroll/satpy/issues/2549) - setuptools-scm-git-archive is obsolete -- use setuptools-scm >= 7 ([PR 2598](https://github.com/pytroll/satpy/pull/2598) by [@pdebuyl](https://github.com/pdebuyl)) +* [Issue 2266](https://github.com/pytroll/satpy/issues/2266) - AGRI data fails with `native` resampling` + +In this release 7 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2598](https://github.com/pytroll/satpy/pull/2598) - remove setuptools_scm_git_archive from requirement ([2549](https://github.com/pytroll/satpy/issues/2549), [2549](https://github.com/pytroll/satpy/issues/2549)) +* [PR 2579](https://github.com/pytroll/satpy/pull/2579) - Adapt satpy to numpy 2 +* [PR 2575](https://github.com/pytroll/satpy/pull/2575) - Remove use of deprecated setuptools_scm_git_archive build package + +#### Features added + +* [PR 2600](https://github.com/pytroll/satpy/pull/2600) - Add some global EPSG 4326 gridded lat/lon areas. +* [PR 2589](https://github.com/pytroll/satpy/pull/2589) - Add meirink calib ([2571](https://github.com/pytroll/satpy/issues/2571)) +* [PR 2584](https://github.com/pytroll/satpy/pull/2584) - Convert AHI HSD dask chunking to be based on band resolution +* [PR 2574](https://github.com/pytroll/satpy/pull/2574) - Rename ABI "night_microphysics_abi" composite to "night_microphysics" +* [PR 2572](https://github.com/pytroll/satpy/pull/2572) - Add reader for GERB high-resolution HDF5 files +* [PR 2558](https://github.com/pytroll/satpy/pull/2558) - New reader for Himawari L2 NOAA enterprise cloud products. +* [PR 2556](https://github.com/pytroll/satpy/pull/2556) - Implement modifier for reducing signal as a function of sunz angle +* [PR 2554](https://github.com/pytroll/satpy/pull/2554) - Implement non-linear scaling for NDVI hybrid green correction +* [PR 2488](https://github.com/pytroll/satpy/pull/2488) - Add a blend method to create temporal RGB from MultiScene +* [PR 2052](https://github.com/pytroll/satpy/pull/2052) - Add resolution dependent chunk sizing to 'modis_l1b' reader + +#### Documentation changes + +* [PR 2582](https://github.com/pytroll/satpy/pull/2582) - Add mastodon link +* [PR 2517](https://github.com/pytroll/satpy/pull/2517) - Add documentation on putting text onto images + +#### Backward incompatible changes + +* [PR 2574](https://github.com/pytroll/satpy/pull/2574) - Rename ABI "night_microphysics_abi" composite to "night_microphysics" + +#### Clean ups + +* [PR 2587](https://github.com/pytroll/satpy/pull/2587) - Remove libnetcdf specific build from CI env +* [PR 2578](https://github.com/pytroll/satpy/pull/2578) - Remove unneeded performance tracker in seviri reader +* [PR 2575](https://github.com/pytroll/satpy/pull/2575) - Remove use of deprecated setuptools_scm_git_archive build package + +In this release 19 pull requests were closed. + + ## Version 0.43.0 (2023/07/03) ### Issues Closed From e09e047cfeeacf3dceba57eb021a59ca9ffe6d4d Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 20 Oct 2023 14:50:31 +0100 Subject: [PATCH 316/702] Change platform name for EPIC (DSCOVR) to upper case. --- satpy/readers/epic_l1b_h5.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/epic_l1b_h5.py b/satpy/readers/epic_l1b_h5.py index 55c020ee21..d83bf6893d 100644 --- a/satpy/readers/epic_l1b_h5.py +++ b/satpy/readers/epic_l1b_h5.py @@ -69,7 +69,7 @@ def __init__(self, filename, filename_info, filetype_info): super(DscovrEpicL1BH5FileHandler, self).__init__(filename, filename_info, filetype_info) self.sensor = 'epic' - self.platform_name = 'dscovr' + self.platform_name = 'DSCOVR' @property def start_time(self): From 58ddfaa671b0d74c9b23bffc3146bbf1337e4e4e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 20 Oct 2023 10:11:06 -0500 Subject: [PATCH 317/702] Bump xarray expected versions in test_cf.py --- satpy/tests/writer_tests/test_cf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index a034038663..7fdcaeb553 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -1448,5 +1448,5 @@ def _should_use_compression_keyword(): versions = _get_backend_versions() return ( versions["libnetcdf"] >= Version("4.9.0") and - versions["xarray"] >= Version("2023.10") + versions["xarray"] >= Version("2023.11") ) From 7495e06d49d26f5259b631437bcdc09067bc4de5 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 22 Oct 2023 19:56:16 -0500 Subject: [PATCH 318/702] Fix ABI L2 reader to produce reflectances as percentages --- satpy/readers/abi_l2_nc.py | 6 ++ satpy/tests/reader_tests/test_abi_l2_nc.py | 69 +++++++++++++--------- 2 files changed, 48 insertions(+), 27 deletions(-) diff --git a/satpy/readers/abi_l2_nc.py b/satpy/readers/abi_l2_nc.py index a152790197..62aad5d08c 100644 --- a/satpy/readers/abi_l2_nc.py +++ b/satpy/readers/abi_l2_nc.py @@ -41,6 +41,12 @@ def get_dataset(self, key, info): variable.attrs.update(key.to_dict()) self._update_data_arr_with_filename_attrs(variable) self._remove_problem_attrs(variable) + + # convert to satpy standard units + if variable.attrs['units'] == '1' and key['calibration'] != 'counts': + variable *= 100.0 + variable.attrs['units'] = '%' + return variable def _update_data_arr_with_filename_attrs(self, variable): diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py index 2f2131461e..a0ec5b92e4 100644 --- a/satpy/tests/reader_tests/test_abi_l2_nc.py +++ b/satpy/tests/reader_tests/test_abi_l2_nc.py @@ -20,6 +20,7 @@ from unittest import mock import numpy as np +import pytest import xarray as xr @@ -87,7 +88,12 @@ def _assert_orbital_parameters(orb_params): def _create_mcmip_dataset(): - return _create_cmip_dataset("CMI_C14") + ds1 = _create_cmip_dataset("CMI_C01") + ds2 = _create_cmip_dataset("CMI_C14") + ds1["CMI_C01"].attrs["units"] = "1" + ds2["CMI_C14"].attrs["units"] = "K" + ds1["CMI_C14"] = ds2["CMI_C14"] + return ds1 class Test_NC_ABI_L2_get_dataset: @@ -135,46 +141,55 @@ def test_get_dataset_gfls(self): class TestMCMIPReading: """Test cases of the MCMIP file format.""" + @pytest.mark.parametrize( + ("product", "exp_metadata"), + [ + ("C14", {"calibration": "brightness_temperature", "wavelength": (10.8, 11.2, 11.6), "units": "K"}), + ("C01", {"calibration": "reflectance", "wavelength": (0.45, 0.47, 0.49), "units": "%"}), + ] + ) @mock.patch('satpy.readers.abi_base.xr') - def test_mcmip_get_dataset(self, xr_): + def test_mcmip_get_dataset(self, xr_, product, exp_metadata): """Test getting channel from MCMIP file.""" from datetime import datetime from pyresample.geometry import AreaDefinition from satpy import Scene - from satpy.dataset.dataid import WavelengthRange - xr_.open_dataset.return_value = _create_mcmip_dataset() + fake_ds = _create_mcmip_dataset() + xr_.open_dataset.return_value = fake_ds fn = "OR_ABI-L2-MCMIPF-M6_G16_s20192600241149_e20192600243534_c20192600245360.nc" scn = Scene(reader='abi_l2_nc', filenames=[fn]) - scn.load(['C14']) + scn.load([product]) exp_data = np.array([[2 * 0.3052037, np.nan], [32768 * 0.3052037, 32767 * 0.3052037]]) + if "C01" in product: + exp_data *= 100 + + exp_attrs = { + 'instrument_ID': None, + 'modifiers': (), + 'name': product, + 'observation_type': 'MCMIP', + 'orbital_slot': None, + 'reader': 'abi_l2_nc', + 'platform_name': 'GOES-16', + 'platform_shortname': 'G16', + 'production_site': None, + 'scan_mode': 'M6', + 'scene_abbr': 'F', + 'scene_id': None, + 'sensor': 'abi', + 'timeline_ID': None, + 'start_time': datetime(2017, 9, 20, 17, 30, 40, 800000), + 'end_time': datetime(2017, 9, 20, 17, 41, 17, 500000), + 'ancillary_variables': [], + } + exp_attrs.update(exp_metadata) - exp_attrs = {'instrument_ID': None, - 'modifiers': (), - 'name': 'C14', - 'observation_type': 'MCMIP', - 'orbital_slot': None, - 'reader': 'abi_l2_nc', - 'platform_name': 'GOES-16', - 'platform_shortname': 'G16', - 'production_site': None, - 'scan_mode': 'M6', - 'scene_abbr': 'F', - 'scene_id': None, - 'sensor': 'abi', - 'timeline_ID': None, - 'start_time': datetime(2017, 9, 20, 17, 30, 40, 800000), - 'end_time': datetime(2017, 9, 20, 17, 41, 17, 500000), - 'calibration': 'brightness_temperature', - 'ancillary_variables': [], - 'wavelength': WavelengthRange(10.8, 11.2, 11.6, unit='µm'), - 'units': 'm'} - - res = scn['C14'] + res = scn[product] np.testing.assert_allclose(res.data, exp_data, equal_nan=True) assert isinstance(res.attrs['area'], AreaDefinition) _compare_subdict(res.attrs, exp_attrs) From bd43ad6be06844eb44e114f8e78a94154a481bec Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 23 Oct 2023 06:43:53 -0500 Subject: [PATCH 319/702] Fix ABI L2 to only convert reflectances to percentages --- satpy/readers/abi_l2_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/abi_l2_nc.py b/satpy/readers/abi_l2_nc.py index 62aad5d08c..d63ba354a6 100644 --- a/satpy/readers/abi_l2_nc.py +++ b/satpy/readers/abi_l2_nc.py @@ -43,7 +43,7 @@ def get_dataset(self, key, info): self._remove_problem_attrs(variable) # convert to satpy standard units - if variable.attrs['units'] == '1' and key['calibration'] != 'counts': + if variable.attrs['units'] == '1' and key['calibration'] == 'reflectance': variable *= 100.0 variable.attrs['units'] = '%' From dc3ee7d30151584caaae05e6fb21401702614cfd Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 23 Oct 2023 13:08:30 -0500 Subject: [PATCH 320/702] Remove legacy resampler code Satpy depends on a new enough version of pyresample that all of theses imports are guaranteed. --- satpy/resample.py | 340 ++-------------------------------------------- 1 file changed, 12 insertions(+), 328 deletions(-) diff --git a/satpy/resample.py b/satpy/resample.py index 289371d8cb..d6c90b5bcf 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -146,32 +146,19 @@ from math import lcm # type: ignore from weakref import WeakValueDictionary -import dask import dask.array as da import numpy as np import pyresample import xarray as xr import zarr from packaging import version -from pyresample.ewa import fornav, ll2cr +from pyresample.ewa import DaskEWAResampler, LegacyDaskEWAResampler from pyresample.geometry import SwathDefinition - -from satpy.utils import PerformanceWarning, get_legacy_chunk_size - -try: - from pyresample.resampler import BaseResampler as PRBaseResampler -except ImportError: - PRBaseResampler = None -try: - from pyresample.gradient import GradientSearchResampler -except ImportError: - GradientSearchResampler = None -try: - from pyresample.ewa import DaskEWAResampler, LegacyDaskEWAResampler -except ImportError: - DaskEWAResampler = LegacyDaskEWAResampler = None +from pyresample.gradient import GradientSearchResampler +from pyresample.resampler import BaseResampler as PRBaseResampler from satpy._config import config_search_paths, get_config_path +from satpy.utils import PerformanceWarning, get_legacy_chunk_size LOG = getLogger(__name__) @@ -355,100 +342,7 @@ def update_resampled_coords(old_data, new_data, new_area): return new_data -class BaseResampler(object): - """Base abstract resampler class.""" - - def __init__(self, source_geo_def, target_geo_def): - """Initialize resampler with geolocation information. - - Args: - source_geo_def (SwathDefinition, AreaDefinition): - Geolocation definition for the data to be resampled - target_geo_def (CoordinateDefinition, AreaDefinition): - Geolocation definition for the area to resample data to. - - """ - self.source_geo_def = source_geo_def - self.target_geo_def = target_geo_def - - def get_hash(self, source_geo_def=None, target_geo_def=None, **kwargs): - """Get hash for the current resample with the given *kwargs*.""" - if source_geo_def is None: - source_geo_def = self.source_geo_def - if target_geo_def is None: - target_geo_def = self.target_geo_def - the_hash = source_geo_def.update_hash() - target_geo_def.update_hash(the_hash) - hash_dict(kwargs, the_hash) - return the_hash.hexdigest() - - def precompute(self, **kwargs): - """Do the precomputation. - - This is an optional step if the subclass wants to implement more - complex features like caching or can share some calculations - between multiple datasets to be processed. - - """ - return None - - def compute(self, data, **kwargs): - """Do the actual resampling. - - This must be implemented by subclasses. - - """ - raise NotImplementedError - - def resample(self, data, cache_dir=None, mask_area=None, **kwargs): - """Resample `data` by calling `precompute` and `compute` methods. - - Only certain resampling classes may use `cache_dir` and the `mask` - provided when `mask_area` is True. The return value of calling the - `precompute` method is passed as the `cache_id` keyword argument - of the `compute` method, but may not be used directly for caching. It - is up to the individual resampler subclasses to determine how this - is used. - - Args: - data (xarray.DataArray): Data to be resampled - cache_dir (str): directory to cache precomputed results - (default False, optional) - mask_area (bool): Mask geolocation data where data values are - invalid. This should be used when data values - may affect what neighbors are considered valid. - - Returns (xarray.DataArray): Data resampled to the target area - - """ - # default is to mask areas for SwathDefinitions - if mask_area is None and isinstance( - self.source_geo_def, SwathDefinition): - mask_area = True - - if mask_area: - if isinstance(self.source_geo_def, SwathDefinition): - geo_dims = self.source_geo_def.lons.dims - else: - geo_dims = ('y', 'x') - flat_dims = [dim for dim in data.dims if dim not in geo_dims] - if np.issubdtype(data.dtype, np.integer): - kwargs['mask'] = data == data.attrs.get('_FillValue', np.iinfo(data.dtype.type).max) - else: - kwargs['mask'] = data.isnull() - kwargs['mask'] = kwargs['mask'].all(dim=flat_dims) - - cache_id = self.precompute(cache_dir=cache_dir, **kwargs) - return self.compute(data, cache_id=cache_id, **kwargs) - - def _create_cache_filename(self, cache_dir, prefix='', - fmt='.zarr', **kwargs): - """Create filename for the cached resampling parameters.""" - hash_str = self.get_hash(**kwargs) - return os.path.join(cache_dir, prefix + hash_str + fmt) - - -class KDTreeResampler(BaseResampler): +class KDTreeResampler(PRBaseResampler): """Resample using a KDTree-based nearest neighbor algorithm. This resampler implements on-disk caching when the `cache_dir` argument @@ -636,209 +530,7 @@ def compute(self, data, weight_funcs=None, fill_value=np.nan, return update_resampled_coords(data, res, self.target_geo_def) -class _LegacySatpyEWAResampler(BaseResampler): - """Resample using an elliptical weighted averaging algorithm. - - This algorithm does **not** use caching or any externally provided data - mask (unlike the 'nearest' resampler). - - This algorithm works under the assumption that the data is observed - one scan line at a time. However, good results can still be achieved - for non-scan based data provided `rows_per_scan` is set to the - number of rows in the entire swath or by setting it to `None`. - - Args: - rows_per_scan (int, None): - Number of data rows for every observed scanline. If None then the - entire swath is treated as one large scanline. - weight_count (int): - number of elements to create in the gaussian weight table. - Default is 10000. Must be at least 2 - weight_min (float): - the minimum value to store in the last position of the - weight table. Default is 0.01, which, with a - `weight_distance_max` of 1.0 produces a weight of 0.01 - at a grid cell distance of 1.0. Must be greater than 0. - weight_distance_max (float): - distance in grid cell units at which to - apply a weight of `weight_min`. Default is - 1.0. Must be greater than 0. - weight_delta_max (float): - maximum distance in grid cells in each grid - dimension over which to distribute a single swath cell. - Default is 10.0. - weight_sum_min (float): - minimum weight sum value. Cells whose weight sums - are less than `weight_sum_min` are set to the grid fill value. - Default is EPSILON. - maximum_weight_mode (bool): - If False (default), a weighted average of - all swath cells that map to a particular grid cell is used. - If True, the swath cell having the maximum weight of all - swath cells that map to a particular grid cell is used. This - option should be used for coded/category data, i.e. snow cover. - - """ - - def __init__(self, source_geo_def, target_geo_def): - """Init _LegacySatpyEWAResampler.""" - warnings.warn( - "A new version of pyresample is available. Please " - "upgrade to get access to a newer 'ewa' and " - "'ewa_legacy' resampler.", - stacklevel=2 - ) - super(_LegacySatpyEWAResampler, self).__init__(source_geo_def, target_geo_def) - self.cache = {} - - def resample(self, *args, **kwargs): - """Run precompute and compute methods. - - .. note:: - - This sets the default of 'mask_area' to False since it is - not needed in EWA resampling currently. - - """ - kwargs.setdefault('mask_area', False) - return super(_LegacySatpyEWAResampler, self).resample(*args, **kwargs) - - def _call_ll2cr(self, lons, lats, target_geo_def, swath_usage=0): - """Wrap ll2cr() for handling dask delayed calls better.""" - new_src = SwathDefinition(lons, lats) - - swath_points_in_grid, cols, rows = ll2cr(new_src, target_geo_def) - # FIXME: How do we check swath usage/coverage if we only do this - # per-block - # # Determine if enough of the input swath was used - # grid_name = getattr(self.target_geo_def, "name", "N/A") - # fraction_in = swath_points_in_grid / float(lons.size) - # swath_used = fraction_in > swath_usage - # if not swath_used: - # LOG.info("Data does not fit in grid %s because it only %f%% of " - # "the swath is used" % - # (grid_name, fraction_in * 100)) - # raise RuntimeError("Data does not fit in grid %s" % (grid_name,)) - # else: - # LOG.debug("Data fits in grid %s and uses %f%% of the swath", - # grid_name, fraction_in * 100) - - return np.stack([cols, rows], axis=0) - - def precompute(self, cache_dir=None, swath_usage=0, **kwargs): - """Generate row and column arrays and store it for later use.""" - if self.cache: - # this resampler should be used for one SwathDefinition - # no need to recompute ll2cr output again - return None - - if kwargs.get('mask') is not None: - LOG.warning("'mask' parameter has no affect during EWA " - "resampling") - - del kwargs - source_geo_def = self.source_geo_def - target_geo_def = self.target_geo_def - - if cache_dir: - LOG.warning("'cache_dir' is not used by EWA resampling") - - # Satpy/PyResample don't support dynamic grids out of the box yet - lons, lats = source_geo_def.get_lonlats() - if isinstance(lons, xr.DataArray): - # get dask arrays - lons = lons.data - lats = lats.data - # we are remapping to a static unchanging grid/area with all of - # its parameters specified - chunks = (2,) + lons.chunks - res = da.map_blocks(self._call_ll2cr, lons, lats, - target_geo_def, swath_usage, - dtype=lons.dtype, chunks=chunks, new_axis=[0]) - cols = res[0] - rows = res[1] - - # save the dask arrays in the class instance cache - # the on-disk cache will store the numpy arrays - self.cache = { - "rows": rows, - "cols": cols, - } - - return None - - def _call_fornav(self, cols, rows, target_geo_def, data, - grid_coverage=0, **kwargs): - """Wrap fornav() to run as a dask delayed.""" - num_valid_points, res = fornav(cols, rows, target_geo_def, - data, **kwargs) - - if isinstance(data, tuple): - # convert 'res' from tuple of arrays to one array - res = np.stack(res) - num_valid_points = sum(num_valid_points) - - grid_covered_ratio = num_valid_points / float(res.size) - grid_covered = grid_covered_ratio > grid_coverage - if not grid_covered: - msg = "EWA resampling only found %f%% of the grid covered " \ - "(need %f%%)" % (grid_covered_ratio * 100, - grid_coverage * 100) - raise RuntimeError(msg) - LOG.debug("EWA resampling found %f%% of the grid covered" % - (grid_covered_ratio * 100)) - - return res - - def compute(self, data, cache_id=None, fill_value=0, weight_count=10000, - weight_min=0.01, weight_distance_max=1.0, - weight_delta_max=1.0, weight_sum_min=-1.0, - maximum_weight_mode=False, grid_coverage=0, **kwargs): - """Resample the data according to the precomputed X/Y coordinates.""" - rows = self.cache["rows"] - cols = self.cache["cols"] - - # if the data is scan based then check its metadata or the passed - # kwargs otherwise assume the entire input swath is one large - # "scanline" - rows_per_scan = kwargs.get('rows_per_scan', - data.attrs.get("rows_per_scan", - data.shape[0])) - - if data.ndim == 3 and 'bands' in data.dims: - data_in = tuple(data.sel(bands=band).data - for band in data['bands']) - elif data.ndim == 2: - data_in = data.data - else: - raise ValueError("Unsupported data shape for EWA resampling.") - - res = dask.delayed(self._call_fornav)( - cols, rows, self.target_geo_def, data_in, - grid_coverage=grid_coverage, - rows_per_scan=rows_per_scan, weight_count=weight_count, - weight_min=weight_min, weight_distance_max=weight_distance_max, - weight_delta_max=weight_delta_max, weight_sum_min=weight_sum_min, - maximum_weight_mode=maximum_weight_mode) - if isinstance(data_in, tuple): - new_shape = (len(data_in),) + self.target_geo_def.shape - else: - new_shape = self.target_geo_def.shape - data_arr = da.from_delayed(res, new_shape, data.dtype) - # from delayed creates one large chunk, break it up a bit if we can - data_arr = data_arr.rechunk([CHUNK_SIZE] * data_arr.ndim) - if data.ndim == 3 and data.dims[0] == 'bands': - dims = ('bands', 'y', 'x') - elif data.ndim == 2: - dims = ('y', 'x') - else: - dims = data.dims - - res = xr.DataArray(data_arr, dims=dims, attrs=data.attrs.copy()) - return update_resampled_coords(data, res, self.target_geo_def) - - -class BilinearResampler(BaseResampler): +class BilinearResampler(PRBaseResampler): """Resample using bilinear interpolation. This resampler implements on-disk caching when the `cache_dir` argument @@ -978,7 +670,7 @@ def _repeat_by_factor(data, block_info=None): return out_data -class NativeResampler(BaseResampler): +class NativeResampler(PRBaseResampler): """Expand or reduce input datasets to be the same shape. If data is higher resolution (more pixels) than the destination area @@ -1139,7 +831,7 @@ def _get_arg_to_pass_for_skipna_handling(**kwargs): return kwargs -class BucketResamplerBase(BaseResampler): +class BucketResamplerBase(PRBaseResampler): """Base class for bucket resampling which implements averaging.""" def __init__(self, source_geo_def, target_geo_def): @@ -1353,17 +1045,9 @@ def compute(self, data, fill_value=np.nan, categories=None, **kwargs): "bucket_sum": BucketSum, "bucket_count": BucketCount, "bucket_fraction": BucketFraction, + "ewa": DaskEWAResampler, + "ewa_legacy": LegacyDaskEWAResampler, } -if DaskEWAResampler is not None: - RESAMPLERS['ewa'] = DaskEWAResampler - RESAMPLERS['ewa_legacy'] = LegacyDaskEWAResampler -else: - RESAMPLERS['ewa'] = _LegacySatpyEWAResampler - - -# deepcode ignore PythonSameEvalBinaryExpressiontrue: PRBaseResampler is None only on import errors -if PRBaseResampler is None: - PRBaseResampler = BaseResampler # TODO: move this to pyresample @@ -1373,7 +1057,7 @@ def prepare_resampler(source_area, destination_area, resampler=None, **resample_ LOG.info("Using default KDTree resampler") resampler = 'kd_tree' - if isinstance(resampler, (BaseResampler, PRBaseResampler)): + if isinstance(resampler, PRBaseResampler): raise ValueError("Trying to create a resampler when one already " "exists.") if isinstance(resampler, str): @@ -1403,7 +1087,7 @@ def prepare_resampler(source_area, destination_area, resampler=None, **resample_ def resample(source_area, data, destination_area, resampler=None, **kwargs): """Do the resampling.""" - if not isinstance(resampler, (BaseResampler, PRBaseResampler)): + if not isinstance(resampler, PRBaseResampler): # we don't use the first argument (cache key) _, resampler_instance = prepare_resampler(source_area, destination_area, From 59c8b94a7495589911dba96649936afe322a07ac Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 23 Oct 2023 15:05:42 -0500 Subject: [PATCH 321/702] Remove long deprecated numpy resampling cache helper --- satpy/resample.py | 31 ------------------------------- 1 file changed, 31 deletions(-) diff --git a/satpy/resample.py b/satpy/resample.py index d6c90b5bcf..d011b20aa2 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -436,41 +436,10 @@ def _apply_cached_index(self, val, idx_name, persist=False): setattr(self.resampler, idx_name, val) return val - def _check_numpy_cache(self, cache_dir, mask=None, - **kwargs): - """Check if there's Numpy cache file and convert it to zarr.""" - if cache_dir is None: - return - fname_np = self._create_cache_filename(cache_dir, - prefix='resample_lut-', - mask=mask, fmt='.npz', - **kwargs) - fname_zarr = self._create_cache_filename(cache_dir, prefix='nn_lut-', - mask=mask, fmt='.zarr', - **kwargs) - LOG.debug("Check if %s exists", fname_np) - if os.path.exists(fname_np) and not os.path.exists(fname_zarr): - import warnings - warnings.warn( - "Using Numpy files as resampling cache is deprecated.", - stacklevel=3 - ) - LOG.warning("Converting resampling LUT from .npz to .zarr") - zarr_out = xr.Dataset() - with np.load(fname_np, 'r') as fid: - for idx_name, coord in NN_COORDINATES.items(): - zarr_out[idx_name] = (coord, fid[idx_name]) - - # Write indices to Zarr file - zarr_out.to_zarr(fname_zarr) - LOG.debug("Resampling LUT saved to %s", fname_zarr) - def load_neighbour_info(self, cache_dir, mask=None, **kwargs): """Read index arrays from either the in-memory or disk cache.""" mask_name = getattr(mask, 'name', None) cached = {} - self._check_numpy_cache(cache_dir, mask=mask_name, **kwargs) - for idx_name in NN_COORDINATES: if mask_name in self._index_caches: cached[idx_name] = self._apply_cached_index( From f273695efc19ffeb07e5a24165912087171036df Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 23 Oct 2023 20:21:59 +0000 Subject: [PATCH 322/702] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.6.0 → v1.6.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.6.0...v1.6.1) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 07c9509616..5bf64d25da 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,7 +20,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.6.0' # Use the sha / tag you want to point at + rev: 'v1.6.1' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: From b9189dd10ca16dfb4b2d26265607be2c4a7bb12d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 23 Oct 2023 15:26:18 -0500 Subject: [PATCH 323/702] Remove tests for removed and deprecated functionality --- satpy/tests/test_resample.py | 40 +----------------------------------- 1 file changed, 1 insertion(+), 39 deletions(-) diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index aa9063b95c..f96b98ecc0 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -137,13 +137,12 @@ def test_type_preserve(self): class TestKDTreeResampler(unittest.TestCase): """Test the kd-tree resampler.""" - @mock.patch('satpy.resample.KDTreeResampler._check_numpy_cache') @mock.patch('satpy.resample.xr.Dataset') @mock.patch('satpy.resample.zarr.open') @mock.patch('satpy.resample.KDTreeResampler._create_cache_filename') @mock.patch('pyresample.kd_tree.XArrayResamplerNN') def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, - xr_dset, cnc): + xr_dset): """Test the kd resampler.""" from satpy.resample import KDTreeResampler data, source_area, swath_data, source_swath, target_area = get_test_data() @@ -157,7 +156,6 @@ def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, # swath definitions should not be cached self.assertFalse(len(mock_dset.to_zarr.mock_calls), 0) resampler.resampler.reset_mock() - cnc.assert_called_once() resampler = KDTreeResampler(source_area, target_area) resampler.precompute() @@ -216,42 +214,6 @@ def astype(self, dtype): resampler.compute(data, fill_value=fill_value) resampler.resampler.get_sample_from_neighbour_info.assert_called_with(data, fill_value) - @mock.patch('satpy.resample.np.load') - @mock.patch('satpy.resample.xr.Dataset') - def test_check_numpy_cache(self, xr_Dataset, np_load): - """Test that cache stored in .npz is converted to zarr.""" - from satpy.resample import KDTreeResampler - - data, source_area, swath_data, source_swath, target_area = get_test_data() - resampler = KDTreeResampler(source_area, target_area) - - zarr_out = mock.MagicMock() - xr_Dataset.return_value = zarr_out - - try: - the_dir = tempfile.mkdtemp() - kwargs = {} - np_path = resampler._create_cache_filename(the_dir, - prefix='resample_lut-', - fmt='.npz', - mask=None, - **kwargs) - zarr_path = resampler._create_cache_filename(the_dir, - prefix='nn_lut-', - fmt='.zarr', - mask=None, - **kwargs) - resampler._check_numpy_cache(the_dir) - np_load.assert_not_called() - zarr_out.to_zarr.assert_not_called() - with open(np_path, 'w') as fid: - fid.write("42") - resampler._check_numpy_cache(the_dir) - np_load.assert_called_once_with(np_path, 'r') - zarr_out.to_zarr.assert_called_once_with(zarr_path) - finally: - shutil.rmtree(the_dir) - @unittest.skipIf(LegacyDaskEWAResampler is not None, "Deprecated EWA resampler is now in pyresample. " From e6261e751cc642e4e4e334a1a1235e0668bd299a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 23 Oct 2023 15:50:17 -0500 Subject: [PATCH 324/702] Remove more tests for removed code --- satpy/tests/test_resample.py | 114 ----------------------------------- 1 file changed, 114 deletions(-) diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index f96b98ecc0..ca9dd409cd 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -28,11 +28,6 @@ import xarray as xr from pyproj import CRS -try: - from pyresample.ewa import LegacyDaskEWAResampler -except ImportError: - LegacyDaskEWAResampler = None - from satpy.resample import NativeResampler @@ -215,115 +210,6 @@ def astype(self, dtype): resampler.resampler.get_sample_from_neighbour_info.assert_called_with(data, fill_value) -@unittest.skipIf(LegacyDaskEWAResampler is not None, - "Deprecated EWA resampler is now in pyresample. " - "No need to test in Satpy.") -class TestEWAResampler(unittest.TestCase): - """Test EWA resampler class.""" - - @mock.patch('satpy.resample.fornav') - @mock.patch('satpy.resample.ll2cr') - @mock.patch('satpy.resample.SwathDefinition.get_lonlats') - def test_2d_ewa(self, get_lonlats, ll2cr, fornav): - """Test EWA with a 2D dataset.""" - import numpy as np - import xarray as xr - - from satpy.resample import resample_dataset - ll2cr.return_value = (100, - np.zeros((10, 10), dtype=np.float32), - np.zeros((10, 10), dtype=np.float32)) - fornav.return_value = (100 * 200, - np.zeros((200, 100), dtype=np.float32)) - _, _, swath_data, source_swath, target_area = get_test_data() - get_lonlats.return_value = (source_swath.lons, source_swath.lats) - swath_data.data = swath_data.data.astype(np.float32) - num_chunks = len(source_swath.lons.chunks[0]) * len(source_swath.lons.chunks[1]) - - new_data = resample_dataset(swath_data, target_area, resampler='ewa') - self.assertTupleEqual(new_data.shape, (200, 100)) - self.assertEqual(new_data.dtype, np.float32) - self.assertEqual(new_data.attrs['test'], 'test') - self.assertIs(new_data.attrs['area'], target_area) - # make sure we can actually compute everything - new_data.compute() - lonlat_calls = get_lonlats.call_count - ll2cr_calls = ll2cr.call_count - - # resample a different dataset and make sure cache is used - data = xr.DataArray( - swath_data.data, - dims=('y', 'x'), attrs={'area': source_swath, 'test': 'test2', - 'name': 'test2'}) - new_data = resample_dataset(data, target_area, resampler='ewa') - new_data.compute() - # ll2cr will be called once more because of the computation - self.assertEqual(ll2cr.call_count, ll2cr_calls + num_chunks) - # but we should already have taken the lonlats from the SwathDefinition - self.assertEqual(get_lonlats.call_count, lonlat_calls) - self.assertIn('y', new_data.coords) - self.assertIn('x', new_data.coords) - self.assertIn('crs', new_data.coords) - self.assertIsInstance(new_data.coords['crs'].item(), CRS) - self.assertIn('lambert', new_data.coords['crs'].item().coordinate_operation.method_name.lower()) - self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') - self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') - self.assertEqual(target_area.crs, new_data.coords['crs'].item()) - - @mock.patch('satpy.resample.fornav') - @mock.patch('satpy.resample.ll2cr') - @mock.patch('satpy.resample.SwathDefinition.get_lonlats') - def test_3d_ewa(self, get_lonlats, ll2cr, fornav): - """Test EWA with a 3D dataset.""" - import numpy as np - import xarray as xr - - from satpy.resample import resample_dataset - _, _, swath_data, source_swath, target_area = get_test_data( - input_shape=(3, 200, 100), input_dims=('bands', 'y', 'x')) - swath_data.data = swath_data.data.astype(np.float32) - ll2cr.return_value = (100, - np.zeros((10, 10), dtype=np.float32), - np.zeros((10, 10), dtype=np.float32)) - fornav.return_value = ([100 * 200] * 3, - [np.zeros((200, 100), dtype=np.float32)] * 3) - get_lonlats.return_value = (source_swath.lons, source_swath.lats) - num_chunks = len(source_swath.lons.chunks[0]) * len(source_swath.lons.chunks[1]) - - new_data = resample_dataset(swath_data, target_area, resampler='ewa') - self.assertTupleEqual(new_data.shape, (3, 200, 100)) - self.assertEqual(new_data.dtype, np.float32) - self.assertEqual(new_data.attrs['test'], 'test') - self.assertIs(new_data.attrs['area'], target_area) - # make sure we can actually compute everything - new_data.compute() - lonlat_calls = get_lonlats.call_count - ll2cr_calls = ll2cr.call_count - - # resample a different dataset and make sure cache is used - swath_data = xr.DataArray( - swath_data.data, - dims=('bands', 'y', 'x'), coords={'bands': ['R', 'G', 'B']}, - attrs={'area': source_swath, 'test': 'test'}) - new_data = resample_dataset(swath_data, target_area, resampler='ewa') - new_data.compute() - # ll2cr will be called once more because of the computation - self.assertEqual(ll2cr.call_count, ll2cr_calls + num_chunks) - # but we should already have taken the lonlats from the SwathDefinition - self.assertEqual(get_lonlats.call_count, lonlat_calls) - self.assertIn('y', new_data.coords) - self.assertIn('x', new_data.coords) - self.assertIn('bands', new_data.coords) - self.assertIn('crs', new_data.coords) - self.assertIsInstance(new_data.coords['crs'].item(), CRS) - self.assertIn('lambert', new_data.coords['crs'].item().coordinate_operation.method_name.lower()) - self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') - self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') - np.testing.assert_equal(new_data.coords['bands'].values, - ['R', 'G', 'B']) - self.assertEqual(target_area.crs, new_data.coords['crs'].item()) - - class TestNativeResampler: """Tests for the 'native' resampling method.""" From 3fc285570ea5356fb773089482fc43c3aa5bd3be Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 24 Oct 2023 13:40:57 +0200 Subject: [PATCH 325/702] Make bandit happy --- pyproject.toml | 3 +- satpy/demo/_google_cloud_platform.py | 6 ++-- satpy/demo/abi_l1b.py | 32 ++++++++++--------- satpy/demo/fci.py | 2 +- satpy/demo/utils.py | 2 +- satpy/readers/xmlformat.py | 8 ++--- utils/fetch_avhrr_calcoeffs.py | 48 ++++++++++++++-------------- 7 files changed, 51 insertions(+), 50 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e3a3a2efbe..d2059e68af 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,8 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ -select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] +# select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] +select = [] ignore = ["B905"] # only available from python 3.10 line-length = 120 diff --git a/satpy/demo/_google_cloud_platform.py b/satpy/demo/_google_cloud_platform.py index c1b7016388..970fd9cfa3 100644 --- a/satpy/demo/_google_cloud_platform.py +++ b/satpy/demo/_google_cloud_platform.py @@ -32,7 +32,7 @@ def is_google_cloud_instance(): """Check if we are on a GCP virtual machine.""" try: - return urlopen('http://metadata.google.internal').headers.get('Metadata-Flavor') == 'Google' + return urlopen("http://metadata.google.internal").headers.get("Metadata-Flavor") == "Google" # nosec except URLError: return False @@ -68,7 +68,7 @@ def get_bucket_files(glob_pattern, base_dir, force=False, pattern_slice=None): if isinstance(glob_pattern, str): glob_pattern = [glob_pattern] - fs = gcsfs.GCSFileSystem(token='anon') + fs = gcsfs.GCSFileSystem(token="anon") # nosec filenames = [] for gp in glob_pattern: # handle multiple glob patterns being treated as one pattern @@ -98,5 +98,5 @@ def _download_gcs_files(globbed_files, fs, base_dir, force): LOG.info("Found existing: {}".format(ondisk_pathname)) continue LOG.info("Downloading: {}".format(ondisk_pathname)) - fs.get('gs://' + fn, ondisk_pathname) + fs.get("gs://" + fn, ondisk_pathname) return filenames diff --git a/satpy/demo/abi_l1b.py b/satpy/demo/abi_l1b.py index e223238767..8583c2580b 100644 --- a/satpy/demo/abi_l1b.py +++ b/satpy/demo/abi_l1b.py @@ -36,19 +36,20 @@ def get_us_midlatitude_cyclone_abi(base_dir=None, method=None, force=False): Total size: ~110MB """ - base_dir = base_dir or config.get('demo_data_dir', '.') + base_dir = base_dir or config.get("demo_data_dir", ".") if method is None: - method = 'gcsfs' - if method not in ['gcsfs']: + method = "gcsfs" + if method not in ["gcsfs"]: raise NotImplementedError("Demo data download method '{}' not " "implemented yet.".format(method)) from ._google_cloud_platform import get_bucket_files - patterns = ['gs://gcp-public-data-goes-16/ABI-L1b-RadC/2019/073/00/*s20190730002*.nc'] - subdir = os.path.join(base_dir, 'abi_l1b', '20190314_us_midlatitude_cyclone') + patterns = ["gs://gcp-public-data-goes-16/ABI-L1b-RadC/2019/073/00/*s20190730002*.nc"] + subdir = os.path.join(base_dir, "abi_l1b", "20190314_us_midlatitude_cyclone") os.makedirs(subdir, exist_ok=True) filenames = get_bucket_files(patterns, subdir, force=force) - assert len(filenames) == 16, "Not all files could be downloaded" + if len(filenames) != 16: + raise RuntimeError("Not all files could be downloaded") return filenames @@ -76,12 +77,12 @@ def get_hurricane_florence_abi(base_dir=None, method=None, force=False, Total size (240 frames, all channels): ~3.5GB """ - base_dir = base_dir or config.get('demo_data_dir', '.') + base_dir = base_dir or config.get("demo_data_dir", ".") if channels is None: channels = range(1, 17) if method is None: - method = 'gcsfs' - if method not in ['gcsfs']: + method = "gcsfs" + if method not in ["gcsfs"]: raise NotImplementedError("Demo data download method '{}' not " "implemented yet.".format(method)) if isinstance(num_frames, (int, float)): @@ -96,16 +97,17 @@ def get_hurricane_florence_abi(base_dir=None, method=None, force=False, # patterns += ['gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/1[3456]/' # '*C{:02d}*s20182541[3456]*.nc'.format(channel)] patterns += [( - 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/13/*RadM1*C{:02d}*s201825413*.nc'.format(channel), - 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/14/*RadM1*C{:02d}*s201825414*.nc'.format(channel), - 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/15/*RadM1*C{:02d}*s201825415*.nc'.format(channel), - 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/16/*RadM1*C{:02d}*s201825416*.nc'.format(channel), + "gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/13/*RadM1*C{:02d}*s201825413*.nc".format(channel), + "gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/14/*RadM1*C{:02d}*s201825414*.nc".format(channel), + "gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/15/*RadM1*C{:02d}*s201825415*.nc".format(channel), + "gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/16/*RadM1*C{:02d}*s201825416*.nc".format(channel), )] - subdir = os.path.join(base_dir, 'abi_l1b', '20180911_hurricane_florence_abi_l1b') + subdir = os.path.join(base_dir, "abi_l1b", "20180911_hurricane_florence_abi_l1b") os.makedirs(subdir, exist_ok=True) filenames = get_bucket_files(patterns, subdir, force=force, pattern_slice=frame_slice) actual_slice = frame_slice.indices(240) # 240 max frames num_frames = int((actual_slice[1] - actual_slice[0]) / actual_slice[2]) - assert len(filenames) == len(channels) * num_frames, "Not all files could be downloaded" + if len(filenames) != len(channels) * num_frames: + raise RuntimeError("Not all files could be downloaded") return filenames diff --git a/satpy/demo/fci.py b/satpy/demo/fci.py index 7c4160b203..7a2abe5d20 100644 --- a/satpy/demo/fci.py +++ b/satpy/demo/fci.py @@ -53,5 +53,5 @@ def _unpack_tarfile_to(filename, subdir): """Unpack content of tarfile in filename to subdir.""" with tarfile.open(filename, mode="r:gz") as tf: contents = tf.getnames() - tf.extractall(path=subdir) + tf.extractall(path=subdir) # nosec return contents diff --git a/satpy/demo/utils.py b/satpy/demo/utils.py index 0fd1d1d1a7..63ccbd007f 100644 --- a/satpy/demo/utils.py +++ b/satpy/demo/utils.py @@ -22,7 +22,7 @@ def download_url(source, target): """Download a url in stream mode.""" - with requests.get(source, stream=True) as r: + with requests.get(source, stream=True, timeout=10) as r: r.raise_for_status() with open(target, "wb") as f: for chunk in r.iter_content(chunk_size=8192): diff --git a/satpy/readers/xmlformat.py b/satpy/readers/xmlformat.py index 0c46a3595e..969c30113a 100644 --- a/satpy/readers/xmlformat.py +++ b/satpy/readers/xmlformat.py @@ -19,9 +19,8 @@ from __future__ import annotations -from xml.etree.ElementTree import ElementTree - import numpy as np +from defusedxml.ElementTree import parse VARIABLES: dict[str, str] = {} @@ -141,8 +140,7 @@ def to_scales(val): def parse_format(xml_file): """Parse the xml file to create types, scaling factor types, and scales.""" - tree = ElementTree() - tree.parse(xml_file) + tree = parse(xml_file) for param in tree.find("parameters"): VARIABLES[param.get("name")] = param.get("value") @@ -204,5 +202,5 @@ def apply_scales(self, array): return _apply_scales(array, *self.translator[array.dtype]) -if __name__ == '__main__': +if __name__ == "__main__": pass diff --git a/utils/fetch_avhrr_calcoeffs.py b/utils/fetch_avhrr_calcoeffs.py index 7bc49ba8db..f73975df95 100644 --- a/utils/fetch_avhrr_calcoeffs.py +++ b/utils/fetch_avhrr_calcoeffs.py @@ -53,32 +53,32 @@ def get_page(url): """Retrieve the given page.""" - return urllib2.urlopen(url).read() + return urllib2.urlopen(url).read() # nosec def get_coeffs(page): """Parse coefficients from the page.""" coeffs = {} - coeffs['datetime'] = [] - coeffs['slope1'] = [] - coeffs['intercept1'] = [] - coeffs['slope2'] = [] - coeffs['intercept2'] = [] + coeffs["datetime"] = [] + coeffs["slope1"] = [] + coeffs["intercept1"] = [] + coeffs["slope2"] = [] + coeffs["intercept2"] = [] slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \ None, None, None, None date_idx = 0 - for row in page.lower().split('\n'): + for row in page.lower().split("\n"): row = row.split() if len(row) == 0: continue - if row[0] == 'update': + if row[0] == "update": # Get the column indices from the header line - slope1_idx = row.index('slope_lo') - intercept1_idx = row.index('int_lo') - slope2_idx = row.index('slope_hi') - intercept2_idx = row.index('int_hi') + slope1_idx = row.index("slope_lo") + intercept1_idx = row.index("int_lo") + slope2_idx = row.index("slope_hi") + intercept2_idx = row.index("int_hi") continue if slope1_idx is None: @@ -94,11 +94,11 @@ def get_coeffs(page): except ValueError: continue - coeffs['datetime'].append([dat.year, dat.month, dat.day]) - coeffs['slope1'].append(float(row[slope1_idx])) - coeffs['intercept1'].append(float(row[intercept1_idx])) - coeffs['slope2'].append(float(row[slope2_idx])) - coeffs['intercept2'].append(float(row[intercept2_idx])) + coeffs["datetime"].append([dat.year, dat.month, dat.day]) + coeffs["slope1"].append(float(row[slope1_idx])) + coeffs["intercept1"].append(float(row[intercept1_idx])) + coeffs["slope2"].append(float(row[slope2_idx])) + coeffs["intercept2"].append(float(row[intercept2_idx])) return coeffs @@ -119,19 +119,19 @@ def get_all_coeffs(): return coeffs -def save_coeffs(coeffs, out_dir=''): +def save_coeffs(coeffs, out_dir=""): """Save calibration coefficients to HDF5 files.""" for platform in coeffs.keys(): fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform) - fid = h5py.File(fname, 'w') + fid = h5py.File(fname, "w") for chan in coeffs[platform].keys(): fid.create_group(chan) - fid[chan]['datetime'] = coeffs[platform][chan]['datetime'] - fid[chan]['slope1'] = coeffs[platform][chan]['slope1'] - fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1'] - fid[chan]['slope2'] = coeffs[platform][chan]['slope2'] - fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2'] + fid[chan]["datetime"] = coeffs[platform][chan]["datetime"] + fid[chan]["slope1"] = coeffs[platform][chan]["slope1"] + fid[chan]["intercept1"] = coeffs[platform][chan]["intercept1"] + fid[chan]["slope2"] = coeffs[platform][chan]["slope2"] + fid[chan]["intercept2"] = coeffs[platform][chan]["intercept2"] fid.close() print("Calibration coefficients saved for %s" % platform) From 4f78a7e3ae83b0c6de7e54f69f4912d496e904f5 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 24 Oct 2023 13:45:46 +0200 Subject: [PATCH 326/702] Turn single quotes into double quotes --- .pre-commit-config.yaml | 1 - benchmarks/abi_l1b_benchmarks.py | 2 +- benchmarks/ahi_hsd_benchmarks.py | 4 +- benchmarks/seviri_hrit_benchmarks.py | 2 +- benchmarks/utils.py | 2 +- benchmarks/viirs_sdr_benchmarks.py | 8 +- doc/source/conf.py | 104 +- doc/source/doi_role.py | 20 +- pyproject.toml | 2 +- satpy/__init__.py | 2 +- satpy/_config.py | 54 +- satpy/_scene_converters.py | 2 +- satpy/aux_download.py | 50 +- satpy/composites/__init__.py | 298 ++-- satpy/composites/cloud_products.py | 16 +- satpy/composites/config_loader.py | 52 +- satpy/composites/glm.py | 8 +- satpy/composites/viirs.py | 8 +- satpy/dataset/anc_vars.py | 10 +- satpy/dataset/data_dict.py | 16 +- satpy/dataset/dataid.py | 116 +- satpy/dataset/metadata.py | 4 +- satpy/demo/ahi_hsd.py | 6 +- satpy/dependency_tree.py | 12 +- satpy/enhancements/__init__.py | 50 +- satpy/enhancements/mimic.py | 14 +- satpy/enhancements/viirs.py | 6 +- satpy/modifiers/_crefl.py | 2 +- satpy/modifiers/_crefl_utils.py | 4 +- satpy/modifiers/angles.py | 8 +- satpy/modifiers/atmosphere.py | 30 +- satpy/modifiers/geometry.py | 4 +- satpy/modifiers/spectral.py | 20 +- satpy/multiscene/_blend_funcs.py | 14 +- satpy/multiscene/_multiscene.py | 28 +- satpy/node.py | 10 +- satpy/plugin_base.py | 2 +- satpy/readers/__init__.py | 32 +- satpy/readers/_geos_area.py | 72 +- satpy/readers/aapp_l1b.py | 142 +- satpy/readers/aapp_mhs_amsub_l1c.py | 54 +- satpy/readers/abi_base.py | 144 +- satpy/readers/abi_l1b.py | 84 +- satpy/readers/abi_l2_nc.py | 50 +- satpy/readers/acspo.py | 74 +- satpy/readers/agri_l1.py | 36 +- satpy/readers/ahi_hsd.py | 220 +-- satpy/readers/ahi_l1b_gridded_bin.py | 86 +- satpy/readers/ami_l1b.py | 132 +- satpy/readers/amsr2_l1b.py | 22 +- satpy/readers/amsr2_l2.py | 8 +- satpy/readers/amsr2_l2_gaasp.py | 90 +- satpy/readers/ascat_l2_soilmoisture_bufr.py | 46 +- satpy/readers/atms_l1b_nc.py | 10 +- satpy/readers/atms_sdr_hdf5.py | 20 +- satpy/readers/avhrr_l1b_gaclac.py | 104 +- satpy/readers/caliop_l2_cloud.py | 18 +- satpy/readers/clavrx.py | 192 +-- satpy/readers/cmsaf_claas2.py | 2 +- satpy/readers/electrol_hrit.py | 172 +-- satpy/readers/epic_l1b_h5.py | 40 +- satpy/readers/eps_l1b.py | 60 +- satpy/readers/eum_base.py | 36 +- satpy/readers/fci_l1c_nc.py | 238 ++-- satpy/readers/fci_l2_nc.py | 138 +- satpy/readers/file_handlers.py | 44 +- satpy/readers/fy4_base.py | 130 +- satpy/readers/generic_image.py | 40 +- satpy/readers/geocat.py | 126 +- satpy/readers/ghi_l1.py | 76 +- satpy/readers/ghrsst_l2.py | 28 +- satpy/readers/ghrsst_l3c_sst.py | 48 +- satpy/readers/glm_l2.py | 72 +- satpy/readers/gms/gms5_vissr_format.py | 572 ++++---- satpy/readers/goes_imager_hrit.py | 252 ++-- satpy/readers/goes_imager_nc.py | 876 ++++++------ satpy/readers/gpm_imerg.py | 42 +- satpy/readers/grib.py | 112 +- satpy/readers/hdf4_utils.py | 8 +- satpy/readers/hdf5_utils.py | 12 +- satpy/readers/hdfeos_base.py | 84 +- satpy/readers/hrit_base.py | 144 +- satpy/readers/hrit_jma.py | 182 +-- satpy/readers/hrpt.py | 88 +- satpy/readers/hsaf_grib.py | 72 +- satpy/readers/hsaf_h5.py | 42 +- satpy/readers/hy2_scat_l2b_h5.py | 96 +- satpy/readers/iasi_l2.py | 90 +- satpy/readers/iasi_l2_so2_bufr.py | 46 +- satpy/readers/ici_l1b_nc.py | 116 +- satpy/readers/insat3d_img_l1b_h5.py | 34 +- satpy/readers/li_base_nc.py | 152 +- satpy/readers/li_l2_nc.py | 12 +- satpy/readers/maia.py | 72 +- satpy/readers/meris_nc_sen3.py | 34 +- satpy/readers/mersi_l1b.py | 70 +- satpy/readers/mimic_TPW2_nc.py | 56 +- satpy/readers/mirs.py | 104 +- satpy/readers/modis_l1b.py | 56 +- satpy/readers/modis_l2.py | 48 +- satpy/readers/msi_safe.py | 90 +- satpy/readers/msu_gsa_l1b.py | 34 +- satpy/readers/mviri_l1b_fiduceo_nc.py | 238 ++-- satpy/readers/mws_l1b.py | 132 +- satpy/readers/netcdf_utils.py | 18 +- satpy/readers/nucaps.py | 122 +- satpy/readers/nwcsaf_msg2013_hdf5.py | 52 +- satpy/readers/nwcsaf_nc.py | 226 +-- satpy/readers/oceancolorcci_l3_nc.py | 64 +- satpy/readers/olci_nc.py | 148 +- satpy/readers/omps_edr.py | 56 +- satpy/readers/safe_sar_l2_ocn.py | 64 +- satpy/readers/sar_c_safe.py | 126 +- satpy/readers/satpy_cf_nc.py | 44 +- satpy/readers/scatsat1_l2b.py | 34 +- satpy/readers/scmi.py | 158 +-- satpy/readers/seadas_l2.py | 6 +- satpy/readers/seviri_base.py | 462 +++---- satpy/readers/seviri_l1b_hrit.py | 368 ++--- satpy/readers/seviri_l1b_icare.py | 130 +- satpy/readers/seviri_l1b_native.py | 386 +++--- satpy/readers/seviri_l1b_native_hdr.py | 1226 ++++++++--------- satpy/readers/seviri_l1b_nc.py | 224 +-- satpy/readers/seviri_l2_bufr.py | 90 +- satpy/readers/seviri_l2_grib.py | 80 +- satpy/readers/slstr_l1b.py | 216 +-- satpy/readers/smos_l2_wind.py | 62 +- satpy/readers/tropomi_l2.py | 76 +- satpy/readers/utils.py | 44 +- satpy/readers/vaisala_gld360.py | 24 +- satpy/readers/vii_base_nc.py | 82 +- satpy/readers/vii_l1b_nc.py | 26 +- satpy/readers/vii_l2_nc.py | 2 +- satpy/readers/viirs_atms_sdr_base.py | 124 +- satpy/readers/viirs_compact.py | 118 +- satpy/readers/viirs_edr.py | 62 +- satpy/readers/viirs_edr_active_fires.py | 40 +- satpy/readers/viirs_edr_flood.py | 48 +- satpy/readers/viirs_l1b.py | 132 +- satpy/readers/viirs_sdr.py | 86 +- satpy/readers/viirs_vgac_l1c_nc.py | 12 +- satpy/readers/virr_l1b.py | 70 +- satpy/readers/yaml_reader.py | 270 ++-- satpy/resample.py | 210 +-- satpy/scene.py | 96 +- satpy/tests/compositor_tests/test_abi.py | 30 +- satpy/tests/compositor_tests/test_agri.py | 26 +- satpy/tests/compositor_tests/test_ahi.py | 2 +- satpy/tests/compositor_tests/test_glm.py | 26 +- satpy/tests/compositor_tests/test_sar.py | 48 +- satpy/tests/compositor_tests/test_spectral.py | 50 +- satpy/tests/compositor_tests/test_viirs.py | 88 +- satpy/tests/conftest.py | 2 +- satpy/tests/enhancement_tests/test_abi.py | 2 +- .../enhancement_tests/test_enhancements.py | 210 +-- satpy/tests/enhancement_tests/test_viirs.py | 8 +- satpy/tests/features/steps/steps-load.py | 46 +- .../steps/steps-real-load-process-write.py | 24 +- satpy/tests/features/steps/steps-save.py | 8 +- satpy/tests/modifier_tests/test_angles.py | 6 +- satpy/tests/modifier_tests/test_crefl.py | 312 ++--- satpy/tests/modifier_tests/test_parallax.py | 2 +- satpy/tests/multiscene_tests/test_blend.py | 200 +-- satpy/tests/multiscene_tests/test_misc.py | 40 +- .../multiscene_tests/test_save_animation.py | 206 +-- satpy/tests/multiscene_tests/test_utils.py | 62 +- satpy/tests/reader_tests/_li_test_utils.py | 296 ++-- .../reader_tests/gms/test_gms5_vissr_l1b.py | 2 +- .../gms/test_gms5_vissr_navigation.py | 24 +- .../modis_tests/_modis_fixtures.py | 262 ++-- .../modis_tests/test_modis_l1b.py | 56 +- .../reader_tests/modis_tests/test_modis_l2.py | 62 +- satpy/tests/reader_tests/test_aapp_l1b.py | 172 +-- .../reader_tests/test_aapp_mhs_amsub_l1c.py | 86 +- satpy/tests/reader_tests/test_abi_l1b.py | 246 ++-- satpy/tests/reader_tests/test_abi_l2_nc.py | 238 ++-- satpy/tests/reader_tests/test_acspo.py | 100 +- satpy/tests/reader_tests/test_agri_l1.py | 204 +-- satpy/tests/reader_tests/test_ahi_hrit.py | 200 +-- satpy/tests/reader_tests/test_ahi_hsd.py | 420 +++--- .../reader_tests/test_ahi_l1b_gridded_bin.py | 160 +-- satpy/tests/reader_tests/test_ami_l1b.py | 246 ++-- satpy/tests/reader_tests/test_amsr2_l1b.py | 148 +- satpy/tests/reader_tests/test_amsr2_l2.py | 52 +- .../tests/reader_tests/test_amsr2_l2_gaasp.py | 202 +-- .../test_ascat_l2_soilmoisture_bufr.py | 116 +- .../tests/reader_tests/test_atms_sdr_hdf5.py | 84 +- .../tests/reader_tests/test_avhrr_l0_hrpt.py | 24 +- .../reader_tests/test_avhrr_l1b_gaclac.py | 346 ++--- satpy/tests/reader_tests/test_clavrx.py | 286 ++-- satpy/tests/reader_tests/test_clavrx_nc.py | 124 +- satpy/tests/reader_tests/test_cmsaf_claas.py | 4 +- .../tests/reader_tests/test_electrol_hrit.py | 148 +- satpy/tests/reader_tests/test_epic_l1b_h5.py | 64 +- satpy/tests/reader_tests/test_eps_l1b.py | 130 +- satpy/tests/reader_tests/test_eum_base.py | 116 +- satpy/tests/reader_tests/test_fci_l1c_nc.py | 386 +++--- satpy/tests/reader_tests/test_fci_l2_nc.py | 350 ++--- satpy/tests/reader_tests/test_fy4_base.py | 12 +- .../tests/reader_tests/test_generic_image.py | 208 +-- satpy/tests/reader_tests/test_geocat.py | 158 +-- satpy/tests/reader_tests/test_geos_area.py | 126 +- satpy/tests/reader_tests/test_ghi_l1.py | 190 +-- satpy/tests/reader_tests/test_ghrsst_l2.py | 58 +- satpy/tests/reader_tests/test_glm_l2.py | 198 +-- .../reader_tests/test_goes_imager_hrit.py | 146 +- .../reader_tests/test_goes_imager_nc_eum.py | 64 +- .../reader_tests/test_goes_imager_nc_noaa.py | 228 +-- satpy/tests/reader_tests/test_gpm_imerg.py | 56 +- satpy/tests/reader_tests/test_grib.py | 90 +- satpy/tests/reader_tests/test_hdf4_utils.py | 50 +- satpy/tests/reader_tests/test_hdf5_utils.py | 96 +- satpy/tests/reader_tests/test_hdfeos_base.py | 230 ++-- satpy/tests/reader_tests/test_hrit_base.py | 128 +- satpy/tests/reader_tests/test_hsaf_grib.py | 46 +- satpy/tests/reader_tests/test_hsaf_h5.py | 18 +- .../reader_tests/test_hy2_scat_l2b_h5.py | 402 +++--- satpy/tests/reader_tests/test_iasi_l2.py | 228 +-- .../reader_tests/test_iasi_l2_so2_bufr.py | 158 +-- satpy/tests/reader_tests/test_ici_l1b_nc.py | 232 ++-- .../reader_tests/test_insat3d_img_l1b_h5.py | 4 +- satpy/tests/reader_tests/test_li_l2_nc.py | 286 ++-- satpy/tests/reader_tests/test_meris_nc.py | 118 +- satpy/tests/reader_tests/test_mersi_l1b.py | 568 ++++---- .../reader_tests/test_mimic_TPW2_lowres.py | 104 +- .../tests/reader_tests/test_mimic_TPW2_nc.py | 64 +- satpy/tests/reader_tests/test_mirs.py | 176 +-- satpy/tests/reader_tests/test_msi_safe.py | 2 +- satpy/tests/reader_tests/test_msu_gsa_l1b.py | 92 +- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 318 ++--- satpy/tests/reader_tests/test_mws_l1b_nc.py | 154 +-- satpy/tests/reader_tests/test_netcdf_utils.py | 140 +- satpy/tests/reader_tests/test_nucaps.py | 338 ++--- satpy/tests/reader_tests/test_nwcsaf_msg.py | 18 +- satpy/tests/reader_tests/test_nwcsaf_nc.py | 148 +- .../reader_tests/test_oceancolorcci_l3_nc.py | 80 +- satpy/tests/reader_tests/test_olci_nc.py | 174 +-- satpy/tests/reader_tests/test_omps_edr.py | 252 ++-- .../reader_tests/test_safe_sar_l2_ocn.py | 42 +- satpy/tests/reader_tests/test_sar_c_safe.py | 32 +- satpy/tests/reader_tests/test_satpy_cf_nc.py | 346 ++--- satpy/tests/reader_tests/test_scmi.py | 176 +-- satpy/tests/reader_tests/test_seadas_l2.py | 34 +- satpy/tests/reader_tests/test_seviri_base.py | 64 +- .../test_seviri_l1b_calibration.py | 152 +- .../reader_tests/test_seviri_l1b_hrit.py | 268 ++-- .../test_seviri_l1b_hrit_setup.py | 194 +-- .../reader_tests/test_seviri_l1b_icare.py | 136 +- .../reader_tests/test_seviri_l1b_native.py | 1114 +++++++-------- .../tests/reader_tests/test_seviri_l1b_nc.py | 304 ++-- .../tests/reader_tests/test_seviri_l2_bufr.py | 144 +- .../tests/reader_tests/test_seviri_l2_grib.py | 100 +- satpy/tests/reader_tests/test_slstr_l1b.py | 212 +-- satpy/tests/reader_tests/test_smos_l2_wind.py | 112 +- satpy/tests/reader_tests/test_tropomi_l2.py | 120 +- satpy/tests/reader_tests/test_utils.py | 216 +-- .../tests/reader_tests/test_vaisala_gld360.py | 20 +- satpy/tests/reader_tests/test_vii_base_nc.py | 228 +-- satpy/tests/reader_tests/test_vii_l1b_nc.py | 66 +- satpy/tests/reader_tests/test_vii_l2_nc.py | 32 +- satpy/tests/reader_tests/test_vii_wv_nc.py | 32 +- .../reader_tests/test_viirs_atms_utils.py | 16 +- .../tests/reader_tests/test_viirs_compact.py | 18 +- .../test_viirs_edr_active_fires.py | 186 +-- .../reader_tests/test_viirs_edr_flood.py | 58 +- satpy/tests/reader_tests/test_viirs_l1b.py | 292 ++-- satpy/tests/reader_tests/test_viirs_sdr.py | 616 ++++----- .../reader_tests/test_viirs_vgac_l1c_nc.py | 22 +- satpy/tests/reader_tests/test_virr_l1b.py | 162 +-- satpy/tests/scene_tests/test_conversions.py | 50 +- satpy/tests/scene_tests/test_data_access.py | 194 +-- satpy/tests/scene_tests/test_init.py | 116 +- satpy/tests/scene_tests/test_load.py | 338 ++--- satpy/tests/scene_tests/test_resampling.py | 372 ++--- satpy/tests/scene_tests/test_saving.py | 52 +- satpy/tests/test_composites.py | 874 ++++++------ satpy/tests/test_config.py | 52 +- satpy/tests/test_data_download.py | 66 +- satpy/tests/test_dataset.py | 516 +++---- satpy/tests/test_demo.py | 72 +- satpy/tests/test_dependency_tree.py | 68 +- satpy/tests/test_file_handlers.py | 148 +- satpy/tests/test_modifiers.py | 324 ++--- satpy/tests/test_node.py | 2 +- satpy/tests/test_readers.py | 408 +++--- satpy/tests/test_regressions.py | 194 +-- satpy/tests/test_resample.py | 390 +++--- satpy/tests/test_utils.py | 60 +- satpy/tests/test_writers.py | 298 ++-- satpy/tests/test_yaml_reader.py | 918 ++++++------ satpy/tests/utils.py | 78 +- satpy/tests/writer_tests/test_awips_tiled.py | 228 +-- satpy/tests/writer_tests/test_cf.py | 1060 +++++++------- satpy/tests/writer_tests/test_geotiff.py | 54 +- satpy/tests/writer_tests/test_mitiff.py | 946 ++++++------- satpy/tests/writer_tests/test_ninjogeotiff.py | 2 +- satpy/tests/writer_tests/test_ninjotiff.py | 54 +- satpy/tests/writer_tests/test_simple_image.py | 6 +- satpy/tests/writer_tests/test_utils.py | 10 +- satpy/utils.py | 80 +- satpy/writers/__init__.py | 132 +- satpy/writers/awips_tiled.py | 382 ++--- satpy/writers/cf/coords_attrs.py | 24 +- satpy/writers/cf_writer.py | 216 +-- satpy/writers/geotiff.py | 8 +- satpy/writers/mitiff.py | 394 +++--- satpy/writers/utils.py | 2 +- setup.py | 138 +- utils/convert_to_ninjotiff.py | 38 +- utils/coord2area_def.py | 12 +- 310 files changed, 20763 insertions(+), 20764 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b5b21a52fa..376f37b95e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,7 +6,6 @@ repos: rev: 'v0.0.247' hooks: - id: ruff - args: [--fix, --exit-non-zero-on-fix] - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 hooks: diff --git a/benchmarks/abi_l1b_benchmarks.py b/benchmarks/abi_l1b_benchmarks.py index b52cb46abd..936e0dc514 100644 --- a/benchmarks/abi_l1b_benchmarks.py +++ b/benchmarks/abi_l1b_benchmarks.py @@ -43,7 +43,7 @@ def setup_cache(self): if len(get_filenames(self.subdir)) != 16: raise RuntimeError("Existing data files do not match the expected number of files.") download_rsr() - download_luts(aerosol_type='rayleigh_only') + download_luts(aerosol_type="rayleigh_only") def setup(self): """Set up the benchmarks.""" diff --git a/benchmarks/ahi_hsd_benchmarks.py b/benchmarks/ahi_hsd_benchmarks.py index 9b78ae8ac5..361934168a 100644 --- a/benchmarks/ahi_hsd_benchmarks.py +++ b/benchmarks/ahi_hsd_benchmarks.py @@ -33,7 +33,7 @@ class HimawariHSD(GeoBenchmarks): timeout = 600 data_files: list[str] = [] subdir = os.path.join("ahi_hsd", "20210417_0500_typhoon_surigae") - reader = 'ahi_hsd' + reader = "ahi_hsd" def setup_cache(self): """Fetch the data files.""" @@ -43,7 +43,7 @@ def setup_cache(self): except ImportError: assert len(get_filenames(self.subdir)) == 4 # nosec download_rsr() - download_luts(aerosol_type='rayleigh_only') + download_luts(aerosol_type="rayleigh_only") def setup(self): """Set up the benchmarks.""" diff --git a/benchmarks/seviri_hrit_benchmarks.py b/benchmarks/seviri_hrit_benchmarks.py index 177d929adf..9851dbdac9 100644 --- a/benchmarks/seviri_hrit_benchmarks.py +++ b/benchmarks/seviri_hrit_benchmarks.py @@ -43,7 +43,7 @@ def setup_cache(self): except ImportError: assert len(get_filenames(self.subdir)) == 114 # nosec download_rsr() - download_luts(aerosol_type='rayleigh_only') + download_luts(aerosol_type="rayleigh_only") def setup(self): """Set up the benchmarks.""" diff --git a/benchmarks/utils.py b/benchmarks/utils.py index 54338d4eac..67b88025b9 100644 --- a/benchmarks/utils.py +++ b/benchmarks/utils.py @@ -64,7 +64,7 @@ def save_composite_as_geotiff(self, composite, resampler="native", area=None, filenames=None): """Save a composite to disk as geotiff.""" lscn = self.load_and_resample(composite, resampler, area, filenames) - lscn.save_dataset(composite, filename='test.tif', tiled=True) + lscn.save_dataset(composite, filename="test.tif", tiled=True) def compute_channel(self, channel, filenames=None): """Load and compute one channel.""" diff --git a/benchmarks/viirs_sdr_benchmarks.py b/benchmarks/viirs_sdr_benchmarks.py index 940c2d524b..68db5c6682 100644 --- a/benchmarks/viirs_sdr_benchmarks.py +++ b/benchmarks/viirs_sdr_benchmarks.py @@ -42,7 +42,7 @@ def setup_cache(self): except ImportError: assert len(self.get_filenames()) == 6 * 3 # nosec download_rsr() - download_luts(aerosol_type='rayleigh_only') + download_luts(aerosol_type="rayleigh_only") def setup(self, name): """Set up the benchmarks.""" @@ -58,14 +58,14 @@ def get_filenames(self): def load(self, composite): """Load one composite.""" from satpy import Scene - scn = Scene(filenames=self.data_files, reader='viirs_sdr') + scn = Scene(filenames=self.data_files, reader="viirs_sdr") scn.load([composite]) return scn def load_and_native_resample(self, composite): """Load and native resample a composite.""" scn = self.load(composite) - lscn = scn.resample(resampler='native') + lscn = scn.resample(resampler="native") return lscn @@ -119,4 +119,4 @@ def compute_composite(self, name): def save_composite_as_geotiff(self, name): """Save a composite to disk as geotiff.""" lscn = self.load_and_native_resample(name) - lscn.save_dataset(name, filename='test.tif', tiled=True) + lscn.save_dataset(name, filename="test.tif", tiled=True) diff --git a/doc/source/conf.py b/doc/source/conf.py index 4af8d63b4a..3bef218f89 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -23,7 +23,7 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.append(os.path.abspath('../../')) +sys.path.append(os.path.abspath("../../")) sys.path.append(os.path.abspath(os.path.dirname(__file__))) from reader_table import generate_reader_table # noqa: E402 @@ -33,10 +33,10 @@ # built documents. # # get version using setuptools-scm -release = get_distribution('satpy').version +release = get_distribution("satpy").version # The full version, including alpha/beta/rc tags. # for example take major/minor -version = '.'.join(release.split('.')[:2]) +version = ".".join(release.split(".")[:2]) class Mock(object): # noqa @@ -53,8 +53,8 @@ def __call__(self, *args, **kwargs): @classmethod def __getattr__(cls, name): """Mock common module attributes used in documentation.""" - if name in ('__file__', '__path__'): - return '/dev/null' + if name in ("__file__", "__path__"): + return "/dev/null" elif name[0] == name[0].upper(): mockType = type(name, (), {}) mockType.__module__ = __name__ @@ -66,15 +66,15 @@ def __getattr__(cls, name): # https://github.com/sphinx-doc/sphinx/issues/3920 -MOCK_MODULES = ['h5py'] +MOCK_MODULES = ["h5py"] for mod_name in MOCK_MODULES: sys.modules[mod_name] = Mock() # type: ignore -autodoc_mock_imports = ['cf', 'glymur', 'h5netcdf', 'imageio', 'mipp', 'netCDF4', - 'pygac', 'pygrib', 'pyhdf', 'pyninjotiff', - 'pyorbital', 'pyspectral', 'rasterio', 'trollimage', - 'zarr'] -autoclass_content = 'both' # append class __init__ docstring to the class docstring +autodoc_mock_imports = ["cf", "glymur", "h5netcdf", "imageio", "mipp", "netCDF4", + "pygac", "pygrib", "pyhdf", "pyninjotiff", + "pyorbital", "pyspectral", "rasterio", "trollimage", + "zarr"] +autoclass_content = "both" # append class __init__ docstring to the class docstring # auto generate reader table from reader config files with open("reader_table.rst", mode="w") as f: @@ -84,19 +84,19 @@ def __getattr__(cls, name): # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', - 'sphinx.ext.doctest', 'sphinx.ext.napoleon', 'sphinx.ext.autosummary', 'doi_role', - 'sphinx.ext.viewcode', 'sphinxcontrib.apidoc', - 'sphinx.ext.mathjax'] +extensions = ["sphinx.ext.autodoc", "sphinx.ext.intersphinx", "sphinx.ext.todo", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.autosummary", "doi_role", + "sphinx.ext.viewcode", "sphinxcontrib.apidoc", + "sphinx.ext.mathjax"] # API docs apidoc_module_dir = "../../satpy" apidoc_output_dir = "api" apidoc_excluded_paths = [ - 'readers/caliop_l2_cloud.py', - 'readers/ghrsst_l3c_sst.py', - 'readers/li_l2.py', - 'readers/scatsat1_l2b.py', + "readers/caliop_l2_cloud.py", + "readers/ghrsst_l3c_sst.py", + "readers/li_l2.py", + "readers/scatsat1_l2b.py", ] apidoc_separate_modules = True apidoc_extra_args = [ @@ -104,20 +104,20 @@ def __getattr__(cls, name): ] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'Satpy' -copyright = u'2009-{}, The PyTroll Team'.format(datetime.utcnow().strftime("%Y")) +project = u"Satpy" +copyright = u"2009-{}, The PyTroll Team".format(datetime.utcnow().strftime("%Y")) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -151,7 +151,7 @@ def __getattr__(cls, name): # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -161,7 +161,7 @@ def __getattr__(cls, name): # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. -html_theme = 'sphinx_rtd_theme' +html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -190,16 +190,16 @@ def __getattr__(cls, name): # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] html_css_files = [ - 'theme_overrides.css', # override wide tables in RTD theme - 'https://cdn.datatables.net/1.10.23/css/jquery.dataTables.min.css', + "theme_overrides.css", # override wide tables in RTD theme + "https://cdn.datatables.net/1.10.23/css/jquery.dataTables.min.css", ] html_js_files = [ - 'https://cdn.datatables.net/1.10.23/js/jquery.dataTables.min.js', - 'main.js', + "https://cdn.datatables.net/1.10.23/js/jquery.dataTables.min.js", + "main.js", ] @@ -239,7 +239,7 @@ def __getattr__(cls, name): # html_file_suffix = '' # Output file base name for HTML help builder. -htmlhelp_basename = 'NWCSAFMSGPPdoc' +htmlhelp_basename = "NWCSAFMSGPPdoc" # -- Options for LaTeX output -------------------------------------------------- @@ -253,8 +253,8 @@ def __getattr__(cls, name): # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'satpy.tex', 'Satpy Documentation', - 'Satpy Developers', 'manual'), + ("index", "satpy.tex", "Satpy Documentation", + "Satpy Developers", "manual"), ] # The name of an image file (relative to this directory) to place at the top of @@ -277,22 +277,22 @@ def __getattr__(cls, name): # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - 'dask': ('https://docs.dask.org/en/latest', None), - 'geoviews': ('http://geoviews.org', None), - 'jobqueue': ('https://jobqueue.dask.org/en/latest', None), - 'numpy': ('https://numpy.org/doc/stable', None), - 'pydecorate': ('https://pydecorate.readthedocs.io/en/stable', None), - 'pyorbital': ('https://pyorbital.readthedocs.io/en/stable', None), - 'pyproj': ('https://pyproj4.github.io/pyproj/dev', None), - 'pyresample': ('https://pyresample.readthedocs.io/en/stable', None), - 'pytest': ('https://docs.pytest.org/en/stable/', None), - 'python': ('https://docs.python.org/3', None), - 'scipy': ('http://scipy.github.io/devdocs', None), - 'trollimage': ('https://trollimage.readthedocs.io/en/stable', None), - 'trollsift': ('https://trollsift.readthedocs.io/en/stable', None), - 'xarray': ('https://xarray.pydata.org/en/stable', None), - 'rasterio': ('https://rasterio.readthedocs.io/en/latest', None), - 'donfig': ('https://donfig.readthedocs.io/en/latest', None), - 'pooch': ('https://www.fatiando.org/pooch/latest/', None), - 'fsspec': ('https://filesystem-spec.readthedocs.io/en/latest/', None), + "dask": ("https://docs.dask.org/en/latest", None), + "geoviews": ("http://geoviews.org", None), + "jobqueue": ("https://jobqueue.dask.org/en/latest", None), + "numpy": ("https://numpy.org/doc/stable", None), + "pydecorate": ("https://pydecorate.readthedocs.io/en/stable", None), + "pyorbital": ("https://pyorbital.readthedocs.io/en/stable", None), + "pyproj": ("https://pyproj4.github.io/pyproj/dev", None), + "pyresample": ("https://pyresample.readthedocs.io/en/stable", None), + "pytest": ("https://docs.pytest.org/en/stable/", None), + "python": ("https://docs.python.org/3", None), + "scipy": ("http://scipy.github.io/devdocs", None), + "trollimage": ("https://trollimage.readthedocs.io/en/stable", None), + "trollsift": ("https://trollsift.readthedocs.io/en/stable", None), + "xarray": ("https://xarray.pydata.org/en/stable", None), + "rasterio": ("https://rasterio.readthedocs.io/en/latest", None), + "donfig": ("https://donfig.readthedocs.io/en/latest", None), + "pooch": ("https://www.fatiando.org/pooch/latest/", None), + "fsspec": ("https://filesystem-spec.readthedocs.io/en/latest/", None), } diff --git a/doc/source/doi_role.py b/doc/source/doi_role.py index b7c64a14ac..115e7895c6 100644 --- a/doc/source/doi_role.py +++ b/doc/source/doi_role.py @@ -26,9 +26,9 @@ def doi_role(typ, rawtext, text, lineno, inliner, options=None, content=None): content = [] text = utils.unescape(text) has_explicit_title, title, part = split_explicit_title(text) - full_url = 'https://doi.org/' + part + full_url = "https://doi.org/" + part if not has_explicit_title: - title = 'DOI:' + part + title = "DOI:" + part pnode = nodes.reference(title, title, internal=False, refuri=full_url) return [pnode], [] @@ -40,20 +40,20 @@ def arxiv_role(typ, rawtext, text, lineno, inliner, options=None, content=None): content = [] text = utils.unescape(text) has_explicit_title, title, part = split_explicit_title(text) - full_url = 'https://arxiv.org/abs/' + part + full_url = "https://arxiv.org/abs/" + part if not has_explicit_title: - title = 'arXiv:' + part + title = "arXiv:" + part pnode = nodes.reference(title, title, internal=False, refuri=full_url) return [pnode], [] def setup_link_role(app): - app.add_role('doi', doi_role, override=True) - app.add_role('DOI', doi_role, override=True) - app.add_role('arXiv', arxiv_role, override=True) - app.add_role('arxiv', arxiv_role, override=True) + app.add_role("doi", doi_role, override=True) + app.add_role("DOI", doi_role, override=True) + app.add_role("arXiv", arxiv_role, override=True) + app.add_role("arxiv", arxiv_role, override=True) def setup(app): - app.connect('builder-inited', setup_link_role) - return {'version': '0.1', 'parallel_read_safe': True} + app.connect("builder-inited", setup_link_role) + return {"version": "0.1", "parallel_read_safe": True} diff --git a/pyproject.toml b/pyproject.toml index d2059e68af..01b0272e89 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ # select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] -select = [] +select = ["Q"] ignore = ["B905"] # only available from python 3.10 line-length = 120 diff --git a/satpy/__init__.py b/satpy/__init__.py index 5392e0d9dd..d90f65d892 100644 --- a/satpy/__init__.py +++ b/satpy/__init__.py @@ -35,4 +35,4 @@ from satpy.utils import get_logger # noqa from satpy.writers import available_writers # noqa -log = get_logger('satpy') +log = get_logger("satpy") diff --git a/satpy/_config.py b/satpy/_config.py index 4abc00aba2..bcbd909aae 100644 --- a/satpy/_config.py +++ b/satpy/_config.py @@ -49,21 +49,21 @@ def impr_files(module_name: str) -> Path: BASE_PATH = os.path.dirname(os.path.realpath(__file__)) # FIXME: Use package_resources? -PACKAGE_CONFIG_PATH = os.path.join(BASE_PATH, 'etc') +PACKAGE_CONFIG_PATH = os.path.join(BASE_PATH, "etc") -_satpy_dirs = appdirs.AppDirs(appname='satpy', appauthor='pytroll') +_satpy_dirs = appdirs.AppDirs(appname="satpy", appauthor="pytroll") _CONFIG_DEFAULTS = { - 'tmp_dir': tempfile.gettempdir(), - 'cache_dir': _satpy_dirs.user_cache_dir, - 'cache_lonlats': False, - 'cache_sensor_angles': False, - 'config_path': [], - 'data_dir': _satpy_dirs.user_data_dir, - 'demo_data_dir': '.', - 'download_aux': True, - 'sensor_angles_position_preference': 'actual', - 'readers': { - 'clip_negative_radiances': False, + "tmp_dir": tempfile.gettempdir(), + "cache_dir": _satpy_dirs.user_cache_dir, + "cache_lonlats": False, + "cache_sensor_angles": False, + "config_path": [], + "data_dir": _satpy_dirs.user_data_dir, + "demo_data_dir": ".", + "download_aux": True, + "sensor_angles_position_preference": "actual", + "readers": { + "clip_negative_radiances": False, }, } @@ -79,17 +79,17 @@ def impr_files(module_name: str) -> Path: # 5. ~/.satpy/satpy.yaml # 6. $SATPY_CONFIG_PATH/satpy.yaml if present (colon separated) _CONFIG_PATHS = [ - os.path.join(PACKAGE_CONFIG_PATH, 'satpy.yaml'), - os.getenv('SATPY_ROOT_CONFIG', os.path.join('/etc', 'satpy', 'satpy.yaml')), - os.path.join(sys.prefix, 'etc', 'satpy', 'satpy.yaml'), - os.path.join(_satpy_dirs.user_config_dir, 'satpy.yaml'), - os.path.join(os.path.expanduser('~'), '.satpy', 'satpy.yaml'), + os.path.join(PACKAGE_CONFIG_PATH, "satpy.yaml"), + os.getenv("SATPY_ROOT_CONFIG", os.path.join("/etc", "satpy", "satpy.yaml")), + os.path.join(sys.prefix, "etc", "satpy", "satpy.yaml"), + os.path.join(_satpy_dirs.user_config_dir, "satpy.yaml"), + os.path.join(os.path.expanduser("~"), ".satpy", "satpy.yaml"), ] # The above files can also be directories. If directories all files # with `.yaml`., `.yml`, or `.json` extensions will be used. -_ppp_config_dir = os.getenv('PPP_CONFIG_DIR', None) -_satpy_config_path = os.getenv('SATPY_CONFIG_PATH', None) +_ppp_config_dir = os.getenv("PPP_CONFIG_DIR", None) +_satpy_config_path = os.getenv("SATPY_CONFIG_PATH", None) if _ppp_config_dir is not None and _satpy_config_path is None: LOG.warning("'PPP_CONFIG_DIR' is deprecated. Please use 'SATPY_CONFIG_PATH' instead.") @@ -105,22 +105,22 @@ def impr_files(module_name: str) -> Path: # i.e. last-applied/highest priority to first-applied/lowest priority _satpy_config_path_list = _satpy_config_path.split(os.pathsep) - os.environ['SATPY_CONFIG_PATH'] = repr(_satpy_config_path_list) + os.environ["SATPY_CONFIG_PATH"] = repr(_satpy_config_path_list) for config_dir in _satpy_config_path_list: - _CONFIG_PATHS.append(os.path.join(config_dir, 'satpy.yaml')) + _CONFIG_PATHS.append(os.path.join(config_dir, "satpy.yaml")) -_ancpath = os.getenv('SATPY_ANCPATH', None) -_data_dir = os.getenv('SATPY_DATA_DIR', None) +_ancpath = os.getenv("SATPY_ANCPATH", None) +_data_dir = os.getenv("SATPY_DATA_DIR", None) if _ancpath is not None and _data_dir is None: LOG.warning("'SATPY_ANCPATH' is deprecated. Please use 'SATPY_DATA_DIR' instead.") - os.environ['SATPY_DATA_DIR'] = _ancpath + os.environ["SATPY_DATA_DIR"] = _ancpath config = Config("satpy", defaults=[_CONFIG_DEFAULTS], paths=_CONFIG_PATHS) def get_config_path_safe(): """Get 'config_path' and check for proper 'list' type.""" - config_path = config.get('config_path') + config_path = config.get("config_path") if not isinstance(config_path, list): raise ValueError("Satpy config option 'config_path' must be a " "list, not '{}'".format(type(config_path))) @@ -136,7 +136,7 @@ def get_entry_points_config_dirs(group_name: str, include_config_path: bool = Tr if not dirs or dirs[-1] != new_dir: dirs.append(new_dir) if include_config_path: - dirs.extend(config.get('config_path')[::-1]) + dirs.extend(config.get("config_path")[::-1]) return dirs diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py index 25fe728b9f..0aa903d2f8 100644 --- a/satpy/_scene_converters.py +++ b/satpy/_scene_converters.py @@ -45,7 +45,7 @@ def to_xarray(scn, include_lonlats=True, epoch=None, include_orig_name=True, - numeric_name_prefix='CHANNEL_'): + numeric_name_prefix="CHANNEL_"): """Merge all xr.DataArray(s) of a satpy.Scene to a CF-compliant xarray object. If all Scene DataArrays are on the same area, it returns an xr.Dataset. diff --git a/satpy/aux_download.py b/satpy/aux_download.py index 82095737f0..5d9f0630e3 100644 --- a/satpy/aux_download.py +++ b/satpy/aux_download.py @@ -68,14 +68,14 @@ def _generate_filename(filename, component_type): return None path = filename if component_type: - path = '/'.join([component_type, path]) + path = "/".join([component_type, path]) return path def _retrieve_offline(data_dir, cache_key): - logger.debug('Downloading auxiliary files is turned off, will check ' - 'local files.') - local_file = os.path.join(data_dir, *cache_key.split('/')) + logger.debug("Downloading auxiliary files is turned off, will check " + "local files.") + local_file = os.path.join(data_dir, *cache_key.split("/")) if not os.path.isfile(local_file): raise RuntimeError("Satpy 'download_aux' setting is False meaning " "no new files will be downloaded and the local " @@ -85,7 +85,7 @@ def _retrieve_offline(data_dir, cache_key): def _should_download(cache_key): """Check if we're running tests and can download this file.""" - return not RUNNING_TESTS or 'README' in cache_key + return not RUNNING_TESTS or "README" in cache_key def retrieve(cache_key, pooch_kwargs=None): @@ -107,8 +107,8 @@ def retrieve(cache_key, pooch_kwargs=None): """ pooch_kwargs = pooch_kwargs or {} - path = satpy.config.get('data_dir') - if not satpy.config.get('download_aux'): + path = satpy.config.get("data_dir") + if not satpy.config.get("download_aux"): return _retrieve_offline(path, cache_key) if not _should_download(cache_key): raise RuntimeError("Auxiliary data download is not allowed during " @@ -123,7 +123,7 @@ def retrieve(cache_key, pooch_kwargs=None): def _retrieve_all_with_pooch(pooch_kwargs): if pooch_kwargs is None: pooch_kwargs = {} - path = satpy.config.get('data_dir') + path = satpy.config.get("data_dir") pooch_obj = pooch.create(path, path, registry=_FILE_REGISTRY, urls=_FILE_URLS) for fname in _FILE_REGISTRY: @@ -153,7 +153,7 @@ def retrieve_all(readers=None, writers=None, composite_sensors=None, ``fetch``. """ - if not satpy.config.get('download_aux'): + if not satpy.config.get("download_aux"): raise RuntimeError("Satpy 'download_aux' setting is False so no files " "will be downloaded.") @@ -305,11 +305,11 @@ def __init__(self, *args, **kwargs): """ DATA_FILE_COMPONENTS = { - 'reader': 'readers', - 'writer': 'writers', - 'composit': 'composites', - 'modifi': 'modifiers', - 'corr': 'modifiers', + "reader": "readers", + "writer": "writers", + "composit": "composites", + "modifi": "modifiers", + "corr": "modifiers", } @property @@ -318,7 +318,7 @@ def _data_file_component_type(self): for cls_name_sub, comp_type in self.DATA_FILE_COMPONENTS.items(): if cls_name_sub in cls_name: return comp_type - return 'other' + return "other" def register_data_files(self, data_files=None): """Register a series of files that may be downloaded later. @@ -330,8 +330,8 @@ def register_data_files(self, data_files=None): """ comp_type = self._data_file_component_type if data_files is None: - df_parent = getattr(self, 'info', self.config) - data_files = df_parent.get('data_files', []) + df_parent = getattr(self, "info", self.config) + data_files = df_parent.get("data_files", []) cache_keys = [] for data_file_entry in data_files: cache_key = self._register_data_file(data_file_entry, comp_type) @@ -340,9 +340,9 @@ def register_data_files(self, data_files=None): @staticmethod def _register_data_file(data_file_entry, comp_type): - url = data_file_entry['url'] - filename = data_file_entry.get('filename', os.path.basename(url)) - known_hash = data_file_entry.get('known_hash') + url = data_file_entry["url"] + filename = data_file_entry.get("filename", os.path.basename(url)) + known_hash = data_file_entry.get("known_hash") return register_file(url, filename, component_type=comp_type, known_hash=known_hash) @@ -351,20 +351,20 @@ def retrieve_all_cmd(argv=None): """Call 'retrieve_all' function from console script 'satpy_retrieve_all'.""" import argparse parser = argparse.ArgumentParser(description="Download auxiliary data files used by Satpy.") - parser.add_argument('--data-dir', + parser.add_argument("--data-dir", help="Override 'SATPY_DATA_DIR' for destination of " "downloaded files. This does NOT change the " "directory Satpy will look at when searching " "for files outside of this script.") - parser.add_argument('--composite-sensors', nargs="*", + parser.add_argument("--composite-sensors", nargs="*", help="Limit loaded composites for the specified " "sensors. If specified with no arguments, " "no composite files will be downloaded.") - parser.add_argument('--readers', nargs="*", + parser.add_argument("--readers", nargs="*", help="Limit searching to these readers. If specified " "with no arguments, no reader files will be " "downloaded.") - parser.add_argument('--writers', nargs="*", + parser.add_argument("--writers", nargs="*", help="Limit searching to these writers. If specified " "with no arguments, no writer files will be " "downloaded.") @@ -373,7 +373,7 @@ def retrieve_all_cmd(argv=None): logging.basicConfig(level=logging.INFO) if args.data_dir is None: - args.data_dir = satpy.config.get('data_dir') + args.data_dir = satpy.config.get("data_dir") with satpy.config.set(data_dir=args.data_dir): retrieve_all(readers=args.readers, writers=args.writers, diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index da4d1a9e5c..f6b1b13150 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -34,12 +34,12 @@ LOG = logging.getLogger(__name__) -NEGLIGIBLE_COORDS = ['time'] +NEGLIGIBLE_COORDS = ["time"] """Keywords identifying non-dimensional coordinates to be ignored during composite generation.""" -MASKING_COMPOSITOR_METHODS = ['less', 'less_equal', 'equal', 'greater_equal', - 'greater', 'not_equal', 'isnan', 'isfinite', - 'isneginf', 'isposinf'] +MASKING_COMPOSITOR_METHODS = ["less", "less_equal", "equal", "greater_equal", + "greater", "not_equal", "isnan", "isfinite", + "isneginf", "isposinf"] class IncompatibleAreas(Exception): @@ -55,8 +55,8 @@ def check_times(projectables): times = [] for proj in projectables: try: - if proj['time'].size and proj['time'][0] != 0: - times.append(proj['time'][0].values) + if proj["time"].size and proj["time"][0] != 0: + times.append(proj["time"][0].values) else: break # right? except KeyError: @@ -64,13 +64,13 @@ def check_times(projectables): break except IndexError: # time is a scalar - if proj['time'].values != 0: - times.append(proj['time'].values) + if proj["time"].values != 0: + times.append(proj["time"].values) else: break else: # Is there a more gracious way to handle this ? - if np.max(times) - np.min(times) > np.timedelta64(1, 's'): + if np.max(times) - np.min(times) > np.timedelta64(1, "s"): raise IncompatibleTimes mid_time = (np.max(times) - np.min(times)) / 2 + np.min(times) return mid_time @@ -79,9 +79,9 @@ def check_times(projectables): def sub_arrays(proj1, proj2): """Substract two DataArrays and combine their attrs.""" attrs = combine_metadata(proj1.attrs, proj2.attrs) - if (attrs.get('area') is None - and proj1.attrs.get('area') is not None - and proj2.attrs.get('area') is not None): + if (attrs.get("area") is None + and proj1.attrs.get("area") is not None + and proj2.attrs.get("area") is not None): raise IncompatibleAreas res = proj1 - proj2 res.attrs = attrs @@ -114,9 +114,9 @@ def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwar def id(self): """Return the DataID of the object.""" try: - return self.attrs['_satpy_id'] + return self.attrs["_satpy_id"] except KeyError: - id_keys = self.attrs.get('_satpy_id_keys', minimal_default_keys_config) + id_keys = self.attrs.get("_satpy_id_keys", minimal_default_keys_config) return DataID(id_keys, **self.attrs) def __call__(self, datasets, optional_datasets=None, **info): @@ -135,15 +135,15 @@ def __repr__(self): def apply_modifier_info(self, origin, destination): """Apply the modifier info from *origin* to *destination*.""" - o = getattr(origin, 'attrs', origin) - d = getattr(destination, 'attrs', destination) + o = getattr(origin, "attrs", origin) + d = getattr(destination, "attrs", destination) try: - dataset_keys = self.attrs['_satpy_id'].id_keys.keys() + dataset_keys = self.attrs["_satpy_id"].id_keys.keys() except KeyError: - dataset_keys = ['name', 'modifiers'] + dataset_keys = ["name", "modifiers"] for k in dataset_keys: - if k == 'modifiers' and k in self.attrs: + if k == "modifiers" and k in self.attrs: d[k] = self.attrs[k] elif d.get(k) is None: if self.attrs.get(k) is not None: @@ -225,16 +225,16 @@ def check_geolocation(self, data_arrays): if len(data_arrays) == 1: return - if 'x' in data_arrays[0].dims and \ - not all(x.sizes['x'] == data_arrays[0].sizes['x'] + if "x" in data_arrays[0].dims and \ + not all(x.sizes["x"] == data_arrays[0].sizes["x"] for x in data_arrays[1:]): raise IncompatibleAreas("X dimension has different sizes") - if 'y' in data_arrays[0].dims and \ - not all(x.sizes['y'] == data_arrays[0].sizes['y'] + if "y" in data_arrays[0].dims and \ + not all(x.sizes["y"] == data_arrays[0].sizes["y"] for x in data_arrays[1:]): raise IncompatibleAreas("Y dimension has different sizes") - areas = [ds.attrs.get('area') for ds in data_arrays] + areas = [ds.attrs.get("area") for ds in data_arrays] if all(a is None for a in areas): return if any(a is None for a in areas): @@ -242,7 +242,7 @@ def check_geolocation(self, data_arrays): if not all(areas[0] == x for x in areas[1:]): LOG.debug("Not all areas are the same in " - "'{}'".format(self.attrs['name'])) + "'{}'".format(self.attrs["name"])) raise IncompatibleAreas("Areas are different") @@ -255,7 +255,7 @@ def __call__(self, projectables, nonprojectables=None, **attrs): raise ValueError("Expected 2 datasets, got %d" % (len(projectables),)) projectables = self.match_data_arrays(projectables) info = combine_metadata(*projectables) - info['name'] = self.attrs['name'] + info["name"] = self.attrs["name"] info.update(self.attrs) # attrs from YAML/__init__ info.update(attrs) # overwriting of DataID properties @@ -273,7 +273,7 @@ def __call__(self, projectables, nonprojectables=None, **info): raise ValueError("Expected 2 datasets, got %d" % (len(projectables),)) projectables = self.match_data_arrays(projectables) info = combine_metadata(*projectables) - info['name'] = self.attrs['name'] + info["name"] = self.attrs["name"] proj = projectables[0] / projectables[1] proj.attrs = info @@ -289,7 +289,7 @@ def __call__(self, projectables, nonprojectables=None, **info): raise ValueError("Expected 2 datasets, got %d" % (len(projectables),)) projectables = self.match_data_arrays(projectables) info = combine_metadata(*projectables) - info['name'] = self.attrs['name'] + info["name"] = self.attrs["name"] proj = projectables[0] + projectables[1] proj.attrs = info @@ -316,10 +316,10 @@ def __call__(self, projectables, nonprojectables=None, **attrs): data = projectables[0] new_attrs = data.attrs.copy() self._update_missing_metadata(new_attrs, attrs) - resolution = new_attrs.get('resolution', None) + resolution = new_attrs.get("resolution", None) new_attrs.update(self.attrs) if resolution is not None: - new_attrs['resolution'] = resolution + new_attrs["resolution"] = resolution return xr.DataArray(data=data.data, attrs=new_attrs, dims=data.dims, coords=data.coords) @@ -349,8 +349,8 @@ def __init__(self, name, lut=None, **kwargs): def _update_attrs(self, new_attrs): """Modify name and add LUT.""" - new_attrs['name'] = self.attrs['name'] - new_attrs['composite_lut'] = list(self.lut) + new_attrs["name"] = self.attrs["name"] + new_attrs["composite_lut"] = list(self.lut) @staticmethod def _getitem(block, lut): @@ -373,7 +373,7 @@ def __call__(self, projectables, **kwargs): class GenericCompositor(CompositeBase): """Basic colored composite builder.""" - modes = {1: 'L', 2: 'LA', 3: 'RGB', 4: 'RGBA'} + modes = {1: "L", 2: "LA", 3: "RGB", 4: "RGBA"} def __init__(self, name, common_channel_mask=True, **kwargs): """Collect custom configuration values. @@ -389,18 +389,18 @@ def __init__(self, name, common_channel_mask=True, **kwargs): @classmethod def infer_mode(cls, data_arr): """Guess at the mode for a particular DataArray.""" - if 'mode' in data_arr.attrs: - return data_arr.attrs['mode'] - if 'bands' not in data_arr.dims: + if "mode" in data_arr.attrs: + return data_arr.attrs["mode"] + if "bands" not in data_arr.dims: return cls.modes[1] - if 'bands' in data_arr.coords and isinstance(data_arr.coords['bands'][0].item(), str): - return ''.join(data_arr.coords['bands'].values) - return cls.modes[data_arr.sizes['bands']] + if "bands" in data_arr.coords and isinstance(data_arr.coords["bands"][0].item(), str): + return "".join(data_arr.coords["bands"].values) + return cls.modes[data_arr.sizes["bands"]] def _concat_datasets(self, projectables, mode): try: - data = xr.concat(projectables, 'bands', coords='minimal') - data['bands'] = list(mode) + data = xr.concat(projectables, "bands", coords="minimal") + data["bands"] = list(mode) except ValueError as e: LOG.debug("Original exception for incompatible areas: {}".format(str(e))) raise IncompatibleAreas @@ -424,15 +424,15 @@ def _get_sensors(self, projectables): def __call__(self, projectables, nonprojectables=None, **attrs): """Build the composite.""" - if 'deprecation_warning' in self.attrs: + if "deprecation_warning" in self.attrs: warnings.warn( - self.attrs['deprecation_warning'], + self.attrs["deprecation_warning"], UserWarning, stacklevel=2 ) - self.attrs.pop('deprecation_warning', None) + self.attrs.pop("deprecation_warning", None) num = len(projectables) - mode = attrs.get('mode') + mode = attrs.get("mode") if mode is None: # num may not be in `self.modes` so only check if we need to mode = self.modes[num] @@ -440,8 +440,8 @@ def __call__(self, projectables, nonprojectables=None, **attrs): projectables = self.match_data_arrays(projectables) data = self._concat_datasets(projectables, mode) # Skip masking if user wants it or a specific alpha channel is given. - if self.common_channel_mask and mode[-1] != 'A': - data = data.where(data.notnull().all(dim='bands')) + if self.common_channel_mask and mode[-1] != "A": + data = data.where(data.notnull().all(dim="bands")) else: data = projectables[0] @@ -450,23 +450,23 @@ def __call__(self, projectables, nonprojectables=None, **attrs): # time coordinate value if len(projectables) > 1: time = check_times(projectables) - if time is not None and 'time' in data.dims: - data['time'] = [time] + if time is not None and "time" in data.dims: + data["time"] = [time] new_attrs = combine_metadata(*projectables) # remove metadata that shouldn't make sense in a composite new_attrs["wavelength"] = None new_attrs.pop("units", None) - new_attrs.pop('calibration', None) - new_attrs.pop('modifiers', None) + new_attrs.pop("calibration", None) + new_attrs.pop("modifiers", None) new_attrs.update({key: val for (key, val) in attrs.items() if val is not None}) - resolution = new_attrs.get('resolution', None) + resolution = new_attrs.get("resolution", None) new_attrs.update(self.attrs) if resolution is not None: - new_attrs['resolution'] = resolution + new_attrs["resolution"] = resolution new_attrs["sensor"] = self._get_sensors(projectables) new_attrs["mode"] = mode @@ -505,8 +505,8 @@ def __call__(self, projectables, nonprojectables=None, **info): filled_projectable = projectables[0] for next_projectable in projectables[1:]: filled_projectable = filled_projectable.fillna(next_projectable) - if 'optional_datasets' in info.keys(): - for next_projectable in info['optional_datasets']: + if "optional_datasets" in info.keys(): + for next_projectable in info["optional_datasets"]: filled_projectable = filled_projectable.fillna(next_projectable) return super().__call__([filled_projectable], **info) @@ -604,16 +604,16 @@ def _create_composite_from_channels(self, channels, template): mask = self._get_mask_from_data(template) channels = [self._create_masked_dataarray_like(channel, template, mask) for channel in channels] res = super(ColormapCompositor, self).__call__(channels, **template.attrs) - res.attrs['_FillValue'] = np.nan + res.attrs["_FillValue"] = np.nan return res @staticmethod def _get_mask_from_data(data): - fill_value = data.attrs.get('_FillValue', np.nan) + fill_value = data.attrs.get("_FillValue", np.nan) if np.isnan(fill_value): mask = data.notnull() else: - mask = data != data.attrs['_FillValue'] + mask = data != data.attrs["_FillValue"] return mask @staticmethod @@ -770,8 +770,8 @@ def _get_data_for_combined_product(self, day_data, night_data): # L/RGB -> RGB/RGB # LA/RGB -> RGBA/RGBA # RGB/RGBA -> RGBA/RGBA - day_data = add_bands(day_data, night_data['bands']) - night_data = add_bands(night_data, day_data['bands']) + day_data = add_bands(day_data, night_data["bands"]) + night_data = add_bands(night_data, day_data["bands"]) # Get merged metadata attrs = combine_metadata(day_data, night_data) @@ -801,7 +801,7 @@ def _weight_data(self, day_data, night_data, weights, attrs): night_band = _get_single_band_data(night_data, b) # For day-only and night-only products only the alpha channel is weighted # If there's no alpha band, weight the actual data - if b == 'A' or "only" not in self.day_night or not self.include_alpha: + if b == "A" or "only" not in self.day_night or not self.include_alpha: day_band = day_band * weights night_band = night_band * (1 - weights) band = day_band + night_band @@ -812,9 +812,9 @@ def _weight_data(self, day_data, night_data, weights, attrs): def _get_band_names(day_data, night_data): try: - bands = day_data['bands'] + bands = day_data["bands"] except TypeError: - bands = night_data['bands'] + bands = night_data["bands"] return bands @@ -850,18 +850,18 @@ def add_alpha_bands(data): Add an alpha band to L or RGB composite as prerequisites for the following band matching to make the masked-out area transparent. """ - if 'A' not in data['bands'].data: - new_data = [data.sel(bands=band) for band in data['bands'].data] + if "A" not in data["bands"].data: + new_data = [data.sel(bands=band) for band in data["bands"].data] # Create alpha band based on a copy of the first "real" band alpha = new_data[0].copy() - alpha.data = da.ones((data.sizes['y'], - data.sizes['x']), + alpha.data = da.ones((data.sizes["y"], + data.sizes["x"]), chunks=new_data[0].chunks) # Rename band to indicate it's alpha - alpha['bands'] = 'A' + alpha["bands"] = "A" new_data.append(alpha) - new_data = xr.concat(new_data, dim='bands') - new_data.attrs['mode'] = data.attrs['mode'] + 'A' + new_data = xr.concat(new_data, dim="bands") + new_data.attrs["mode"] = data.attrs["mode"] + "A" data = new_data return data @@ -875,17 +875,17 @@ def enhance2dataset(dset, convert_p=False): data = _get_data_from_enhanced_image(dset, convert_p) data.attrs = attrs # remove 'mode' if it is specified since it may have been updated - data.attrs.pop('mode', None) + data.attrs.pop("mode", None) # update mode since it may have changed (colorized/palettize) - data.attrs['mode'] = GenericCompositor.infer_mode(data) + data.attrs["mode"] = GenericCompositor.infer_mode(data) return data def _get_data_from_enhanced_image(dset, convert_p): img = get_enhanced_image(dset) - if convert_p and img.mode == 'P': + if convert_p and img.mode == "P": img = _apply_palette_to_image(img) - if img.mode != 'P': + if img.mode != "P": data = img.data.clip(0.0, 1.0) else: data = img.data @@ -894,9 +894,9 @@ def _get_data_from_enhanced_image(dset, convert_p): def _apply_palette_to_image(img): if len(img.palette[0]) == 3: - img = img.convert('RGB') + img = img.convert("RGB") elif len(img.palette[0]) == 4: - img = img.convert('RGBA') + img = img.convert("RGBA") return img @@ -904,36 +904,36 @@ def add_bands(data, bands): """Add bands so that they match *bands*.""" # Add R, G and B bands, remove L band bands = bands.compute() - if 'P' in data['bands'].data or 'P' in bands.data: - raise NotImplementedError('Cannot mix datasets of mode P with other datasets at the moment.') - if 'L' in data['bands'].data and 'R' in bands.data: - lum = data.sel(bands='L') + if "P" in data["bands"].data or "P" in bands.data: + raise NotImplementedError("Cannot mix datasets of mode P with other datasets at the moment.") + if "L" in data["bands"].data and "R" in bands.data: + lum = data.sel(bands="L") # Keep 'A' if it was present - if 'A' in data['bands']: - alpha = data.sel(bands='A') + if "A" in data["bands"]: + alpha = data.sel(bands="A") new_data = (lum, lum, lum, alpha) - new_bands = ['R', 'G', 'B', 'A'] - mode = 'RGBA' + new_bands = ["R", "G", "B", "A"] + mode = "RGBA" else: new_data = (lum, lum, lum) - new_bands = ['R', 'G', 'B'] - mode = 'RGB' - data = xr.concat(new_data, dim='bands', coords={'bands': new_bands}) - data['bands'] = new_bands - data.attrs['mode'] = mode + new_bands = ["R", "G", "B"] + mode = "RGB" + data = xr.concat(new_data, dim="bands", coords={"bands": new_bands}) + data["bands"] = new_bands + data.attrs["mode"] = mode # Add alpha band - if 'A' not in data['bands'].data and 'A' in bands.data: - new_data = [data.sel(bands=band) for band in data['bands'].data] + if "A" not in data["bands"].data and "A" in bands.data: + new_data = [data.sel(bands=band) for band in data["bands"].data] # Create alpha band based on a copy of the first "real" band alpha = new_data[0].copy() - alpha.data = da.ones((data.sizes['y'], - data.sizes['x']), + alpha.data = da.ones((data.sizes["y"], + data.sizes["x"]), chunks=new_data[0].chunks) # Rename band to indicate it's alpha - alpha['bands'] = 'A' + alpha["bands"] = "A" new_data.append(alpha) - new_data = xr.concat(new_data, dim='bands') - new_data.attrs['mode'] = data.attrs['mode'] + 'A' + new_data = xr.concat(new_data, dim="bands") + new_data.attrs["mode"] = data.attrs["mode"] + "A" data = new_data return data @@ -1061,11 +1061,11 @@ def __init__(self, *args, **kwargs): """Instanciate the ration sharpener.""" self.high_resolution_color = kwargs.pop("high_resolution_band", "red") self.neutral_resolution_color = kwargs.pop("neutral_resolution_band", None) - if self.high_resolution_color not in ['red', 'green', 'blue', None]: + if self.high_resolution_color not in ["red", "green", "blue", None]: raise ValueError("RatioSharpenedRGB.high_resolution_band must " "be one of ['red', 'green', 'blue', None]. Not " "'{}'".format(self.high_resolution_color)) - if self.neutral_resolution_color not in ['red', 'green', 'blue', None]: + if self.neutral_resolution_color not in ["red", "green", "blue", None]: raise ValueError("RatioSharpenedRGB.neutral_resolution_band must " "be one of ['red', 'green', 'blue', None]. Not " "'{}'".format(self.neutral_resolution_color)) @@ -1081,8 +1081,8 @@ def __call__(self, datasets, optional_datasets=None, **info): if not all(x.shape == datasets[0].shape for x in datasets[1:]) or \ (optional_datasets and optional_datasets[0].shape != datasets[0].shape): - raise IncompatibleAreas('RatioSharpening requires datasets of ' - 'the same size. Must resample first.') + raise IncompatibleAreas("RatioSharpening requires datasets of " + "the same size. Must resample first.") optional_datasets = tuple() if optional_datasets is None else optional_datasets datasets = self.match_data_arrays(datasets + optional_datasets) @@ -1100,19 +1100,19 @@ def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets) if optional_datasets and self.high_resolution_color is not None: LOG.debug("Sharpening image with high resolution {} band".format(self.high_resolution_color)) high_res = datasets[3] - if 'rows_per_scan' in high_res.attrs: - new_attrs.setdefault('rows_per_scan', high_res.attrs['rows_per_scan']) - new_attrs.setdefault('resolution', high_res.attrs['resolution']) + if "rows_per_scan" in high_res.attrs: + new_attrs.setdefault("rows_per_scan", high_res.attrs["rows_per_scan"]) + new_attrs.setdefault("resolution", high_res.attrs["resolution"]) else: LOG.debug("No sharpening band specified for ratio sharpening") high_res = None - bands = {'red': low_res_red, 'green': low_res_green, 'blue': low_res_blue} + bands = {"red": low_res_red, "green": low_res_green, "blue": low_res_blue} if high_res is not None: self._sharpen_bands_with_high_res(bands, high_res) - return bands['red'], bands['green'], bands['blue'], new_attrs + return bands["red"], bands["green"], bands["blue"], new_attrs def _sharpen_bands_with_high_res(self, bands, high_res): ratio = da.map_blocks( @@ -1170,9 +1170,9 @@ def _mean4(data, offset=(0, 0), block_id=None): rows2 = rows + row_offset + row_after cols2 = cols + col_offset + col_after - av_data = np.pad(data, pad, 'edge') + av_data = np.pad(data, pad, "edge") new_shape = (int(rows2 / 2.), 2, int(cols2 / 2.), 2) - with np.errstate(invalid='ignore'): + with np.errstate(invalid="ignore"): data_mean = np.nanmean(av_data.reshape(new_shape), axis=(1, 3)) data_mean = np.repeat(np.repeat(data_mean, 2, axis=0), 2, axis=1) data_mean = data_mean[row_offset:row_offset + rows, col_offset:col_offset + cols] @@ -1199,7 +1199,7 @@ class SelfSharpenedRGB(RatioSharpenedRGB): def four_element_average_dask(d): """Average every 4 elements (2x2) in a 2D array.""" try: - offset = d.attrs['area'].crop_offset + offset = d.attrs["area"].crop_offset except (KeyError, AttributeError): offset = (0, 0) @@ -1208,16 +1208,16 @@ def four_element_average_dask(d): def __call__(self, datasets, optional_datasets=None, **attrs): """Generate the composite.""" - colors = ['red', 'green', 'blue'] + colors = ["red", "green", "blue"] if self.high_resolution_color not in colors: raise ValueError("SelfSharpenedRGB requires at least one high resolution band, not " "'{}'".format(self.high_resolution_color)) high_res = datasets[colors.index(self.high_resolution_color)] high_mean = self.four_element_average_dask(high_res) - red = high_mean if self.high_resolution_color == 'red' else datasets[0] - green = high_mean if self.high_resolution_color == 'green' else datasets[1] - blue = high_mean if self.high_resolution_color == 'blue' else datasets[2] + red = high_mean if self.high_resolution_color == "red" else datasets[0] + green = high_mean if self.high_resolution_color == "green" else datasets[1] + blue = high_mean if self.high_resolution_color == "blue" else datasets[2] return super(SelfSharpenedRGB, self).__call__((red, green, blue), optional_datasets=(high_res,), **attrs) @@ -1273,7 +1273,7 @@ def __call__(self, projectables, *args, **kwargs): # Get the enhanced version of the RGB composite to be sharpened rgb_img = enhance2dataset(projectables[1]) # Ignore alpha band when applying luminance - rgb_img = rgb_img.where(rgb_img.bands == 'A', rgb_img * luminance) + rgb_img = rgb_img.where(rgb_img.bands == "A", rgb_img * luminance) return super(SandwichCompositor, self).__call__(rgb_img, *args, **kwargs) @@ -1378,7 +1378,7 @@ def __init__(self, name, filename=None, url=None, known_hash=None, area=None, @staticmethod def _check_relative_filename(filename): - data_dir = satpy.config.get('data_dir') + data_dir = satpy.config.get("data_dir") path = os.path.join(data_dir, filename) return path if os.path.exists(path) else filename @@ -1406,9 +1406,9 @@ def register_data_files(self, data_files): if os.path.isabs(self._cache_filename): return [None] return super().register_data_files([{ - 'url': self._url, - 'known_hash': self._known_hash, - 'filename': self._cache_filename, + "url": self._url, + "known_hash": self._known_hash, + "filename": self._cache_filename, }]) def _retrieve_data_file(self): @@ -1421,29 +1421,29 @@ def __call__(self, *args, **kwargs): """Call the compositor.""" from satpy import Scene local_file = self._retrieve_data_file() - scn = Scene(reader='generic_image', filenames=[local_file]) - scn.load(['image']) - img = scn['image'] + scn = Scene(reader="generic_image", filenames=[local_file]) + scn.load(["image"]) + img = scn["image"] # use compositor parameters as extra metadata # most important: set 'name' of the image img.attrs.update(self.attrs) # Check for proper area definition. Non-georeferenced images # do not have `area` in the attributes - if 'area' not in img.attrs: + if "area" not in img.attrs: if self.area is None: raise AttributeError("Area definition needs to be configured") - img.attrs['area'] = self.area - img.attrs['sensor'] = None - img.attrs['mode'] = ''.join(img.bands.data) - img.attrs.pop('modifiers', None) - img.attrs.pop('calibration', None) + img.attrs["area"] = self.area + img.attrs["sensor"] = None + img.attrs["mode"] = "".join(img.bands.data) + img.attrs.pop("modifiers", None) + img.attrs.pop("calibration", None) # Add start time if not present in the filename - if 'start_time' not in img.attrs or not img.attrs['start_time']: + if "start_time" not in img.attrs or not img.attrs["start_time"]: import datetime as dt - img.attrs['start_time'] = dt.datetime.utcnow() - if 'end_time' not in img.attrs or not img.attrs['end_time']: + img.attrs["start_time"] = dt.datetime.utcnow() + if "end_time" not in img.attrs or not img.attrs["end_time"]: import datetime as dt - img.attrs['end_time'] = dt.datetime.utcnow() + img.attrs["end_time"] = dt.datetime.utcnow() return img @@ -1461,8 +1461,8 @@ def __call__(self, projectables, *args, **kwargs): # L/RGB -> RGB/RGB # LA/RGB -> RGBA/RGBA # RGB/RGBA -> RGBA/RGBA - foreground = add_bands(foreground, background['bands']) - background = add_bands(background, foreground['bands']) + foreground = add_bands(foreground, background["bands"]) + background = add_bands(background, foreground["bands"]) attrs = self._combine_metadata_with_mode_and_sensor(foreground, background) data = self._get_merged_image_data(foreground, background) @@ -1479,18 +1479,18 @@ def _combine_metadata_with_mode_and_sensor(self, # 'mode' is no longer valid after we've remove the 'A' # let the base class __call__ determine mode attrs.pop("mode", None) - if attrs.get('sensor') is None: + if attrs.get("sensor") is None: # sensor can be a set - attrs['sensor'] = self._get_sensors([foreground, background]) + attrs["sensor"] = self._get_sensors([foreground, background]) return attrs @staticmethod def _get_merged_image_data(foreground: xr.DataArray, background: xr.DataArray ) -> list[xr.DataArray]: - if 'A' in foreground.attrs['mode']: + if "A" in foreground.attrs["mode"]: # Use alpha channel as weight and blend the two composites - alpha = foreground.sel(bands='A') + alpha = foreground.sel(bands="A") data = [] # NOTE: there's no alpha band in the output image, it will # be added by the data writer @@ -1503,7 +1503,7 @@ def _get_merged_image_data(foreground: xr.DataArray, else: data_arr = xr.where(foreground.isnull(), background, foreground) # Split to separate bands so the mode is correct - data = [data_arr.sel(bands=b) for b in data_arr['bands']] + data = [data_arr.sel(bands=b) for b in data_arr["bands"]] return data @@ -1577,9 +1577,9 @@ def __init__(self, name, transparency=None, conditions=None, mode="LA", "MaskingCompositor, use 'conditions' instead.") self.conditions = [] for key, transp in transparency.items(): - self.conditions.append({'method': 'equal', - 'value': key, - 'transparency': transp}) + self.conditions.append({"method": "equal", + "value": key, + "transparency": transp}) LOG.info("Converted 'transparency' to 'conditions': %s", str(self.conditions)) else: @@ -1643,8 +1643,8 @@ def _select_data_bands(self, data_in): From input data, select the bands that need to have masking applied. """ - if 'bands' in data_in.dims: - return [data_in.sel(bands=b) for b in data_in['bands'] if b != 'A'] + if "bands" in data_in.dims: + return [data_in.sel(bands=b) for b in data_in["bands"] if b != "A"] if self.mode == "RGBA": return [data_in, data_in, data_in] return [data_in] @@ -1656,16 +1656,16 @@ def _get_alpha_bands(self, data, mask_in, alpha_attrs): """ # Create alpha band mask_data = mask_in.data - alpha = da.ones((data[0].sizes['y'], - data[0].sizes['x']), + alpha = da.ones((data[0].sizes["y"], + data[0].sizes["x"]), chunks=data[0].chunks) for condition in self.conditions: - method = condition['method'] - value = condition.get('value', None) + method = condition["method"] + value = condition.get("value", None) if isinstance(value, str): value = _get_flag_value(mask_in, value) - transparency = condition['transparency'] + transparency = condition["transparency"] mask = self._get_mask(method, value, mask_data) if transparency == 100.0: @@ -1684,8 +1684,8 @@ def _get_flag_value(mask, val): NWC SAF GEO/PPS softwares. """ - flag_meanings = mask.attrs['flag_meanings'] - flag_values = mask.attrs['flag_values'] + flag_meanings = mask.attrs["flag_meanings"] + flag_values = mask.attrs["flag_values"] if isinstance(flag_meanings, str): flag_meanings = flag_meanings.split() diff --git a/satpy/composites/cloud_products.py b/satpy/composites/cloud_products.py index a05be8ad17..4dbc2e489f 100644 --- a/satpy/composites/cloud_products.py +++ b/satpy/composites/cloud_products.py @@ -31,7 +31,7 @@ def __call__(self, projectables, **info): raise ValueError("Expected 2 datasets, got %d" % (len(projectables), )) data, status = projectables - valid = status != status.attrs['_FillValue'] + valid = status != status.attrs["_FillValue"] status_cloud_free = status % 2 == 1 # bit 0 is set cloud_free = np.logical_and(valid, status_cloud_free) if "bad_optical_conditions" in status.attrs.get("flag_meanings", "") and data.name == "cmic_cre": @@ -43,7 +43,7 @@ def __call__(self, projectables, **info): # Keep cloudfree or valid product data = data.where(np.logical_or(cloud_free, data != data.attrs["scaled_FillValue"]), np.nan) res = SingleBandCompositor.__call__(self, [data], **data.attrs) - res.attrs['_FillValue'] = np.nan + res.attrs["_FillValue"] = np.nan return res @@ -56,15 +56,15 @@ def __call__(self, projectables, **info): raise ValueError("Expected 2 datasets, got %d" % (len(projectables), )) data, cma = projectables - valid_cma = cma != cma.attrs['_FillValue'] - valid_prod = data != data.attrs['_FillValue'] + valid_cma = cma != cma.attrs["_FillValue"] + valid_prod = data != data.attrs["_FillValue"] valid_prod = np.logical_and(valid_prod, np.logical_not(np.isnan(data))) # Update valid_cma and not valid_prod means: keep not valid cma or valid prod data = data.where(np.logical_or(np.logical_not(valid_cma), valid_prod), data.attrs["scaled_FillValue"]) data = data.where(np.logical_or(valid_prod, valid_cma), np.nan) res = SingleBandCompositor.__call__(self, [data], **data.attrs) - res.attrs['_FillValue'] = np.nan + res.attrs["_FillValue"] = np.nan return res @@ -95,15 +95,15 @@ def __call__(self, projectables, *args, **kwargs): scalef1 = 1.0 / maxs1 - 1 / 255.0 p1data = (light*scalef1).where(light != 0) - p1data = p1data.where(light != light.attrs['_FillValue']) + p1data = p1data.where(light != light.attrs["_FillValue"]) p1data.attrs = light.attrs data = moderate*scalef2 p2data = data.where(moderate != 0) - p2data = p2data.where(moderate != moderate.attrs['_FillValue']) + p2data = p2data.where(moderate != moderate.attrs["_FillValue"]) p2data.attrs = moderate.attrs data = intense*scalef3 p3data = data.where(intense != 0) - p3data = p3data.where(intense != intense.attrs['_FillValue']) + p3data = p3data.where(intense != intense.attrs["_FillValue"]) p3data.attrs = intense.attrs res = super(PrecipCloudsRGB, self).__call__((p3data, p2data, p1data), diff --git a/satpy/composites/config_loader.py b/satpy/composites/config_loader.py index e5af45355b..bffbee8a13 100644 --- a/satpy/composites/config_loader.py +++ b/satpy/composites/config_loader.py @@ -38,10 +38,10 @@ def _convert_dep_info_to_data_query(dep_info): key_item = dep_info.copy() - key_item.pop('prerequisites', None) - key_item.pop('optional_prerequisites', None) - if 'modifiers' in key_item: - key_item['modifiers'] = tuple(key_item['modifiers']) + key_item.pop("prerequisites", None) + key_item.pop("optional_prerequisites", None) + if "modifiers" in key_item: + key_item["modifiers"] = tuple(key_item["modifiers"]) key = DataQuery.from_dict(key_item) return key @@ -64,14 +64,14 @@ def _create_comp_from_info(self, composite_info, loader): def _handle_inline_comp_dep(self, dep_info, dep_num, parent_name): # Create an unique temporary name for the composite - sub_comp_name = '_' + parent_name + '_dep_{}'.format(dep_num) - dep_info['name'] = sub_comp_name + sub_comp_name = "_" + parent_name + "_dep_{}".format(dep_num) + dep_info["name"] = sub_comp_name self._load_config_composite(dep_info) @staticmethod def _get_compositor_loader_from_config(composite_name, composite_info): try: - loader = composite_info.pop('compositor') + loader = composite_info.pop("compositor") except KeyError: raise ValueError("'compositor' key missing or empty for '{}'. Option keys = {}".format( composite_name, str(composite_info.keys()))) @@ -79,22 +79,22 @@ def _get_compositor_loader_from_config(composite_name, composite_info): def _process_composite_deps(self, composite_info): dep_num = -1 - for prereq_type in ['prerequisites', 'optional_prerequisites']: + for prereq_type in ["prerequisites", "optional_prerequisites"]: prereqs = [] for dep_info in composite_info.get(prereq_type, []): dep_num += 1 if not isinstance(dep_info, dict): prereqs.append(dep_info) continue - elif 'compositor' in dep_info: + elif "compositor" in dep_info: self._handle_inline_comp_dep( - dep_info, dep_num, composite_info['name']) + dep_info, dep_num, composite_info["name"]) prereq_key = _convert_dep_info_to_data_query(dep_info) prereqs.append(prereq_key) composite_info[prereq_type] = prereqs def _load_config_composite(self, composite_info): - composite_name = composite_info['name'] + composite_name = composite_info["name"] loader = self._get_compositor_loader_from_config(composite_name, composite_info) self._process_composite_deps(composite_info) key, comp = self._create_comp_from_info(composite_info, loader) @@ -102,7 +102,7 @@ def _load_config_composite(self, composite_info): def _load_config_composites(self, configured_composites): for composite_name, composite_info in configured_composites.items(): - composite_info['name'] = composite_name + composite_info["name"] = composite_name self._load_config_composite(composite_info) def parse_config(self, configured_composites, composite_configs): @@ -128,9 +128,9 @@ def __init__(self, loaded_modifiers, sensor_id_keys): @staticmethod def _get_modifier_loader_from_config(modifier_name, modifier_info): try: - loader = modifier_info.pop('modifier', None) + loader = modifier_info.pop("modifier", None) if loader is None: - loader = modifier_info.pop('compositor') + loader = modifier_info.pop("compositor") warnings.warn( "Modifier '{}' uses deprecated 'compositor' " "key to point to Python class, replace " @@ -143,7 +143,7 @@ def _get_modifier_loader_from_config(modifier_name, modifier_info): return loader def _process_modifier_deps(self, modifier_info): - for prereq_type in ['prerequisites', 'optional_prerequisites']: + for prereq_type in ["prerequisites", "optional_prerequisites"]: prereqs = [] for dep_info in modifier_info.get(prereq_type, []): if not isinstance(dep_info, dict): @@ -154,14 +154,14 @@ def _process_modifier_deps(self, modifier_info): modifier_info[prereq_type] = prereqs def _load_config_modifier(self, modifier_info): - modifier_name = modifier_info['name'] + modifier_name = modifier_info["name"] loader = self._get_modifier_loader_from_config(modifier_name, modifier_info) self._process_modifier_deps(modifier_info) self.loaded_modifiers[modifier_name] = (loader, modifier_info) def _load_config_modifiers(self, configured_modifiers): for modifier_name, modifier_info in configured_modifiers.items(): - modifier_info['name'] = modifier_name + modifier_info["name"] = modifier_name self._load_config_modifier(modifier_info) def parse_config(self, configured_modifiers, composite_configs): @@ -179,10 +179,10 @@ def _load_config(composite_configs): conf = {} for composite_config in composite_configs: - with open(composite_config, 'r', encoding='utf-8') as conf_file: + with open(composite_config, "r", encoding="utf-8") as conf_file: conf = recursive_dict_update(conf, yaml.load(conf_file, Loader=UnsafeLoader)) try: - sensor_name = conf['sensor_name'] + sensor_name = conf["sensor_name"] except KeyError: logger.debug('No "sensor_name" tag found in %s, skipping.', composite_configs) @@ -192,7 +192,7 @@ def _load_config(composite_configs): sensor_modifiers = {} dep_id_keys = None - sensor_deps = sensor_name.split('/')[:-1] + sensor_deps = sensor_name.split("/")[:-1] if sensor_deps: # get dependent for sensor_dep in sensor_deps: @@ -203,18 +203,18 @@ def _load_config(composite_configs): id_keys = _get_sensor_id_keys(conf, dep_id_keys) mod_config_helper = _ModifierConfigHelper(sensor_modifiers, id_keys) - configured_modifiers = conf.get('modifiers', {}) + configured_modifiers = conf.get("modifiers", {}) mod_config_helper.parse_config(configured_modifiers, composite_configs) comp_config_helper = _CompositeConfigHelper(sensor_compositors, id_keys) - configured_composites = conf.get('composites', {}) + configured_composites = conf.get("composites", {}) comp_config_helper.parse_config(configured_composites, composite_configs) return sensor_compositors, sensor_modifiers, id_keys def _get_sensor_id_keys(conf, parent_id_keys): try: - id_keys = conf['composite_identification_keys'] + id_keys = conf["composite_identification_keys"] except KeyError: id_keys = parent_id_keys if not id_keys: @@ -270,7 +270,7 @@ def load_compositor_configs_for_sensor(sensor_name: str) -> tuple[dict[str, dict """ config_filename = sensor_name + ".yaml" logger.debug("Looking for composites config file %s", config_filename) - paths = get_entry_points_config_dirs('satpy.composites') + paths = get_entry_points_config_dirs("satpy.composites") composite_configs = config_search_paths( os.path.join("composites", config_filename), search_dirs=paths, check_exists=True) @@ -310,12 +310,12 @@ def load_compositor_configs_for_sensors(sensor_names: Iterable[str]) -> tuple[di def all_composite_sensors(): """Get all sensor names from available composite configs.""" - paths = get_entry_points_config_dirs('satpy.composites') + paths = get_entry_points_config_dirs("satpy.composites") composite_configs = glob_config( os.path.join("composites", "*.yaml"), search_dirs=paths) yaml_names = set([os.path.splitext(os.path.basename(fn))[0] for fn in composite_configs]) - non_sensor_yamls = ('visir',) + non_sensor_yamls = ("visir",) sensor_names = [x for x in yaml_names if x not in non_sensor_yamls] return sensor_names diff --git a/satpy/composites/glm.py b/satpy/composites/glm.py index 48fe6b922c..e9b6ef275e 100644 --- a/satpy/composites/glm.py +++ b/satpy/composites/glm.py @@ -74,7 +74,7 @@ def __init__(self, name, min_highlight=0.0, max_highlight=10.0, def _get_enhanced_background_data(background_layer): img = get_enhanced_image(background_layer) img.data = img.data.clip(0.0, 1.0) - img = img.convert('RGBA') + img = img.convert("RGBA") return img.data def _get_highlight_factor(self, highlight_data): @@ -93,10 +93,10 @@ def _apply_highlight_effect(self, background_data, factor): def _update_attrs(self, new_data, background_layer, highlight_layer): new_data.attrs = background_layer.attrs.copy() - new_data.attrs['units'] = 1 + new_data.attrs["units"] = 1 new_sensors = self._get_sensors((highlight_layer, background_layer)) new_data.attrs.update({ - 'sensor': new_sensors, + "sensor": new_sensors, }) def __call__(self, projectables, optional_datasets=None, **attrs): @@ -107,7 +107,7 @@ def __call__(self, projectables, optional_datasets=None, **attrs): # Adjust the colors of background by highlight layer factor = self._get_highlight_factor(highlight_product) new_channels = self._apply_highlight_effect(background_data, factor) - new_data = xr.concat(new_channels, dim='bands') + new_data = xr.concat(new_channels, dim="bands") self._update_attrs(new_data, background_layer, highlight_product) return super(HighlightCompositor, self).__call__((new_data,), **attrs) diff --git a/satpy/composites/viirs.py b/satpy/composites/viirs.py index 1dd0523053..5df2d482af 100644 --- a/satpy/composites/viirs.py +++ b/satpy/composites/viirs.py @@ -90,8 +90,8 @@ def _run_dnb_normalization(self, dnb_data, sza_data): """ # convert dask arrays to DataArray objects - dnb_data = xr.DataArray(dnb_data, dims=('y', 'x')) - sza_data = xr.DataArray(sza_data, dims=('y', 'x')) + dnb_data = xr.DataArray(dnb_data, dims=("y", "x")) + sza_data = xr.DataArray(sza_data, dims=("y", "x")) good_mask = ~(dnb_data.isnull() | sza_data.isnull()) output_dataset = dnb_data.where(good_mask) @@ -904,8 +904,8 @@ def __call__(self, datasets, **info): dnb_data += 2.6e-10 dnb_data *= gtot - mda['name'] = self.attrs['name'] - mda['standard_name'] = 'ncc_radiance' + mda["name"] = self.attrs["name"] + mda["standard_name"] = "ncc_radiance" dnb_data.attrs = mda return dnb_data diff --git a/satpy/dataset/anc_vars.py b/satpy/dataset/anc_vars.py index 071a21d786..90b2d7bd3c 100644 --- a/satpy/dataset/anc_vars.py +++ b/satpy/dataset/anc_vars.py @@ -27,7 +27,7 @@ def dataset_walker(datasets): """ for dataset in datasets: yield dataset, None - for anc_ds in dataset.attrs.get('ancillary_variables', []): + for anc_ds in dataset.attrs.get("ancillary_variables", []): try: anc_ds.attrs yield anc_ds, dataset @@ -40,12 +40,12 @@ def replace_anc(dataset, parent_dataset): if parent_dataset is None: return id_keys = parent_dataset.attrs.get( - '_satpy_id_keys', + "_satpy_id_keys", dataset.attrs.get( - '_satpy_id_keys', + "_satpy_id_keys", default_id_keys_config)) current_dataid = DataID(id_keys, **dataset.attrs) - for idx, ds in enumerate(parent_dataset.attrs['ancillary_variables']): + for idx, ds in enumerate(parent_dataset.attrs["ancillary_variables"]): if current_dataid == DataID(id_keys, **ds.attrs): - parent_dataset.attrs['ancillary_variables'][idx] = dataset + parent_dataset.attrs["ancillary_variables"][idx] = dataset return diff --git a/satpy/dataset/data_dict.py b/satpy/dataset/data_dict.py index eb9d8b9662..790d688b24 100644 --- a/satpy/dataset/data_dict.py +++ b/satpy/dataset/data_dict.py @@ -133,9 +133,9 @@ def keys(self, names=False, wavelengths=False): # sort keys so things are a little more deterministic (.keys() is not) keys = sorted(super(DatasetDict, self).keys()) if names: - return (k.get('name') for k in keys) + return (k.get("name") for k in keys) elif wavelengths: - return (k.get('wavelength') for k in keys) + return (k.get("wavelength") for k in keys) else: return keys @@ -181,7 +181,7 @@ def get(self, key, default=None): def __setitem__(self, key, value): """Support assigning 'Dataset' objects or dictionaries of metadata.""" - if hasattr(value, 'attrs'): + if hasattr(value, "attrs"): # xarray.DataArray objects value_info = value.attrs else: @@ -198,7 +198,7 @@ def __setitem__(self, key, value): if isinstance(value_info, dict): value_info.update(new_info) if isinstance(key, DataID): - value_info['_satpy_id'] = key + value_info["_satpy_id"] = key return super(DatasetDict, self).__setitem__(key, value) @@ -215,21 +215,21 @@ def _create_dataid_key(self, key, value_info): else: new_name = value_info.get("name") # this is a new key and it's not a full DataID tuple - if new_name is None and value_info.get('wavelength') is None: + if new_name is None and value_info.get("wavelength") is None: raise ValueError("One of 'name' or 'wavelength' attrs " "values should be set.") id_keys = self._create_id_keys_from_dict(value_info) - value_info['name'] = new_name + value_info["name"] = new_name key = DataID(id_keys, **value_info) return key def _create_id_keys_from_dict(self, value_info_dict): """Create id_keys from dict.""" try: - id_keys = value_info_dict['_satpy_id'].id_keys + id_keys = value_info_dict["_satpy_id"].id_keys except KeyError: try: - id_keys = value_info_dict['_satpy_id_keys'] + id_keys = value_info_dict["_satpy_id_keys"] except KeyError: id_keys = minimal_default_keys_config return id_keys diff --git a/satpy/dataset/dataid.py b/satpy/dataset/dataid.py index ded6cec146..f52c6494b7 100644 --- a/satpy/dataset/dataid.py +++ b/satpy/dataset/dataid.py @@ -34,10 +34,10 @@ def get_keys_from_config(common_id_keys, config): for key, val in common_id_keys.items(): if key in config: id_keys[key] = val - elif val is not None and (val.get('required') is True or val.get('default') is not None): + elif val is not None and (val.get("required") is True or val.get("default") is not None): id_keys[key] = val if not id_keys: - raise ValueError('Metadata does not contain enough information to create a DataID.') + raise ValueError("Metadata does not contain enough information to create a DataID.") return id_keys @@ -57,7 +57,7 @@ def convert(cls, value): try: return cls[value] except KeyError: - raise ValueError('{} invalid value for {}'.format(value, cls)) + raise ValueError("{} invalid value for {}".format(value, cls)) @classmethod def _unpickle(cls, enum_name, enum_members, enum_member): @@ -88,10 +88,10 @@ def __hash__(self): def __repr__(self): """Represent the values.""" - return '<' + str(self) + '>' + return "<" + str(self) + ">" -wlklass = namedtuple("WavelengthRange", "min central max unit", defaults=('µm',)) # type: ignore +wlklass = namedtuple("WavelengthRange", "min central max unit", defaults=("µm",)) # type: ignore class WavelengthRange(wlklass): @@ -196,7 +196,7 @@ def _read_cf_from_string_export(cls, blob): from trollsift import Parser parser = Parser(pattern) res_dict = parser.parse(blob) - res_dict.pop('unit2') + res_dict.pop("unit2") obj = cls(**res_dict) return obj @@ -239,45 +239,45 @@ def __hash__(self): #: Default ID keys DataArrays. -default_id_keys_config = {'name': { - 'required': True, +default_id_keys_config = {"name": { + "required": True, }, - 'wavelength': { - 'type': WavelengthRange, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': { - 'transitive': False, + "resolution": { + "transitive": False, }, - 'calibration': { - 'enum': [ - 'reflectance', - 'brightness_temperature', - 'radiance', - 'counts' + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "counts" ], - 'transitive': True, + "transitive": True, }, - 'modifiers': { - 'default': ModifierTuple(), - 'type': ModifierTuple, + "modifiers": { + "default": ModifierTuple(), + "type": ModifierTuple, }, } #: Default ID keys for coordinate DataArrays. -default_co_keys_config = {'name': { - 'required': True, +default_co_keys_config = {"name": { + "required": True, }, - 'resolution': { - 'transitive': True, + "resolution": { + "transitive": True, } } #: Minimal ID keys for DataArrays, for example composites. -minimal_default_keys_config = {'name': { - 'required': True, +minimal_default_keys_config = {"name": { + "required": True, }, - 'resolution': { - 'transitive': True, + "resolution": { + "transitive": True, } } @@ -312,11 +312,11 @@ def fix_id_keys(id_keys): for key, val in id_keys.items(): if not val: continue - if 'enum' in val and 'type' in val: - raise ValueError('Cannot have both type and enum for the same id key.') + if "enum" in val and "type" in val: + raise ValueError("Cannot have both type and enum for the same id key.") new_val = copy(val) - if 'enum' in val: - new_val['type'] = ValueList(key, ' '.join(new_val.pop('enum'))) + if "enum" in val: + new_val["type"] = ValueList(key, " ".join(new_val.pop("enum"))) new_id_keys[key] = new_val return new_id_keys @@ -328,12 +328,12 @@ def convert_dict(self, keyvals): for key, val in self._id_keys.items(): if val is None: val = {} - if key in keyvals or val.get('default') is not None or val.get('required'): - curated_val = keyvals.get(key, val.get('default')) - if 'required' in val and curated_val is None: - raise ValueError('Required field {} missing.'.format(key)) - if 'type' in val: - curated[key] = val['type'].convert(curated_val) + if key in keyvals or val.get("default") is not None or val.get("required"): + curated_val = keyvals.get(key, val.get("default")) + if "required" in val and curated_val is None: + raise ValueError("Required field {} missing.".format(key)) + if "type" in val: + curated[key] = val["type"].convert(curated_val) elif curated_val is not None: curated[key] = curated_val @@ -355,17 +355,17 @@ def from_dict(self, keyvals): @classmethod def from_dataarray(cls, array, default_keys=minimal_default_keys_config): """Get the DataID using the dataarray attributes.""" - if '_satpy_id' in array.attrs: - return array.attrs['_satpy_id'] + if "_satpy_id" in array.attrs: + return array.attrs["_satpy_id"] return cls.new_id_from_dataarray(array, default_keys) @classmethod def new_id_from_dataarray(cls, array, default_keys=minimal_default_keys_config): """Create a new DataID from a dataarray's attributes.""" try: - id_keys = array.attrs['_satpy_id'].id_keys + id_keys = array.attrs["_satpy_id"].id_keys except KeyError: - id_keys = array.attrs.get('_satpy_id_keys', default_keys) + id_keys = array.attrs.get("_satpy_id_keys", default_keys) return cls(id_keys, **array.attrs) @property @@ -380,7 +380,7 @@ def create_filter_query_without_required_fields(self, query): except AttributeError: new_query = query.copy() for key, val in self._id_keys.items(): - if val and (val.get('transitive') is not True): + if val and (val.get("transitive") is not True): new_query.pop(key, None) return DataQuery.from_dict(new_query) @@ -430,7 +430,7 @@ def __hash__(self): def _immutable(self, *args, **kws) -> NoReturn: """Raise and error.""" - raise TypeError('Cannot change a DataID') + raise TypeError("Cannot change a DataID") def __lt__(self, other): """Check lesser than.""" @@ -468,7 +468,7 @@ def _find_modifiers_key(self): def create_less_modified_query(self): """Create a query with one less modifier.""" new_dict = self.to_dict() - new_dict['modifiers'] = tuple(new_dict['modifiers'][:-1]) + new_dict["modifiers"] = tuple(new_dict["modifiers"][:-1]) return DataQuery.from_dict(new_dict) def is_modified(self): @@ -535,7 +535,7 @@ def __hash__(self): fields = [] values = [] for field, value in sorted(self._dict.items()): - if value != '*': + if value != "*": fields.append(field) if isinstance(value, (list, set)): value = tuple(value) @@ -567,7 +567,7 @@ def to_dict(self, trim=True): def _to_trimmed_dict(self): return {key: val for key, val in self._dict.items() - if val != '*'} + if val != "*"} def __repr__(self): """Represent the query.""" @@ -594,7 +594,7 @@ def _shares_required_keys(self, dataid): """Check if dataid shares required keys with the current query.""" for key, val in dataid._id_keys.items(): try: - if val.get('required', False): + if val.get("required", False): if key in self._fields: return True except AttributeError: @@ -603,7 +603,7 @@ def _shares_required_keys(self, dataid): def _match_query_value(self, key, id_val): val = self._dict[key] - if val == '*': + if val == "*": return True if isinstance(id_val, tuple) and isinstance(val, (tuple, list)): return tuple(val) == id_val @@ -663,8 +663,8 @@ def sort_dataids(self, dataids): for key in keys: if distance == np.inf: break - val = self._dict.get(key, '*') - if val == '*': + val = self._dict.get(key, "*") + if val == "*": distance = self._add_absolute_distance(dataid, key, distance) else: try: @@ -710,12 +710,12 @@ def _add_distance_from_query(dataid_val, requested_val, distance): def create_less_modified_query(self): """Create a query with one less modifier.""" new_dict = self.to_dict() - new_dict['modifiers'] = tuple(new_dict['modifiers'][:-1]) + new_dict["modifiers"] = tuple(new_dict["modifiers"][:-1]) return DataQuery.from_dict(new_dict) def is_modified(self): """Check if this is modified.""" - return bool(self._dict.get('modifiers')) + return bool(self._dict.get("modifiers")) def create_filtered_query(dataset_key, filter_query): @@ -734,7 +734,7 @@ def create_filtered_query(dataset_key, filter_query): def _update_dict_with_filter_query(ds_dict, filter_query): if filter_query is not None: for key, value in filter_query.items(): - if value != '*': + if value != "*": ds_dict.setdefault(key, value) @@ -743,9 +743,9 @@ def _create_id_dict_from_any_key(dataset_key): ds_dict = dataset_key.to_dict() except AttributeError: if isinstance(dataset_key, str): - ds_dict = {'name': dataset_key} + ds_dict = {"name": dataset_key} elif isinstance(dataset_key, numbers.Number): - ds_dict = {'wavelength': dataset_key} + ds_dict = {"wavelength": dataset_key} else: raise TypeError("Don't know how to interpret a dataset_key of type {}".format(type(dataset_key))) return ds_dict diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py index 4ba3cde1a1..46f6f622b8 100644 --- a/satpy/dataset/metadata.py +++ b/satpy/dataset/metadata.py @@ -79,7 +79,7 @@ def _combine_shared_info(shared_keys, info_dicts, average_times): shared_info = {} for key in shared_keys: values = [info[key] for info in info_dicts] - if 'time' in key and isinstance(values[0], datetime) and average_times: + if "time" in key and isinstance(values[0], datetime) and average_times: shared_info[key] = average_datetimes(values) elif _are_values_combinable(values): shared_info[key] = values[0] @@ -146,7 +146,7 @@ def _all_arrays_equal(arrays): If the arrays are lazy, just check if they have the same identity. """ - if hasattr(arrays[0], 'compute'): + if hasattr(arrays[0], "compute"): return _all_identical(arrays) return _all_values_equal(arrays) diff --git a/satpy/demo/ahi_hsd.py b/satpy/demo/ahi_hsd.py index 784d90719f..5731b23f01 100644 --- a/satpy/demo/ahi_hsd.py +++ b/satpy/demo/ahi_hsd.py @@ -29,7 +29,7 @@ def download_typhoon_surigae_ahi(base_dir=None, This scene shows the Typhoon Surigae. """ import s3fs - base_dir = base_dir or config.get('demo_data_dir', '.') + base_dir = base_dir or config.get("demo_data_dir", ".") channel_resolution = {1: 10, 2: 10, 3: 5, @@ -40,7 +40,7 @@ def download_typhoon_surigae_ahi(base_dir=None, for segment in segments: data_files.append(f"HS_H08_20210417_0500_B{channel:02d}_FLDK_R{resolution:02d}_S{segment:02d}10.DAT.bz2") - subdir = os.path.join(base_dir, 'ahi_hsd', '20210417_0500_typhoon_surigae') + subdir = os.path.join(base_dir, "ahi_hsd", "20210417_0500_typhoon_surigae") os.makedirs(subdir, exist_ok=True) fs = s3fs.S3FileSystem(anon=True) @@ -50,7 +50,7 @@ def download_typhoon_surigae_ahi(base_dir=None, result.append(destination_filename) if os.path.exists(destination_filename): continue - to_get = 'noaa-himawari8/AHI-L1b-FLDK/2021/04/17/0500/' + filename + to_get = "noaa-himawari8/AHI-L1b-FLDK/2021/04/17/0500/" + filename fs.get_file(to_get, destination_filename) return result diff --git a/satpy/dependency_tree.py b/satpy/dependency_tree.py index 331483cabc..d99fb536eb 100644 --- a/satpy/dependency_tree.py +++ b/satpy/dependency_tree.py @@ -431,7 +431,7 @@ def _find_compositor(self, dataset_key, query): compositor = self.get_modifier(dataset_key) except KeyError: raise KeyError("Can't find anything called {}".format(str(dataset_key))) - compositor.attrs['prerequisites'] = [implicit_dependency_node] + list(compositor.attrs['prerequisites']) + compositor.attrs["prerequisites"] = [implicit_dependency_node] + list(compositor.attrs["prerequisites"]) else: try: compositor = self.get_compositor(dataset_key) @@ -446,14 +446,14 @@ def _find_compositor(self, dataset_key, query): # Get the prerequisites LOG.trace("Looking for composite prerequisites for: {}".format(dataset_key)) prereqs = [create_filtered_query(prereq, prerequisite_filter) if not isinstance(prereq, Node) else prereq - for prereq in compositor.attrs['prerequisites']] + for prereq in compositor.attrs["prerequisites"]] prereqs = self._create_required_subtrees(root, prereqs, query=query) root.add_required_nodes(prereqs) # Get the optionals LOG.trace("Looking for optional prerequisites for: {}".format(dataset_key)) optionals = [create_filtered_query(prereq, prerequisite_filter) if not isinstance(prereq, Node) else prereq - for prereq in compositor.attrs['optional_prerequisites']] + for prereq in compositor.attrs["optional_prerequisites"]] optionals = self._create_optional_subtrees(root, optionals, query=query) root.add_optional_nodes(optionals) @@ -501,7 +501,7 @@ def get_compositor(self, key): def get_modifier(self, comp_id): """Get a modifer.""" # create a DataID for the compositor we are generating - modifier = comp_id['modifiers'][-1] + modifier = comp_id["modifiers"][-1] for sensor_name in sorted(self.modifiers): modifiers = self.modifiers[sensor_name] compositors = self.compositors[sensor_name] @@ -511,7 +511,7 @@ def get_modifier(self, comp_id): mloader, moptions = modifiers[modifier] moptions = moptions.copy() moptions.update(comp_id.to_dict()) - moptions['sensor'] = sensor_name + moptions["sensor"] = sensor_name compositors[comp_id] = mloader(_satpy_id=comp_id, **moptions) return compositors[comp_id] @@ -544,7 +544,7 @@ def _create_optional_subtrees(self, parent, prereqs, query=None): for prereq, unknowns in unknown_datasets.items(): u_str = ", ".join([str(x) for x in unknowns]) - LOG.debug('Skipping optional %s: Unknown dataset %s', + LOG.debug("Skipping optional %s: Unknown dataset %s", str(prereq), u_str) return prereq_nodes diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py index 6f6a66654d..b74cc2c8bd 100644 --- a/satpy/enhancements/__init__.py +++ b/satpy/enhancements/__init__.py @@ -57,8 +57,8 @@ def exclude_alpha(func): @wraps(func) def wrapper(data, **kwargs): - bands = data.coords['bands'].values - exclude = ['A'] if 'A' in bands else [] + bands = data.coords["bands"].values + exclude = ["A"] if "A" in bands else [] band_data = data.sel(bands=[b for b in bands if b not in exclude]) band_data = func(band_data, **kwargs) @@ -67,7 +67,7 @@ def wrapper(data, **kwargs): attrs.update(band_data.attrs) # combine the new data with the excluded data new_data = xr.concat([band_data, data.sel(bands=exclude)], - dim='bands') + dim="bands") data.data = new_data.sel(bands=bands).data data.attrs = attrs return data @@ -92,12 +92,12 @@ def my_enhancement_function(data): def wrapper(data, **kwargs): attrs = data.attrs data_arrs = [] - for idx, band in enumerate(data.coords['bands'].values): + for idx, band in enumerate(data.coords["bands"].values): band_data = func(data.sel(bands=[band]), index=idx, **kwargs) data_arrs.append(band_data) # we assume that the func can add attrs attrs.update(band_data.attrs) - data.data = xr.concat(data_arrs, dim='bands').data + data.data = xr.concat(data_arrs, dim="bands").data data.attrs = attrs return data @@ -248,9 +248,9 @@ def reinhard_to_srgb(img, saturation=1.25, white=100, **kwargs): white /= 100 # extract color components - r = rgb.sel(bands='R').data - g = rgb.sel(bands='G').data - b = rgb.sel(bands='B').data + r = rgb.sel(bands="R").data + g = rgb.sel(bands="G").data + b = rgb.sel(bands="B").data # saturate luma = _compute_luminance_from_rgb(r, g, b) @@ -280,7 +280,7 @@ def _srgb_gamma(arr): def lookup(img, **kwargs): """Assign values to channels based on a table.""" - luts = np.array(kwargs['luts'], dtype=np.float32) / 255.0 + luts = np.array(kwargs["luts"], dtype=np.float32) / 255.0 return _lookup_table(img.data, luts=luts) @@ -352,7 +352,7 @@ def _merge_colormaps(kwargs, img=None): from trollimage.colormap import Colormap full_cmap = None - palette = kwargs['palettes'] + palette = kwargs["palettes"] if isinstance(palette, Colormap): full_cmap = palette else: @@ -457,11 +457,11 @@ def create_colormap(palette, img=None): information. """ - fname = palette.get('filename', None) - colors = palette.get('colors', None) + fname = palette.get("filename", None) + colors = palette.get("colors", None) dataset = palette.get("dataset", None) # are colors between 0-255 or 0-1 - color_scale = palette.get('color_scale', 255) + color_scale = palette.get("color_scale", 255) if fname: if not os.path.exists(fname): fname = get_config_path(fname) @@ -477,9 +477,9 @@ def create_colormap(palette, img=None): if palette.get("reverse", False): cmap.reverse() - if 'min_value' in palette and 'max_value' in palette: + if "min_value" in palette and "max_value" in palette: cmap.set_range(palette["min_value"], palette["max_value"]) - elif 'min_value' in palette or 'max_value' in palette: + elif "min_value" in palette or "max_value" in palette: raise ValueError("Both 'min_value' and 'max_value' must be specified (or neither)") return cmap @@ -498,12 +498,12 @@ def _create_colormap_from_dataset(img, dataset, color_scale): def three_d_effect(img, **kwargs): """Create 3D effect using convolution.""" - w = kwargs.get('weight', 1) + w = kwargs.get("weight", 1) LOG.debug("Applying 3D effect with weight %.2f", w) kernel = np.array([[-w, 0, w], [-w, 1, w], [-w, 0, w]]) - mode = kwargs.get('convolve_mode', 'same') + mode = kwargs.get("convolve_mode", "same") return _three_d_effect(img.data, kernel=kernel, mode=mode) @@ -582,7 +582,7 @@ def jma_true_color_reproduction(img): https://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html """ _jma_true_color_reproduction(img.data, - platform=img.data.attrs['platform_name']) + platform=img.data.attrs["platform_name"]) @exclude_alpha @@ -597,29 +597,29 @@ def _jma_true_color_reproduction(img_data, platform=None): """ # Conversion matrix dictionaries specifying sensor and platform. - ccm_dict = {'himawari-8': np.array([[1.1629, 0.1539, -0.2175], + ccm_dict = {"himawari-8": np.array([[1.1629, 0.1539, -0.2175], [-0.0252, 0.8725, 0.1300], [-0.0204, -0.1100, 1.0633]]), - 'himawari-9': np.array([[1.1619, 0.1542, -0.2168], + "himawari-9": np.array([[1.1619, 0.1542, -0.2168], [-0.0271, 0.8749, 0.1295], [-0.0202, -0.1103, 1.0634]]), - 'goes-16': np.array([[1.1425, 0.1819, -0.2250], + "goes-16": np.array([[1.1425, 0.1819, -0.2250], [-0.0951, 0.9363, 0.1360], [-0.0113, -0.1179, 1.0621]]), - 'goes-17': np.array([[1.1437, 0.1818, -0.2262], + "goes-17": np.array([[1.1437, 0.1818, -0.2262], [-0.0952, 0.9354, 0.1371], [-0.0113, -0.1178, 1.0620]]), - 'goes-18': np.array([[1.1629, 0.1539, -0.2175], + "goes-18": np.array([[1.1629, 0.1539, -0.2175], [-0.0252, 0.8725, 0.1300], [-0.0204, -0.1100, 1.0633]]), - 'mtg-i1': np.array([[0.9007, 0.2086, -0.0100], + "mtg-i1": np.array([[0.9007, 0.2086, -0.0100], [-0.0475, 1.0662, -0.0414], [-0.0123, -0.1342, 1.0794]]), - 'geo-kompsat-2a': np.array([[1.1661, 0.1489, -0.2157], + "geo-kompsat-2a": np.array([[1.1661, 0.1489, -0.2157], [-0.0255, 0.8745, 0.1282], [-0.0205, -0.1103, 1.0637]]), } diff --git a/satpy/enhancements/mimic.py b/satpy/enhancements/mimic.py index c2b028f6e8..3a72d5b247 100644 --- a/satpy/enhancements/mimic.py +++ b/satpy/enhancements/mimic.py @@ -296,11 +296,11 @@ def nrl_colors(img, **kwargs): [74.72527472527473, [218, 131, 185]], [75.0, [220, 135, 189]], ]} - kwargs['palettes'].update(nrl_tpw_colors) - palette = kwargs['palettes'] - palette['colors'] = tuple(map(tuple, palette['colors'])) + kwargs["palettes"].update(nrl_tpw_colors) + palette = kwargs["palettes"] + palette["colors"] = tuple(map(tuple, palette["colors"])) - cm = Colormap(*palette['colors']) + cm = Colormap(*palette["colors"]) img.palettize(cm) @@ -310,8 +310,8 @@ def total_precipitable_water(img, **kwargs): This modifies the image's data so the correct colors can be applied to it, and then palettizes the image. """ - palette = kwargs['palettes'] - palette['colors'] = tuple(map(tuple, palette['colors'])) + palette = kwargs["palettes"] + palette["colors"] = tuple(map(tuple, palette["colors"])) - cm = Colormap(*palette['colors']) + cm = Colormap(*palette["colors"]) img.palettize(cm) diff --git a/satpy/enhancements/viirs.py b/satpy/enhancements/viirs.py index 627fc80220..3ed5e2dd5f 100644 --- a/satpy/enhancements/viirs.py +++ b/satpy/enhancements/viirs.py @@ -27,11 +27,11 @@ def water_detection(img, **kwargs): This modifies the image's data so the correct colors can be applied to it, and then palettizes the image. """ - palette = kwargs['palettes'] - palette['colors'] = tuple(map(tuple, palette['colors'])) + palette = kwargs["palettes"] + palette["colors"] = tuple(map(tuple, palette["colors"])) _water_detection(img.data) - cm = Colormap(*palette['colors']) + cm = Colormap(*palette["colors"]) img.palettize(cm) diff --git a/satpy/modifiers/_crefl.py b/satpy/modifiers/_crefl.py index 3d34ab9d93..bc42228f26 100644 --- a/satpy/modifiers/_crefl.py +++ b/satpy/modifiers/_crefl.py @@ -70,7 +70,7 @@ def _get_registered_dem_cache_key(self): if not self.url: return reg_files = self.register_data_files([{ - 'url': self.url, 'known_hash': self.known_hash} + "url": self.url, "known_hash": self.known_hash} ]) return reg_files[0] diff --git a/satpy/modifiers/_crefl_utils.py b/satpy/modifiers/_crefl_utils.py index c8d6920056..e83d43b5e2 100644 --- a/satpy/modifiers/_crefl_utils.py +++ b/satpy/modifiers/_crefl_utils.py @@ -282,7 +282,7 @@ def run_crefl(refl, :param avg_elevation: average elevation (usually pre-calculated and stored in CMGDEM.hdf) """ - runner_cls = _runner_class_for_sensor(refl.attrs['sensor']) + runner_cls = _runner_class_for_sensor(refl.attrs["sensor"]) runner = runner_cls(refl) corr_refl = runner(sensor_azimuth, sensor_zenith, solar_azimuth, solar_zenith, avg_elevation) return corr_refl @@ -326,7 +326,7 @@ def _height_from_avg_elevation(self, avg_elevation: Optional[np.ndarray]) -> da. height = 0. else: LOG.debug("Using average elevation information provided to CREFL") - lon, lat = self._refl.attrs['area'].get_lonlats(chunks=self._refl.chunks) + lon, lat = self._refl.attrs["area"].get_lonlats(chunks=self._refl.chunks) height = da.map_blocks(_space_mask_height, lon, lat, avg_elevation, chunks=lon.chunks, dtype=avg_elevation.dtype) return height diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 28adb60028..8adf32f3d0 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -257,7 +257,7 @@ def _hash_args(*args, unhashable_types=DEFAULT_UNCACHE_TYPES): arg = arg.isoformat(" ") hashable_args.append(arg) arg_hash = hashlib.sha1() # nosec - arg_hash.update(json.dumps(tuple(hashable_args)).encode('utf8')) + arg_hash.update(json.dumps(tuple(hashable_args)).encode("utf8")) return arg_hash.hexdigest() @@ -320,7 +320,7 @@ def _chunks_are_irregular(chunks_tuple: tuple) -> bool: def _geo_dask_to_data_array(arr: da.Array) -> xr.DataArray: - return xr.DataArray(arr, dims=('y', 'x')) + return xr.DataArray(arr, dims=("y", "x")) def compute_relative_azimuth(sat_azi: xr.DataArray, sun_azi: xr.DataArray) -> xr.DataArray: @@ -447,7 +447,7 @@ def _get_sun_azimuth_ndarray(lons: np.ndarray, lats: np.ndarray, start_time: dat def _get_sensor_angles(data_arr: xr.DataArray) -> tuple[xr.DataArray, xr.DataArray]: - preference = satpy.config.get('sensor_angles_position_preference', 'actual') + preference = satpy.config.get("sensor_angles_position_preference", "actual") sat_lon, sat_lat, sat_alt = get_satpos(data_arr, preference=preference) area_def = data_arr.attrs["area"] chunks = _geo_chunks_from_data_arr(data_arr) @@ -531,7 +531,7 @@ def _sunzen_corr_cos_ndarray(data: np.ndarray, # gradually fall off for larger zenith angle grad_factor = (np.arccos(cos_zen) - limit_rad) / (max_sza_rad - limit_rad) # invert the factor so maximum correction is done at `limit` and falls off later - with np.errstate(invalid='ignore'): # we expect space pixels to be invalid + with np.errstate(invalid="ignore"): # we expect space pixels to be invalid grad_factor = 1. - np.log(grad_factor + 1) / np.log(2) # make sure we don't make anything negative grad_factor = grad_factor.clip(0.) diff --git a/satpy/modifiers/atmosphere.py b/satpy/modifiers/atmosphere.py index e804982330..1c6225f42a 100644 --- a/satpy/modifiers/atmosphere.py +++ b/satpy/modifiers/atmosphere.py @@ -93,28 +93,28 @@ def __call__(self, projectables, optional_datasets=None, **info): ssadiff = compute_relative_azimuth(sata, suna) del sata, suna - atmosphere = self.attrs.get('atmosphere', 'us-standard') - aerosol_type = self.attrs.get('aerosol_type', 'marine_clean_aerosol') - reduce_lim_low = abs(self.attrs.get('reduce_lim_low', 70)) - reduce_lim_high = abs(self.attrs.get('reduce_lim_high', 105)) - reduce_strength = np.clip(self.attrs.get('reduce_strength', 0), 0, 1) + atmosphere = self.attrs.get("atmosphere", "us-standard") + aerosol_type = self.attrs.get("aerosol_type", "marine_clean_aerosol") + reduce_lim_low = abs(self.attrs.get("reduce_lim_low", 70)) + reduce_lim_high = abs(self.attrs.get("reduce_lim_high", 105)) + reduce_strength = np.clip(self.attrs.get("reduce_strength", 0), 0, 1) logger.info("Removing Rayleigh scattering with atmosphere '%s' and " "aerosol type '%s' for '%s'", - atmosphere, aerosol_type, vis.attrs['name']) - corrector = Rayleigh(vis.attrs['platform_name'], vis.attrs['sensor'], + atmosphere, aerosol_type, vis.attrs["name"]) + corrector = Rayleigh(vis.attrs["platform_name"], vis.attrs["sensor"], atmosphere=atmosphere, aerosol_type=aerosol_type) try: refl_cor_band = corrector.get_reflectance(sunz, satz, ssadiff, - vis.attrs['name'], + vis.attrs["name"], red.data) except (KeyError, IOError): - logger.warning("Could not get the reflectance correction using band name: %s", vis.attrs['name']) + logger.warning("Could not get the reflectance correction using band name: %s", vis.attrs["name"]) logger.warning("Will try use the wavelength, however, this may be ambiguous!") refl_cor_band = corrector.get_reflectance(sunz, satz, ssadiff, - vis.attrs['wavelength'][1], + vis.attrs["wavelength"][1], red.data) if reduce_strength > 0: @@ -155,13 +155,13 @@ def __call__(self, projectables, optional_datasets=None, **info): satz = get_satellite_zenith_angle(band) satz = satz.data # get dask array underneath - logger.info('Correction for limb cooling') - corrector = AtmosphericalCorrection(band.attrs['platform_name'], - band.attrs['sensor']) + logger.info("Correction for limb cooling") + corrector = AtmosphericalCorrection(band.attrs["platform_name"], + band.attrs["sensor"]) atm_corr = da.map_blocks(_call_mapped_correction, satz, band.data, corrector=corrector, - band_name=band.attrs['name'], + band_name=band.attrs["name"], meta=np.array((), dtype=band.dtype)) proj = xr.DataArray(atm_corr, attrs=band.attrs, dims=band.dims, coords=band.coords) @@ -187,7 +187,7 @@ class CO2Corrector(ModifierBase): def __call__(self, projectables, optional_datasets=None, **info): """Apply correction.""" ir_039, ir_108, ir_134 = projectables - logger.info('Applying CO2 correction') + logger.info("Applying CO2 correction") dt_co2 = (ir_108 - ir_134) / 4.0 rcorr = ir_108 ** 4 - (ir_108 - dt_co2) ** 4 t4_co2corr = (ir_039 ** 4 + rcorr).clip(0.0) ** 0.25 diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py index ecd83f80e5..a43a530c0e 100644 --- a/satpy/modifiers/geometry.py +++ b/satpy/modifiers/geometry.py @@ -47,14 +47,14 @@ def __init__(self, max_sza=95.0, **kwargs): def __call__(self, projectables, **info): """Generate the composite.""" - projectables = self.match_data_arrays(list(projectables) + list(info.get('optional_datasets', []))) + projectables = self.match_data_arrays(list(projectables) + list(info.get("optional_datasets", []))) vis = projectables[0] if vis.attrs.get("sunz_corrected"): logger.debug("Sun zenith correction already applied") return vis logger.debug("Applying sun zen correction") - if not info.get('optional_datasets'): + if not info.get("optional_datasets"): # we were not given SZA, generate cos(SZA) logger.debug("Computing sun zenith angles.") from .angles import get_cos_sza diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index 6fbf695972..e3ea3214b8 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -77,19 +77,19 @@ def _get_reflectance_as_dataarray(self, projectables, optional_datasets): da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) da_sun_zenith = self._get_sun_zenith_from_provided_data(projectables, optional_datasets) - logger.info('Getting reflective part of %s', _nir.attrs['name']) + logger.info("Getting reflective part of %s", _nir.attrs["name"]) reflectance = self._get_reflectance_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, _nir.attrs) proj = self._create_modified_dataarray(reflectance, base_dataarray=_nir) - proj.attrs['units'] = '%' + proj.attrs["units"] = "%" return proj @staticmethod def _get_tb13_4_from_optionals(optional_datasets): tb13_4 = None for dataset in optional_datasets: - wavelengths = dataset.attrs.get('wavelength', [100., 0, 0]) - if (dataset.attrs.get('units') == 'K' and + wavelengths = dataset.attrs.get("wavelength", [100., 0, 0]) + if (dataset.attrs.get("units") == "K" and wavelengths[0] <= 13.4 <= wavelengths[2]): tb13_4 = dataset.data return tb13_4 @@ -108,14 +108,14 @@ def _get_sun_zenith_from_provided_data(projectables, optional_datasets): raise ImportError("Module pyorbital.astronomy needed to compute sun zenith angles.") _nir = projectables[0] lons, lats = _nir.attrs["area"].get_lonlats(chunks=_nir.data.chunks) - sun_zenith = sun_zenith_angle(_nir.attrs['start_time'], lons, lats) + sun_zenith = sun_zenith_angle(_nir.attrs["start_time"], lons, lats) return sun_zenith def _create_modified_dataarray(self, reflectance, base_dataarray): proj = xr.DataArray(reflectance, dims=base_dataarray.dims, coords=base_dataarray.coords, attrs=base_dataarray.attrs.copy()) - proj.attrs['sun_zenith_threshold'] = self.sun_zenith_threshold - proj.attrs['sun_zenith_masking_limit'] = self.masking_limit + proj.attrs["sun_zenith_threshold"] = self.sun_zenith_threshold + proj.attrs["sun_zenith_masking_limit"] = self.masking_limit self.apply_modifier_info(base_dataarray, proj) return proj @@ -130,7 +130,7 @@ def _init_reflectance_calculator(self, metadata): logger.info("Couldn't load pyspectral") raise ImportError("No module named pyspectral.near_infrared_reflectance") - reflectance_3x_calculator = Calculator(metadata['platform_name'], metadata['sensor'], metadata['name'], + reflectance_3x_calculator = Calculator(metadata["platform_name"], metadata["sensor"], metadata["name"], sunz_threshold=self.sun_zenith_threshold, masking_limit=self.masking_limit) return reflectance_3x_calculator @@ -169,11 +169,11 @@ def _get_emissivity_as_dataarray(self, projectables, optional_datasets): da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) da_sun_zenith = self._get_sun_zenith_from_provided_data(projectables, optional_datasets) - logger.info('Getting emissive part of %s', _nir.attrs['name']) + logger.info("Getting emissive part of %s", _nir.attrs["name"]) emissivity = self._get_emissivity_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, _nir.attrs) proj = self._create_modified_dataarray(emissivity, base_dataarray=_nir) - proj.attrs['units'] = 'K' + proj.attrs["units"] = "K" return proj def _get_emissivity_as_dask(self, da_nir, da_tb11, da_tb13_4, da_sun_zenith, metadata): diff --git a/satpy/multiscene/_blend_funcs.py b/satpy/multiscene/_blend_funcs.py index 0210cef5cc..e77a7765fb 100644 --- a/satpy/multiscene/_blend_funcs.py +++ b/satpy/multiscene/_blend_funcs.py @@ -14,7 +14,7 @@ def stack( data_arrays: Sequence[xr.DataArray], weights: Optional[Sequence[xr.DataArray]] = None, combine_times: bool = True, - blend_type: str = 'select_with_weights' + blend_type: str = "select_with_weights" ) -> xr.DataArray: """Combine a series of datasets in different ways. @@ -143,7 +143,7 @@ def _stack_no_weights( def _combine_stacked_attrs(collected_attrs: Sequence[Mapping], combine_times: bool) -> dict: attrs = combine_metadata(*collected_attrs) - if combine_times and ('start_time' in attrs or 'end_time' in attrs): + if combine_times and ("start_time" in attrs or "end_time" in attrs): new_start, new_end = _get_combined_start_end_times(collected_attrs) if new_start: attrs["start_time"] = new_start @@ -157,10 +157,10 @@ def _get_combined_start_end_times(metadata_objects: Iterable[Mapping]) -> tuple[ start_time = None end_time = None for md_obj in metadata_objects: - if "start_time" in md_obj and (start_time is None or md_obj['start_time'] < start_time): - start_time = md_obj['start_time'] - if "end_time" in md_obj and (end_time is None or md_obj['end_time'] > end_time): - end_time = md_obj['end_time'] + if "start_time" in md_obj and (start_time is None or md_obj["start_time"] < start_time): + start_time = md_obj["start_time"] + if "end_time" in md_obj and (end_time is None or md_obj["end_time"] > end_time): + end_time = md_obj["end_time"] return start_time, end_time @@ -168,7 +168,7 @@ def timeseries(datasets): """Expand dataset with and concatenate by time dimension.""" expanded_ds = [] for ds in datasets: - if 'time' not in ds.dims: + if "time" not in ds.dims: tmp = ds.expand_dims("time") tmp.coords["time"] = pd.DatetimeIndex([ds.attrs["start_time"]]) else: diff --git a/satpy/multiscene/_multiscene.py b/satpy/multiscene/_multiscene.py index d803758b88..11c4a4888f 100644 --- a/satpy/multiscene/_multiscene.py +++ b/satpy/multiscene/_multiscene.py @@ -87,12 +87,12 @@ def _duplicate_dataset_with_group_alias(self, group_id, group_members): alias_id=group_id, ) elif len(member_ids) > 1: - raise ValueError('Cannot add multiple datasets from a scene ' - 'to the same group') + raise ValueError("Cannot add multiple datasets from a scene " + "to the same group") def _get_dataset_id_of_group_members_in_scene(self, group_members): return [ - self.scene[member].attrs['_satpy_id'] + self.scene[member].attrs["_satpy_id"] for member in group_members if member in self.scene ] @@ -281,7 +281,7 @@ def _all_same_area(self, dataset_ids): ds = scn.get(ds_id) if ds is None: continue - all_areas.append(ds.attrs.get('area')) + all_areas.append(ds.attrs.get("area")) all_areas = [area for area in all_areas if area is not None] return all(all_areas[0] == area for area in all_areas[1:]) @@ -314,15 +314,15 @@ def _generate_scene_func(self, gen, func_name, create_new_scene, *args, **kwargs def load(self, *args, **kwargs): """Load the required datasets from the multiple scenes.""" - self._generate_scene_func(self._scenes, 'load', False, *args, **kwargs) + self._generate_scene_func(self._scenes, "load", False, *args, **kwargs) def crop(self, *args, **kwargs): """Crop the multiscene and return a new cropped multiscene.""" - return self._generate_scene_func(self._scenes, 'crop', True, *args, **kwargs) + return self._generate_scene_func(self._scenes, "crop", True, *args, **kwargs) def resample(self, destination=None, **kwargs): """Resample the multiscene.""" - return self._generate_scene_func(self._scenes, 'resample', True, destination=destination, **kwargs) + return self._generate_scene_func(self._scenes, "resample", True, destination=destination, **kwargs) def blend( self, @@ -447,7 +447,7 @@ def save_datasets(self, client=True, batch_size=1, **kwargs): Note ``compute`` can not be provided. """ - if 'compute' in kwargs: + if "compute" in kwargs: raise ValueError("The 'compute' keyword argument can not be provided.") client = self._get_client(client=client) @@ -466,15 +466,15 @@ def _get_animation_info(self, all_datasets, filename, fill_value=None): first_img = get_enhanced_image(first_dataset) first_img_data = first_img.finalize(fill_value=fill_value)[0] shape = tuple(first_img_data.sizes.get(dim_name) - for dim_name in ('y', 'x', 'bands')) - if fill_value is None and filename.endswith('gif'): + for dim_name in ("y", "x", "bands")) + if fill_value is None and filename.endswith("gif"): log.warning("Forcing fill value to '0' for GIF Luminance images") fill_value = 0 shape = shape[:2] attrs = first_dataset.attrs.copy() - if 'end_time' in last_dataset.attrs: - attrs['end_time'] = last_dataset.attrs['end_time'] + if "end_time" in last_dataset.attrs: + attrs["end_time"] = last_dataset.attrs["end_time"] this_fn = filename.format(**attrs) return this_fn, shape, fill_value @@ -508,7 +508,7 @@ def _get_single_frame(self, ds, enh_args, fill_value): # assume all other shapes are (y, x) # we need arrays grouped by pixel so # transpose if needed - data = data.transpose('y', 'x', 'bands') + data = data.transpose("y", "x", "bands") return data def _get_animation_frames(self, all_datasets, shape, fill_value=None, @@ -603,7 +603,7 @@ def _get_writers_and_frames( first_scene = self.first_scene scenes = iter(self._scene_gen) info_scenes = [first_scene] - if 'end_time' in filename: + if "end_time" in filename: # if we need the last scene to generate the filename # then compute all the scenes so we can figure it out log.debug("Generating scenes to compute end_time for filename") diff --git a/satpy/node.py b/satpy/node.py index f1cf401057..191ec0bbcf 100644 --- a/satpy/node.py +++ b/satpy/node.py @@ -121,8 +121,8 @@ def display(self, previous=0, include_data=False): """Display the node.""" no_data = " (No Data)" if self.data is None else "" return ( - (" +" * previous) + str(self.name) + no_data + '\n' + - ''.join([child.display(previous + 1) for child in self.children])) + (" +" * previous) + str(self.name) + no_data + "\n" + + "".join([child.display(previous + 1) for child in self.children])) def leaves(self, unique=True): """Get the leaves of the tree starting at this root.""" @@ -204,12 +204,12 @@ class ReaderNode(Node): def __init__(self, unique_id, reader_name): """Set up the node.""" - super().__init__(unique_id, data={'reader_name': reader_name}) + super().__init__(unique_id, data={"reader_name": reader_name}) def _copy_name_and_data(self, node_cache): - return ReaderNode(self.name, self.data['reader_name']) + return ReaderNode(self.name, self.data["reader_name"]) @property def reader_name(self): """Get the name of the reader.""" - return self.data['reader_name'] + return self.data["reader_name"] diff --git a/satpy/plugin_base.py b/satpy/plugin_base.py index ee19341796..286b5fc335 100644 --- a/satpy/plugin_base.py +++ b/satpy/plugin_base.py @@ -60,5 +60,5 @@ def __init__(self, default_config_filename=None, config_files=None, **kwargs): def load_yaml_config(self, conf): """Load a YAML configuration file and recursively update the overall configuration.""" - with open(conf, 'r', encoding='utf-8') as fd: + with open(conf, "r", encoding="utf-8") as fd: self.config = recursive_dict_update(self.config, yaml.load(fd, Loader=UnsafeLoader)) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 2b1bbc37ba..81ebf2393b 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -37,7 +37,7 @@ # Old Name -> New Name -PENDING_OLD_READER_NAMES = {'fci_l1c_fdhsi': 'fci_l1c_nc', 'viirs_l2_cloud_mask_nc': 'viirs_edr'} +PENDING_OLD_READER_NAMES = {"fci_l1c_fdhsi": "fci_l1c_nc", "viirs_l2_cloud_mask_nc": "viirs_edr"} OLD_READER_NAMES: dict[str, str] = {} @@ -171,7 +171,7 @@ def _get_file_keys_for_reader_files(reader_files, group_keys=None): file_keys = {} for (reader_name, (reader_instance, files_to_sort)) in reader_files.items(): if group_keys is None: - group_keys = reader_instance.info.get('group_keys', ('start_time',)) + group_keys = reader_instance.info.get("group_keys", ("start_time",)) file_keys[reader_name] = [] # make a copy because filename_items_for_filetype will modify inplace files_to_sort = set(files_to_sort) @@ -301,7 +301,7 @@ def _get_keys_with_empty_values(grp): def read_reader_config(config_files, loader=UnsafeLoader): """Read the reader `config_files` and return the extracted reader metadata.""" reader_config = load_yaml_reader_configs(*config_files, loader=loader) - return reader_config['reader'] + return reader_config["reader"] def load_reader(reader_configs, **reader_kwargs): @@ -324,16 +324,16 @@ def configs_for_reader(reader=None): reader = get_valid_reader_names(reader) # given a config filename or reader name - config_files = [r if r.endswith('.yaml') else r + '.yaml' for r in reader] + config_files = [r if r.endswith(".yaml") else r + ".yaml" for r in reader] else: - paths = get_entry_points_config_dirs('satpy.readers') - reader_configs = glob_config(os.path.join('readers', '*.yaml'), search_dirs=paths) + paths = get_entry_points_config_dirs("satpy.readers") + reader_configs = glob_config(os.path.join("readers", "*.yaml"), search_dirs=paths) config_files = set(reader_configs) for config_file in config_files: config_basename = os.path.basename(config_file) reader_name = os.path.splitext(config_basename)[0] - paths = get_entry_points_config_dirs('satpy.readers') + paths = get_entry_points_config_dirs("satpy.readers") reader_configs = config_search_paths( os.path.join("readers", config_basename), search_dirs=paths, check_exists=True) @@ -393,9 +393,9 @@ def available_readers(as_dict=False, yaml_loader=UnsafeLoader): LOG.debug("Could not import reader config from: %s", reader_configs) LOG.debug("Error loading YAML", exc_info=True) continue - readers.append(reader_info if as_dict else reader_info['name']) + readers.append(reader_info if as_dict else reader_info["name"]) if as_dict: - readers = sorted(readers, key=lambda reader_info: reader_info['name']) + readers = sorted(readers, key=lambda reader_info: reader_info["name"]) else: readers = sorted(readers) return readers @@ -467,13 +467,13 @@ def find_files_and_readers(start_time=None, end_time=None, base_dir=None, """ reader_files = {} reader_kwargs = reader_kwargs or {} - filter_parameters = filter_parameters or reader_kwargs.get('filter_parameters', {}) + filter_parameters = filter_parameters or reader_kwargs.get("filter_parameters", {}) sensor_supported = False if start_time or end_time: - filter_parameters['start_time'] = start_time - filter_parameters['end_time'] = end_time - reader_kwargs['filter_parameters'] = filter_parameters + filter_parameters["start_time"] = start_time + filter_parameters["end_time"] = end_time + reader_kwargs["filter_parameters"] = filter_parameters for reader_configs in configs_for_reader(reader): (reader_instance, loadables, this_sensor_supported) = _get_loadables_for_reader_config( @@ -509,7 +509,7 @@ def _get_loadables_for_reader_config(base_dir, reader, sensor, reader_configs, try: reader_instance = load_reader(reader_configs, **reader_kwargs) except (KeyError, IOError, yaml.YAMLError) as err: - LOG.info('Cannot use %s', str(reader_configs)) + LOG.info("Cannot use %s", str(reader_configs)) LOG.debug(str(err)) if reader and (isinstance(reader, str) or len(reader) == 1): # if it is a single reader then give a more usable error @@ -563,7 +563,7 @@ def load_readers(filenames=None, reader=None, reader_kwargs=None): reader_configs, **reader_kwargs[None if reader is None else reader[idx]]) except (KeyError, IOError, yaml.YAMLError) as err: - LOG.info('Cannot use %s', str(reader_configs)) + LOG.info("Cannot use %s", str(reader_configs)) LOG.debug(str(err)) continue @@ -644,7 +644,7 @@ def _get_reader_kwargs(reader, reader_kwargs): reader_kwargs_without_filter = {} for (k, v) in reader_kwargs.items(): reader_kwargs_without_filter[k] = v.copy() - reader_kwargs_without_filter[k].pop('filter_parameters', None) + reader_kwargs_without_filter[k].pop("filter_parameters", None) return (reader_kwargs, reader_kwargs_without_filter) diff --git a/satpy/readers/_geos_area.py b/satpy/readers/_geos_area.py index 0775e51381..e777d78ca7 100644 --- a/satpy/readers/_geos_area.py +++ b/satpy/readers/_geos_area.py @@ -79,7 +79,7 @@ def get_area_extent(pdict): # count starts at 1 cols = 1 - 0.5 - if pdict['scandir'] == 'S2N': + if pdict["scandir"] == "S2N": lines = 0.5 - 1 scanmult = -1 else: @@ -88,22 +88,22 @@ def get_area_extent(pdict): # Lower left x, y scanning angles in degrees ll_x, ll_y = get_xy_from_linecol(lines * scanmult, cols, - (pdict['loff'], pdict['coff']), - (pdict['lfac'], pdict['cfac'])) + (pdict["loff"], pdict["coff"]), + (pdict["lfac"], pdict["cfac"])) - cols += pdict['ncols'] - lines += pdict['nlines'] + cols += pdict["ncols"] + lines += pdict["nlines"] # Upper right x, y scanning angles in degrees ur_x, ur_y = get_xy_from_linecol(lines * scanmult, cols, - (pdict['loff'], pdict['coff']), - (pdict['lfac'], pdict['cfac'])) - if pdict['scandir'] == 'S2N': + (pdict["loff"], pdict["coff"]), + (pdict["lfac"], pdict["cfac"])) + if pdict["scandir"] == "S2N": ll_y *= -1 ur_y *= -1 # Convert degrees to radians and create area extent - aex = make_ext(ll_x=ll_x, ur_x=ur_x, ll_y=ll_y, ur_y=ur_y, h=pdict['h']) + aex = make_ext(ll_x=ll_x, ur_x=ur_x, ll_y=ll_y, ur_y=ur_y, h=pdict["h"]) return aex @@ -132,20 +132,20 @@ def get_area_definition(pdict, a_ext): The AreaDefinition `proj_id` attribute is being deprecated. """ - proj_dict = {'a': float(pdict['a']), - 'b': float(pdict['b']), - 'lon_0': float(pdict['ssp_lon']), - 'h': float(pdict['h']), - 'proj': 'geos', - 'units': 'm'} + proj_dict = {"a": float(pdict["a"]), + "b": float(pdict["b"]), + "lon_0": float(pdict["ssp_lon"]), + "h": float(pdict["h"]), + "proj": "geos", + "units": "m"} a_def = geometry.AreaDefinition( - pdict['a_name'], - pdict['a_desc'], - pdict['p_id'], + pdict["a_name"], + pdict["a_desc"], + pdict["p_id"], proj_dict, - int(pdict['ncols']), - int(pdict['nlines']), + int(pdict["ncols"]), + int(pdict["nlines"]), a_ext) return a_def @@ -189,21 +189,21 @@ def get_geos_area_naming(input_dict): """ area_naming_dict = {} - resolution_strings = get_resolution_and_unit_strings(input_dict['resolution']) + resolution_strings = get_resolution_and_unit_strings(input_dict["resolution"]) - area_naming_dict['area_id'] = '{}_{}_{}_{}{}'.format(input_dict['platform_name'].lower(), - input_dict['instrument_name'].lower(), - input_dict['service_name'].lower(), - resolution_strings['value'], - resolution_strings['unit'] + area_naming_dict["area_id"] = "{}_{}_{}_{}{}".format(input_dict["platform_name"].lower(), + input_dict["instrument_name"].lower(), + input_dict["service_name"].lower(), + resolution_strings["value"], + resolution_strings["unit"] ) - area_naming_dict['description'] = '{} {} {} area definition ' \ - 'with {} {} resolution'.format(input_dict['platform_name'].upper(), - input_dict['instrument_name'].upper(), - input_dict['service_desc'], - resolution_strings['value'], - resolution_strings['unit'] + area_naming_dict["description"] = "{} {} {} area definition " \ + "with {} {} resolution".format(input_dict["platform_name"].upper(), + input_dict["instrument_name"].upper(), + input_dict["service_desc"], + resolution_strings["value"], + resolution_strings["unit"] ) return area_naming_dict @@ -222,8 +222,8 @@ def get_resolution_and_unit_strings(resolution): Dictionary with `value` and `unit` keys, values are strings. """ if resolution >= 1000: - return {'value': '{:.0f}'.format(resolution*1e-3), - 'unit': 'km'} + return {"value": "{:.0f}".format(resolution*1e-3), + "unit": "km"} - return {'value': '{:.0f}'.format(resolution), - 'unit': 'm'} + return {"value": "{:.0f}".format(resolution), + "unit": "m"} diff --git a/satpy/readers/aapp_l1b.py b/satpy/readers/aapp_l1b.py index ea3877e48a..e502a9da64 100644 --- a/satpy/readers/aapp_l1b.py +++ b/satpy/readers/aapp_l1b.py @@ -54,24 +54,24 @@ def get_aapp_chunks(shape): AVHRR_CHANNEL_NAMES = ["1", "2", "3a", "3b", "4", "5"] -AVHRR_ANGLE_NAMES = ['sensor_zenith_angle', - 'solar_zenith_angle', - 'sun_sensor_azimuth_difference_angle'] +AVHRR_ANGLE_NAMES = ["sensor_zenith_angle", + "solar_zenith_angle", + "sun_sensor_azimuth_difference_angle"] -AVHRR_PLATFORM_IDS2NAMES = {4: 'NOAA-15', - 2: 'NOAA-16', - 6: 'NOAA-17', - 7: 'NOAA-18', - 8: 'NOAA-19', - 11: 'Metop-B', - 12: 'Metop-A', - 13: 'Metop-C', - 14: 'Metop simulator'} +AVHRR_PLATFORM_IDS2NAMES = {4: "NOAA-15", + 2: "NOAA-16", + 6: "NOAA-17", + 7: "NOAA-18", + 8: "NOAA-19", + 11: "Metop-B", + 12: "Metop-A", + 13: "Metop-C", + 14: "Metop simulator"} def create_xarray(arr): """Create an `xarray.DataArray`.""" - res = xr.DataArray(arr, dims=['y', 'x']) + res = xr.DataArray(arr, dims=["y", "x"]) return res @@ -102,30 +102,30 @@ def _set_filedata_layout(self): @property def start_time(self): """Get the time of the first observation.""" - return datetime(self._data['scnlinyr'][0], 1, 1) + timedelta( - days=int(self._data['scnlindy'][0]) - 1, - milliseconds=int(self._data['scnlintime'][0])) + return datetime(self._data["scnlinyr"][0], 1, 1) + timedelta( + days=int(self._data["scnlindy"][0]) - 1, + milliseconds=int(self._data["scnlintime"][0])) @property def end_time(self): """Get the time of the final observation.""" - return datetime(self._data['scnlinyr'][-1], 1, 1) + timedelta( - days=int(self._data['scnlindy'][-1]) - 1, - milliseconds=int(self._data['scnlintime'][-1])) + return datetime(self._data["scnlinyr"][-1], 1, 1) + timedelta( + days=int(self._data["scnlindy"][-1]) - 1, + milliseconds=int(self._data["scnlintime"][-1])) def _update_dataset_attributes(self, dataset, key, info): - dataset.attrs.update({'platform_name': self.platform_name, - 'sensor': self.sensor}) + dataset.attrs.update({"platform_name": self.platform_name, + "sensor": self.sensor}) dataset.attrs.update(key.to_dict()) - for meta_key in ('standard_name', 'units'): + for meta_key in ("standard_name", "units"): if meta_key in info: dataset.attrs.setdefault(meta_key, info[meta_key]) def _get_platform_name(self, platform_names_lookup): """Get the platform name from the file header.""" - self.platform_name = platform_names_lookup.get(self._header['satid'][0], None) + self.platform_name = platform_names_lookup.get(self._header["satid"][0], None) if self.platform_name is None: - raise ValueError("Unsupported platform ID: %d" % self.header['satid']) + raise ValueError("Unsupported platform ID: %d" % self.header["satid"]) def read(self): """Read the data.""" @@ -143,17 +143,17 @@ def _calibrate_active_channel_data(self, key): def get_dataset(self, key, info): """Get a dataset from the file.""" - if key['name'] in self._channel_names: + if key["name"] in self._channel_names: dataset = self._calibrate_active_channel_data(key) if dataset is None: return None - elif key['name'] in ['longitude', 'latitude']: - dataset = self.navigate(key['name']) + elif key["name"] in ["longitude", "latitude"]: + dataset = self.navigate(key["name"]) dataset.attrs = info - elif key['name'] in self._angle_names: - dataset = self.get_angles(key['name']) + elif key["name"] in self._angle_names: + dataset = self.get_angles(key["name"]) else: - raise ValueError("Not a supported dataset: %s", key['name']) + raise ValueError("Not a supported dataset: %s", key["name"]) self._update_dataset_attributes(dataset, key, info) return dataset @@ -168,7 +168,7 @@ def __init__(self, filename, filename_info, filetype_info): filetype_info) self.channels = {i: None for i in AVHRR_CHANNEL_NAMES} - self.units = {i: 'counts' for i in AVHRR_CHANNEL_NAMES} + self.units = {i: "counts" for i in AVHRR_CHANNEL_NAMES} self._is3b = None self._is3a = None @@ -181,7 +181,7 @@ def __init__(self, filename, filename_info, filetype_info): self.active_channels = self._get_active_channels() self._get_platform_name(AVHRR_PLATFORM_IDS2NAMES) - self.sensor = 'avhrr-3' + self.sensor = "avhrr-3" self._get_all_interpolated_angles = functools.lru_cache(maxsize=10)( self._get_all_interpolated_angles_uncached @@ -202,25 +202,25 @@ def _get_active_channels(self): def _calibrate_active_channel_data(self, key): """Calibrate active channel data only.""" - if self.active_channels[key['name']]: + if self.active_channels[key["name"]]: return self.calibrate(key) return None def _get_channel_binary_status_from_header(self): - status = self._header['inststat1'].item() - change_line = self._header['statchrecnb'] + status = self._header["inststat1"].item() + change_line = self._header["statchrecnb"] if change_line > 0: - status |= self._header['inststat2'].item() + status |= self._header["inststat2"].item() return status @staticmethod def _convert_binary_channel_status_to_activation_dict(status): - bits_channels = ((13, '1'), - (12, '2'), - (11, '3a'), - (10, '3b'), - (9, '4'), - (8, '5')) + bits_channels = ((13, "1"), + (12, "2"), + (11, "3a"), + (10, "3b"), + (9, "4"), + (8, "5")) activated = dict() for bit, channel_name in bits_channels: activated[channel_name] = bool(status >> bit & 1) @@ -229,8 +229,8 @@ def _convert_binary_channel_status_to_activation_dict(status): def available_datasets(self, configured_datasets=None): """Get the available datasets.""" for _, mda in configured_datasets: - if mda['name'] in self._channel_names: - yield self.active_channels[mda['name']], mda + if mda["name"] in self._channel_names: + yield self.active_channels[mda["name"]], mda else: yield True, mda @@ -285,9 +285,9 @@ def _create_40km_interpolator(lines, *arrays_40km, geolocation=False): def navigate(self, coordinate_id): """Get the longitudes and latitudes of the scene.""" lons, lats = self._get_all_interpolated_coordinates() - if coordinate_id == 'longitude': + if coordinate_id == "longitude": return create_xarray(lons) - if coordinate_id == 'latitude': + if coordinate_id == "latitude": return create_xarray(lats) raise KeyError("Coordinate {} unknown.".format(coordinate_id)) @@ -309,49 +309,49 @@ def calibrate(self, if calib_coeffs is None: calib_coeffs = {} - units = {'reflectance': '%', - 'brightness_temperature': 'K', - 'counts': '', - 'radiance': 'W*m-2*sr-1*cm ?'} + units = {"reflectance": "%", + "brightness_temperature": "K", + "counts": "", + "radiance": "W*m-2*sr-1*cm ?"} - if dataset_id['name'] in ("3a", "3b") and self._is3b is None: + if dataset_id["name"] in ("3a", "3b") and self._is3b is None: # Is it 3a or 3b: line_chunks = get_aapp_chunks((self._data.shape[0], 2048))[0] - self._is3a = da.bitwise_and(da.from_array(self._data['scnlinbit'], + self._is3a = da.bitwise_and(da.from_array(self._data["scnlinbit"], chunks=line_chunks), 3) == 0 - self._is3b = da.bitwise_and(da.from_array(self._data['scnlinbit'], + self._is3b = da.bitwise_and(da.from_array(self._data["scnlinbit"], chunks=line_chunks), 3) == 1 try: - vis_idx = ['1', '2', '3a'].index(dataset_id['name']) + vis_idx = ["1", "2", "3a"].index(dataset_id["name"]) ir_idx = None except ValueError: vis_idx = None - ir_idx = ['3b', '4', '5'].index(dataset_id['name']) + ir_idx = ["3b", "4", "5"].index(dataset_id["name"]) mask = True if vis_idx is not None: - coeffs = calib_coeffs.get('ch' + dataset_id['name']) - if dataset_id['name'] == '3a': + coeffs = calib_coeffs.get("ch" + dataset_id["name"]) + if dataset_id["name"] == "3a": mask = self._is3a[:, None] ds = create_xarray( _vis_calibrate(self._data, vis_idx, - dataset_id['calibration'], + dataset_id["calibration"], pre_launch_coeffs, coeffs, mask=mask)) else: - if dataset_id['name'] == '3b': + if dataset_id["name"] == "3b": mask = self._is3b[:, None] ds = create_xarray( _ir_calibrate(self._header, self._data, ir_idx, - dataset_id['calibration'], + dataset_id["calibration"], mask=mask)) - ds.attrs['units'] = units[dataset_id['calibration']] + ds.attrs["units"] = units[dataset_id["calibration"]] ds.attrs.update(dataset_id._asdict()) return ds @@ -545,8 +545,8 @@ def _vis_calibrate(data, """ # Calibration count to albedo, the calibration is performed separately for # two value ranges. - if calib_type not in ['counts', 'radiance', 'reflectance']: - raise ValueError('Calibration ' + calib_type + ' unknown!') + if calib_type not in ["counts", "radiance", "reflectance"]: + raise ValueError("Calibration " + calib_type + " unknown!") channel_data = data["hrpt"][:, :, chn] chunks = get_aapp_chunks(channel_data.shape) @@ -554,12 +554,12 @@ def _vis_calibrate(data, channel = da.from_array(channel_data, chunks=chunks) mask &= channel != 0 - if calib_type == 'counts': + if calib_type == "counts": return channel channel = channel.astype(CHANNEL_DTYPE) - if calib_type == 'radiance': + if calib_type == "radiance": logger.info("Radiances are not yet supported for " + "the VIS/NIR channels!") @@ -630,9 +630,9 @@ def _ir_calibrate(header, data, irchn, calib_type, mask=True): mask &= count != 0 count = count.astype(CHANNEL_DTYPE) - k1_ = da.from_array(data['calir'][:, irchn, 0, 0], chunks=line_chunks) / 1.0e9 - k2_ = da.from_array(data['calir'][:, irchn, 0, 1], chunks=line_chunks) / 1.0e6 - k3_ = da.from_array(data['calir'][:, irchn, 0, 2], chunks=line_chunks) / 1.0e6 + k1_ = da.from_array(data["calir"][:, irchn, 0, 0], chunks=line_chunks) / 1.0e9 + k2_ = da.from_array(data["calir"][:, irchn, 0, 1], chunks=line_chunks) / 1.0e6 + k3_ = da.from_array(data["calir"][:, irchn, 0, 2], chunks=line_chunks) / 1.0e6 # Count to radiance conversion: rad = k1_[:, None] * count * count + k2_[:, None] * count + k3_[:, None] @@ -645,14 +645,14 @@ def _ir_calibrate(header, data, irchn, calib_type, mask=True): return da.where(mask, rad, np.nan) # Central wavenumber: - cwnum = header['radtempcnv'][0, irchn, 0] + cwnum = header["radtempcnv"][0, irchn, 0] if irchn == 0: cwnum = cwnum / 1.0e2 else: cwnum = cwnum / 1.0e3 - bandcor_2 = header['radtempcnv'][0, irchn, 1] / 1e5 - bandcor_3 = header['radtempcnv'][0, irchn, 2] / 1e6 + bandcor_2 = header["radtempcnv"][0, irchn, 1] / 1e5 + bandcor_3 = header["radtempcnv"][0, irchn, 2] / 1e6 ir_const_1 = 1.1910659e-5 ir_const_2 = 1.438833 diff --git a/satpy/readers/aapp_mhs_amsub_l1c.py b/satpy/readers/aapp_mhs_amsub_l1c.py index 39216431f4..18c054dd3b 100644 --- a/satpy/readers/aapp_mhs_amsub_l1c.py +++ b/satpy/readers/aapp_mhs_amsub_l1c.py @@ -36,21 +36,21 @@ CHUNK_SIZE = get_legacy_chunk_size() LINE_CHUNK = CHUNK_SIZE ** 2 // 90 -MHS_AMSUB_CHANNEL_NAMES = ['1', '2', '3', '4', '5'] -MHS_AMSUB_ANGLE_NAMES = ['sensor_zenith_angle', 'sensor_azimuth_angle', - 'solar_zenith_angle', 'solar_azimuth_difference_angle'] +MHS_AMSUB_CHANNEL_NAMES = ["1", "2", "3", "4", "5"] +MHS_AMSUB_ANGLE_NAMES = ["sensor_zenith_angle", "sensor_azimuth_angle", + "solar_zenith_angle", "solar_azimuth_difference_angle"] -MHS_AMSUB_PLATFORM_IDS2NAMES = {15: 'NOAA-15', - 16: 'NOAA-16', - 17: 'NOAA-17', - 18: 'NOAA-18', - 19: 'NOAA-19', - 1: 'Metop-B', - 2: 'Metop-A', - 3: 'Metop-C', - 4: 'Metop simulator'} +MHS_AMSUB_PLATFORM_IDS2NAMES = {15: "NOAA-15", + 16: "NOAA-16", + 17: "NOAA-17", + 18: "NOAA-18", + 19: "NOAA-19", + 1: "Metop-B", + 2: "Metop-A", + 3: "Metop-C", + 4: "Metop simulator"} -MHS_AMSUB_PLATFORMS = ['Metop-A', 'Metop-B', 'Metop-C', 'NOAA-18', 'NOAA-19'] +MHS_AMSUB_PLATFORMS = ["Metop-A", "Metop-B", "Metop-C", "NOAA-18", "NOAA-19"] class MHS_AMSUB_AAPPL1CFile(AAPPL1BaseFileHandler): @@ -61,7 +61,7 @@ def __init__(self, filename, filename_info, filetype_info): super().__init__(filename, filename_info, filetype_info) self.channels = {i: None for i in MHS_AMSUB_CHANNEL_NAMES} - self.units = {i: 'brightness_temperature' for i in MHS_AMSUB_CHANNEL_NAMES} + self.units = {i: "brightness_temperature" for i in MHS_AMSUB_CHANNEL_NAMES} self._channel_names = MHS_AMSUB_CHANNEL_NAMES self._angle_names = MHS_AMSUB_ANGLE_NAMES @@ -80,10 +80,10 @@ def _set_filedata_layout(self): def _get_sensorname(self): """Get the sensor name from the header.""" - if self._header['instrument'][0] == 11: - self.sensor = 'amsub' - elif self._header['instrument'][0] == 12: - self.sensor = 'mhs' + if self._header["instrument"][0] == 11: + self.sensor = "amsub" + elif self._header["instrument"][0] == 12: + self.sensor = "mhs" else: raise IOError("Sensor neither MHS nor AMSU-B!") @@ -101,9 +101,9 @@ def get_angles(self, angle_id): def navigate(self, coordinate_id): """Get the longitudes and latitudes of the scene.""" lons, lats = self._get_coordinates_in_degrees() - if coordinate_id == 'longitude': + if coordinate_id == "longitude": return create_xarray(lons) - if coordinate_id == 'latitude': + if coordinate_id == "latitude": return create_xarray(lats) raise KeyError("Coordinate {} unknown.".format(coordinate_id)) @@ -119,17 +119,17 @@ def _calibrate_active_channel_data(self, key): def calibrate(self, dataset_id): """Calibrate the data.""" - units = {'brightness_temperature': 'K'} + units = {"brightness_temperature": "K"} mask = True - idx = ['1', '2', '3', '4', '5'].index(dataset_id['name']) + idx = ["1", "2", "3", "4", "5"].index(dataset_id["name"]) ds = create_xarray( _calibrate(self._data, idx, - dataset_id['calibration'], + dataset_id["calibration"], mask=mask)) - ds.attrs['units'] = units[dataset_id['calibration']] + ds.attrs["units"] = units[dataset_id["calibration"]] ds.attrs.update(dataset_id._asdict()) return ds @@ -143,13 +143,13 @@ def _calibrate(data, *calib_type* in brightness_temperature. """ - if calib_type not in ['brightness_temperature']: - raise ValueError('Calibration ' + calib_type + ' unknown!') + if calib_type not in ["brightness_temperature"]: + raise ValueError("Calibration " + calib_type + " unknown!") channel = da.from_array(data["btemps"][:, :, chn] / 100., chunks=(LINE_CHUNK, 90)) mask &= channel != 0 - if calib_type == 'counts': + if calib_type == "counts": return channel channel = channel.astype(np.float_) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 4a6bf069c1..0b80045767 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -34,13 +34,13 @@ CHUNK_SIZE = get_legacy_chunk_size() PLATFORM_NAMES = { - 'g16': 'GOES-16', - 'g17': 'GOES-17', - 'g18': 'GOES-18', - 'g19': 'GOES-19', - 'goes16': 'GOES-16', - 'goes17': 'GOES-17', - 'goes18': 'GOES-18', + "g16": "GOES-16", + "g17": "GOES-17", + "g18": "GOES-18", + "g19": "GOES-19", + "goes16": "GOES-16", + "goes17": "GOES-17", + "goes18": "GOES-18", } @@ -51,11 +51,11 @@ def __init__(self, filename, filename_info, filetype_info): """Open the NetCDF file with xarray and prepare the Dataset for reading.""" super(NC_ABI_BASE, self).__init__(filename, filename_info, filetype_info) - platform_shortname = filename_info['platform_shortname'] + platform_shortname = filename_info["platform_shortname"] self.platform_name = PLATFORM_NAMES.get(platform_shortname.lower()) - self.nlines = self.nc['y'].size - self.ncols = self.nc['x'].size + self.nlines = self.nc["y"].size + self.ncols = self.nc["x"].size self.coords = {} @@ -67,28 +67,28 @@ def nc(self): nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, - chunks={'x': CHUNK_SIZE, 'y': CHUNK_SIZE}, ) + chunks={"x": CHUNK_SIZE, "y": CHUNK_SIZE}, ) except ValueError: nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, - chunks={'lon': CHUNK_SIZE, 'lat': CHUNK_SIZE}, ) + chunks={"lon": CHUNK_SIZE, "lat": CHUNK_SIZE}, ) nc = self._rename_dims(nc) return nc @staticmethod def _rename_dims(nc): - if 't' in nc.dims or 't' in nc.coords: - nc = nc.rename({'t': 'time'}) - if 'goes_lat_lon_projection' in nc: + if "t" in nc.dims or "t" in nc.coords: + nc = nc.rename({"t": "time"}) + if "goes_lat_lon_projection" in nc: with suppress(ValueError): - nc = nc.rename({'lon': 'x', 'lat': 'y'}) + nc = nc.rename({"lon": "x", "lat": "y"}) return nc @property def sensor(self): """Get sensor name for current file handler.""" - return 'abi' + return "abi" def __getitem__(self, item): """Wrap `self.nc[item]` for better floating point precision. @@ -110,21 +110,21 @@ def __getitem__(self, item): def _adjust_data(self, data, item): """Adjust data with typing, scaling and filling.""" - factor = data.attrs.get('scale_factor', 1) - offset = data.attrs.get('add_offset', 0) - fill = data.attrs.get('_FillValue') - unsigned = data.attrs.get('_Unsigned', None) + factor = data.attrs.get("scale_factor", 1) + offset = data.attrs.get("add_offset", 0) + fill = data.attrs.get("_FillValue") + unsigned = data.attrs.get("_Unsigned", None) def is_int(val): - return np.issubdtype(val.dtype, np.integer) if hasattr(val, 'dtype') else isinstance(val, int) + return np.issubdtype(val.dtype, np.integer) if hasattr(val, "dtype") else isinstance(val, int) # Ref. GOESR PUG-L1B-vol3, section 5.0.2 Unsigned Integer Processing - if unsigned is not None and unsigned.lower() == 'true': + if unsigned is not None and unsigned.lower() == "true": # cast the data from int to uint - data = data.astype('u%s' % data.dtype.itemsize) + data = data.astype("u%s" % data.dtype.itemsize) if fill is not None: - fill = fill.astype('u%s' % fill.dtype.itemsize) + fill = fill.astype("u%s" % fill.dtype.itemsize) if fill is not None: # Some backends (h5netcdf) may return attributes as shape (1,) # arrays rather than shape () scalars, which according to the netcdf @@ -138,7 +138,7 @@ def is_int(val): else: new_fill = np.nan data = data.where(data != fill, new_fill) - if factor != 1 and item in ('x', 'y'): + if factor != 1 and item in ("x", "y"): # be more precise with x/y coordinates # see get_area_def for more information data = data * np.round(float(factor), 6) + np.round(float(offset), 6) @@ -157,7 +157,7 @@ def _adjust_coords(self, data, item): # 'time' dimension causes issues in other processing # 'x_image' and 'y_image' are confusing to some users and unnecessary # 'x' and 'y' will be overwritten by base class AreaDefinition - for coord_name in ('x_image', 'y_image', 'time', 'x', 'y'): + for coord_name in ("x_image", "y_image", "time", "x", "y"): if coord_name in data.coords: data = data.drop_vars(coord_name) if item in data.coords: @@ -175,44 +175,44 @@ def get_dataset(self, key, info): def get_area_def(self, key): """Get the area definition of the data at hand.""" - if 'goes_imager_projection' in self.nc: + if "goes_imager_projection" in self.nc: return self._get_areadef_fixedgrid(key) - if 'goes_lat_lon_projection' in self.nc: + if "goes_lat_lon_projection" in self.nc: return self._get_areadef_latlon(key) - raise ValueError('Unsupported projection found in the dataset') + raise ValueError("Unsupported projection found in the dataset") def _get_areadef_latlon(self, key): """Get the area definition of the data at hand.""" projection = self.nc["goes_lat_lon_projection"] - a = projection.attrs['semi_major_axis'] - b = projection.attrs['semi_minor_axis'] - fi = projection.attrs['inverse_flattening'] - pm = projection.attrs['longitude_of_prime_meridian'] + a = projection.attrs["semi_major_axis"] + b = projection.attrs["semi_minor_axis"] + fi = projection.attrs["inverse_flattening"] + pm = projection.attrs["longitude_of_prime_meridian"] proj_ext = self.nc["geospatial_lat_lon_extent"] - w_lon = proj_ext.attrs['geospatial_westbound_longitude'] - e_lon = proj_ext.attrs['geospatial_eastbound_longitude'] - n_lat = proj_ext.attrs['geospatial_northbound_latitude'] - s_lat = proj_ext.attrs['geospatial_southbound_latitude'] + w_lon = proj_ext.attrs["geospatial_westbound_longitude"] + e_lon = proj_ext.attrs["geospatial_eastbound_longitude"] + n_lat = proj_ext.attrs["geospatial_northbound_latitude"] + s_lat = proj_ext.attrs["geospatial_southbound_latitude"] - lat_0 = proj_ext.attrs['geospatial_lat_center'] - lon_0 = proj_ext.attrs['geospatial_lon_center'] + lat_0 = proj_ext.attrs["geospatial_lat_center"] + lon_0 = proj_ext.attrs["geospatial_lon_center"] area_extent = (w_lon, s_lat, e_lon, n_lat) - proj_dict = {'proj': 'latlong', - 'lon_0': float(lon_0), - 'lat_0': float(lat_0), - 'a': float(a), - 'b': float(b), - 'fi': float(fi), - 'pm': float(pm)} + proj_dict = {"proj": "latlong", + "lon_0": float(lon_0), + "lat_0": float(lat_0), + "a": float(a), + "b": float(b), + "fi": float(fi), + "pm": float(pm)} ll_area_def = geometry.AreaDefinition( - self.nc.attrs.get('orbital_slot', 'abi_geos'), - self.nc.attrs.get('spatial_resolution', 'ABI file area'), - 'abi_latlon', + self.nc.attrs.get("orbital_slot", "abi_geos"), + self.nc.attrs.get("spatial_resolution", "ABI file area"), + "abi_latlon", proj_dict, self.ncols, self.nlines, @@ -231,17 +231,17 @@ def _get_areadef_fixedgrid(self, key): """ projection = self.nc["goes_imager_projection"] - a = projection.attrs['semi_major_axis'] - b = projection.attrs['semi_minor_axis'] - h = projection.attrs['perspective_point_height'] + a = projection.attrs["semi_major_axis"] + b = projection.attrs["semi_minor_axis"] + h = projection.attrs["perspective_point_height"] - lon_0 = projection.attrs['longitude_of_projection_origin'] - sweep_axis = projection.attrs['sweep_angle_axis'][0] + lon_0 = projection.attrs["longitude_of_projection_origin"] + sweep_axis = projection.attrs["sweep_angle_axis"][0] # compute x and y extents in m h = np.float64(h) - x = self['x'] - y = self['y'] + x = self["x"] + y = self["y"] x_l = x[0].values x_r = x[-1].values y_l = y[-1].values @@ -251,18 +251,18 @@ def _get_areadef_fixedgrid(self, key): area_extent = (x_l - x_half, y_l - y_half, x_r + x_half, y_u + y_half) area_extent = tuple(np.round(h * val, 6) for val in area_extent) - proj_dict = {'proj': 'geos', - 'lon_0': float(lon_0), - 'a': float(a), - 'b': float(b), - 'h': h, - 'units': 'm', - 'sweep': sweep_axis} + proj_dict = {"proj": "geos", + "lon_0": float(lon_0), + "a": float(a), + "b": float(b), + "h": h, + "units": "m", + "sweep": sweep_axis} fg_area_def = geometry.AreaDefinition( - self.nc.attrs.get('orbital_slot', 'abi_geos'), - self.nc.attrs.get('spatial_resolution', 'ABI file area'), - 'abi_fixed_grid', + self.nc.attrs.get("orbital_slot", "abi_geos"), + self.nc.attrs.get("spatial_resolution", "ABI file area"), + "abi_fixed_grid", proj_dict, self.ncols, self.nlines, @@ -273,19 +273,19 @@ def _get_areadef_fixedgrid(self, key): @property def start_time(self): """Start time of the current file's observations.""" - return datetime.strptime(self.nc.attrs['time_coverage_start'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """End time of the current file's observations.""" - return datetime.strptime(self.nc.attrs['time_coverage_end'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%S.%fZ") def spatial_resolution_to_number(self): """Convert the 'spatial_resolution' global attribute to meters.""" - res = self.nc.attrs['spatial_resolution'].split(' ')[0] - if res.endswith('km'): + res = self.nc.attrs["spatial_resolution"].split(" ")[0] + if res.endswith("km"): res = int(float(res[:-2]) * 1000) - elif res.endswith('m'): + elif res.endswith("m"): res = int(res[:-1]) else: raise ValueError("Unexpected 'spatial_resolution' attribute '{}'".format(res)) diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index dafdc8a373..3a22397cde 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -44,70 +44,70 @@ def __init__(self, filename, filename_info, filetype_info, clip_negative_radianc def get_dataset(self, key, info): """Load a dataset.""" - logger.debug('Reading in get_dataset %s.', key['name']) + logger.debug("Reading in get_dataset %s.", key["name"]) # For raw cal, don't apply scale and offset, return raw file counts - if key['calibration'] == 'counts': - radiances = self.nc['Rad'].copy() + if key["calibration"] == "counts": + radiances = self.nc["Rad"].copy() else: - radiances = self['Rad'] + radiances = self["Rad"] # mapping of calibration types to calibration functions cal_dictionary = { - 'reflectance': self._vis_calibrate, - 'brightness_temperature': self._ir_calibrate, - 'radiance': self._rad_calibrate, - 'counts': self._raw_calibrate, + "reflectance": self._vis_calibrate, + "brightness_temperature": self._ir_calibrate, + "radiance": self._rad_calibrate, + "counts": self._raw_calibrate, } try: - func = cal_dictionary[key['calibration']] + func = cal_dictionary[key["calibration"]] res = func(radiances) except KeyError: - raise ValueError("Unknown calibration '{}'".format(key['calibration'])) + raise ValueError("Unknown calibration '{}'".format(key["calibration"])) # convert to satpy standard units - if res.attrs['units'] == '1' and key['calibration'] != 'counts': + if res.attrs["units"] == "1" and key["calibration"] != "counts": res *= 100 - res.attrs['units'] = '%' + res.attrs["units"] = "%" self._adjust_attrs(res, key) return res def _adjust_attrs(self, data, key): - data.attrs.update({'platform_name': self.platform_name, - 'sensor': self.sensor}) + data.attrs.update({"platform_name": self.platform_name, + "sensor": self.sensor}) # Add orbital parameters projection = self.nc["goes_imager_projection"] - data.attrs['orbital_parameters'] = { - 'projection_longitude': float(projection.attrs['longitude_of_projection_origin']), - 'projection_latitude': float(projection.attrs['latitude_of_projection_origin']), - 'projection_altitude': float(projection.attrs['perspective_point_height']), - 'satellite_nominal_latitude': float(self['nominal_satellite_subpoint_lat']), - 'satellite_nominal_longitude': float(self['nominal_satellite_subpoint_lon']), - 'satellite_nominal_altitude': float(self['nominal_satellite_height']) * 1000., - 'yaw_flip': bool(self['yaw_flip_flag']), + data.attrs["orbital_parameters"] = { + "projection_longitude": float(projection.attrs["longitude_of_projection_origin"]), + "projection_latitude": float(projection.attrs["latitude_of_projection_origin"]), + "projection_altitude": float(projection.attrs["perspective_point_height"]), + "satellite_nominal_latitude": float(self["nominal_satellite_subpoint_lat"]), + "satellite_nominal_longitude": float(self["nominal_satellite_subpoint_lon"]), + "satellite_nominal_altitude": float(self["nominal_satellite_height"]) * 1000., + "yaw_flip": bool(self["yaw_flip_flag"]), } data.attrs.update(key.to_dict()) # remove attributes that could be confusing later # if calibration type is raw counts, we leave them in - if key['calibration'] != 'counts': - data.attrs.pop('_FillValue', None) - data.attrs.pop('scale_factor', None) - data.attrs.pop('add_offset', None) - data.attrs.pop('_Unsigned', None) - data.attrs.pop('ancillary_variables', None) # Can't currently load DQF + if key["calibration"] != "counts": + data.attrs.pop("_FillValue", None) + data.attrs.pop("scale_factor", None) + data.attrs.pop("add_offset", None) + data.attrs.pop("_Unsigned", None) + data.attrs.pop("ancillary_variables", None) # Can't currently load DQF # although we could compute these, we'd have to update in calibration - data.attrs.pop('valid_range', None) + data.attrs.pop("valid_range", None) # add in information from the filename that may be useful to the user - for attr in ('observation_type', 'scene_abbr', 'scan_mode', 'platform_shortname', 'suffix'): + for attr in ("observation_type", "scene_abbr", "scan_mode", "platform_shortname", "suffix"): if attr in self.filename_info: data.attrs[attr] = self.filename_info[attr] # copy global attributes to metadata - for attr in ('scene_id', 'orbital_slot', 'instrument_ID', 'production_site', 'timeline_ID'): + for attr in ("scene_id", "orbital_slot", "instrument_ID", "production_site", "timeline_ID"): data.attrs[attr] = self.nc.attrs.get(attr) # only include these if they are present - for attr in ('fusion_args',): + for attr in ("fusion_args",): if attr in self.nc.attrs: data.attrs[attr] = self.nc.attrs[attr] @@ -128,23 +128,23 @@ def _raw_calibrate(self, data): """ res = data res.attrs = data.attrs - res.attrs['units'] = '1' - res.attrs['long_name'] = 'Raw Counts' - res.attrs['standard_name'] = 'counts' + res.attrs["units"] = "1" + res.attrs["long_name"] = "Raw Counts" + res.attrs["standard_name"] = "counts" return res def _vis_calibrate(self, data): """Calibrate visible channels to reflectance.""" - solar_irradiance = self['esun'] + solar_irradiance = self["esun"] esd = self["earth_sun_distance_anomaly_in_AU"].astype(float) factor = np.pi * esd * esd / solar_irradiance res = data * factor res.attrs = data.attrs - res.attrs['units'] = '1' - res.attrs['long_name'] = 'Bidirectional Reflectance' - res.attrs['standard_name'] = 'toa_bidirectional_reflectance' + res.attrs["units"] = "1" + res.attrs["long_name"] = "Bidirectional Reflectance" + res.attrs["standard_name"] = "toa_bidirectional_reflectance" return res def _get_minimum_radiance(self, data): @@ -170,7 +170,7 @@ def _ir_calibrate(self, data): res = (fk2 / np.log(fk1 / data + 1) - bc1) / bc2 res.attrs = data.attrs - res.attrs['units'] = 'K' - res.attrs['long_name'] = 'Brightness Temperature' - res.attrs['standard_name'] = 'toa_brightness_temperature' + res.attrs["units"] = "K" + res.attrs["long_name"] = "Brightness Temperature" + res.attrs["standard_name"] = "toa_brightness_temperature" return res diff --git a/satpy/readers/abi_l2_nc.py b/satpy/readers/abi_l2_nc.py index a152790197..ad87286f32 100644 --- a/satpy/readers/abi_l2_nc.py +++ b/satpy/readers/abi_l2_nc.py @@ -33,10 +33,10 @@ class NC_ABI_L2(NC_ABI_BASE): def get_dataset(self, key, info): """Load a dataset.""" - var = info['file_key'] - if self.filetype_info['file_type'] == 'abi_l2_mcmip': + var = info["file_key"] + if self.filetype_info["file_type"] == "abi_l2_mcmip": var += "_" + key["name"] - LOG.debug('Reading in get_dataset %s.', var) + LOG.debug("Reading in get_dataset %s.", var) variable = self[var] variable.attrs.update(key.to_dict()) self._update_data_arr_with_filename_attrs(variable) @@ -44,32 +44,32 @@ def get_dataset(self, key, info): return variable def _update_data_arr_with_filename_attrs(self, variable): - _units = variable.attrs['units'] if 'units' in variable.attrs else None + _units = variable.attrs["units"] if "units" in variable.attrs else None variable.attrs.update({ - 'platform_name': self.platform_name, - 'sensor': self.sensor, - 'units': _units, - 'orbital_parameters': { - 'satellite_nominal_latitude': float(self.nc['nominal_satellite_subpoint_lat']), - 'satellite_nominal_longitude': float(self.nc['nominal_satellite_subpoint_lon']), - 'satellite_nominal_altitude': float(self.nc['nominal_satellite_height']) * 1000., + "platform_name": self.platform_name, + "sensor": self.sensor, + "units": _units, + "orbital_parameters": { + "satellite_nominal_latitude": float(self.nc["nominal_satellite_subpoint_lat"]), + "satellite_nominal_longitude": float(self.nc["nominal_satellite_subpoint_lon"]), + "satellite_nominal_altitude": float(self.nc["nominal_satellite_height"]) * 1000., }, }) - if 'flag_meanings' in variable.attrs: - variable.attrs['flag_meanings'] = variable.attrs['flag_meanings'].split(' ') + if "flag_meanings" in variable.attrs: + variable.attrs["flag_meanings"] = variable.attrs["flag_meanings"].split(" ") # add in information from the filename that may be useful to the user - for attr in ('scene_abbr', 'scan_mode', 'platform_shortname'): + for attr in ("scene_abbr", "scan_mode", "platform_shortname"): variable.attrs[attr] = self.filename_info.get(attr) # add in information hardcoded in the filetype YAML - for attr in ('observation_type',): + for attr in ("observation_type",): if attr in self.filetype_info: variable.attrs[attr] = self.filetype_info[attr] # copy global attributes to metadata - for attr in ('scene_id', 'orbital_slot', 'instrument_ID', 'production_site', 'timeline_ID'): + for attr in ("scene_id", "orbital_slot", "instrument_ID", "production_site", "timeline_ID"): variable.attrs[attr] = self.nc.attrs.get(attr) @staticmethod @@ -77,13 +77,13 @@ def _remove_problem_attrs(variable): # remove attributes that could be confusing later if not np.issubdtype(variable.dtype, np.integer): # integer fields keep the _FillValue - variable.attrs.pop('_FillValue', None) - variable.attrs.pop('scale_factor', None) - variable.attrs.pop('add_offset', None) - variable.attrs.pop('valid_range', None) - variable.attrs.pop('_Unsigned', None) - variable.attrs.pop('valid_range', None) - variable.attrs.pop('ancillary_variables', None) # Can't currently load DQF + variable.attrs.pop("_FillValue", None) + variable.attrs.pop("scale_factor", None) + variable.attrs.pop("add_offset", None) + variable.attrs.pop("valid_range", None) + variable.attrs.pop("_Unsigned", None) + variable.attrs.pop("valid_range", None) + variable.attrs.pop("ancillary_variables", None) # Can't currently load DQF def available_datasets(self, configured_datasets=None): """Add resolution to configured datasets.""" @@ -92,12 +92,12 @@ def available_datasets(self, configured_datasets=None): # don't override what they've done if is_avail is not None: yield is_avail, ds_info - matches = self.file_type_matches(ds_info['file_type']) + matches = self.file_type_matches(ds_info["file_type"]) if matches: # we have this dataset resolution = self.spatial_resolution_to_number() new_info = ds_info.copy() - new_info.setdefault('resolution', resolution) + new_info.setdefault("resolution", resolution) yield True, ds_info elif is_avail is None: # we don't know what to do with this diff --git a/satpy/readers/acspo.py b/satpy/readers/acspo.py index 14c8038b63..8a8262af33 100644 --- a/satpy/readers/acspo.py +++ b/satpy/readers/acspo.py @@ -33,9 +33,9 @@ ROWS_PER_SCAN = { - 'modis': 10, - 'viirs': 16, - 'avhrr': None, + "modis": 10, + "viirs": 16, + "avhrr": None, } @@ -45,7 +45,7 @@ class ACSPOFileHandler(NetCDF4FileHandler): @property def platform_name(self): """Get satellite name for this file's data.""" - res = self['/attr/platform'] + res = self["/attr/platform"] if isinstance(res, np.ndarray): return str(res.astype(str)) return res @@ -53,7 +53,7 @@ def platform_name(self): @property def sensor_name(self): """Get instrument name for this file's data.""" - res = self['/attr/sensor'] + res = self["/attr/sensor"] if isinstance(res, np.ndarray): res = str(res.astype(str)) return res.lower() @@ -69,12 +69,12 @@ def get_shape(self, ds_id, ds_info): tuple: (rows, cols) """ - var_path = ds_info.get('file_key', '{}'.format(ds_id['name'])) - if var_path + '/shape' not in self: + var_path = ds_info.get("file_key", "{}".format(ds_id["name"])) + if var_path + "/shape" not in self: # loading a scalar value shape = 1 else: - shape = self[var_path + '/shape'] + shape = self[var_path + "/shape"] if len(shape) == 3: if shape[0] != 1: raise ValueError("Not sure how to load 3D Dataset with more than 1 time") @@ -88,49 +88,49 @@ def _parse_datetime(datestr): @property def start_time(self): """Get first observation time of data.""" - return self._parse_datetime(self['/attr/time_coverage_start']) + return self._parse_datetime(self["/attr/time_coverage_start"]) @property def end_time(self): """Get final observation time of data.""" - return self._parse_datetime(self['/attr/time_coverage_end']) + return self._parse_datetime(self["/attr/time_coverage_end"]) def get_metadata(self, dataset_id, ds_info): """Collect various metadata about the specified dataset.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) shape = self.get_shape(dataset_id, ds_info) - units = self[var_path + '/attr/units'] - info = getattr(self[var_path], 'attrs', {}) - standard_name = self[var_path + '/attr/standard_name'] - resolution = float(self['/attr/spatial_resolution'].split(' ')[0]) + units = self[var_path + "/attr/units"] + info = getattr(self[var_path], "attrs", {}) + standard_name = self[var_path + "/attr/standard_name"] + resolution = float(self["/attr/spatial_resolution"].split(" ")[0]) rows_per_scan = ROWS_PER_SCAN.get(self.sensor_name) or 0 info.update(dataset_id.to_dict()) info.update({ - 'shape': shape, - 'units': units, - 'platform_name': self.platform_name, - 'sensor': self.sensor_name, - 'standard_name': standard_name, - 'resolution': resolution, - 'rows_per_scan': rows_per_scan, - 'long_name': self.get(var_path + '/attr/long_name'), - 'comment': self.get(var_path + '/attr/comment'), + "shape": shape, + "units": units, + "platform_name": self.platform_name, + "sensor": self.sensor_name, + "standard_name": standard_name, + "resolution": resolution, + "rows_per_scan": rows_per_scan, + "long_name": self.get(var_path + "/attr/long_name"), + "comment": self.get(var_path + "/attr/comment"), }) return info def get_dataset(self, dataset_id, ds_info): """Load data array and metadata from file on disk.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) metadata = self.get_metadata(dataset_id, ds_info) - shape = metadata['shape'] - file_shape = self[var_path + '/shape'] - metadata['shape'] = shape + shape = metadata["shape"] + file_shape = self[var_path + "/shape"] + metadata["shape"] = shape - valid_min = self[var_path + '/attr/valid_min'] - valid_max = self[var_path + '/attr/valid_max'] + valid_min = self[var_path + "/attr/valid_min"] + valid_max = self[var_path + "/attr/valid_max"] # no need to check fill value since we are using valid min/max - scale_factor = self.get(var_path + '/attr/scale_factor') - add_offset = self.get(var_path + '/attr/add_offset') + scale_factor = self.get(var_path + "/attr/scale_factor") + add_offset = self.get(var_path + "/attr/add_offset") data = self[var_path] data = data.rename({"ni": "x", "nj": "y"}) @@ -141,15 +141,15 @@ def get_dataset(self, dataset_id, ds_info): if scale_factor is not None: data = data * scale_factor + add_offset - if ds_info.get('cloud_clear', False): + if ds_info.get("cloud_clear", False): # clear-sky if bit 15-16 are 00 - clear_sky_mask = (self['l2p_flags'][0] & 0b1100000000000000) != 0 + clear_sky_mask = (self["l2p_flags"][0] & 0b1100000000000000) != 0 clear_sky_mask = clear_sky_mask.rename({"ni": "x", "nj": "y"}) data = data.where(~clear_sky_mask) data.attrs.update(metadata) # Remove these attributes since they are no longer valid and can cause invalid value filling. - data.attrs.pop('_FillValue', None) - data.attrs.pop('valid_max', None) - data.attrs.pop('valid_min', None) + data.attrs.pop("_FillValue", None) + data.attrs.pop("valid_max", None) + data.attrs.pop("valid_min", None) return data diff --git a/satpy/readers/agri_l1.py b/satpy/readers/agri_l1.py index 9612d016cd..381880cd5c 100644 --- a/satpy/readers/agri_l1.py +++ b/satpy/readers/agri_l1.py @@ -36,21 +36,21 @@ class HDF_AGRI_L1(FY4Base): def __init__(self, filename, filename_info, filetype_info): """Init filehandler.""" super(HDF_AGRI_L1, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'AGRI' + self.sensor = "AGRI" def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" - ds_name = dataset_id['name'] - logger.debug('Reading in get_dataset %s.', ds_name) - file_key = ds_info.get('file_key', ds_name) - if self.PLATFORM_ID == 'FY-4B': + ds_name = dataset_id["name"] + logger.debug("Reading in get_dataset %s.", ds_name) + file_key = ds_info.get("file_key", ds_name) + if self.PLATFORM_ID == "FY-4B": if self.CHANS_ID in file_key: - file_key = f'Data/{file_key}' + file_key = f"Data/{file_key}" elif self.SUN_ID in file_key or self.SAT_ID in file_key: - file_key = f'Navigation/{file_key}' + file_key = f"Navigation/{file_key}" data = self.get(file_key) if data.ndim >= 2: - data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'}) + data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) data = self.calibrate(data, ds_info, ds_name, file_key) self.adjust_attrs(data, ds_info) @@ -59,15 +59,15 @@ def get_dataset(self, dataset_id, ds_info): def adjust_attrs(self, data, ds_info): """Adjust the attrs of the data.""" - satname = self.PLATFORM_NAMES.get(self['/attr/Satellite Name'], self['/attr/Satellite Name']) - data.attrs.update({'platform_name': satname, - 'sensor': self['/attr/Sensor Identification Code'].lower(), - 'orbital_parameters': { - 'satellite_nominal_latitude': self['/attr/NOMCenterLat'].item(), - 'satellite_nominal_longitude': self['/attr/NOMCenterLon'].item(), - 'satellite_nominal_altitude': self['/attr/NOMSatHeight'].item()}}) + satname = self.PLATFORM_NAMES.get(self["/attr/Satellite Name"], self["/attr/Satellite Name"]) + data.attrs.update({"platform_name": satname, + "sensor": self["/attr/Sensor Identification Code"].lower(), + "orbital_parameters": { + "satellite_nominal_latitude": self["/attr/NOMCenterLat"].item(), + "satellite_nominal_longitude": self["/attr/NOMCenterLon"].item(), + "satellite_nominal_altitude": self["/attr/NOMSatHeight"].item()}}) data.attrs.update(ds_info) # remove attributes that could be confusing later - data.attrs.pop('FillValue', None) - data.attrs.pop('Intercept', None) - data.attrs.pop('Slope', None) + data.attrs.pop("FillValue", None) + data.attrs.pop("Intercept", None) + data.attrs.pop("Slope", None) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index e06f7ebc50..681885dd51 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -84,7 +84,7 @@ "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16") -logger = logging.getLogger('ahi_hsd') +logger = logging.getLogger("ahi_hsd") # Basic information block: _BASIC_INFO_TYPE = np.dtype([("hblock_number", "u1"), @@ -350,14 +350,14 @@ class AHIHSDFileHandler(BaseFileHandler): """ def __init__(self, filename, filename_info, filetype_info, - mask_space=True, calib_mode='update', + mask_space=True, calib_mode="update", user_calibration=None, round_actual_position=True): """Initialize the reader.""" super(AHIHSDFileHandler, self).__init__(filename, filename_info, filetype_info) self.is_zipped = False - self._unzipped = unzip_file(self.filename, prefix=str(filename_info['segment']).zfill(2)) + self._unzipped = unzip_file(self.filename, prefix=str(filename_info["segment"]).zfill(2)) # Assume file is not zipped if self._unzipped: # But if it is, set the filename to point to unzipped temp file @@ -365,14 +365,14 @@ def __init__(self, filename, filename_info, filetype_info, self.filename = self._unzipped self.channels = dict([(i, None) for i in AHI_CHANNEL_NAMES]) - self.units = dict([(i, 'counts') for i in AHI_CHANNEL_NAMES]) + self.units = dict([(i, "counts") for i in AHI_CHANNEL_NAMES]) self._data = dict([(i, None) for i in AHI_CHANNEL_NAMES]) self._header = dict([(i, None) for i in AHI_CHANNEL_NAMES]) self.lons = None self.lats = None - self.segment_number = filename_info['segment'] - self.total_segments = filename_info['total_segments'] + self.segment_number = filename_info["segment"] + self.total_segments = filename_info["total_segments"] with open(self.filename) as fd: self.basic_info = np.fromfile(fd, @@ -387,14 +387,14 @@ def __init__(self, filename, filename_info, filetype_info, self.nav_info = np.fromfile(fd, dtype=_NAV_INFO_TYPE, count=1)[0] - self.platform_name = np2str(self.basic_info['satellite']) - self.observation_area = np2str(self.basic_info['observation_area']) - self.sensor = 'ahi' + self.platform_name = np2str(self.basic_info["satellite"]) + self.observation_area = np2str(self.basic_info["observation_area"]) + self.sensor = "ahi" self.mask_space = mask_space - self.band_name = filetype_info['file_type'][4:].upper() - calib_mode_choices = ('NOMINAL', 'UPDATE') + self.band_name = filetype_info["file_type"][4:].upper() + calib_mode_choices = ("NOMINAL", "UPDATE") if calib_mode.upper() not in calib_mode_choices: - raise ValueError('Invalid calibration mode: {}. Choose one of {}'.format( + raise ValueError("Invalid calibration mode: {}. Choose one of {}".format( calib_mode, calib_mode_choices)) self.calib_mode = calib_mode.upper() @@ -419,12 +419,12 @@ def end_time(self): @property def observation_start_time(self): """Get the observation start time.""" - return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info['observation_start_time'])) + return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_start_time"])) @property def observation_end_time(self): """Get the observation end time.""" - return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info['observation_end_time'])) + return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_end_time"])) @property def nominal_start_time(self): @@ -456,7 +456,7 @@ def _modify_observation_time_for_nominal(self, observation_time): 2.5 minutes apart, then the result should be 13:32:30. """ - timeline = "{:04d}".format(self.basic_info['observation_timeline'][0]) + timeline = "{:04d}".format(self.basic_info["observation_timeline"][0]) if not self._is_valid_timeline(timeline): warnings.warn( "Observation timeline is fill value, not rounding observation time.", @@ -464,10 +464,10 @@ def _modify_observation_time_for_nominal(self, observation_time): ) return observation_time - if self.observation_area == 'FLDK': + if self.observation_area == "FLDK": dt = 0 else: - observation_frequency_seconds = {'JP': 150, 'R3': 150, 'R4': 30, 'R5': 30}[self.observation_area[:2]] + observation_frequency_seconds = {"JP": 150, "R3": 150, "R4": 30, "R5": 30}[self.observation_area[:2]] dt = observation_frequency_seconds * (int(self.observation_area[2:]) - 1) return observation_time.replace( @@ -490,25 +490,25 @@ def get_area_def(self, dsid): def _get_area_def(self): pdict = {} - pdict['cfac'] = np.uint32(self.proj_info['CFAC']) - pdict['lfac'] = np.uint32(self.proj_info['LFAC']) - pdict['coff'] = np.float32(self.proj_info['COFF']) - pdict['loff'] = -np.float32(self.proj_info['LOFF']) + 1 - pdict['a'] = float(self.proj_info['earth_equatorial_radius'] * 1000) - pdict['h'] = float(self.proj_info['distance_from_earth_center'] * 1000 - pdict['a']) - pdict['b'] = float(self.proj_info['earth_polar_radius'] * 1000) - pdict['ssp_lon'] = float(self.proj_info['sub_lon']) - pdict['nlines'] = int(self.data_info['number_of_lines']) - pdict['ncols'] = int(self.data_info['number_of_columns']) - pdict['scandir'] = 'N2S' - - pdict['loff'] = pdict['loff'] + (self.segment_number * pdict['nlines']) + pdict["cfac"] = np.uint32(self.proj_info["CFAC"]) + pdict["lfac"] = np.uint32(self.proj_info["LFAC"]) + pdict["coff"] = np.float32(self.proj_info["COFF"]) + pdict["loff"] = -np.float32(self.proj_info["LOFF"]) + 1 + pdict["a"] = float(self.proj_info["earth_equatorial_radius"] * 1000) + pdict["h"] = float(self.proj_info["distance_from_earth_center"] * 1000 - pdict["a"]) + pdict["b"] = float(self.proj_info["earth_polar_radius"] * 1000) + pdict["ssp_lon"] = float(self.proj_info["sub_lon"]) + pdict["nlines"] = int(self.data_info["number_of_lines"]) + pdict["ncols"] = int(self.data_info["number_of_columns"]) + pdict["scandir"] = "N2S" + + pdict["loff"] = pdict["loff"] + (self.segment_number * pdict["nlines"]) aex = get_area_extent(pdict) - pdict['a_name'] = self.observation_area - pdict['a_desc'] = "AHI {} area".format(self.observation_area) - pdict['p_id'] = f'geosh{self.basic_info["satellite"][0].decode()[-1]}' + pdict["a_name"] = self.observation_area + pdict["a_desc"] = "AHI {} area".format(self.observation_area) + pdict["p_id"] = f'geosh{self.basic_info["satellite"][0].decode()[-1]}' return get_area_definition(pdict, aex) @@ -526,112 +526,112 @@ def _read_header(self, fp_): header = {} fpos = 0 - header['block1'] = np.fromfile( + header["block1"] = np.fromfile( fp_, dtype=_BASIC_INFO_TYPE, count=1) - fpos = fpos + int(header['block1']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block1') + fpos = fpos + int(header["block1"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block1") fp_.seek(fpos, 0) header["block2"] = np.fromfile(fp_, dtype=_DATA_INFO_TYPE, count=1) - fpos = fpos + int(header['block2']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block2') + fpos = fpos + int(header["block2"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block2") fp_.seek(fpos, 0) header["block3"] = np.fromfile(fp_, dtype=_PROJ_INFO_TYPE, count=1) - fpos = fpos + int(header['block3']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block3') + fpos = fpos + int(header["block3"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block3") fp_.seek(fpos, 0) header["block4"] = np.fromfile(fp_, dtype=_NAV_INFO_TYPE, count=1) - fpos = fpos + int(header['block4']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block4') + fpos = fpos + int(header["block4"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block4") fp_.seek(fpos, 0) header["block5"] = np.fromfile(fp_, dtype=_CAL_INFO_TYPE, count=1) logger.debug("Band number = " + - str(header["block5"]['band_number'][0])) - logger.debug('Time_interval: %s - %s', + str(header["block5"]["band_number"][0])) + logger.debug("Time_interval: %s - %s", str(self.start_time), str(self.end_time)) - band_number = header["block5"]['band_number'][0] + band_number = header["block5"]["band_number"][0] if band_number < 7: cal = np.fromfile(fp_, dtype=_VISCAL_INFO_TYPE, count=1) else: cal = np.fromfile(fp_, dtype=_IRCAL_INFO_TYPE, count=1) - fpos = fpos + int(header['block5']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block5') + fpos = fpos + int(header["block5"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block5") fp_.seek(fpos, 0) - header['calibration'] = cal + header["calibration"] = cal header["block6"] = np.fromfile( fp_, dtype=_INTER_CALIBRATION_INFO_TYPE, count=1) - fpos = fpos + int(header['block6']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block6') + fpos = fpos + int(header["block6"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block6") fp_.seek(fpos, 0) header["block7"] = np.fromfile( fp_, dtype=_SEGMENT_INFO_TYPE, count=1) - fpos = fpos + int(header['block7']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block7') + fpos = fpos + int(header["block7"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block7") fp_.seek(fpos, 0) header["block8"] = np.fromfile( fp_, dtype=_NAVIGATION_CORRECTION_INFO_TYPE, count=1) # 8 The navigation corrections: - ncorrs = header["block8"]['numof_correction_info_data'][0] + ncorrs = header["block8"]["numof_correction_info_data"][0] corrections = [] for _i in range(ncorrs): corrections.append(np.fromfile(fp_, dtype=_NAVIGATION_CORRECTION_SUBINFO_TYPE, count=1)) - fpos = fpos + int(header['block8']['blocklength']) - self._check_fpos(fp_, fpos, 40, 'block8') + fpos = fpos + int(header["block8"]["blocklength"]) + self._check_fpos(fp_, fpos, 40, "block8") fp_.seek(fpos, 0) - header['navigation_corrections'] = corrections + header["navigation_corrections"] = corrections header["block9"] = np.fromfile(fp_, dtype=_OBSERVATION_TIME_INFO_TYPE, count=1) - numobstimes = header["block9"]['number_of_observation_times'][0] + numobstimes = header["block9"]["number_of_observation_times"][0] lines_and_times = [] for _i in range(numobstimes): lines_and_times.append(np.fromfile(fp_, dtype=_OBSERVATION_LINE_TIME_INFO_TYPE, count=1)) - header['observation_time_information'] = lines_and_times - fpos = fpos + int(header['block9']['blocklength']) - self._check_fpos(fp_, fpos, 40, 'block9') + header["observation_time_information"] = lines_and_times + fpos = fpos + int(header["block9"]["blocklength"]) + self._check_fpos(fp_, fpos, 40, "block9") fp_.seek(fpos, 0) header["block10"] = np.fromfile(fp_, dtype=_ERROR_INFO_TYPE, count=1) num_err_info_data = header["block10"][ - 'number_of_error_info_data'][0] + "number_of_error_info_data"][0] err_info_data = [] for _i in range(num_err_info_data): err_info_data.append(np.fromfile(fp_, dtype=_ERROR_LINE_INFO_TYPE, count=1)) - header['error_information_data'] = err_info_data - fpos = fpos + int(header['block10']['blocklength']) - self._check_fpos(fp_, fpos, 40, 'block10') + header["error_information_data"] = err_info_data + fpos = fpos + int(header["block10"]["blocklength"]) + self._check_fpos(fp_, fpos, 40, "block10") fp_.seek(fpos, 0) header["block11"] = np.fromfile(fp_, dtype=_SPARE_TYPE, count=1) - fpos = fpos + int(header['block11']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block11') + fpos = fpos + int(header["block11"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block11") fp_.seek(fpos, 0) return header def _read_data(self, fp_, header): """Read data block.""" - nlines = int(header["block2"]['number_of_lines'][0]) - ncols = int(header["block2"]['number_of_columns'][0]) + nlines = int(header["block2"]["number_of_lines"][0]) + ncols = int(header["block2"]["number_of_columns"][0]) chunks = da.core.normalize_chunks("auto", shape=(nlines, ncols), limit=get_chunk_size_limit(), - dtype='f8', + dtype="f8", previous_chunks=(550, 550)) return da.from_array(np.memmap(self.filename, offset=fp_.tell(), - dtype=' no temperature data = da.where(data == 0, np.float32(np.nan), data) - cwl = self._header['block5']["central_wave_length"][0] * 1e-6 - c__ = self._header['calibration']["speed_of_light"][0] - h__ = self._header['calibration']["planck_constant"][0] - k__ = self._header['calibration']["boltzmann_constant"][0] + cwl = self._header["block5"]["central_wave_length"][0] * 1e-6 + c__ = self._header["calibration"]["speed_of_light"][0] + h__ = self._header["calibration"]["planck_constant"][0] + k__ = self._header["calibration"]["boltzmann_constant"][0] a__ = (h__ * c__) / (k__ * cwl) b__ = ((2 * h__ * c__ ** 2) / (data * 1.0e6 * cwl ** 5)) + 1 Te_ = a__ / da.log(b__) - c0_ = self._header['calibration']["c0_rad2tb_conversion"][0] - c1_ = self._header['calibration']["c1_rad2tb_conversion"][0] - c2_ = self._header['calibration']["c2_rad2tb_conversion"][0] + c0_ = self._header["calibration"]["c0_rad2tb_conversion"][0] + c1_ = self._header["calibration"]["c1_rad2tb_conversion"][0] + c2_ = self._header["calibration"]["c2_rad2tb_conversion"][0] return (c0_ + c1_ * Te_ + c2_ * Te_ ** 2).clip(0) diff --git a/satpy/readers/ahi_l1b_gridded_bin.py b/satpy/readers/ahi_l1b_gridded_bin.py index 0270015950..33289aee11 100644 --- a/satpy/readers/ahi_l1b_gridded_bin.py +++ b/satpy/readers/ahi_l1b_gridded_bin.py @@ -47,32 +47,32 @@ CHUNK_SIZE = get_legacy_chunk_size() # Hardcoded address of the reflectance and BT look-up tables -AHI_REMOTE_LUTS = 'http://www.cr.chiba-u.jp/databases/GEO/H8_9/FD/count2tbb_v102.tgz' +AHI_REMOTE_LUTS = "http://www.cr.chiba-u.jp/databases/GEO/H8_9/FD/count2tbb_v102.tgz" # Full disk image sizes for each spatial resolution -AHI_FULLDISK_SIZES = {0.005: {'x_size': 24000, - 'y_size': 24000}, - 0.01: {'x_size': 12000, - 'y_size': 12000}, - 0.02: {'x_size': 6000, - 'y_size': 6000}} +AHI_FULLDISK_SIZES = {0.005: {"x_size": 24000, + "y_size": 24000}, + 0.01: {"x_size": 12000, + "y_size": 12000}, + 0.02: {"x_size": 6000, + "y_size": 6000}} # Geographic extent of the full disk area in degrees AHI_FULLDISK_EXTENT = [85., -60., 205., 60.] # Resolutions of each channel type -AHI_CHANNEL_RES = {'vis': 0.01, - 'ext': 0.005, - 'sir': 0.02, - 'tir': 0.02} +AHI_CHANNEL_RES = {"vis": 0.01, + "ext": 0.005, + "sir": 0.02, + "tir": 0.02} # List of LUT filenames -AHI_LUT_NAMES = ['ext.01', 'vis.01', 'vis.02', 'vis.03', - 'sir.01', 'sir.02', 'tir.01', 'tir.02', - 'tir.03', 'tir.04', 'tir.05', 'tir.06', - 'tir.07', 'tir.08', 'tir.09', 'tir.10'] +AHI_LUT_NAMES = ["ext.01", "vis.01", "vis.02", "vis.03", + "sir.01", "sir.02", "tir.01", "tir.02", + "tir.03", "tir.04", "tir.05", "tir.06", + "tir.07", "tir.08", "tir.09", "tir.10"] -logger = logging.getLogger('ahi_grid') +logger = logging.getLogger("ahi_grid") class AHIGriddedFileHandler(BaseFileHandler): @@ -99,19 +99,19 @@ def __init__(self, filename, filename_info, filetype_info): # But if it is, set the filename to point to unzipped temp file self.filename = self._unzipped # Get the band name, needed for finding area and dimensions - self.product_name = filetype_info['file_type'] - self.areaname = filename_info['area'] - self.sensor = 'ahi' + self.product_name = filetype_info["file_type"] + self.areaname = filename_info["area"] + self.sensor = "ahi" self.res = AHI_CHANNEL_RES[self.product_name[:3]] - if self.areaname == 'fld': - self.nlines = AHI_FULLDISK_SIZES[self.res]['y_size'] - self.ncols = AHI_FULLDISK_SIZES[self.res]['x_size'] + if self.areaname == "fld": + self.nlines = AHI_FULLDISK_SIZES[self.res]["y_size"] + self.ncols = AHI_FULLDISK_SIZES[self.res]["x_size"] else: raise NotImplementedError("Only full disk data is supported.") # Set up directory path for the LUTs - app_dirs = AppDirs('ahi_gridded_luts', 'satpy', '1.0.2') - self.lut_dir = os.path.expanduser(app_dirs.user_data_dir) + '/' + app_dirs = AppDirs("ahi_gridded_luts", "satpy", "1.0.2") + self.lut_dir = os.path.expanduser(app_dirs.user_data_dir) + "/" self.area = None def __del__(self): @@ -149,7 +149,7 @@ def _download_luts(file_name): # Set up an connection and download with urllib.request.urlopen(AHI_REMOTE_LUTS) as response: # nosec - with open(file_name, 'wb') as out_file: + with open(file_name, "wb") as out_file: shutil.copyfileobj(response, out_file) @staticmethod @@ -174,14 +174,14 @@ def _get_luts(self): logger.info("Download AHI LUTs files and store in directory %s", self.lut_dir) tempdir = config["tmp_dir"] - fname = os.path.join(tempdir, 'tmp.tgz') + fname = os.path.join(tempdir, "tmp.tgz") # Download the LUTs self._download_luts(fname) # The file is tarred, untar and remove the downloaded file self._untar_luts(fname, tempdir) - lut_dl_dir = os.path.join(tempdir, 'count2tbb_v102/') + lut_dl_dir = os.path.join(tempdir, "count2tbb_v102/") # Loop over the LUTs and copy to the correct location for lutfile in AHI_LUT_NAMES: @@ -198,16 +198,16 @@ def get_area_def(self, dsid): This is fixed, but not defined in the file. So we must generate it ourselves with some assumptions. """ - if self.areaname == 'fld': + if self.areaname == "fld": area_extent = AHI_FULLDISK_EXTENT else: raise NotImplementedError("Reader only supports full disk data.") - proj_param = 'EPSG:4326' + proj_param = "EPSG:4326" - area = geometry.AreaDefinition('gridded_himawari', - 'A gridded Himawari area', - 'longlat', + area = geometry.AreaDefinition("gridded_himawari", + "A gridded Himawari area", + "longlat", proj_param, self.ncols, self.nlines, @@ -220,9 +220,9 @@ def _read_data(self, fp_): """Read raw binary data from file.""" return da.from_array(np.memmap(self.filename, offset=fp_.tell(), - dtype='>u2', + dtype=">u2", shape=(self.nlines, self.ncols), - mode='r'), + mode="r"), chunks=CHUNK_SIZE) def read_band(self, key, info): @@ -231,26 +231,26 @@ def read_band(self, key, info): res = self._read_data(fp_) # Calibrate - res = self.calibrate(res, key['calibration']) + res = self.calibrate(res, key["calibration"]) # Update metadata new_info = dict( - units=info['units'], - standard_name=info['standard_name'], - wavelength=info['wavelength'], - resolution=info['resolution'], + units=info["units"], + standard_name=info["standard_name"], + wavelength=info["wavelength"], + resolution=info["resolution"], id=key, - name=key['name'], + name=key["name"], sensor=self.sensor, ) - res = xr.DataArray(res, attrs=new_info, dims=['y', 'x']) + res = xr.DataArray(res, attrs=new_info, dims=["y", "x"]) return res def calibrate(self, data, calib): """Calibrate the data.""" - if calib == 'counts': + if calib == "counts": return data - if calib == 'reflectance' or calib == 'brightness_temperature': + if calib == "reflectance" or calib == "brightness_temperature": return self._calibrate(data) raise NotImplementedError("ERROR: Unsupported calibration.", "Only counts, reflectance and ", diff --git a/satpy/readers/ami_l1b.py b/satpy/readers/ami_l1b.py index 9adeaf76f1..db8c8444d8 100644 --- a/satpy/readers/ami_l1b.py +++ b/satpy/readers/ami_l1b.py @@ -36,8 +36,8 @@ CHUNK_SIZE = get_legacy_chunk_size() PLATFORM_NAMES = { - 'GK-2A': 'GEO-KOMPSAT-2A', - 'GK-2B': 'GEO-KOMPSAT-2B', + "GK-2A": "GEO-KOMPSAT-2A", + "GK-2B": "GEO-KOMPSAT-2B", } @@ -90,7 +90,7 @@ class AMIL1bNetCDF(BaseFileHandler): """ def __init__(self, filename, filename_info, filetype_info, - calib_mode='PYSPECTRAL', allow_conditional_pixels=False, + calib_mode="PYSPECTRAL", allow_conditional_pixels=False, user_calibration=None): """Open the NetCDF file with xarray and prepare the Dataset for reading.""" super(AMIL1bNetCDF, self).__init__(filename, filename_info, filetype_info) @@ -98,17 +98,17 @@ def __init__(self, filename, filename_info, filetype_info, self.nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, - chunks={'dim_image_x': CHUNK_SIZE, 'dim_image_y': CHUNK_SIZE}) - self.nc = self.nc.rename({'dim_image_x': 'x', 'dim_image_y': 'y'}) + chunks={"dim_image_x": CHUNK_SIZE, "dim_image_y": CHUNK_SIZE}) + self.nc = self.nc.rename({"dim_image_x": "x", "dim_image_y": "y"}) - platform_shortname = self.nc.attrs['satellite_name'] + platform_shortname = self.nc.attrs["satellite_name"] self.platform_name = PLATFORM_NAMES.get(platform_shortname) - self.sensor = 'ami' - self.band_name = filetype_info['file_type'].upper() + self.sensor = "ami" + self.band_name = filetype_info["file_type"].upper() self.allow_conditional_pixels = allow_conditional_pixels - calib_mode_choices = ('FILE', 'PYSPECTRAL', 'GSICS') + calib_mode_choices = ("FILE", "PYSPECTRAL", "GSICS") if calib_mode.upper() not in calib_mode_choices: - raise ValueError('Invalid calibration mode: {}. Choose one of {}'.format( + raise ValueError("Invalid calibration mode: {}. Choose one of {}".format( calib_mode, calib_mode_choices)) self.calib_mode = calib_mode.upper() @@ -118,36 +118,36 @@ def __init__(self, filename, filename_info, filetype_info, def start_time(self): """Get observation start time.""" base = datetime(2000, 1, 1, 12, 0, 0) - return base + timedelta(seconds=self.nc.attrs['observation_start_time']) + return base + timedelta(seconds=self.nc.attrs["observation_start_time"]) @property def end_time(self): """Get observation end time.""" base = datetime(2000, 1, 1, 12, 0, 0) - return base + timedelta(seconds=self.nc.attrs['observation_end_time']) + return base + timedelta(seconds=self.nc.attrs["observation_end_time"]) def get_area_def(self, dsid): """Get area definition for this file.""" pdict = {} - pdict['a'] = self.nc.attrs['earth_equatorial_radius'] - pdict['b'] = self.nc.attrs['earth_polar_radius'] - pdict['h'] = self.nc.attrs['nominal_satellite_height'] - pdict['a'] - pdict['ssp_lon'] = self.nc.attrs['sub_longitude'] * 180 / np.pi # it's in radians? - pdict['ncols'] = self.nc.attrs['number_of_columns'] - pdict['nlines'] = self.nc.attrs['number_of_lines'] - obs_mode = self.nc.attrs['observation_mode'] - resolution = self.nc.attrs['channel_spatial_resolution'] + pdict["a"] = self.nc.attrs["earth_equatorial_radius"] + pdict["b"] = self.nc.attrs["earth_polar_radius"] + pdict["h"] = self.nc.attrs["nominal_satellite_height"] - pdict["a"] + pdict["ssp_lon"] = self.nc.attrs["sub_longitude"] * 180 / np.pi # it's in radians? + pdict["ncols"] = self.nc.attrs["number_of_columns"] + pdict["nlines"] = self.nc.attrs["number_of_lines"] + obs_mode = self.nc.attrs["observation_mode"] + resolution = self.nc.attrs["channel_spatial_resolution"] # Example offset: 11000.5 # the 'get_area_extent' will handle this half pixel for us - pdict['cfac'] = self.nc.attrs['cfac'] - pdict['coff'] = self.nc.attrs['coff'] - pdict['lfac'] = -self.nc.attrs['lfac'] - pdict['loff'] = self.nc.attrs['loff'] - pdict['scandir'] = 'N2S' - pdict['a_name'] = 'ami_geos_{}'.format(obs_mode.lower()) - pdict['a_desc'] = 'AMI {} Area at {} resolution'.format(obs_mode, resolution) - pdict['p_id'] = 'ami_fixed_grid' + pdict["cfac"] = self.nc.attrs["cfac"] + pdict["coff"] = self.nc.attrs["coff"] + pdict["lfac"] = -self.nc.attrs["lfac"] + pdict["loff"] = self.nc.attrs["loff"] + pdict["scandir"] = "N2S" + pdict["a_name"] = "ami_geos_{}".format(obs_mode.lower()) + pdict["a_desc"] = "AMI {} Area at {} resolution".format(obs_mode, resolution) + pdict["p_id"] = "ami_fixed_grid" area_extent = get_area_extent(pdict) fg_area_def = get_area_definition(pdict, area_extent) @@ -155,12 +155,12 @@ def get_area_def(self, dsid): def get_orbital_parameters(self): """Collect orbital parameters for this file.""" - a = float(self.nc.attrs['earth_equatorial_radius']) - b = float(self.nc.attrs['earth_polar_radius']) + a = float(self.nc.attrs["earth_equatorial_radius"]) + b = float(self.nc.attrs["earth_polar_radius"]) # nominal_satellite_height seems to be from the center of the earth - h = float(self.nc.attrs['nominal_satellite_height']) - a - lon_0 = self.nc.attrs['sub_longitude'] * 180 / np.pi # it's in radians? - sc_position = self.nc['sc_position'].attrs['sc_position_center_pixel'] + h = float(self.nc.attrs["nominal_satellite_height"]) - a + lon_0 = self.nc.attrs["sub_longitude"] * 180 / np.pi # it's in radians? + sc_position = self.nc["sc_position"].attrs["sc_position_center_pixel"] # convert ECEF coordinates to lon, lat, alt ecef = pyproj.CRS.from_dict({"proj": "geocent", "a": a, "b": b}) @@ -169,18 +169,18 @@ def get_orbital_parameters(self): sc_position = transformer.transform(sc_position[0], sc_position[1], sc_position[2]) orbital_parameters = { - 'projection_longitude': float(lon_0), - 'projection_latitude': 0.0, - 'projection_altitude': h, - 'satellite_actual_longitude': sc_position[0], - 'satellite_actual_latitude': sc_position[1], - 'satellite_actual_altitude': sc_position[2], # meters + "projection_longitude": float(lon_0), + "projection_latitude": 0.0, + "projection_altitude": h, + "satellite_actual_longitude": sc_position[0], + "satellite_actual_latitude": sc_position[1], + "satellite_actual_altitude": sc_position[2], # meters } return orbital_parameters def get_dataset(self, dataset_id, ds_info): """Load a dataset as a xarray DataArray.""" - file_key = ds_info.get('file_key', dataset_id['name']) + file_key = ds_info.get("file_key", dataset_id["name"]) data = self.nc[file_key] # hold on to attributes for later attrs = data.attrs @@ -195,47 +195,47 @@ def get_dataset(self, dataset_id, ds_info): qf = data & 0b1100000000000000 # mask DQF bits - bits = attrs['number_of_valid_bits_per_pixel'] + bits = attrs["number_of_valid_bits_per_pixel"] data &= 2**bits - 1 # only take "no error" pixels as valid data = data.where(qf == 0) # Calibration values from file, fall back to built-in if unavailable - gain = self.nc.attrs['DN_to_Radiance_Gain'] - offset = self.nc.attrs['DN_to_Radiance_Offset'] + gain = self.nc.attrs["DN_to_Radiance_Gain"] + offset = self.nc.attrs["DN_to_Radiance_Offset"] - if dataset_id['calibration'] in ('radiance', 'reflectance', 'brightness_temperature'): + if dataset_id["calibration"] in ("radiance", "reflectance", "brightness_temperature"): data = gain * data + offset - if self.calib_mode == 'GSICS': + if self.calib_mode == "GSICS": data = self._apply_gsics_rad_correction(data) elif isinstance(self.user_calibration, dict): data = self._apply_user_rad_correction(data) - if dataset_id['calibration'] == 'reflectance': + if dataset_id["calibration"] == "reflectance": # depends on the radiance calibration above - rad_to_alb = self.nc.attrs['Radiance_to_Albedo_c'] - if ds_info.get('units') == '%': + rad_to_alb = self.nc.attrs["Radiance_to_Albedo_c"] + if ds_info.get("units") == "%": rad_to_alb *= 100 data = data * rad_to_alb - elif dataset_id['calibration'] == 'brightness_temperature': + elif dataset_id["calibration"] == "brightness_temperature": data = self._calibrate_ir(dataset_id, data) - elif dataset_id['calibration'] not in ('counts', 'radiance'): - raise ValueError("Unknown calibration: '{}'".format(dataset_id['calibration'])) + elif dataset_id["calibration"] not in ("counts", "radiance"): + raise ValueError("Unknown calibration: '{}'".format(dataset_id["calibration"])) - for attr_name in ('standard_name', 'units'): + for attr_name in ("standard_name", "units"): attrs[attr_name] = ds_info[attr_name] attrs.update(dataset_id.to_dict()) - attrs['orbital_parameters'] = self.get_orbital_parameters() - attrs['platform_name'] = self.platform_name - attrs['sensor'] = self.sensor + attrs["orbital_parameters"] = self.get_orbital_parameters() + attrs["platform_name"] = self.platform_name + attrs["sensor"] = self.sensor data.attrs = attrs return data def _calibrate_ir(self, dataset_id, data): """Calibrate radiance data to BTs using either pyspectral or in-file coefficients.""" - if self.calib_mode == 'PYSPECTRAL': + if self.calib_mode == "PYSPECTRAL": # depends on the radiance calibration above # Convert um to m^-1 (SI units for pyspectral) - wn = 1 / (dataset_id['wavelength'][1] / 1e6) + wn = 1 / (dataset_id["wavelength"][1] / 1e6) # Convert cm^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. bt_data = rad2temp(wn, data.data * 1e-5) @@ -248,17 +248,17 @@ def _calibrate_ir(self, dataset_id, data): else: # IR coefficients from the file # Channel specific - c0 = self.nc.attrs['Teff_to_Tbb_c0'] - c1 = self.nc.attrs['Teff_to_Tbb_c1'] - c2 = self.nc.attrs['Teff_to_Tbb_c2'] + c0 = self.nc.attrs["Teff_to_Tbb_c0"] + c1 = self.nc.attrs["Teff_to_Tbb_c1"] + c2 = self.nc.attrs["Teff_to_Tbb_c2"] # These should be fixed, but load anyway - cval = self.nc.attrs['light_speed'] - kval = self.nc.attrs['Boltzmann_constant_k'] - hval = self.nc.attrs['Plank_constant_h'] + cval = self.nc.attrs["light_speed"] + kval = self.nc.attrs["Boltzmann_constant_k"] + hval = self.nc.attrs["Plank_constant_h"] # Compute wavenumber as cm-1 - wn = (10000 / dataset_id['wavelength'][1]) * 100 + wn = (10000 / dataset_id["wavelength"][1]) * 100 # Convert radiance to effective brightness temperature e1 = (2 * hval * cval * cval) * np.power(wn, 3) e2 = (data.data * 1e-5) @@ -271,8 +271,8 @@ def _calibrate_ir(self, dataset_id, data): def _apply_gsics_rad_correction(self, data): """Retrieve GSICS factors from L1 file and apply to radiance.""" - rad_slope = self.nc['gsics_coeff_slope'][0] - rad_offset = self.nc['gsics_coeff_intercept'][0] + rad_slope = self.nc["gsics_coeff_slope"][0] + rad_offset = self.nc["gsics_coeff_intercept"][0] data = apply_rad_correction(data, rad_slope, rad_offset) return data diff --git a/satpy/readers/amsr2_l1b.py b/satpy/readers/amsr2_l1b.py index bd3a35c05d..29778c5f0d 100644 --- a/satpy/readers/amsr2_l1b.py +++ b/satpy/readers/amsr2_l1b.py @@ -25,8 +25,8 @@ class AMSR2L1BFileHandler(HDF5FileHandler): def get_metadata(self, ds_id, ds_info): """Get the metadata.""" - var_path = ds_info['file_key'] - info = getattr(self[var_path], 'attrs', {}) + var_path = ds_info["file_key"] + info = getattr(self[var_path], "attrs", {}) info.update(ds_info) info.update({ "shape": self.get_shape(ds_id, ds_info), @@ -41,23 +41,23 @@ def get_metadata(self, ds_id, ds_info): def get_shape(self, ds_id, ds_info): """Get output shape of specified dataset.""" - var_path = ds_info['file_key'] - shape = self[var_path + '/shape'] - if ((ds_info.get('standard_name') == "longitude" or ds_info.get('standard_name') == "latitude") and - ds_id['resolution'] == 10000): + var_path = ds_info["file_key"] + shape = self[var_path + "/shape"] + if ((ds_info.get("standard_name") == "longitude" or ds_info.get("standard_name") == "latitude") and + ds_id["resolution"] == 10000): return shape[0], int(shape[1] / 2) return shape def get_dataset(self, ds_id, ds_info): """Get output data and metadata of specified dataset.""" - var_path = ds_info['file_key'] - fill_value = ds_info.get('fill_value', 65535) + var_path = ds_info["file_key"] + fill_value = ds_info.get("fill_value", 65535) metadata = self.get_metadata(ds_id, ds_info) data = self[var_path] - if ((ds_info.get('standard_name') == "longitude" or - ds_info.get('standard_name') == "latitude") and - ds_id['resolution'] == 10000): + if ((ds_info.get("standard_name") == "longitude" or + ds_info.get("standard_name") == "latitude") and + ds_id["resolution"] == 10000): # FIXME: Lower frequency channels need CoRegistration parameters applied data = data[:, ::2] * self[var_path + "/attr/SCALE FACTOR"] else: diff --git a/satpy/readers/amsr2_l2.py b/satpy/readers/amsr2_l2.py index f241861c22..0797ad5bbd 100644 --- a/satpy/readers/amsr2_l2.py +++ b/satpy/readers/amsr2_l2.py @@ -25,7 +25,7 @@ class AMSR2L2FileHandler(AMSR2L1BFileHandler): def mask_dataset(self, ds_info, data): """Mask data with the fill value.""" - fill_value = ds_info.get('fill_value', 65535) + fill_value = ds_info.get("fill_value", 65535) return data.where(data != fill_value) def scale_dataset(self, var_path, data): @@ -34,14 +34,14 @@ def scale_dataset(self, var_path, data): def get_dataset(self, ds_id, ds_info): """Get output data and metadata of specified dataset.""" - var_path = ds_info['file_key'] + var_path = ds_info["file_key"] data = self[var_path].squeeze() data = self.mask_dataset(ds_info, data) data = self.scale_dataset(var_path, data) - if ds_info.get('name') == "ssw": - data = data.rename({'dim_0': 'y', 'dim_1': 'x'}) + if ds_info.get("name") == "ssw": + data = data.rename({"dim_0": "y", "dim_1": "x"}) metadata = self.get_metadata(ds_id, ds_info) data.attrs.update(metadata) return data diff --git a/satpy/readers/amsr2_l2_gaasp.py b/satpy/readers/amsr2_l2_gaasp.py index 5f91e2d965..54a3769747 100644 --- a/satpy/readers/amsr2_l2_gaasp.py +++ b/satpy/readers/amsr2_l2_gaasp.py @@ -58,19 +58,19 @@ class GAASPFileHandler(BaseFileHandler): """Generic file handler for GAASP output files.""" y_dims: Tuple[str, ...] = ( - 'Number_of_Scans', + "Number_of_Scans", ) x_dims: Tuple[str, ...] = ( - 'Number_of_hi_rez_FOVs', - 'Number_of_low_rez_FOVs', + "Number_of_hi_rez_FOVs", + "Number_of_low_rez_FOVs", ) time_dims = ( - 'Time_Dimension', + "Time_Dimension", ) is_gridded = False dim_resolutions = { - 'Number_of_hi_rez_FOVs': 5000, - 'Number_of_low_rez_FOVs': 10000, + "Number_of_hi_rez_FOVs": 5000, + "Number_of_low_rez_FOVs": 10000, } @cached_property @@ -84,39 +84,39 @@ def nc(self): chunks=chunks) if len(self.time_dims) == 1: - nc = nc.rename({self.time_dims[0]: 'time'}) + nc = nc.rename({self.time_dims[0]: "time"}) return nc @property def start_time(self): """Get start time of observation.""" try: - return self.filename_info['start_time'] + return self.filename_info["start_time"] except KeyError: - time_str = self.nc.attrs['time_coverage_start'] + time_str = self.nc.attrs["time_coverage_start"] return datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get end time of observation.""" try: - return self.filename_info['end_time'] + return self.filename_info["end_time"] except KeyError: - time_str = self.nc.attrs['time_coverage_end'] + time_str = self.nc.attrs["time_coverage_end"] return datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ") @property def sensor_names(self): """Sensors who have data in this file.""" - return {self.nc.attrs['instrument_name'].lower()} + return {self.nc.attrs["instrument_name"].lower()} @property def platform_name(self): """Name of the platform whose data is stored in this file.""" - return self.nc.attrs['platform_name'] + return self.nc.attrs["platform_name"] def _get_var_name_without_suffix(self, var_name): - var_suffix = self.filetype_info.get('var_suffix', "") + var_suffix = self.filetype_info.get("var_suffix", "") if var_suffix: var_name = var_name[:-len(var_suffix)] return var_name @@ -124,8 +124,8 @@ def _get_var_name_without_suffix(self, var_name): def _scale_data(self, data_arr, attrs): # handle scaling # take special care for integer/category fields - scale_factor = attrs.pop('scale_factor', 1.) - add_offset = attrs.pop('add_offset', 0.) + scale_factor = attrs.pop("scale_factor", 1.) + add_offset = attrs.pop("add_offset", 0.) scaling_needed = not (scale_factor == 1 and add_offset == 0) if scaling_needed: data_arr = data_arr * scale_factor + add_offset @@ -138,19 +138,19 @@ def _nan_for_dtype(data_arr_dtype): if data_arr_dtype.type == np.float32: return np.float32(np.nan) if np.issubdtype(data_arr_dtype, np.timedelta64): - return np.timedelta64('NaT') + return np.timedelta64("NaT") if np.issubdtype(data_arr_dtype, np.datetime64): - return np.datetime64('NaT') + return np.datetime64("NaT") return np.nan def _fill_data(self, data_arr, attrs): - fill_value = attrs.pop('_FillValue', None) + fill_value = attrs.pop("_FillValue", None) is_int = np.issubdtype(data_arr.dtype, np.integer) - has_flag_comment = 'comment' in attrs + has_flag_comment = "comment" in attrs if is_int and has_flag_comment: # category product fill_out = fill_value - attrs['_FillValue'] = fill_out + attrs["_FillValue"] = fill_out else: fill_out = self._nan_for_dtype(data_arr.dtype) if fill_value is not None: @@ -159,19 +159,19 @@ def _fill_data(self, data_arr, attrs): def get_dataset(self, dataid, ds_info): """Load, scale, and collect metadata for the specified DataID.""" - orig_var_name = self._get_var_name_without_suffix(dataid['name']) + orig_var_name = self._get_var_name_without_suffix(dataid["name"]) data_arr = self.nc[orig_var_name].copy() attrs = data_arr.attrs.copy() data_arr, attrs = self._scale_data(data_arr, attrs) data_arr, attrs = self._fill_data(data_arr, attrs) attrs.update({ - 'platform_name': self.platform_name, - 'sensor': sorted(self.sensor_names)[0], - 'start_time': self.start_time, - 'end_time': self.end_time, + "platform_name": self.platform_name, + "sensor": sorted(self.sensor_names)[0], + "start_time": self.start_time, + "end_time": self.end_time, }) - dim_map = dict(zip(data_arr.dims, ('y', 'x'))) + dim_map = dict(zip(data_arr.dims, ("y", "x"))) # rename dims data_arr = data_arr.rename(**dim_map) # drop coords, the base reader will recreate these @@ -187,27 +187,27 @@ def _available_if_this_file_type(self, configured_datasets): # file handler so let's yield early yield is_avail, ds_info continue - yield self.file_type_matches(ds_info['file_type']), ds_info + yield self.file_type_matches(ds_info["file_type"]), ds_info def _add_lonlat_coords(self, data_arr, ds_info): lat_coord = None lon_coord = None for coord_name in data_arr.coords: - if 'longitude' in coord_name.lower(): + if "longitude" in coord_name.lower(): lon_coord = coord_name - if 'latitude' in coord_name.lower(): + if "latitude" in coord_name.lower(): lat_coord = coord_name - ds_info['coordinates'] = [lon_coord, lat_coord] + ds_info["coordinates"] = [lon_coord, lat_coord] def _get_ds_info_for_data_arr(self, var_name, data_arr): - var_suffix = self.filetype_info.get('var_suffix', "") + var_suffix = self.filetype_info.get("var_suffix", "") ds_info = { - 'file_type': self.filetype_info['file_type'], - 'name': var_name + var_suffix, + "file_type": self.filetype_info["file_type"], + "name": var_name + var_suffix, } x_dim_name = data_arr.dims[1] if x_dim_name in self.dim_resolutions: - ds_info['resolution'] = self.dim_resolutions[x_dim_name] + ds_info["resolution"] = self.dim_resolutions[x_dim_name] if not self.is_gridded and data_arr.coords: self._add_lonlat_coords(data_arr, ds_info) return ds_info @@ -245,13 +245,13 @@ class GAASPGriddedFileHandler(GAASPFileHandler): """GAASP file handler for gridded products like SEAICE.""" y_dims = ( - 'Number_of_Y_Dimension', + "Number_of_Y_Dimension", ) x_dims = ( - 'Number_of_X_Dimension', + "Number_of_X_Dimension", ) dim_resolutions = { - 'Number_of_X_Dimension': 10000, + "Number_of_X_Dimension": 10000, } is_gridded = True @@ -266,12 +266,12 @@ def _get_extents(data_shape, res): def get_area_def(self, dataid): """Create area definition for equirectangular projected data.""" - var_suffix = self.filetype_info.get('var_suffix', '') - area_name = 'gaasp{}'.format(var_suffix) - orig_var_name = self._get_var_name_without_suffix(dataid['name']) + var_suffix = self.filetype_info.get("var_suffix", "") + area_name = "gaasp{}".format(var_suffix) + orig_var_name = self._get_var_name_without_suffix(dataid["name"]) data_shape = self.nc[orig_var_name].shape - crs = CRS(self.filetype_info['grid_epsg']) - res = dataid['resolution'] + crs = CRS(self.filetype_info["grid_epsg"]) + res = dataid["resolution"] extent = self._get_extents(data_shape, res) area_def = AreaDefinition( area_name, @@ -289,8 +289,8 @@ class GAASPLowResFileHandler(GAASPFileHandler): """GAASP file handler for files that only have low resolution products.""" x_dims = ( - 'Number_of_low_rez_FOVs', + "Number_of_low_rez_FOVs", ) dim_resolutions = { - 'Number_of_low_rez_FOVs': 10000, + "Number_of_low_rez_FOVs": 10000, } diff --git a/satpy/readers/ascat_l2_soilmoisture_bufr.py b/satpy/readers/ascat_l2_soilmoisture_bufr.py index c1a974807d..a5f77fd7eb 100644 --- a/satpy/readers/ascat_l2_soilmoisture_bufr.py +++ b/satpy/readers/ascat_l2_soilmoisture_bufr.py @@ -38,7 +38,7 @@ from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size -logger = logging.getLogger('AscatSoilMoistureBufr') +logger = logging.getLogger("AscatSoilMoistureBufr") CHUNK_SIZE = get_legacy_chunk_size() @@ -53,34 +53,34 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): start_time, end_time = self.get_start_end_date() self.metadata = {} - self.metadata['start_time'] = start_time - self.metadata['end_time'] = end_time + self.metadata["start_time"] = start_time + self.metadata["end_time"] = end_time @property def start_time(self): """Return the start time of data acqusition.""" - return self.metadata['start_time'] + return self.metadata["start_time"] @property def end_time(self): """Return the end time of data acquisition.""" - return self.metadata['end_time'] + return self.metadata["end_time"] @property def platform_name(self): """Return spacecraft name.""" - return self.filename_info['platform'] + return self.filename_info["platform"] def extract_msg_date_extremes(self, bufr, date_min=None, date_max=None): """Extract the minimum and maximum dates from a single bufr message.""" - ec.codes_set(bufr, 'unpack', 1) - size = ec.codes_get(bufr, 'numberOfSubsets') - years = np.resize(ec.codes_get_array(bufr, 'year'), size) - months = np.resize(ec.codes_get_array(bufr, 'month'), size) - days = np.resize(ec.codes_get_array(bufr, 'day'), size) - hours = np.resize(ec.codes_get_array(bufr, 'hour'), size) - minutes = np.resize(ec.codes_get_array(bufr, 'minute'), size) - seconds = np.resize(ec.codes_get_array(bufr, 'second'), size) + ec.codes_set(bufr, "unpack", 1) + size = ec.codes_get(bufr, "numberOfSubsets") + years = np.resize(ec.codes_get_array(bufr, "year"), size) + months = np.resize(ec.codes_get_array(bufr, "month"), size) + days = np.resize(ec.codes_get_array(bufr, "day"), size) + hours = np.resize(ec.codes_get_array(bufr, "hour"), size) + minutes = np.resize(ec.codes_get_array(bufr, "minute"), size) + seconds = np.resize(ec.codes_get_array(bufr, "second"), size) for year, month, day, hour, minute, second in zip(years, months, days, hours, minutes, seconds): time_stamp = datetime(year, month, day, hour, minute, second) date_min = time_stamp if not date_min else min(date_min, time_stamp) @@ -89,7 +89,7 @@ def extract_msg_date_extremes(self, bufr, date_min=None, date_max=None): def get_start_end_date(self): """Get the first and last date from the bufr file.""" - with open(self.filename, 'rb') as fh: + with open(self.filename, "rb") as fh: date_min = None date_max = None while True: @@ -103,16 +103,16 @@ def get_start_end_date(self): def get_bufr_data(self, key): """Get BUFR data by key.""" attr = np.array([]) - with open(self.filename, 'rb') as fh: + with open(self.filename, "rb") as fh: while True: # get handle for message bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break - ec.codes_set(bufr, 'unpack', 1) + ec.codes_set(bufr, "unpack", 1) tmp = ec.codes_get_array(bufr, key, float) if len(tmp) == 1: - size = ec.codes_get(bufr, 'numberOfSubsets') + size = ec.codes_get(bufr, "numberOfSubsets") tmp = np.resize(tmp, size) attr = np.append(attr, tmp) ec.codes_release(bufr) @@ -120,12 +120,12 @@ def get_bufr_data(self, key): def get_dataset(self, dataset_id, dataset_info): """Get dataset using the BUFR key in dataset_info.""" - arr = self.get_bufr_data(dataset_info['key']) - if 'fill_value' in dataset_info: - arr[arr == dataset_info['fill_value']] = np.nan + arr = self.get_bufr_data(dataset_info["key"]) + if "fill_value" in dataset_info: + arr[arr == dataset_info["fill_value"]] = np.nan arr = da.from_array(arr, chunks=CHUNK_SIZE) - xarr = xr.DataArray(arr, dims=["y"], name=dataset_info['name']) - xarr.attrs['platform_name'] = self.platform_name + xarr = xr.DataArray(arr, dims=["y"], name=dataset_info["name"]) + xarr.attrs["platform_name"] = self.platform_name xarr.attrs.update(dataset_info) return xarr diff --git a/satpy/readers/atms_l1b_nc.py b/satpy/readers/atms_l1b_nc.py index 1ea61fe92c..95d48b81cd 100644 --- a/satpy/readers/atms_l1b_nc.py +++ b/satpy/readers/atms_l1b_nc.py @@ -28,7 +28,7 @@ logger = logging.getLogger(__name__) -DATE_FMT = '%Y-%m-%dT%H:%M:%SZ' +DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" class AtmsL1bNCFileHandler(NetCDF4FileHandler): @@ -43,12 +43,12 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): @property def start_time(self): """Get observation start time.""" - return datetime.strptime(self['/attr/time_coverage_start'], DATE_FMT) + return datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT) @property def end_time(self): """Get observation end time.""" - return datetime.strptime(self['/attr/time_coverage_end'], DATE_FMT) + return datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT) @property def platform_name(self): @@ -113,8 +113,8 @@ def _select_dataset(self, name): def get_dataset(self, dataset_id, ds_info): """Get dataset.""" - name = dataset_id['name'] - logger.debug(f'Reading in file to get dataset with name {name}.') + name = dataset_id["name"] + logger.debug(f"Reading in file to get dataset with name {name}.") dataset = self._select_dataset(name) dataset = self._merge_attributes(dataset, ds_info) dataset = self._drop_coords(dataset) diff --git a/satpy/readers/atms_sdr_hdf5.py b/satpy/readers/atms_sdr_hdf5.py index 26fd3d58e5..7f2d43bd71 100644 --- a/satpy/readers/atms_sdr_hdf5.py +++ b/satpy/readers/atms_sdr_hdf5.py @@ -46,8 +46,8 @@ LOG = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() -ATMS_CHANNEL_NAMES = ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', - '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22'] +ATMS_CHANNEL_NAMES = ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", + "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22"] class ATMS_SDR_FileHandler(JPSS_SDR_FileHandler): @@ -55,18 +55,18 @@ class ATMS_SDR_FileHandler(JPSS_SDR_FileHandler): def __init__(self, filename, filename_info, filetype_info, **kwargs): """Initialize file handler.""" - self.datasets = os.path.basename(filename).split('_')[0].split('-') + self.datasets = os.path.basename(filename).split("_")[0].split("-") super().__init__(filename, filename_info, filetype_info, **kwargs) def __getitem__(self, key): """Get item for given key.""" val = self.file_content[key] if isinstance(val, h5py.Dataset): - dset = h5py.File(self.filename, 'r')[key] + dset = h5py.File(self.filename, "r")[key] if dset.ndim == 3: dset_data = da.from_array(dset, chunks=CHUNK_SIZE) attrs = self._attrs_cache.get(key, dset.attrs) - return xr.DataArray(dset_data, dims=['y', 'x', 'z'], attrs=attrs) + return xr.DataArray(dset_data, dims=["y", "x", "z"], attrs=attrs) return super().__getitem__(key) @@ -78,11 +78,11 @@ def _get_atms_channel_index(self, ch_name): return None def _get_scans_per_granule(self, dataset_group): - number_of_granules_path = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateNumberGranules' + number_of_granules_path = "Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateNumberGranules" nb_granules_path = number_of_granules_path.format(dataset_group=DATASET_KEYS[dataset_group]) scans = [] for granule in range(self[nb_granules_path]): - scans_path = 'Data_Products/{dataset_group}/{dataset_group}_Gran_{granule}/attr/N_Number_Of_Scans' + scans_path = "Data_Products/{dataset_group}/{dataset_group}_Gran_{granule}/attr/N_Number_Of_Scans" scans_path = scans_path.format(dataset_group=DATASET_KEYS[dataset_group], granule=granule) scans.append(self[scans_path]) return scans @@ -99,15 +99,15 @@ def get_dataset(self, dataset_id, ds_info): scans actually sensed of course. """ - dataset_group = [ds_group for ds_group in ds_info['dataset_groups'] if ds_group in self.datasets] + dataset_group = [ds_group for ds_group in ds_info["dataset_groups"] if ds_group in self.datasets] if not dataset_group: return dataset_group = dataset_group[0] - ds_info['dataset_group'] = dataset_group + ds_info["dataset_group"] = dataset_group var_path = self._generate_file_key(dataset_id, ds_info) - ch_index = self._get_atms_channel_index(ds_info['name']) + ch_index = self._get_atms_channel_index(ds_info["name"]) data = self.concatenate_dataset(dataset_group, var_path, channel_index=ch_index) data = self.mask_fill_values(data, ds_info) diff --git a/satpy/readers/avhrr_l1b_gaclac.py b/satpy/readers/avhrr_l1b_gaclac.py index e520b29b30..c566175b8c 100644 --- a/satpy/readers/avhrr_l1b_gaclac.py +++ b/satpy/readers/avhrr_l1b_gaclac.py @@ -53,8 +53,8 @@ AVHRR3_CHANNEL_NAMES = {"1": 0, "2": 1, "3A": 2, "3B": 3, "4": 4, "5": 5} AVHRR2_CHANNEL_NAMES = {"1": 0, "2": 1, "3": 2, "4": 3, "5": 4} AVHRR_CHANNEL_NAMES = {"1": 0, "2": 1, "3": 2, "4": 3} -ANGLES = ('sensor_zenith_angle', 'sensor_azimuth_angle', 'solar_zenith_angle', - 'solar_azimuth_angle', 'sun_sensor_azimuth_difference_angle') +ANGLES = ("sensor_zenith_angle", "sensor_azimuth_angle", "solar_zenith_angle", + "solar_azimuth_angle", "sun_sensor_azimuth_difference_angle") class GACLACFile(BaseFileHandler): @@ -84,7 +84,7 @@ def __init__(self, filename, filename_info, filetype_info, self.strip_invalid_coords = strip_invalid_coords self.interpolate_coords = interpolate_coords self.reader_kwargs = reader_kwargs - self.creation_site = filename_info.get('creation_site') + self.creation_site = filename_info.get("creation_site") self.reader = None self.calib_channels = None self.counts = None @@ -92,34 +92,34 @@ def __init__(self, filename, filename_info, filetype_info, self.qual_flags = None self.first_valid_lat = None self.last_valid_lat = None - self._start_time = filename_info['start_time'] - self._end_time = datetime.combine(filename_info['start_time'].date(), - filename_info['end_time'].time()) + self._start_time = filename_info["start_time"] + self._end_time = datetime.combine(filename_info["start_time"].date(), + filename_info["end_time"].time()) if self._end_time < self._start_time: self._end_time += timedelta(days=1) - self.platform_id = filename_info['platform_id'] - if self.platform_id in ['NK', 'NL', 'NM', 'NN', 'NP', 'M1', 'M2', - 'M3']: - if filename_info.get('transfer_mode') == 'GHRR': + self.platform_id = filename_info["platform_id"] + if self.platform_id in ["NK", "NL", "NM", "NN", "NP", "M1", "M2", + "M3"]: + if filename_info.get("transfer_mode") == "GHRR": self.reader_class = GACKLMReader else: self.reader_class = LACKLMReader self.chn_dict = AVHRR3_CHANNEL_NAMES - self.sensor = 'avhrr-3' - elif self.platform_id in ['NC', 'ND', 'NF', 'NH', 'NJ']: - if filename_info.get('transfer_mode') == 'GHRR': + self.sensor = "avhrr-3" + elif self.platform_id in ["NC", "ND", "NF", "NH", "NJ"]: + if filename_info.get("transfer_mode") == "GHRR": self.reader_class = GACPODReader else: self.reader_class = LACPODReader self.chn_dict = AVHRR2_CHANNEL_NAMES - self.sensor = 'avhrr-2' + self.sensor = "avhrr-2" else: - if filename_info.get('transfer_mode') == 'GHRR': + if filename_info.get("transfer_mode") == "GHRR": self.reader_class = GACPODReader else: self.reader_class = LACPODReader self.chn_dict = AVHRR_CHANNEL_NAMES - self.sensor = 'avhrr' + self.sensor = "avhrr" self.filename_info = filename_info def read_raw_data(self): @@ -131,43 +131,43 @@ def read_raw_data(self): **self.reader_kwargs) self.reader.read(self.filename) if np.all(self.reader.mask): - raise ValueError('All data is masked out') + raise ValueError("All data is masked out") def get_dataset(self, key, info): """Get the dataset.""" self.read_raw_data() - if key['name'] in ['latitude', 'longitude']: + if key["name"] in ["latitude", "longitude"]: # Lats/lons are buffered by the reader - if key['name'] == 'latitude': + if key["name"] == "latitude": _, data = self.reader.get_lonlat() else: data, _ = self.reader.get_lonlat() # If coordinate interpolation is disabled, only every eighth # pixel has a lat/lon coordinate - xdim = 'x' if self.interpolate_coords else 'x_every_eighth' + xdim = "x" if self.interpolate_coords else "x_every_eighth" xcoords = None - elif key['name'] in ANGLES: + elif key["name"] in ANGLES: data = self._get_angle(key) - xdim = 'x' if self.interpolate_coords else 'x_every_eighth' + xdim = "x" if self.interpolate_coords else "x_every_eighth" xcoords = None - elif key['name'] == 'qual_flags': + elif key["name"] == "qual_flags": data = self.reader.get_qual_flags() - xdim = 'num_flags' - xcoords = ['Scan line number', - 'Fatal error flag', - 'Insufficient data for calibration', - 'Insufficient data for calibration', - 'Solar contamination of blackbody in channels 3', - 'Solar contamination of blackbody in channels 4', - 'Solar contamination of blackbody in channels 5'] - elif key['name'].upper() in self.chn_dict: + xdim = "num_flags" + xcoords = ["Scan line number", + "Fatal error flag", + "Insufficient data for calibration", + "Insufficient data for calibration", + "Solar contamination of blackbody in channels 3", + "Solar contamination of blackbody in channels 4", + "Solar contamination of blackbody in channels 5"] + elif key["name"].upper() in self.chn_dict: # Read and calibrate channel data data = self._get_channel(key) - xdim = 'x' + xdim = "x" xcoords = None else: - raise ValueError('Unknown dataset: {}'.format(key['name'])) + raise ValueError("Unknown dataset: {}".format(key["name"])) # Update start/end time using the actual scanline timestamps times = self.reader.get_times() @@ -183,7 +183,7 @@ def get_dataset(self, key, info): chunk_cols = data.shape[1] chunk_lines = int((CHUNK_SIZE ** 2) / chunk_cols) res = xr.DataArray(da.from_array(data, chunks=(chunk_lines, chunk_cols)), - dims=['y', xdim], attrs=info) + dims=["y", xdim], attrs=info) if xcoords: res[xdim] = xcoords @@ -191,8 +191,8 @@ def get_dataset(self, key, info): self._update_attrs(res) # Add scanline acquisition times - res['acq_time'] = ('y', times) - res['acq_time'].attrs['long_name'] = 'Mean scanline acquisition time' + res["acq_time"] = ("y", times) + res["acq_time"].attrs["long_name"] = "Mean scanline acquisition time" return res @@ -253,19 +253,19 @@ def _slice(self, data): def _get_channel(self, key): """Get channel and buffer results.""" - name = key['name'] - calibration = key['calibration'] - if calibration == 'counts': + name = key["name"] + calibration = key["calibration"] + if calibration == "counts": if self.counts is None: counts = self.reader.get_counts() self.counts = counts channels = self.counts - elif calibration in ['reflectance', 'brightness_temperature']: + elif calibration in ["reflectance", "brightness_temperature"]: if self.calib_channels is None: self.calib_channels = self.reader.get_calibrated_channels() channels = self.calib_channels else: - raise ValueError('Unknown calibration: {}'.format(calibration)) + raise ValueError("Unknown calibration: {}".format(calibration)) return channels[:, :, self.chn_dict[name.upper()]] def _get_qual_flags(self): @@ -278,12 +278,12 @@ def _get_angle(self, key): """Get angles and buffer results.""" if self.angles is None: sat_azi, sat_zenith, sun_azi, sun_zenith, rel_azi = self.reader.get_angles() - self.angles = {'sensor_zenith_angle': sat_zenith, - 'sensor_azimuth_angle': sat_azi, - 'solar_zenith_angle': sun_zenith, - 'solar_azimuth_angle': sun_azi, - 'sun_sensor_azimuth_difference_angle': rel_azi} - return self.angles[key['name']] + self.angles = {"sensor_zenith_angle": sat_zenith, + "sensor_azimuth_angle": sat_azi, + "solar_zenith_angle": sun_zenith, + "solar_azimuth_angle": sun_azi, + "sun_sensor_azimuth_difference_angle": rel_azi} + return self.angles[key["name"]] def _strip_invalid_lat(self): """Strip scanlines with invalid coordinates in the beginning/end of the orbit. @@ -302,11 +302,11 @@ def _update_attrs(self, res): """Update dataset attributes.""" for attr in self.reader.meta_data: res.attrs[attr] = self.reader.meta_data[attr] - res.attrs['platform_name'] = self.reader.spacecraft_name - res.attrs['orbit_number'] = self.filename_info.get('orbit_number', None) - res.attrs['sensor'] = self.sensor + res.attrs["platform_name"] = self.reader.spacecraft_name + res.attrs["orbit_number"] = self.filename_info.get("orbit_number", None) + res.attrs["sensor"] = self.sensor try: - res.attrs['orbital_parameters'] = {'tle': self.reader.get_tle_lines()} + res.attrs["orbital_parameters"] = {"tle": self.reader.get_tle_lines()} except (IndexError, RuntimeError): pass diff --git a/satpy/readers/caliop_l2_cloud.py b/satpy/readers/caliop_l2_cloud.py index 0fc89ae548..54dd100ffc 100644 --- a/satpy/readers/caliop_l2_cloud.py +++ b/satpy/readers/caliop_l2_cloud.py @@ -46,15 +46,15 @@ def __init__(self, filename, filename_info, filetype_info): self.get_filehandle() - self._start_time = filename_info['start_time'] + self._start_time = filename_info["start_time"] - logger.debug('Retrieving end time from metadata array') + logger.debug("Retrieving end time from metadata array") self.get_end_time() def get_end_time(self): """Get observation end time from file metadata.""" mda_dict = self.filehandle.attributes() - core_mda = mda_dict['coremetadata'] + core_mda = mda_dict["coremetadata"] end_time_str = self.parse_metadata_string(core_mda) self._end_time = datetime.strptime(end_time_str, "%Y-%m-%dT%H:%M:%SZ") @@ -76,19 +76,19 @@ def get_filehandle(self): def get_dataset(self, key, info): """Read data from file and return the corresponding projectables.""" - if key['name'] in ['longitude', 'latitude']: - logger.debug('Reading coordinate arrays.') + if key["name"] in ["longitude", "latitude"]: + logger.debug("Reading coordinate arrays.") if self.lons is None or self.lats is None: self.lons, self.lats = self.get_lonlats() - if key['name'] == 'latitude': + if key["name"] == "latitude": proj = Dataset(self.lats, id=key, **info) else: proj = Dataset(self.lons, id=key, **info) else: - data = self.get_sds_variable(key['name']) + data = self.get_sds_variable(key["name"]) proj = Dataset(data, id=key, **info) return proj @@ -101,8 +101,8 @@ def get_sds_variable(self, name): def get_lonlats(self): """Get longitude and latitude arrays from the file.""" - longitudes = self.get_sds_variable('Longitude') - latitudes = self.get_sds_variable('Latitude') + longitudes = self.get_sds_variable("Longitude") + latitudes = self.get_sds_variable("Latitude") return longitudes, latitudes @property diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py index fd8cd552ae..4303456c04 100644 --- a/satpy/readers/clavrx.py +++ b/satpy/readers/clavrx.py @@ -36,37 +36,37 @@ CHUNK_SIZE = get_legacy_chunk_size() CF_UNITS = { - 'none': '1', + "none": "1", } SENSORS = { - 'MODIS': 'modis', - 'VIIRS': 'viirs', - 'AVHRR': 'avhrr', - 'AHI': 'ahi', - 'ABI': 'abi', - 'GOES-RU-IMAGER': 'abi', + "MODIS": "modis", + "VIIRS": "viirs", + "AVHRR": "avhrr", + "AHI": "ahi", + "ABI": "abi", + "GOES-RU-IMAGER": "abi", } PLATFORMS = { - 'SNPP': 'npp', - 'HIM8': 'himawari8', - 'HIM9': 'himawari9', - 'H08': 'himawari8', - 'H09': 'himawari9', - 'G16': 'GOES-16', - 'G17': 'GOES-17', - 'G18': 'GOES-18', + "SNPP": "npp", + "HIM8": "himawari8", + "HIM9": "himawari9", + "H08": "himawari8", + "H09": "himawari9", + "G16": "GOES-16", + "G17": "GOES-17", + "G18": "GOES-18", } ROWS_PER_SCAN = { - 'viirs': 16, - 'modis': 10, + "viirs": 16, + "modis": 10, } NADIR_RESOLUTION = { - 'viirs': 742, - 'modis': 1000, - 'avhrr': 1050, - 'ahi': 2000, - 'abi': 2004, + "viirs": 742, + "modis": 1000, + "avhrr": 1050, + "ahi": 2000, + "abi": 2004, } @@ -100,8 +100,8 @@ class _CLAVRxHelper: @staticmethod def _remove_attributes(attrs: dict) -> dict: """Remove attributes that described data before scaling.""" - old_attrs = ['unscaled_missing', 'SCALED_MIN', 'SCALED_MAX', - 'SCALED_MISSING'] + old_attrs = ["unscaled_missing", "SCALED_MIN", "SCALED_MAX", + "SCALED_MISSING"] for attr_key in old_attrs: attrs.pop(attr_key, None) @@ -118,15 +118,15 @@ def _scale_data(data_arr: Union[xr.DataArray, int], scale_factor: float, add_off @staticmethod def _get_data(data, dataset_id: dict) -> xr.DataArray: """Get a dataset.""" - if dataset_id.get('resolution'): - data.attrs['resolution'] = dataset_id['resolution'] + if dataset_id.get("resolution"): + data.attrs["resolution"] = dataset_id["resolution"] attrs = data.attrs.copy() - fill = attrs.get('_FillValue') - factor = attrs.pop('scale_factor', (np.ones(1, dtype=data.dtype))[0]) - offset = attrs.pop('add_offset', (np.zeros(1, dtype=data.dtype))[0]) - valid_range = attrs.get('valid_range', [None]) + fill = attrs.get("_FillValue") + factor = attrs.pop("scale_factor", (np.ones(1, dtype=data.dtype))[0]) + offset = attrs.pop("add_offset", (np.zeros(1, dtype=data.dtype))[0]) + valid_range = attrs.get("valid_range", [None]) if isinstance(valid_range, np.ndarray): attrs["valid_range"] = valid_range.tolist() @@ -135,7 +135,7 @@ def _get_data(data, dataset_id: dict) -> xr.DataArray: data = data.where(data != fill) data = _CLAVRxHelper._scale_data(data, factor, offset) # don't need _FillValue if it has been applied. - attrs.pop('_FillValue', None) + attrs.pop("_FillValue", None) if all(valid_range): valid_min = _CLAVRxHelper._scale_data(valid_range[0], factor, offset) @@ -144,7 +144,7 @@ def _get_data(data, dataset_id: dict) -> xr.DataArray: data = data.where((data >= valid_min) & (data <= valid_max), fill) else: data = data.where((data >= valid_min) & (data <= valid_max)) - attrs['valid_range'] = [valid_min, valid_max] + attrs["valid_range"] = [valid_min, valid_max] data.attrs = _CLAVRxHelper._remove_attributes(attrs) @@ -173,29 +173,29 @@ def _read_pug_fixed_grid(projection_coordinates: netCDF4.Variable, distance_mult lon_0 = projection_coordinates.longitude_of_projection_origin sweep_axis = projection_coordinates.sweep_angle_axis[0] - proj_dict = {'a': float(a) * distance_multiplier, - 'b': float(b) * distance_multiplier, - 'lon_0': float(lon_0), - 'h': float(h) * distance_multiplier, - 'proj': 'geos', - 'units': 'm', - 'sweep': sweep_axis} + proj_dict = {"a": float(a) * distance_multiplier, + "b": float(b) * distance_multiplier, + "lon_0": float(lon_0), + "h": float(h) * distance_multiplier, + "proj": "geos", + "units": "m", + "sweep": sweep_axis} return proj_dict @staticmethod def _find_input_nc(filename: str, l1b_base: str) -> str: dirname = os.path.dirname(filename) - l1b_filename = os.path.join(dirname, l1b_base + '.nc') + l1b_filename = os.path.join(dirname, l1b_base + ".nc") if os.path.exists(l1b_filename): return str(l1b_filename) - glob_pat = os.path.join(dirname, l1b_base + '*R20*.nc') + glob_pat = os.path.join(dirname, l1b_base + "*R20*.nc") LOG.debug("searching for {0}".format(glob_pat)) found_l1b_filenames = list(glob(glob_pat)) if len(found_l1b_filenames) == 0: raise IOError("Could not find navigation donor for {0}" " in same directory as CLAVR-x data".format(l1b_base)) - LOG.debug('Candidate nav donors: {0}'.format(repr(found_l1b_filenames))) + LOG.debug("Candidate nav donors: {0}".format(repr(found_l1b_filenames))) return found_l1b_filenames[0] @staticmethod @@ -231,14 +231,14 @@ def _read_axi_fixed_grid(filename: str, l1b_attr) -> geometry.AreaDefinition: if not proj: raise ValueError(f"Unable to recover projection information for {filename}") - h = float(proj['h']) - x, y = l1b['x'], l1b['y'] + h = float(proj["h"]) + x, y = l1b["x"], l1b["y"] area_extent, ncols, nlines = _CLAVRxHelper._area_extent(x, y, h) area = geometry.AreaDefinition( - 'ahi_geos', + "ahi_geos", "AHI L2 file area", - 'ahi_geos', + "ahi_geos", proj, ncols, nlines, @@ -253,24 +253,24 @@ def get_metadata(sensor: str, platform: str, attrs: dict, ds_info: dict) -> dict attr_info.update(attrs) attr_info.update(ds_info) - flag_meanings = attr_info.get('flag_meanings', None) - if not attr_info.get('SCALED', 1) and not flag_meanings: - attr_info['flag_meanings'] = '' - attr_info.setdefault('flag_values', [None]) - elif not attr_info.get('SCALED', 1) and isinstance(flag_meanings, str): + flag_meanings = attr_info.get("flag_meanings", None) + if not attr_info.get("SCALED", 1) and not flag_meanings: + attr_info["flag_meanings"] = "" + attr_info.setdefault("flag_values", [None]) + elif not attr_info.get("SCALED", 1) and isinstance(flag_meanings, str): attr_info["flag_meanings"] = flag_meanings.split(" ") - u = attr_info.get('units') + u = attr_info.get("units") if u in CF_UNITS: # CF compliance - attr_info['units'] = CF_UNITS[u] + attr_info["units"] = CF_UNITS[u] if u.lower() == "none": - attr_info['units'] = "1" - attr_info['sensor'] = sensor - attr_info['platform_name'] = platform + attr_info["units"] = "1" + attr_info["sensor"] = sensor + attr_info["platform_name"] = platform rps = _get_rows_per_scan(sensor) if rps: - attr_info['rows_per_scan'] = rps - attr_info['reader'] = 'clavrx' + attr_info["rows_per_scan"] = rps + attr_info["reader"] = "clavrx" return attr_info @@ -287,16 +287,16 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Get the start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get the end time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) def get_dataset(self, dataset_id, ds_info): """Get a dataset.""" - var_name = ds_info.get('file_key', dataset_id['name']) + var_name = ds_info.get("file_key", dataset_id["name"]) data = self[var_name] data = _CLAVRxHelper._get_data(data, dataset_id) data.attrs = _CLAVRxHelper.get_metadata(self.sensor, self.platform, @@ -308,39 +308,39 @@ def get_nadir_resolution(self, sensor): for k, v in NADIR_RESOLUTION.items(): if sensor.startswith(k): return v - res = self.filename_info.get('resolution') - if res.endswith('m'): + res = self.filename_info.get("resolution") + if res.endswith("m"): return int(res[:-1]) elif res is not None: return int(res) def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" - self.sensor = _get_sensor(self.file_content.get('/attr/sensor')) - self.platform = _get_platform(self.file_content.get('/attr/platform')) + self.sensor = _get_sensor(self.file_content.get("/attr/sensor")) + self.platform = _get_platform(self.file_content.get("/attr/platform")) nadir_resolution = self.get_nadir_resolution(self.sensor) - coordinates = ('longitude', 'latitude') + coordinates = ("longitude", "latitude") handled_variables = set() # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): - this_res = ds_info.get('resolution') - this_coords = ds_info.get('coordinates') + this_res = ds_info.get("resolution") + this_coords = ds_info.get("coordinates") # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info - var_name = ds_info.get('file_key', ds_info['name']) - matches = self.file_type_matches(ds_info['file_type']) + var_name = ds_info.get("file_key", ds_info["name"]) + matches = self.file_type_matches(ds_info["file_type"]) # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self and this_res != nadir_resolution: handled_variables.add(var_name) new_info = ds_info.copy() # don't mess up the above yielded - new_info['resolution'] = nadir_resolution + new_info["resolution"] = nadir_resolution if self._is_polar() and this_coords is None: - new_info['coordinates'] = coordinates + new_info["coordinates"] = coordinates yield True, new_info elif is_avail is None: # if we didn't know how to handle this dataset and no one else did @@ -351,31 +351,31 @@ def available_datasets(self, configured_datasets=None): for var_name, val in self.file_content.items(): if isinstance(val, SDS): ds_info = { - 'file_type': self.filetype_info['file_type'], - 'resolution': nadir_resolution, - 'name': var_name, + "file_type": self.filetype_info["file_type"], + "resolution": nadir_resolution, + "name": var_name, } if self._is_polar(): - ds_info['coordinates'] = ['longitude', 'latitude'] + ds_info["coordinates"] = ["longitude", "latitude"] yield True, ds_info def get_shape(self, dataset_id, ds_info): """Get the shape.""" - var_name = ds_info.get('file_key', dataset_id['name']) - return self[var_name + '/shape'] + var_name = ds_info.get("file_key", dataset_id["name"]) + return self[var_name + "/shape"] def _is_polar(self): - l1b_att, inst_att = (str(self.file_content.get('/attr/L1B', None)), - str(self.file_content.get('/attr/sensor', None))) + l1b_att, inst_att = (str(self.file_content.get("/attr/L1B", None)), + str(self.file_content.get("/attr/sensor", None))) - return (inst_att != 'AHI' and 'GOES' not in inst_att) or (l1b_att is None) + return (inst_att != "AHI" and "GOES" not in inst_att) or (l1b_att is None) def get_area_def(self, key): """Get the area definition of the data at hand.""" if self._is_polar(): # then it doesn't have a fixed grid return super(CLAVRXHDF4FileHandler, self).get_area_def(key) - l1b_att = str(self.file_content.get('/attr/L1B', None)) + l1b_att = str(self.file_content.get("/attr/L1B", None)) area_def = _CLAVRxHelper._read_axi_fixed_grid(self.filename, l1b_att) return area_def @@ -396,12 +396,12 @@ def __init__(self, filename, filename_info, filetype_info): decode_coords=True, chunks=CHUNK_SIZE) # y,x is used in satpy, bands rather than channel using in xrimage - self.nc = self.nc.rename_dims({'scan_lines_along_track_direction': "y", - 'pixel_elements_along_scan_direction': "x"}) + self.nc = self.nc.rename_dims({"scan_lines_along_track_direction": "y", + "pixel_elements_along_scan_direction": "x"}) self.platform = _get_platform( - self.filename_info.get('platform_shortname', None)) - self.sensor = _get_sensor(self.nc.attrs.get('sensor', None)) + self.filename_info.get("platform_shortname", None)) + self.sensor = _get_sensor(self.nc.attrs.get("sensor", None)) # coordinates need scaling and valid_range (mask_and_scale won't work on valid_range) self.nc.coords["latitude"] = _CLAVRxHelper._get_data(self.nc.coords["latitude"], {"name": "latitude"}) @@ -410,8 +410,8 @@ def __init__(self, filename, filename_info, filetype_info): def _get_ds_info_for_data_arr(self, var_name): ds_info = { - 'file_type': self.filetype_info['file_type'], - 'name': var_name, + "file_type": self.filetype_info["file_type"], + "name": var_name, } return ds_info @@ -451,28 +451,28 @@ def available_datasets(self, configured_datasets=None): # file handler so let's yield early yield is_avail, ds_info continue - if self.file_type_matches(ds_info['file_type']): - handled_vars.add(ds_info['name']) - yield self.file_type_matches(ds_info['file_type']), ds_info + if self.file_type_matches(ds_info["file_type"]): + handled_vars.add(ds_info["name"]) + yield self.file_type_matches(ds_info["file_type"]), ds_info yield from self._available_new_datasets(handled_vars) def _is_polar(self): - l1b_att, inst_att = (str(self.nc.attrs.get('L1B', None)), - str(self.nc.attrs.get('sensor', None))) + l1b_att, inst_att = (str(self.nc.attrs.get("L1B", None)), + str(self.nc.attrs.get("sensor", None))) - return (inst_att != 'AHI' and 'GOES' not in inst_att) or (l1b_att is None) + return (inst_att != "AHI" and "GOES" not in inst_att) or (l1b_att is None) def get_area_def(self, key): """Get the area definition of the data at hand.""" if self._is_polar(): # then it doesn't have a fixed grid return super(CLAVRXNetCDFFileHandler, self).get_area_def(key) - l1b_att = str(self.nc.attrs.get('L1B', None)) + l1b_att = str(self.nc.attrs.get("L1B", None)) return _CLAVRxHelper._read_axi_fixed_grid(self.filename, l1b_att) def get_dataset(self, dataset_id, ds_info): """Get a dataset.""" - var_name = ds_info.get('name', dataset_id['name']) + var_name = ds_info.get("name", dataset_id["name"]) data = self[var_name] data = _CLAVRxHelper._get_data(data, dataset_id) data.attrs = _CLAVRxHelper.get_metadata(self.sensor, self.platform, diff --git a/satpy/readers/cmsaf_claas2.py b/satpy/readers/cmsaf_claas2.py index f8f360623e..9bf3ca3deb 100644 --- a/satpy/readers/cmsaf_claas2.py +++ b/satpy/readers/cmsaf_claas2.py @@ -87,7 +87,7 @@ def _get_dsinfo(self, var): def get_dataset(self, dataset_id, info): """Get the dataset.""" - ds = self[dataset_id['name']] + ds = self[dataset_id["name"]] if "time" in ds.dims: return ds.squeeze(["time"]) diff --git a/satpy/readers/electrol_hrit.py b/satpy/readers/electrol_hrit.py index 53e69d42b4..c773850a73 100644 --- a/satpy/readers/electrol_hrit.py +++ b/satpy/readers/electrol_hrit.py @@ -40,34 +40,34 @@ time_cds_short, ) -logger = logging.getLogger('hrit_electrol') +logger = logging.getLogger("hrit_electrol") # goms implementation: -key_header = np.dtype([('key_number', 'u1'), - ('seed', '>f8')]) - -segment_identification = np.dtype([('GP_SC_ID', '>i2'), - ('spectral_channel_id', '>i1'), - ('segment_sequence_number', '>u2'), - ('planned_start_segment_number', '>u2'), - ('planned_end_segment_number', '>u2'), - ('data_field_representation', '>i1')]) - -image_segment_line_quality = np.dtype([('line_number_in_grid', '>i4'), - ('line_mean_acquisition', - [('days', '>u2'), - ('milliseconds', '>u4')]), - ('line_validity', 'u1'), - ('line_radiometric_quality', 'u1'), - ('line_geometric_quality', 'u1')]) +key_header = np.dtype([("key_number", "u1"), + ("seed", ">f8")]) + +segment_identification = np.dtype([("GP_SC_ID", ">i2"), + ("spectral_channel_id", ">i1"), + ("segment_sequence_number", ">u2"), + ("planned_start_segment_number", ">u2"), + ("planned_end_segment_number", ">u2"), + ("data_field_representation", ">i1")]) + +image_segment_line_quality = np.dtype([("line_number_in_grid", ">i4"), + ("line_mean_acquisition", + [("days", ">u2"), + ("milliseconds", ">u4")]), + ("line_validity", "u1"), + ("line_radiometric_quality", "u1"), + ("line_geometric_quality", "u1")]) goms_variable_length_headers = { - image_segment_line_quality: 'image_segment_line_quality'} + image_segment_line_quality: "image_segment_line_quality"} -goms_text_headers = {image_data_function: 'image_data_function', - annotation_header: 'annotation_header', - ancillary_text: 'ancillary_text'} +goms_text_headers = {image_data_function: "image_data_function", + annotation_header: "annotation_header", + ancillary_text: "ancillary_text"} goms_hdr_map = base_hdr_map.copy() goms_hdr_map.update({7: key_header, @@ -76,28 +76,28 @@ }) -orbit_coef = np.dtype([('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('X', '>f8', (8, )), - ('Y', '>f8', (8, )), - ('Z', '>f8', (8, )), - ('VX', '>f8', (8, )), - ('VY', '>f8', (8, )), - ('VZ', '>f8', (8, ))]) +orbit_coef = np.dtype([("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("X", ">f8", (8, )), + ("Y", ">f8", (8, )), + ("Z", ">f8", (8, )), + ("VX", ">f8", (8, )), + ("VY", ">f8", (8, )), + ("VZ", ">f8", (8, ))]) -attitude_coef = np.dtype([('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('XofSpinAxis', '>f8', (8, )), - ('YofSpinAxis', '>f8', (8, )), - ('ZofSpinAxis', '>f8', (8, ))]) +attitude_coef = np.dtype([("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("XofSpinAxis", ">f8", (8, )), + ("YofSpinAxis", ">f8", (8, )), + ("ZofSpinAxis", ">f8", (8, ))]) -cuc_time = np.dtype([('coarse', 'u1', (4, )), - ('fine', 'u1', (3, ))]) +cuc_time = np.dtype([("coarse", "u1", (4, )), + ("fine", "u1", (3, ))]) -time_cds_expanded = np.dtype([('days', '>u2'), - ('milliseconds', '>u4'), - ('microseconds', '>u2'), - ('nanoseconds', '>u2')]) +time_cds_expanded = np.dtype([("days", ">u2"), + ("milliseconds", ">u4"), + ("microseconds", ">u2"), + ("nanoseconds", ">u2")]) satellite_status = np.dtype([("TagType", " 16777216: lut = lut.astype(np.float64) else: @@ -337,26 +337,26 @@ def _calibrate(self, data): def get_area_def(self, dsid): """Get the area definition of the band.""" pdict = {} - pdict['cfac'] = np.int32(self.mda['cfac']) - pdict['lfac'] = np.int32(self.mda['lfac']) - pdict['coff'] = np.float32(self.mda['coff']) - pdict['loff'] = np.float32(self.mda['loff']) + pdict["cfac"] = np.int32(self.mda["cfac"]) + pdict["lfac"] = np.int32(self.mda["lfac"]) + pdict["coff"] = np.float32(self.mda["coff"]) + pdict["loff"] = np.float32(self.mda["loff"]) - pdict['a'] = 6378169.00 - pdict['b'] = 6356583.80 - pdict['h'] = 35785831.00 - pdict['scandir'] = 'N2S' + pdict["a"] = 6378169.00 + pdict["b"] = 6356583.80 + pdict["h"] = 35785831.00 + pdict["scandir"] = "N2S" - pdict['ssp_lon'] = self.mda['projection_parameters']['SSP_longitude'] + pdict["ssp_lon"] = self.mda["projection_parameters"]["SSP_longitude"] - pdict['nlines'] = int(self.mda['number_of_lines']) - pdict['ncols'] = int(self.mda['number_of_columns']) + pdict["nlines"] = int(self.mda["number_of_lines"]) + pdict["ncols"] = int(self.mda["number_of_columns"]) - pdict['loff'] = pdict['nlines'] - pdict['loff'] + pdict["loff"] = pdict["nlines"] - pdict["loff"] - pdict['a_name'] = 'geosgoms' - pdict['a_desc'] = 'Electro-L/GOMS channel area' - pdict['p_id'] = 'goms' + pdict["a_name"] = "geosgoms" + pdict["a_desc"] = "Electro-L/GOMS channel area" + pdict["p_id"] = "goms" area_extent = get_area_extent(pdict) area = get_area_definition(pdict, area_extent) diff --git a/satpy/readers/epic_l1b_h5.py b/satpy/readers/epic_l1b_h5.py index 55c020ee21..4f6d66ebae 100644 --- a/satpy/readers/epic_l1b_h5.py +++ b/satpy/readers/epic_l1b_h5.py @@ -49,16 +49,16 @@ # Level 1b is given as counts. These factors convert to reflectance. # Retrieved from: https://asdc.larc.nasa.gov/documents/dscovr/DSCOVR_EPIC_Calibration_Factors_V03.pdf -CALIB_COEFS = {'B317': 1.216e-4, - 'B325': 1.111e-4, - 'B340': 1.975e-5, - 'B388': 2.685e-5, - 'B443': 8.34e-6, - 'B551': 6.66e-6, - 'B680': 9.3e-6, - 'B688': 2.02e-5, - 'B764': 2.36e-5, - 'B780': 1.435e-5} +CALIB_COEFS = {"B317": 1.216e-4, + "B325": 1.111e-4, + "B340": 1.975e-5, + "B388": 2.685e-5, + "B443": 8.34e-6, + "B551": 6.66e-6, + "B680": 9.3e-6, + "B688": 2.02e-5, + "B764": 2.36e-5, + "B780": 1.435e-5} class DscovrEpicL1BH5FileHandler(HDF5FileHandler): @@ -68,19 +68,19 @@ def __init__(self, filename, filename_info, filetype_info): """Init filehandler.""" super(DscovrEpicL1BH5FileHandler, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'epic' - self.platform_name = 'dscovr' + self.sensor = "epic" + self.platform_name = "dscovr" @property def start_time(self): """Get the start time.""" - start_time = datetime.strptime(self.file_content['/attr/begin_time'], '%Y-%m-%d %H:%M:%S') + start_time = datetime.strptime(self.file_content["/attr/begin_time"], "%Y-%m-%d %H:%M:%S") return start_time @property def end_time(self): """Get the end time.""" - end_time = datetime.strptime(self.file_content['/attr/end_time'], '%Y-%m-%d %H:%M:%S') + end_time = datetime.strptime(self.file_content["/attr/end_time"], "%Y-%m-%d %H:%M:%S") return end_time @staticmethod @@ -97,19 +97,19 @@ def calibrate(data, ds_name, calibration=None): def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" - ds_name = dataset_id['name'] + ds_name = dataset_id["name"] - logger.debug('Reading in get_dataset %s.', ds_name) - file_key = ds_info.get('file_key', ds_name) + logger.debug("Reading in get_dataset %s.", ds_name) + file_key = ds_info.get("file_key", ds_name) band = self._mask_infinite(self.get(file_key)) - band = self.calibrate(band, ds_name, calibration=dataset_id.get('calibration')) + band = self.calibrate(band, ds_name, calibration=dataset_id.get("calibration")) band = self._update_metadata(band) return band def _update_metadata(self, band): - band = band.rename({band.dims[0]: 'x', band.dims[1]: 'y'}) - band.attrs.update({'platform_name': self.platform_name, 'sensor': self.sensor}) + band = band.rename({band.dims[0]: "x", band.dims[1]: "y"}) + band.attrs.update({"platform_name": self.platform_name, "sensor": self.sensor}) return band diff --git a/satpy/readers/eps_l1b.py b/satpy/readers/eps_l1b.py index 1cc098a612..23e4ca712d 100644 --- a/satpy/readers/eps_l1b.py +++ b/satpy/readers/eps_l1b.py @@ -90,11 +90,11 @@ def read_records(filename): the_type = form.dtype((rec_class, sub_class)) # the_descr = grh_dtype.descr + the_type.descr except KeyError: - the_type = np.dtype([('unknown', 'V%d' % bare_size)]) + the_type = np.dtype([("unknown", "V%d" % bare_size)]) the_descr = grh_dtype.descr + the_type.descr the_type = np.dtype(the_descr) if the_type.itemsize < expected_size: - padding = [('unknown%d' % cnt, 'V%d' % (expected_size - the_type.itemsize))] + padding = [("unknown%d" % cnt, "V%d" % (expected_size - the_type.itemsize))] cnt += 1 the_descr += padding new_dtype = np.dtype(the_descr) @@ -112,14 +112,14 @@ def read_records(filename): offset = 0 for dtype, count, rec_class in zip(dtypes, counts, classes): fdes.seek(offset) - if rec_class == ('mdr', 2): - record = da.from_array(np.memmap(fdes, mode='r', dtype=dtype, shape=count, offset=offset), + if rec_class == ("mdr", 2): + record = da.from_array(np.memmap(fdes, mode="r", dtype=dtype, shape=count, offset=offset), chunks=(max_lines,)) else: record = np.fromfile(fdes, dtype=dtype, count=count) offset += dtype.itemsize * count if rec_class in sections: - logger.debug('Multiple records for ', str(rec_class)) + logger.debug("Multiple records for ", str(rec_class)) sections[rec_class] = np.hstack((sections[rec_class], record)) else: sections[rec_class] = record @@ -130,7 +130,7 @@ def read_records(filename): def create_xarray(arr): """Create xarray with correct dimensions.""" res = arr - res = xr.DataArray(res, dims=['y', 'x']) + res = xr.DataArray(res, dims=["y", "x"]) return res @@ -152,8 +152,8 @@ def __init__(self, filename, filename_info, filetype_info): filename, filename_info, filetype_info) self.area = None - self._start_time = filename_info['start_time'] - self._end_time = filename_info['end_time'] + self._start_time = filename_info["start_time"] + self._end_time = filename_info["end_time"] self.form = None self.scanlines = None self.pixels = None @@ -168,10 +168,10 @@ def __init__(self, filename, filename_info, filetype_info): def _read_all(self): logger.debug("Reading %s", self.filename) self.sections, self.form = read_records(self.filename) - self.scanlines = self['TOTAL_MDR'] - if self.scanlines != len(self.sections[('mdr', 2)]): + self.scanlines = self["TOTAL_MDR"] + if self.scanlines != len(self.sections[("mdr", 2)]): logger.warning("Number of declared records doesn't match number of scanlines in the file.") - self.scanlines = len(self.sections[('mdr', 2)]) + self.scanlines = len(self.sections[("mdr", 2)]) self.pixels = self["EARTH_VIEWS_PER_SCANLINE"] def __getitem__(self, key): @@ -287,24 +287,24 @@ def get_dataset(self, key, info): if self.sections is None: self._read_all() - if key['name'] in ['longitude', 'latitude']: + if key["name"] in ["longitude", "latitude"]: lons, lats = self.get_full_lonlats() - if key['name'] == 'longitude': + if key["name"] == "longitude": dataset = create_xarray(lons) else: dataset = create_xarray(lats) - elif key['name'] in ['solar_zenith_angle', 'solar_azimuth_angle', - 'satellite_zenith_angle', 'satellite_azimuth_angle']: + elif key["name"] in ["solar_zenith_angle", "solar_azimuth_angle", + "satellite_zenith_angle", "satellite_azimuth_angle"]: dataset = self._get_angle_dataarray(key) - elif key['name'] in ["1", "2", "3a", "3A", "3b", "3B", "4", "5"]: + elif key["name"] in ["1", "2", "3a", "3A", "3b", "3B", "4", "5"]: dataset = self._get_calibrated_dataarray(key) else: - logger.info("Can't load channel in eps_l1b: " + str(key['name'])) + logger.info("Can't load channel in eps_l1b: " + str(key["name"])) return - dataset.attrs['platform_name'] = self.platform_name - dataset.attrs['sensor'] = self.sensor_name + dataset.attrs["platform_name"] = self.platform_name + dataset.attrs["sensor"] = self.sensor_name if "calibration" in key: dataset.attrs["units"] = self.units[key["calibration"]] dataset.attrs.update(info) @@ -314,13 +314,13 @@ def get_dataset(self, key, info): def _get_angle_dataarray(self, key): """Get an angle dataarray.""" sun_azi, sun_zen, sat_azi, sat_zen = self.get_full_angles() - if key['name'] == 'solar_zenith_angle': + if key["name"] == "solar_zenith_angle": dataset = create_xarray(sun_zen) - elif key['name'] == 'solar_azimuth_angle': + elif key["name"] == "solar_azimuth_angle": dataset = create_xarray(sun_azi) - if key['name'] == 'satellite_zenith_angle': + if key["name"] == "satellite_zenith_angle": dataset = create_xarray(sat_zen) - elif key['name'] == 'satellite_azimuth_angle': + elif key["name"] == "satellite_azimuth_angle": dataset = create_xarray(sat_azi) return dataset @@ -336,26 +336,26 @@ def three_b_mask(self): def _get_calibrated_dataarray(self, key): """Get a calibrated dataarray.""" - if key['calibration'] not in ['reflectance', 'brightness_temperature', 'radiance']: - raise ValueError('calibration type ' + str(key['calibration']) + - ' is not supported!') + if key["calibration"] not in ["reflectance", "brightness_temperature", "radiance"]: + raise ValueError("calibration type " + str(key["calibration"]) + + " is not supported!") mask = None - channel_name = key['name'].upper() + channel_name = key["name"].upper() radiance_indices = {"1": 0, "2": 1, "3A": 2, "3B": 2, "4": 3, "5": 4} array = self["SCENE_RADIANCES"][:, radiance_indices[channel_name], :] if channel_name in ["1", "2", "3A"]: - if key['calibration'] == 'reflectance': + if key["calibration"] == "reflectance": array = radiance_to_refl(array, self[f"CH{channel_name}_SOLAR_FILTERED_IRRADIANCE"]) if channel_name == "3A": mask = self.three_a_mask[:, np.newaxis] if channel_name in ["3B", "4", "5"]: - if key['calibration'] == 'brightness_temperature': + if key["calibration"] == "brightness_temperature": array = radiance_to_bt(array, self[f"CH{channel_name}_CENTRAL_WAVENUMBER"], self[f"CH{channel_name}_CONSTANT1"], @@ -373,7 +373,7 @@ def get_lonlats(self): if self.area is None: lons, lats = self.get_full_lonlats() self.area = SwathDefinition(lons, lats) - self.area.name = '_'.join([self.platform_name, str(self.start_time), + self.area.name = "_".join([self.platform_name, str(self.start_time), str(self.end_time)]) return self.area diff --git a/satpy/readers/eum_base.py b/satpy/readers/eum_base.py index 76abcf035c..cc82ee008d 100644 --- a/satpy/readers/eum_base.py +++ b/satpy/readers/eum_base.py @@ -22,10 +22,10 @@ import numpy as np # 6 bytes, 8 bytes, 10 bytes -time_cds_short = [('Days', '>u2'), ('Milliseconds', '>u4')] -time_cds = time_cds_short + [('Microseconds', '>u2')] -time_cds_expanded = time_cds + [('Nanoseconds', '>u2')] -issue_revision = [('Issue', np.uint16), ('Revision', np.uint16)] +time_cds_short = [("Days", ">u2"), ("Milliseconds", ">u4")] +time_cds = time_cds_short + [("Microseconds", ">u2")] +time_cds_expanded = time_cds + [("Nanoseconds", ">u2")] +issue_revision = [("Issue", np.uint16), ("Revision", np.uint16)] def timecds2datetime(tcds): @@ -33,14 +33,14 @@ def timecds2datetime(tcds): Works both with a dictionary and a numpy record_array. """ - days = int(tcds['Days']) - milliseconds = int(tcds['Milliseconds']) + days = int(tcds["Days"]) + milliseconds = int(tcds["Milliseconds"]) try: - microseconds = int(tcds['Microseconds']) + microseconds = int(tcds["Microseconds"]) except (KeyError, ValueError): microseconds = 0 try: - microseconds += int(tcds['Nanoseconds']) / 1000. + microseconds += int(tcds["Nanoseconds"]) / 1000. except (KeyError, ValueError): pass @@ -71,14 +71,14 @@ def recarray2dict(arr): else: if data.size == 1: data = data[0] - if ntype[:2] == '|S': + if ntype[:2] == "|S": # Python2 and Python3 handle strings differently try: data = data.decode() except ValueError: data = None else: - data = data.split(':')[0].strip() + data = data.split(":")[0].strip() res[key] = data else: res[key] = data.squeeze() @@ -88,15 +88,15 @@ def recarray2dict(arr): def get_service_mode(instrument_name, ssp_lon): """Get information about service mode for a given instrument and subsatellite longitude.""" - service_modes = {'seviri': {'0.0': {'service_name': 'fes', 'service_desc': 'Full Earth Scanning service'}, - '9.5': {'service_name': 'rss', 'service_desc': 'Rapid Scanning Service'}, - '41.5': {'service_name': 'iodc', 'service_desc': 'Indian Ocean Data Coverage service'}, - '45.5': {'service_name': 'iodc', 'service_desc': 'Indian Ocean Data Coverage service'} + service_modes = {"seviri": {"0.0": {"service_name": "fes", "service_desc": "Full Earth Scanning service"}, + "9.5": {"service_name": "rss", "service_desc": "Rapid Scanning Service"}, + "41.5": {"service_name": "iodc", "service_desc": "Indian Ocean Data Coverage service"}, + "45.5": {"service_name": "iodc", "service_desc": "Indian Ocean Data Coverage service"} }, - 'fci': {'0.0': {'service_name': 'fdss', 'service_desc': 'Full Disk Scanning Service'}, - '9.5': {'service_name': 'rss', 'service_desc': 'Rapid Scanning Service'}, + "fci": {"0.0": {"service_name": "fdss", "service_desc": "Full Disk Scanning Service"}, + "9.5": {"service_name": "rss", "service_desc": "Rapid Scanning Service"}, }, } - unknown_modes = {'service_name': 'unknown', 'service_desc': 'unknown'} + unknown_modes = {"service_name": "unknown", "service_desc": "unknown"} - return service_modes.get(instrument_name, unknown_modes).get('{:.1f}'.format(ssp_lon), unknown_modes) + return service_modes.get(instrument_name, unknown_modes).get("{:.1f}".format(ssp_lon), unknown_modes) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 8e28219035..a405c86201 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -131,26 +131,26 @@ # dict containing all available auxiliary data parameters to be read using the index map. Keys are the # parameter name and values are the paths to the variable inside the netcdf AUX_DATA = { - 'subsatellite_latitude': 'state/platform/subsatellite_latitude', - 'subsatellite_longitude': 'state/platform/subsatellite_longitude', - 'platform_altitude': 'state/platform/platform_altitude', - 'subsolar_latitude': 'state/celestial/subsolar_latitude', - 'subsolar_longitude': 'state/celestial/subsolar_longitude', - 'earth_sun_distance': 'state/celestial/earth_sun_distance', - 'sun_satellite_distance': 'state/celestial/sun_satellite_distance', - 'time': 'time', - 'swath_number': 'data/swath_number', - 'swath_direction': 'data/swath_direction', + "subsatellite_latitude": "state/platform/subsatellite_latitude", + "subsatellite_longitude": "state/platform/subsatellite_longitude", + "platform_altitude": "state/platform/platform_altitude", + "subsolar_latitude": "state/celestial/subsolar_latitude", + "subsolar_longitude": "state/celestial/subsolar_longitude", + "earth_sun_distance": "state/celestial/earth_sun_distance", + "sun_satellite_distance": "state/celestial/sun_satellite_distance", + "time": "time", + "swath_number": "data/swath_number", + "swath_direction": "data/swath_direction", } -HIGH_RES_GRID_INFO = {'fci_l1c_hrfi': {'grid_type': '500m', - 'grid_width': 22272}, - 'fci_l1c_fdhsi': {'grid_type': '1km', - 'grid_width': 11136}} -LOW_RES_GRID_INFO = {'fci_l1c_hrfi': {'grid_type': '1km', - 'grid_width': 11136}, - 'fci_l1c_fdhsi': {'grid_type': '2km', - 'grid_width': 5568}} +HIGH_RES_GRID_INFO = {"fci_l1c_hrfi": {"grid_type": "500m", + "grid_width": 22272}, + "fci_l1c_fdhsi": {"grid_type": "1km", + "grid_width": 11136}} +LOW_RES_GRID_INFO = {"fci_l1c_hrfi": {"grid_type": "1km", + "grid_width": 11136}, + "fci_l1c_fdhsi": {"grid_type": "2km", + "grid_width": 5568}} def _get_aux_data_name_from_dsname(dsname): @@ -206,9 +206,9 @@ def __init__(self, filename, filename_info, filetype_info): filetype_info, cache_var_size=0, cache_handle=True) - logger.debug('Reading: {}'.format(self.filename)) - logger.debug('Start: {}'.format(self.start_time)) - logger.debug('End: {}'.format(self.end_time)) + logger.debug("Reading: {}".format(self.filename)) + logger.debug("Start: {}".format(self.start_time)) + logger.debug("End: {}".format(self.end_time)) self._cache = {} @@ -218,7 +218,7 @@ def rc_period_min(self): As RSS is not yet implemeted and error will be raised if RSS are to be read """ - if not self.filename_info['coverage'] == 'FD': + if not self.filename_info["coverage"] == "FD": raise NotImplementedError(f"coverage for {self.filename_info['coverage']} not supported by this reader") return 2.5 return 10 @@ -227,7 +227,7 @@ def rc_period_min(self): def nominal_start_time(self): """Get nominal start time.""" rc_date = self.observation_start_time.replace(hour=0, minute=0, second=0, microsecond=0) - return rc_date + timedelta(minutes=(self.filename_info['repeat_cycle_in_day']-1)*self.rc_period_min) + return rc_date + timedelta(minutes=(self.filename_info["repeat_cycle_in_day"]-1)*self.rc_period_min) @property def nominal_end_time(self): @@ -237,12 +237,12 @@ def nominal_end_time(self): @property def observation_start_time(self): """Get observation start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def observation_end_time(self): """Get observation end time.""" - return self.filename_info['end_time'] + return self.filename_info["end_time"] @property def start_time(self): @@ -256,9 +256,9 @@ def end_time(self): def get_channel_measured_group_path(self, channel): """Get the channel's measured group path.""" - if self.filetype_info['file_type'] == 'fci_l1c_hrfi': - channel += '_hr' - measured_group_path = 'data/{}/measured'.format(channel) + if self.filetype_info["file_type"] == "fci_l1c_hrfi": + channel += "_hr" + measured_group_path = "data/{}/measured".format(channel) return measured_group_path @@ -273,25 +273,25 @@ def get_segment_position_info(self): Note: in the FCI terminology, a segment is actually called "chunk". To avoid confusion with the dask concept of chunk, and to be consistent with SEVIRI, we opt to use the word segment. """ - vis_06_measured_path = self.get_channel_measured_group_path('vis_06') - ir_105_measured_path = self.get_channel_measured_group_path('ir_105') + vis_06_measured_path = self.get_channel_measured_group_path("vis_06") + ir_105_measured_path = self.get_channel_measured_group_path("ir_105") - file_type = self.filetype_info['file_type'] + file_type = self.filetype_info["file_type"] segment_position_info = { - HIGH_RES_GRID_INFO[file_type]['grid_type']: { - 'start_position_row': self.get_and_cache_npxr(vis_06_measured_path + '/start_position_row').item(), - 'end_position_row': self.get_and_cache_npxr(vis_06_measured_path + '/end_position_row').item(), - 'segment_height': self.get_and_cache_npxr(vis_06_measured_path + '/end_position_row').item() - - self.get_and_cache_npxr(vis_06_measured_path + '/start_position_row').item() + 1, - 'grid_width': HIGH_RES_GRID_INFO[file_type]['grid_width'] + HIGH_RES_GRID_INFO[file_type]["grid_type"]: { + "start_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item(), + "end_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item(), + "segment_height": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item() - + self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item() + 1, + "grid_width": HIGH_RES_GRID_INFO[file_type]["grid_width"] }, - LOW_RES_GRID_INFO[file_type]['grid_type']: { - 'start_position_row': self.get_and_cache_npxr(ir_105_measured_path + '/start_position_row').item(), - 'end_position_row': self.get_and_cache_npxr(ir_105_measured_path + '/end_position_row').item(), - 'segment_height': self.get_and_cache_npxr(ir_105_measured_path + '/end_position_row').item() - - self.get_and_cache_npxr(ir_105_measured_path + '/start_position_row').item() + 1, - 'grid_width': LOW_RES_GRID_INFO[file_type]['grid_width'] + LOW_RES_GRID_INFO[file_type]["grid_type"]: { + "start_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item(), + "end_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item(), + "segment_height": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item() - + self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item() + 1, + "grid_width": LOW_RES_GRID_INFO[file_type]["grid_width"] } } @@ -299,14 +299,14 @@ def get_segment_position_info(self): def get_dataset(self, key, info=None): """Load a dataset.""" - logger.debug('Reading {} from {}'.format(key['name'], self.filename)) - if "pixel_quality" in key['name']: - return self._get_dataset_quality(key['name']) - elif "index_map" in key['name']: - return self._get_dataset_index_map(key['name']) - elif _get_aux_data_name_from_dsname(key['name']) is not None: - return self._get_dataset_aux_data(key['name']) - elif any(lb in key['name'] for lb in {"vis_", "ir_", "nir_", "wv_"}): + logger.debug("Reading {} from {}".format(key["name"], self.filename)) + if "pixel_quality" in key["name"]: + return self._get_dataset_quality(key["name"]) + elif "index_map" in key["name"]: + return self._get_dataset_index_map(key["name"]) + elif _get_aux_data_name_from_dsname(key["name"]) is not None: + return self._get_dataset_aux_data(key["name"]) + elif any(lb in key["name"] for lb in {"vis_", "ir_", "nir_", "wv_"}): return self._get_dataset_measurand(key, info=info) else: raise ValueError("Unknown dataset key, not a channel, quality or auxiliary data: " @@ -321,7 +321,7 @@ def _get_dataset_measurand(self, key, info=None): """ # Get the dataset # Get metadata for given dataset - measured = self.get_channel_measured_group_path(key['name']) + measured = self.get_channel_measured_group_path(key["name"]) data = self[measured + "/effective_radiance"] attrs = dict(data.attrs).copy() @@ -332,7 +332,7 @@ def _get_dataset_measurand(self, key, info=None): "FillValue", default_fillvals.get(data.dtype.str[1:], np.nan)) vr = attrs.get("valid_range", [-np.inf, np.inf]) - if key['calibration'] == "counts": + if key["calibration"] == "counts": attrs["_FillValue"] = fv nfv = fv else: @@ -357,7 +357,7 @@ def _get_dataset_measurand(self, key, info=None): # https://github.com/pytroll/satpy/issues/1171. if "pixel_quality" in attrs["ancillary_variables"]: attrs["ancillary_variables"] = attrs["ancillary_variables"].replace( - "pixel_quality", key['name'] + "_pixel_quality") + "pixel_quality", key["name"] + "_pixel_quality") else: raise ValueError( "Unexpected value for attribute ancillary_variables, " @@ -373,20 +373,20 @@ def _get_dataset_measurand(self, key, info=None): self["attr/platform"], self["attr/platform"]) # remove unpacking parameters for calibrated data - if key['calibration'] in ['brightness_temperature', 'reflectance']: + if key["calibration"] in ["brightness_temperature", "reflectance"]: res.attrs.pop("add_offset") res.attrs.pop("warm_add_offset") res.attrs.pop("scale_factor") res.attrs.pop("warm_scale_factor") # remove attributes from original file which don't apply anymore - res.attrs.pop('long_name') + res.attrs.pop("long_name") # Add time_parameter attributes - res.attrs['time_parameters'] = { - 'nominal_start_time': self.nominal_start_time, - 'nominal_end_time': self.nominal_end_time, - 'observation_start_time': self.observation_start_time, - 'observation_end_time': self.observation_end_time, + res.attrs["time_parameters"] = { + "nominal_start_time": self.nominal_start_time, + "nominal_end_time": self.nominal_end_time, + "observation_start_time": self.observation_start_time, + "observation_end_time": self.observation_end_time, } res.attrs.update(self.orbital_param) @@ -395,9 +395,9 @@ def _get_dataset_measurand(self, key, info=None): @cached_property def orbital_param(self): """Compute the orbital parameters for the current segment.""" - actual_subsat_lon = float(np.nanmean(self._get_aux_data_lut_vector('subsatellite_longitude'))) - actual_subsat_lat = float(np.nanmean(self._get_aux_data_lut_vector('subsatellite_latitude'))) - actual_sat_alt = float(np.nanmean(self._get_aux_data_lut_vector('platform_altitude'))) + actual_subsat_lon = float(np.nanmean(self._get_aux_data_lut_vector("subsatellite_longitude"))) + actual_subsat_lat = float(np.nanmean(self._get_aux_data_lut_vector("subsatellite_latitude"))) + actual_sat_alt = float(np.nanmean(self._get_aux_data_lut_vector("platform_altitude"))) nominal_and_proj_subsat_lon = float( self.get_and_cache_npxr("data/mtg_geos_projection/attr/longitude_of_projection_origin")) nominal_and_proj_subsat_lat = 0 @@ -405,16 +405,16 @@ def orbital_param(self): self.get_and_cache_npxr("data/mtg_geos_projection/attr/perspective_point_height")) orb_param_dict = { - 'orbital_parameters': { - 'satellite_actual_longitude': actual_subsat_lon, - 'satellite_actual_latitude': actual_subsat_lat, - 'satellite_actual_altitude': actual_sat_alt, - 'satellite_nominal_longitude': nominal_and_proj_subsat_lon, - 'satellite_nominal_latitude': nominal_and_proj_subsat_lat, - 'satellite_nominal_altitude': nominal_and_proj_sat_alt, - 'projection_longitude': nominal_and_proj_subsat_lon, - 'projection_latitude': nominal_and_proj_subsat_lat, - 'projection_altitude': nominal_and_proj_sat_alt, + "orbital_parameters": { + "satellite_actual_longitude": actual_subsat_lon, + "satellite_actual_latitude": actual_subsat_lat, + "satellite_actual_altitude": actual_sat_alt, + "satellite_nominal_longitude": nominal_and_proj_subsat_lon, + "satellite_nominal_latitude": nominal_and_proj_subsat_lat, + "satellite_nominal_altitude": nominal_and_proj_sat_alt, + "projection_longitude": nominal_and_proj_subsat_lon, + "projection_latitude": nominal_and_proj_subsat_lat, + "projection_altitude": nominal_and_proj_sat_alt, }} return orb_param_dict @@ -432,7 +432,7 @@ def _get_dataset_index_map(self, dsname): dv_path = grp_path + "/index_map" data = self[dv_path] - data = data.where(data != data.attrs.get('_FillValue', 65535)) + data = data.where(data != data.attrs.get("_FillValue", 65535)) return data def _get_aux_data_lut_vector(self, aux_data_name): @@ -446,14 +446,14 @@ def _get_aux_data_lut_vector(self, aux_data_name): @staticmethod def _getitem(block, lut): - return lut[block.astype('uint16')] + return lut[block.astype("uint16")] def _get_dataset_aux_data(self, dsname): """Get the auxiliary data arrays using the index map.""" # get index map index_map = self._get_dataset_index_map(_get_channel_name_from_dsname(dsname)) # subtract minimum of index variable (index_offset) - index_map -= np.min(self.get_and_cache_npxr('index')) + index_map -= np.min(self.get_and_cache_npxr("index")) # get lut values from 1-d vector variable lut = self._get_aux_data_lut_vector(_get_aux_data_name_from_dsname(dsname)) @@ -472,14 +472,14 @@ def calc_area_extent(self, key): # if a user requests a pixel quality or index map before the channel data, the # yaml-reader will ask the area extent of the pixel quality/index map field, # which will ultimately end up here - channel_name = _get_channel_name_from_dsname(key['name']) + channel_name = _get_channel_name_from_dsname(key["name"]) # Get metadata for given dataset measured = self.get_channel_measured_group_path(channel_name) # Get start/end line and column of loaded swath. nlines, ncols = self[measured + "/effective_radiance/shape"] - logger.debug('Channel {} resolution: {}'.format(channel_name, ncols)) - logger.debug('Row/Cols: {} / {}'.format(nlines, ncols)) + logger.debug("Channel {} resolution: {}".format(channel_name, ncols)) + logger.debug("Row/Cols: {} / {}".format(nlines, ncols)) # Calculate full globe line extent h = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/perspective_point_height")) @@ -489,16 +489,16 @@ def calc_area_extent(self, key): coord_radian = self.get_and_cache_npxr(measured + "/{:s}".format(coord)) # TODO remove this check when old versions of IDPF test data ( 0: - coord_radian.attrs['scale_factor'] *= -1 + if coord == "x" and coord_radian.attrs["scale_factor"] > 0: + coord_radian.attrs["scale_factor"] *= -1 # TODO remove this check when old versions of IDPF test data ( 1.1: - logger.info('The variable state/celestial/earth_sun_distance contains unexpected values' - '(mean value is {} AU). Defaulting to 1 AU for reflectance calculation.' - ''.format(sun_earth_distance)) + logger.info("The variable state/celestial/earth_sun_distance contains unexpected values" + "(mean value is {} AU). Defaulting to 1 AU for reflectance calculation." + "".format(sun_earth_distance)) sun_earth_distance = 1 res = 100 * radiance * np.pi * sun_earth_distance ** 2 / cesi diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index cbb47b2c8c..c387326f89 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -41,18 +41,18 @@ class FciL2CommonFunctions(object): @property def spacecraft_name(self): """Return spacecraft name.""" - return self.nc.attrs['platform'] + return self.nc.attrs["platform"] @property def sensor_name(self): """Return instrument name.""" - return self.nc.attrs['data_source'] + return self.nc.attrs["data_source"] @property def ssp_lon(self): """Return longitude at subsatellite point.""" try: - return float(self.nc['mtg_geos_projection'].attrs['longitude_of_projection_origin']) + return float(self.nc["mtg_geos_projection"].attrs["longitude_of_projection_origin"]) except (KeyError, AttributeError): logger.warning(f"ssp_lon could not be obtained from file content, using default value " f"of {SSP_DEFAULT} degrees east instead") @@ -71,11 +71,11 @@ def _get_global_attributes(self): """ attributes = { - 'filename': self.filename, - 'spacecraft_name': self.spacecraft_name, - 'ssp_lon': self.ssp_lon, - 'sensor': self.sensor_name, - 'platform_name': self.spacecraft_name, + "filename": self.filename, + "spacecraft_name": self.spacecraft_name, + "ssp_lon": self.ssp_lon, + "sensor": self.sensor_name, + "platform_name": self.spacecraft_name, } return attributes @@ -86,10 +86,10 @@ def _set_attributes(self, variable, dataset_info, segmented=False): else: xdim, ydim = "number_of_columns", "number_of_rows" - if dataset_info['file_key'] not in ['product_quality', 'product_completeness', 'product_timeliness']: - variable = variable.rename({ydim: 'y', xdim: 'x'}) + if dataset_info["file_key"] not in ["product_quality", "product_completeness", "product_timeliness"]: + variable = variable.rename({ydim: "y", xdim: "x"}) - variable.attrs.setdefault('units', None) + variable.attrs.setdefault("units", None) variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) @@ -116,7 +116,7 @@ def _mask_data(variable, fill_value): fill_value = [fill_value] for val in fill_value: - variable = variable.where(variable != val).astype('float32') + variable = variable.where(variable != val).astype("float32") return variable @@ -139,8 +139,8 @@ def __init__(self, filename, filename_info, filetype_info, with_area_definition= decode_cf=True, mask_and_scale=True, chunks={ - 'number_of_columns': CHUNK_SIZE, - 'number_of_rows': CHUNK_SIZE + "number_of_columns": CHUNK_SIZE, + "number_of_rows": CHUNK_SIZE } ) @@ -148,10 +148,10 @@ def __init__(self, filename, filename_info, filetype_info, with_area_definition= logger.info("Setting `with_area_defintion=False` has no effect on pixel-based products.") # Read metadata which are common to all datasets - self.nlines = self.nc['y'].size - self.ncols = self.nc['x'].size - self._projection = self.nc['mtg_geos_projection'] - self.multi_dims = {'maximum_number_of_layers': 'layer', 'number_of_vis_channels': 'vis_channel_id'} + self.nlines = self.nc["y"].size + self.ncols = self.nc["x"].size + self._projection = self.nc["mtg_geos_projection"] + self.multi_dims = {"maximum_number_of_layers": "layer", "number_of_vis_channels": "vis_channel_id"} def get_area_def(self, key): """Return the area definition.""" @@ -162,9 +162,9 @@ def get_area_def(self, key): def get_dataset(self, dataset_id, dataset_info): """Get dataset using the file_key in dataset_info.""" - var_key = dataset_info['file_key'] - par_name = dataset_info['name'] - logger.debug('Reading in file to get dataset with key %s.', var_key) + var_key = dataset_info["file_key"] + par_name = dataset_info["name"] + logger.debug("Reading in file to get dataset with key %s.", var_key) try: variable = self.nc[var_key] @@ -173,20 +173,20 @@ def get_dataset(self, dataset_id, dataset_info): return None # Compute the area definition - if var_key not in ['product_quality', 'product_completeness', 'product_timeliness']: + if var_key not in ["product_quality", "product_completeness", "product_timeliness"]: self._area_def = self._compute_area_def(dataset_id) if any(dim_id in dataset_info.keys() for dim_id in self.multi_dims.values()): variable = self._slice_dataset(variable, dataset_info, self.multi_dims) - if par_name == 'retrieved_cloud_optical_thickness': + if par_name == "retrieved_cloud_optical_thickness": variable = self.get_total_cot(variable) - if dataset_info['file_type'] == 'nc_fci_test_clm': + if dataset_info["file_type"] == "nc_fci_test_clm": variable = self._decode_clm_test_data(variable, dataset_info) - if 'fill_value' in dataset_info: - variable = self._mask_data(variable, dataset_info['fill_value']) + if "fill_value" in dataset_info: + variable = self._mask_data(variable, dataset_info["fill_value"]) variable = self._set_attributes(variable, dataset_info) @@ -194,9 +194,9 @@ def get_dataset(self, dataset_id, dataset_info): @staticmethod def _decode_clm_test_data(variable, dataset_info): - if dataset_info['file_key'] != 'cloud_mask_cmrt6_test_result': - variable = variable.astype('uint32') - variable.values = (variable.values >> dataset_info['extract_byte'] << 31 >> 31).astype('int8') + if dataset_info["file_key"] != "cloud_mask_cmrt6_test_result": + variable = variable.astype("uint32") + variable.values = (variable.values >> dataset_info["extract_byte"] << 31 >> 31).astype("int8") return variable @@ -210,8 +210,8 @@ def _compute_area_def(self, dataset_id): area_extent = self._get_area_extent() area_naming, proj_dict = self._get_proj_area(dataset_id) area_def = geometry.AreaDefinition( - area_naming['area_id'], - area_naming['description'], + area_naming["area_id"], + area_naming["description"], "", proj_dict, self.ncols, @@ -223,15 +223,15 @@ def _compute_area_def(self, dataset_id): def _get_area_extent(self): """Calculate area extent of dataset.""" # Load and convert x/y coordinates to degrees as required by the make_ext function - x = self.nc['x'] - y = self.nc['y'] + x = self.nc["x"] + y = self.nc["y"] x_deg = np.degrees(x) y_deg = np.degrees(y) # Select the extreme points and calcualte area extent (not: these refer to pixel center) ll_x, ur_x = -x_deg.values[0], -x_deg.values[-1] ll_y, ur_y = y_deg.values[-1], y_deg.values[0] - h = float(self._projection.attrs['perspective_point_height']) + h = float(self._projection.attrs["perspective_point_height"]) area_extent_pixel_center = make_ext(ll_x, ur_x, ll_y, ur_y, h) # Shift area extent by half a pixel to get the area extent w.r.t. the dataset/pixel corners @@ -244,30 +244,30 @@ def _get_area_extent(self): def _get_proj_area(self, dataset_id): """Extract projection and area information.""" # Read the projection data from the mtg_geos_projection variable - a = float(self._projection.attrs['semi_major_axis']) - h = float(self._projection.attrs['perspective_point_height']) + a = float(self._projection.attrs["semi_major_axis"]) + h = float(self._projection.attrs["perspective_point_height"]) # Some L2PF test data files have a typo in the keyname for the inverse flattening parameter. Use a default value # as fallback until all L2PF test files are correctly formatted. - rf = float(self._projection.attrs.get('inverse_flattening', 298.257223563)) + rf = float(self._projection.attrs.get("inverse_flattening", 298.257223563)) res = dataset_id["resolution"] - area_naming_input_dict = {'platform_name': 'mtg', - 'instrument_name': 'fci', - 'resolution': res, + area_naming_input_dict = {"platform_name": "mtg", + "instrument_name": "fci", + "resolution": res, } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('fci', self.ssp_lon)}) + **get_service_mode("fci", self.ssp_lon)}) - proj_dict = {'a': a, - 'lon_0': self.ssp_lon, - 'h': h, + proj_dict = {"a": a, + "lon_0": self.ssp_lon, + "h": h, "rf": rf, - 'proj': 'geos', - 'units': 'm', - "sweep": 'y'} + "proj": "geos", + "units": "m", + "sweep": "y"} return area_naming, proj_dict @@ -281,7 +281,7 @@ def get_total_cot(variable): attrs = variable.attrs variable = 10 ** variable variable = variable.fillna(0.) - variable = variable.sum(dim='maximum_number_of_layers', keep_attrs=True) + variable = variable.sum(dim="maximum_number_of_layers", keep_attrs=True) variable = variable.where(variable != 0., np.nan) variable = np.log10(variable) variable.attrs = attrs @@ -301,19 +301,19 @@ def __init__(self, filename, filename_info, filetype_info, with_area_definition= decode_cf=True, mask_and_scale=True, chunks={ - 'number_of_FoR_cols': CHUNK_SIZE, - 'number_of_FoR_rows': CHUNK_SIZE + "number_of_FoR_cols": CHUNK_SIZE, + "number_of_FoR_rows": CHUNK_SIZE } ) # Read metadata which are common to all datasets - self.nlines = self.nc['number_of_FoR_rows'].size - self.ncols = self.nc['number_of_FoR_cols'].size + self.nlines = self.nc["number_of_FoR_rows"].size + self.ncols = self.nc["number_of_FoR_cols"].size self.with_adef = with_area_definition self.multi_dims = { - 'number_of_categories': 'category_id', 'number_of_channels': 'channel_id', - 'number_of_vis_channels': 'vis_channel_id', 'number_of_ir_channels': 'ir_channel_id', - 'number_test': 'test_id', + "number_of_categories": "category_id", "number_of_channels": "channel_id", + "number_of_vis_channels": "vis_channel_id", "number_of_ir_channels": "ir_channel_id", + "number_test": "test_id", } def get_area_def(self, key): @@ -325,8 +325,8 @@ def get_area_def(self, key): def get_dataset(self, dataset_id, dataset_info): """Get dataset using the file_key in dataset_info.""" - var_key = dataset_info['file_key'] - logger.debug('Reading in file to get dataset with key %s.', var_key) + var_key = dataset_info["file_key"] + logger.debug("Reading in file to get dataset with key %s.", var_key) try: variable = self.nc[var_key] @@ -337,16 +337,16 @@ def get_dataset(self, dataset_id, dataset_info): if any(dim_id in dataset_info.keys() for dim_id in self.multi_dims.values()): variable = self._slice_dataset(variable, dataset_info, self.multi_dims) - if self.with_adef and var_key not in ['longitude', 'latitude', - 'product_quality', 'product_completeness', 'product_timeliness']: + if self.with_adef and var_key not in ["longitude", "latitude", + "product_quality", "product_completeness", "product_timeliness"]: self._area_def = self._construct_area_def(dataset_id) # coordinates are not relevant when returning data with an AreaDefinition - if 'coordinates' in dataset_info.keys(): - del dataset_info['coordinates'] + if "coordinates" in dataset_info.keys(): + del dataset_info["coordinates"] - if 'fill_value' in dataset_info: - variable = self._mask_data(variable, dataset_info['fill_value']) + if "fill_value" in dataset_info: + variable = self._mask_data(variable, dataset_info["fill_value"]) variable = self._set_attributes(variable, dataset_info, segmented=True) @@ -361,19 +361,19 @@ def _construct_area_def(self, dataset_id): """ res = dataset_id["resolution"] - area_naming_input_dict = {'platform_name': 'mtg', - 'instrument_name': 'fci', - 'resolution': res, + area_naming_input_dict = {"platform_name": "mtg", + "instrument_name": "fci", + "resolution": res, } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('fci', self.ssp_lon)}) + **get_service_mode("fci", self.ssp_lon)}) # Construct area definition from standardized area definition. - stand_area_def = get_area_def(area_naming['area_id']) + stand_area_def = get_area_def(area_naming["area_id"]) if (stand_area_def.x_size != self.ncols) | (stand_area_def.y_size != self.nlines): - raise NotImplementedError('Unrecognised AreaDefinition.') + raise NotImplementedError("Unrecognised AreaDefinition.") mod_area_extent = self._modify_area_extent(stand_area_def.area_extent) diff --git a/satpy/readers/file_handlers.py b/satpy/readers/file_handlers.py index 0c47553b0d..3fdeed1edc 100644 --- a/satpy/readers/file_handlers.py +++ b/satpy/readers/file_handlers.py @@ -112,16 +112,16 @@ def combine_info(self, all_infos): """ combined_info = combine_metadata(*all_infos) - new_dict = self._combine(all_infos, min, 'start_time', 'start_orbit') - new_dict.update(self._combine(all_infos, max, 'end_time', 'end_orbit')) + new_dict = self._combine(all_infos, min, "start_time", "start_orbit") + new_dict.update(self._combine(all_infos, max, "end_time", "end_orbit")) new_dict.update(self._combine_orbital_parameters(all_infos)) new_dict.update(self._combine_time_parameters(all_infos)) try: - area = SwathDefinition(lons=np.ma.vstack([info['area'].lons for info in all_infos]), - lats=np.ma.vstack([info['area'].lats for info in all_infos])) - area.name = '_'.join([info['area'].name for info in all_infos]) - combined_info['area'] = area + area = SwathDefinition(lons=np.ma.vstack([info["area"].lons for info in all_infos]), + lats=np.ma.vstack([info["area"].lats for info in all_infos])) + area.name = "_".join([info["area"].name for info in all_infos]) + combined_info["area"] = area except KeyError: pass @@ -129,7 +129,7 @@ def combine_info(self, all_infos): return new_dict def _combine_orbital_parameters(self, all_infos): - orb_params = [info.get('orbital_parameters', {}) for info in all_infos] + orb_params = [info.get("orbital_parameters", {}) for info in all_infos] if not all(orb_params): return {} # Collect all available keys @@ -138,15 +138,15 @@ def _combine_orbital_parameters(self, all_infos): orb_params_comb.update(d) # Average known keys - keys = ['projection_longitude', 'projection_latitude', 'projection_altitude', - 'satellite_nominal_longitude', 'satellite_nominal_latitude', - 'satellite_actual_longitude', 'satellite_actual_latitude', 'satellite_actual_altitude', - 'nadir_longitude', 'nadir_latitude'] + keys = ["projection_longitude", "projection_latitude", "projection_altitude", + "satellite_nominal_longitude", "satellite_nominal_latitude", + "satellite_actual_longitude", "satellite_actual_latitude", "satellite_actual_altitude", + "nadir_longitude", "nadir_latitude"] orb_params_comb.update(self._combine(orb_params, np.mean, *keys)) - return {'orbital_parameters': orb_params_comb} + return {"orbital_parameters": orb_params_comb} def _combine_time_parameters(self, all_infos): - time_params = [info.get('time_parameters', {}) for info in all_infos] + time_params = [info.get("time_parameters", {}) for info in all_infos] if not all(time_params): return {} # Collect all available keys @@ -155,26 +155,26 @@ def _combine_time_parameters(self, all_infos): time_params_comb.update(d) start_keys = ( - 'nominal_start_time', - 'observation_start_time', + "nominal_start_time", + "observation_start_time", ) end_keys = ( - 'nominal_end_time', - 'observation_end_time', + "nominal_end_time", + "observation_end_time", ) time_params_comb.update(self._combine(time_params, min, *start_keys)) time_params_comb.update(self._combine(time_params, max, *end_keys)) - return {'time_parameters': time_params_comb} + return {"time_parameters": time_params_comb} @property def start_time(self): """Get start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def sensor_names(self): @@ -197,7 +197,7 @@ def file_type_matches(self, ds_ftype): """ if not isinstance(ds_ftype, (list, tuple)): ds_ftype = [ds_ftype] - if self.filetype_info['file_type'] in ds_ftype: + if self.filetype_info["file_type"] in ds_ftype: return True return None @@ -295,4 +295,4 @@ def available_datasets(self, configured_datasets=None): # file handler so let's yield early yield is_avail, ds_info continue - yield self.file_type_matches(ds_info['file_type']), ds_info + yield self.file_type_matches(ds_info["file_type"]), ds_info diff --git a/satpy/readers/fy4_base.py b/satpy/readers/fy4_base.py index 9b6b364420..144e559858 100644 --- a/satpy/readers/fy4_base.py +++ b/satpy/readers/fy4_base.py @@ -46,7 +46,7 @@ def __init__(self, filename, filename_info, filetype_info): """Init filehandler.""" super(FY4Base, self).__init__(filename, filename_info, filetype_info) - self.sensor = filename_info['instrument'] + self.sensor = filename_info["instrument"] # info of 250m, 500m, 1km, 2km and 4km data self._COFF_list = [21983.5, 10991.5, 5495.5, 2747.5, 1373.5] @@ -55,17 +55,17 @@ def __init__(self, filename, filename_info, filetype_info): self._CFAC_list = [163730199.0, 81865099.0, 40932549.0, 20466274.0, 10233137.0] self._LFAC_list = [163730199.0, 81865099.0, 40932549.0, 20466274.0, 10233137.0] - self.PLATFORM_NAMES = {'FY4A': 'FY-4A', - 'FY4B': 'FY-4B', - 'FY4C': 'FY-4C'} + self.PLATFORM_NAMES = {"FY4A": "FY-4A", + "FY4B": "FY-4B", + "FY4C": "FY-4C"} try: - self.PLATFORM_ID = self.PLATFORM_NAMES[filename_info['platform_id']] + self.PLATFORM_ID = self.PLATFORM_NAMES[filename_info["platform_id"]] except KeyError: raise KeyError(f"Unsupported platform ID: {filename_info['platform_id']}") - self.CHANS_ID = 'NOMChannel' - self.SAT_ID = 'NOMSatellite' - self.SUN_ID = 'NOMSun' + self.CHANS_ID = "NOMChannel" + self.SAT_ID = "NOMSatellite" + self.SUN_ID = "NOMSun" @staticmethod def scale(dn, slope, offset): @@ -112,10 +112,10 @@ def _getitem(block, lut): def reflectance_coeffs(self): """Retrieve the reflectance calibration coefficients from the HDF file.""" # using the corresponding SCALE and OFFSET - if self.PLATFORM_ID == 'FY-4A': - cal_coef = 'CALIBRATION_COEF(SCALE+OFFSET)' - elif self.PLATFORM_ID == 'FY-4B': - cal_coef = 'Calibration/CALIBRATION_COEF(SCALE+OFFSET)' + if self.PLATFORM_ID == "FY-4A": + cal_coef = "CALIBRATION_COEF(SCALE+OFFSET)" + elif self.PLATFORM_ID == "FY-4B": + cal_coef = "Calibration/CALIBRATION_COEF(SCALE+OFFSET)" else: raise KeyError(f"Unsupported platform ID for calibration: {self.PLATFORM_ID}") return self.get(cal_coef).values @@ -123,58 +123,58 @@ def reflectance_coeffs(self): def calibrate(self, data, ds_info, ds_name, file_key): """Calibrate the data.""" # Check if calibration is present, if not assume dataset is an angle - calibration = ds_info.get('calibration') + calibration = ds_info.get("calibration") # Return raw data in case of counts or no calibration - if calibration in ('counts', None): - data.attrs['units'] = ds_info['units'] - ds_info['valid_range'] = data.attrs['valid_range'] - ds_info['fill_value'] = data.attrs['FillValue'].item() - elif calibration == 'reflectance': + if calibration in ("counts", None): + data.attrs["units"] = ds_info["units"] + ds_info["valid_range"] = data.attrs["valid_range"] + ds_info["fill_value"] = data.attrs["FillValue"].item() + elif calibration == "reflectance": channel_index = int(file_key[-2:]) - 1 data = self.calibrate_to_reflectance(data, channel_index, ds_info) - elif calibration == 'brightness_temperature': + elif calibration == "brightness_temperature": data = self.calibrate_to_bt(data, ds_info, ds_name) - elif calibration == 'radiance': + elif calibration == "radiance": raise NotImplementedError("Calibration to radiance is not supported.") # Apply range limits, but not for counts or we convert to float! - if calibration != 'counts': - data = data.where((data >= min(data.attrs['valid_range'])) & - (data <= max(data.attrs['valid_range']))) + if calibration != "counts": + data = data.where((data >= min(data.attrs["valid_range"])) & + (data <= max(data.attrs["valid_range"]))) else: - data.attrs['_FillValue'] = data.attrs['FillValue'].item() + data.attrs["_FillValue"] = data.attrs["FillValue"].item() return data def calibrate_to_reflectance(self, data, channel_index, ds_info): """Calibrate to reflectance [%].""" logger.debug("Calibrating to reflectances") # using the corresponding SCALE and OFFSET - if self.sensor != 'AGRI' and self.sensor != 'GHI': - raise ValueError(f'Unsupported sensor type: {self.sensor}') + if self.sensor != "AGRI" and self.sensor != "GHI": + raise ValueError(f"Unsupported sensor type: {self.sensor}") coeffs = self.reflectance_coeffs num_channel = coeffs.shape[0] - if self.sensor == 'AGRI' and num_channel == 1: + if self.sensor == "AGRI" and num_channel == 1: # only channel_2, resolution = 500 m channel_index = 0 - data.data = da.where(data.data == data.attrs['FillValue'].item(), np.nan, data.data) - data.attrs['scale_factor'] = coeffs[channel_index, 0].item() - data.attrs['add_offset'] = coeffs[channel_index, 1].item() - data = self.scale(data, data.attrs['scale_factor'], data.attrs['add_offset']) + data.data = da.where(data.data == data.attrs["FillValue"].item(), np.nan, data.data) + data.attrs["scale_factor"] = coeffs[channel_index, 0].item() + data.attrs["add_offset"] = coeffs[channel_index, 1].item() + data = self.scale(data, data.attrs["scale_factor"], data.attrs["add_offset"]) data *= 100 - ds_info['valid_range'] = (data.attrs['valid_range'] * data.attrs['scale_factor'] + data.attrs['add_offset']) - ds_info['valid_range'] = ds_info['valid_range'] * 100 + ds_info["valid_range"] = (data.attrs["valid_range"] * data.attrs["scale_factor"] + data.attrs["add_offset"]) + ds_info["valid_range"] = ds_info["valid_range"] * 100 return data def calibrate_to_bt(self, data, ds_info, ds_name): """Calibrate to Brightness Temperatures [K].""" logger.debug("Calibrating to brightness_temperature") - if self.sensor not in ['GHI', 'AGRI']: + if self.sensor not in ["GHI", "AGRI"]: raise ValueError("Error, sensor must be GHI or AGRI.") # The key is sometimes prefixes with `Calibration/` so we try both options here - lut_key = ds_info.get('lut_key', ds_name) + lut_key = ds_info.get("lut_key", ds_name) try: lut = self[lut_key] except KeyError: @@ -183,66 +183,66 @@ def calibrate_to_bt(self, data, ds_info, ds_name): # the value of dn is the index of brightness_temperature data = self.apply_lut(data, lut) - ds_info['valid_range'] = lut.attrs['valid_range'] + ds_info["valid_range"] = lut.attrs["valid_range"] return data @property def start_time(self): """Get the start time.""" - start_time = self['/attr/Observing Beginning Date'] + 'T' + self['/attr/Observing Beginning Time'] + 'Z' + start_time = self["/attr/Observing Beginning Date"] + "T" + self["/attr/Observing Beginning Time"] + "Z" try: - return datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ") except ValueError: # For some data there is no sub-second component - return datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%SZ') + return datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%SZ") @property def end_time(self): """Get the end time.""" - end_time = self['/attr/Observing Ending Date'] + 'T' + self['/attr/Observing Ending Time'] + 'Z' + end_time = self["/attr/Observing Ending Date"] + "T" + self["/attr/Observing Ending Time"] + "Z" try: - return datetime.strptime(end_time, '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ") except ValueError: # For some data there is no sub-second component - return datetime.strptime(end_time, '%Y-%m-%dT%H:%M:%SZ') + return datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%SZ") def get_area_def(self, key): """Get the area definition.""" # Coordination Group for Meteorological Satellites LRIT/HRIT Global Specification # https://www.cgms-info.org/documents/cgms-lrit-hrit-global-specification-(v2-8-of-30-oct-2013).pdf - res = key['resolution'] + res = key["resolution"] pdict = {} - begin_cols = float(self.file_content['/attr/Begin Pixel Number']) - end_lines = float(self.file_content['/attr/End Line Number']) - pdict['coff'] = self._COFF_list[RESOLUTION_LIST.index(res)] - begin_cols + 1 - pdict['loff'] = -self._LOFF_list[RESOLUTION_LIST.index(res)] + end_lines + 1 + begin_cols = float(self.file_content["/attr/Begin Pixel Number"]) + end_lines = float(self.file_content["/attr/End Line Number"]) + pdict["coff"] = self._COFF_list[RESOLUTION_LIST.index(res)] - begin_cols + 1 + pdict["loff"] = -self._LOFF_list[RESOLUTION_LIST.index(res)] + end_lines + 1 - pdict['cfac'] = self._CFAC_list[RESOLUTION_LIST.index(res)] - pdict['lfac'] = self._LFAC_list[RESOLUTION_LIST.index(res)] + pdict["cfac"] = self._CFAC_list[RESOLUTION_LIST.index(res)] + pdict["lfac"] = self._LFAC_list[RESOLUTION_LIST.index(res)] try: - pdict['a'] = float(self.file_content['/attr/Semimajor axis of ellipsoid']) + pdict["a"] = float(self.file_content["/attr/Semimajor axis of ellipsoid"]) except KeyError: - pdict['a'] = float(self.file_content['/attr/dEA']) - if pdict['a'] < 10000: - pdict['a'] = pdict['a'] * 1E3 # equator radius (m) + pdict["a"] = float(self.file_content["/attr/dEA"]) + if pdict["a"] < 10000: + pdict["a"] = pdict["a"] * 1E3 # equator radius (m) try: - pdict['b'] = float(self.file_content['/attr/Semiminor axis of ellipsoid']) + pdict["b"] = float(self.file_content["/attr/Semiminor axis of ellipsoid"]) except KeyError: - pdict['b'] = pdict['a'] * (1 - 1 / self.file_content['/attr/dObRecFlat']) # polar radius (m) + pdict["b"] = pdict["a"] * (1 - 1 / self.file_content["/attr/dObRecFlat"]) # polar radius (m) - pdict['h'] = self.file_content['/attr/NOMSatHeight'] # the altitude of satellite (m) - if pdict['h'] > 42000000.0: - pdict['h'] = pdict['h'] - pdict['a'] + pdict["h"] = self.file_content["/attr/NOMSatHeight"] # the altitude of satellite (m) + if pdict["h"] > 42000000.0: + pdict["h"] = pdict["h"] - pdict["a"] - pdict['ssp_lon'] = float(self.file_content['/attr/NOMCenterLon']) - pdict['nlines'] = float(self.file_content['/attr/RegLength']) - pdict['ncols'] = float(self.file_content['/attr/RegWidth']) + pdict["ssp_lon"] = float(self.file_content["/attr/NOMCenterLon"]) + pdict["nlines"] = float(self.file_content["/attr/RegLength"]) + pdict["ncols"] = float(self.file_content["/attr/RegWidth"]) - pdict['scandir'] = 'N2S' - pdict['a_desc'] = "FY-4 {} area".format(self.filename_info['observation_type']) - pdict['a_name'] = f'{self.filename_info["observation_type"]}_{res}m' - pdict['p_id'] = f'FY-4, {res}m' + pdict["scandir"] = "N2S" + pdict["a_desc"] = "FY-4 {} area".format(self.filename_info["observation_type"]) + pdict["a_name"] = f'{self.filename_info["observation_type"]}_{res}m' + pdict["p_id"] = f"FY-4, {res}m" area_extent = get_area_extent(pdict) area_extent = (area_extent[0], diff --git a/satpy/readers/generic_image.py b/satpy/readers/generic_image.py index 3fb8c6b560..1ba160095f 100644 --- a/satpy/readers/generic_image.py +++ b/satpy/readers/generic_image.py @@ -40,13 +40,13 @@ CHUNK_SIZE = get_legacy_chunk_size() -BANDS = {1: ['L'], - 2: ['L', 'A'], - 3: ['R', 'G', 'B'], - 4: ['R', 'G', 'B', 'A']} +BANDS = {1: ["L"], + 2: ["L", "A"], + 3: ["R", "G", "B"], + 4: ["R", "G", "B", "A"]} -NODATA_HANDLING_FILLVALUE = 'fill_value' -NODATA_HANDLING_NANMASK = 'nan_mask' +NODATA_HANDLING_FILLVALUE = "fill_value" +NODATA_HANDLING_NANMASK = "nan_mask" logger = logging.getLogger(__name__) @@ -60,10 +60,10 @@ def __init__(self, filename, filename_info, filetype_info): filename, filename_info, filetype_info) self.finfo = filename_info try: - self.finfo['end_time'] = self.finfo['start_time'] + self.finfo["end_time"] = self.finfo["start_time"] except KeyError: pass - self.finfo['filename'] = self.filename + self.finfo["filename"] = self.filename self.file_content = {} self.area = None self.dataset_name = None @@ -71,10 +71,10 @@ def __init__(self, filename, filename_info, filetype_info): def read(self): """Read the image.""" - dataset = rasterio.open(self.finfo['filename']) + dataset = rasterio.open(self.finfo["filename"]) # Create area definition - if hasattr(dataset, 'crs') and dataset.crs is not None: + if hasattr(dataset, "crs") and dataset.crs is not None: self.area = utils.get_area_def_from_raster(dataset) data = xr.open_dataset(self.finfo["filename"], engine="rasterio", @@ -90,13 +90,13 @@ def read(self): attrs = data.attrs.copy() # Rename to Satpy convention - data = data.rename({'band': 'bands'}) + data = data.rename({"band": "bands"}) # Rename bands to [R, G, B, A], or a subset of those - data['bands'] = BANDS[data.bands.size] + data["bands"] = BANDS[data.bands.size] data.attrs = attrs - self.dataset_name = 'image' + self.dataset_name = "image" self.file_content[self.dataset_name] = data def get_area_def(self, dsid): @@ -108,16 +108,16 @@ def get_area_def(self, dsid): @property def start_time(self): """Return start time.""" - return self.finfo['start_time'] + return self.finfo["start_time"] @property def end_time(self): """Return end time.""" - return self.finfo['end_time'] + return self.finfo["end_time"] def get_dataset(self, key, info): """Get a dataset from the file.""" - ds_name = self.dataset_name if self.dataset_name else key['name'] + ds_name = self.dataset_name if self.dataset_name else key["name"] logger.debug("Reading '%s.'", ds_name) data = self.file_content[ds_name] @@ -149,8 +149,8 @@ def _mask_image_data(data, info): for i in range(data.shape[0])]) data.data = masked_data data = data.sel(bands=BANDS[data.bands.size - 1]) - elif hasattr(data, 'nodatavals') and data.nodatavals: - data = _handle_nodatavals(data, info.get('nodata_handling', NODATA_HANDLING_FILLVALUE)) + elif hasattr(data, "nodatavals") and data.nodatavals: + data = _handle_nodatavals(data, info.get("nodata_handling", NODATA_HANDLING_FILLVALUE)) return data @@ -162,7 +162,7 @@ def _handle_nodatavals(data, nodata_handling): masked_data = da.stack([da.where(data.data[i, :, :] == nodataval, np.nan, data.data[i, :, :]) for i, nodataval in enumerate(data.nodatavals)]) data.data = masked_data - data.attrs['_FillValue'] = np.nan + data.attrs["_FillValue"] = np.nan elif nodata_handling == NODATA_HANDLING_FILLVALUE: # keep data as it is but set _FillValue attribute to provided # nodatavalue (first one as it has to be the same for all bands at least @@ -170,5 +170,5 @@ def _handle_nodatavals(data, nodata_handling): fill_value = data.nodatavals[0] if np.issubdtype(data.dtype, np.integer): fill_value = int(fill_value) - data.attrs['_FillValue'] = fill_value + data.attrs["_FillValue"] = fill_value return data diff --git a/satpy/readers/geocat.py b/satpy/readers/geocat.py index 5086cd899b..185e7d3c13 100644 --- a/satpy/readers/geocat.py +++ b/satpy/readers/geocat.py @@ -44,14 +44,14 @@ CF_UNITS = { - 'none': '1', + "none": "1", } # GEOCAT currently doesn't include projection information in it's files GEO_PROJS = { - 'GOES-16': '+proj=geos +lon_0={lon_0:0.02f} +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs', - 'GOES-17': '+proj=geos +lon_0={lon_0:0.02f} +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs', - 'HIMAWARI-8': '+proj=geos +over +lon_0=140.7 +h=35785863 +a=6378137 +b=6356752.299581327 +units=m +no_defs', + "GOES-16": "+proj=geos +lon_0={lon_0:0.02f} +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs", + "GOES-17": "+proj=geos +lon_0={lon_0:0.02f} +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs", + "HIMAWARI-8": "+proj=geos +over +lon_0=140.7 +h=35785863 +a=6378137 +b=6356752.299581327 +units=m +no_defs", } @@ -72,29 +72,29 @@ class GEOCATFileHandler(NetCDF4FileHandler): def __init__(self, filename, filename_info, filetype_info, **kwargs): """Open and perform initial investigation of NetCDF file.""" - kwargs.setdefault('xarray_kwargs', {}).setdefault( - 'engine', "netcdf4") - kwargs.setdefault('xarray_kwargs', {}).setdefault( - 'decode_times', False) + kwargs.setdefault("xarray_kwargs", {}).setdefault( + "engine", "netcdf4") + kwargs.setdefault("xarray_kwargs", {}).setdefault( + "decode_times", False) super(GEOCATFileHandler, self).__init__( filename, filename_info, filetype_info, xarray_kwargs=kwargs["xarray_kwargs"]) sensors = { - 'goes': 'goes_imager', - 'himawari8': 'ahi', - 'goes16': 'abi', # untested - 'goesr': 'abi', # untested + "goes": "goes_imager", + "himawari8": "ahi", + "goes16": "abi", # untested + "goesr": "abi", # untested } platforms: dict[str, str] = { } resolutions = { - 'abi': { + "abi": { 1: 1002.0086577437705, 2: 2004.0173154875411, }, - 'ahi': { + "ahi": { 1: 999.9999820317674, # assumption 2: 1999.999964063535, 4: 3999.99992812707, @@ -121,7 +121,7 @@ def get_platform(self, platform): return platform def _get_proj(self, platform, ref_lon): - if platform == 'GOES-16' and -76. < ref_lon < -74.: + if platform == "GOES-16" and -76. < ref_lon < -74.: # geocat file holds the *actual* subsatellite point, not the # projection (-75.2 actual versus -75 projection) ref_lon = -75. @@ -130,33 +130,33 @@ def _get_proj(self, platform, ref_lon): @property def sensor_names(self): """Get sensor names.""" - return [self.get_sensor(self['/attr/Sensor_Name'])] + return [self.get_sensor(self["/attr/Sensor_Name"])] @property def start_time(self): """Get start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def is_geo(self): """Check platform.""" - platform = self.get_platform(self['/attr/Platform_Name']) + platform = self.get_platform(self["/attr/Platform_Name"]) return platform in GEO_PROJS @property def resolution(self): """Get resolution.""" - elem_res = self['/attr/Element_Resolution'] + elem_res = self["/attr/Element_Resolution"] return int(elem_res * 1000) def _calc_area_resolution(self, ds_res): elem_res = round(ds_res / 1000.) # mimic 'Element_Resolution' attribute from above - sensor = self.get_sensor(self['/attr/Sensor_Name']) + sensor = self.get_sensor(self["/attr/Sensor_Name"]) return self.resolutions.get(sensor, {}).get(int(elem_res), elem_res * 1000.) @@ -174,27 +174,27 @@ def available_datasets(self, configured_datasets=None): """ res = self.resolution - coordinates = ('pixel_longitude', 'pixel_latitude') + coordinates = ("pixel_longitude", "pixel_latitude") handled_variables = set() # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): - this_res = ds_info.get('resolution') - this_coords = ds_info.get('coordinates') + this_res = ds_info.get("resolution") + this_coords = ds_info.get("coordinates") # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info - var_name = ds_info.get('file_key', ds_info['name']) - matches = self.file_type_matches(ds_info['file_type']) + var_name = ds_info.get("file_key", ds_info["name"]) + matches = self.file_type_matches(ds_info["file_type"]) # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self and this_res != res: handled_variables.add(var_name) new_info = ds_info.copy() # don't mess up the above yielded - new_info['resolution'] = res + new_info["resolution"] = res if not self.is_geo and this_coords is None: - new_info['coordinates'] = coordinates + new_info["coordinates"] = coordinates yield True, new_info elif is_avail is None: # if we didn't know how to handle this dataset and no one else did @@ -207,21 +207,21 @@ def available_datasets(self, configured_datasets=None): continue if isinstance(val, netCDF4.Variable): ds_info = { - 'file_type': self.filetype_info['file_type'], - 'resolution': res, - 'name': var_name, + "file_type": self.filetype_info["file_type"], + "resolution": res, + "name": var_name, } if not self.is_geo: - ds_info['coordinates'] = coordinates + ds_info["coordinates"] = coordinates yield True, ds_info def get_shape(self, dataset_id, ds_info): """Get shape.""" - var_name = ds_info.get('file_key', dataset_id['name']) - return self[var_name + '/shape'] + var_name = ds_info.get("file_key", dataset_id["name"]) + return self[var_name + "/shape"] def _first_good_nav(self, lon_arr, lat_arr): - if hasattr(lon_arr, 'mask'): + if hasattr(lon_arr, "mask"): good_indexes = np.nonzero(~lon_arr.mask) else: # no masked values found in auto maskandscale @@ -247,9 +247,9 @@ def _get_extents(self, proj, res, lon_arr, lat_arr): def _load_nav(self, name): nav = self[name] - factor = self[name + '/attr/scale_factor'] - offset = self[name + '/attr/add_offset'] - fill = self[name + '/attr/_FillValue'] + factor = self[name + "/attr/scale_factor"] + offset = self[name + "/attr/add_offset"] + fill = self[name + "/attr/_FillValue"] nav = nav[:] mask = nav == fill nav = np.ma.masked_array(nav * factor + offset, mask=mask) @@ -260,15 +260,15 @@ def get_area_def(self, dsid): if not self.is_geo: raise NotImplementedError("Don't know how to get the Area Definition for this file") - platform = self.get_platform(self['/attr/Platform_Name']) - res = self._calc_area_resolution(dsid['resolution']) - proj = self._get_proj(platform, float(self['/attr/Subsatellite_Longitude'])) - area_name = '{} {} Area at {}m'.format( + platform = self.get_platform(self["/attr/Platform_Name"]) + res = self._calc_area_resolution(dsid["resolution"]) + proj = self._get_proj(platform, float(self["/attr/Subsatellite_Longitude"])) + area_name = "{} {} Area at {}m".format( platform, - self.metadata.get('sector_id', ''), + self.metadata.get("sector_id", ""), int(res)) - lon = self._load_nav('pixel_longitude') - lat = self._load_nav('pixel_latitude') + lon = self._load_nav("pixel_longitude") + lat = self._load_nav("pixel_latitude") extents = self._get_extents(proj, res, lon, lat) area_def = geometry.AreaDefinition( area_name, @@ -283,36 +283,36 @@ def get_area_def(self, dsid): def get_metadata(self, dataset_id, ds_info): """Get metadata.""" - var_name = ds_info.get('file_key', dataset_id['name']) + var_name = ds_info.get("file_key", dataset_id["name"]) shape = self.get_shape(dataset_id, ds_info) - info = getattr(self[var_name], 'attrs', {}) - info['shape'] = shape + info = getattr(self[var_name], "attrs", {}) + info["shape"] = shape info.update(ds_info) - u = info.get('units') + u = info.get("units") if u in CF_UNITS: # CF compliance - info['units'] = CF_UNITS[u] + info["units"] = CF_UNITS[u] - info['sensor'] = self.get_sensor(self['/attr/Sensor_Name']) - info['platform_name'] = self.get_platform(self['/attr/Platform_Name']) - info['resolution'] = dataset_id['resolution'] - if var_name == 'pixel_longitude': - info['standard_name'] = 'longitude' - elif var_name == 'pixel_latitude': - info['standard_name'] = 'latitude' + info["sensor"] = self.get_sensor(self["/attr/Sensor_Name"]) + info["platform_name"] = self.get_platform(self["/attr/Platform_Name"]) + info["resolution"] = dataset_id["resolution"] + if var_name == "pixel_longitude": + info["standard_name"] = "longitude" + elif var_name == "pixel_latitude": + info["standard_name"] = "latitude" return info def get_dataset(self, dataset_id, ds_info): """Get dataset.""" - var_name = ds_info.get('file_key', dataset_id['name']) + var_name = ds_info.get("file_key", dataset_id["name"]) # FUTURE: Metadata retrieval may be separate info = self.get_metadata(dataset_id, ds_info) data = self[var_name] - fill = self[var_name + '/attr/_FillValue'] - factor = self.get(var_name + '/attr/scale_factor') - offset = self.get(var_name + '/attr/add_offset') - valid_range = self.get(var_name + '/attr/valid_range') + fill = self[var_name + "/attr/_FillValue"] + factor = self.get(var_name + "/attr/scale_factor") + offset = self.get(var_name + "/attr/add_offset") + valid_range = self.get(var_name + "/attr/valid_range") data = data.where(data != fill) if valid_range is not None: @@ -321,5 +321,5 @@ def get_dataset(self, dataset_id, ds_info): data = data * factor + offset data.attrs.update(info) - data = data.rename({'lines': 'y', 'elements': 'x'}) + data = data.rename({"lines": "y", "elements": "x"}) return data diff --git a/satpy/readers/ghi_l1.py b/satpy/readers/ghi_l1.py index 2e26aeee24..3c085282c7 100644 --- a/satpy/readers/ghi_l1.py +++ b/satpy/readers/ghi_l1.py @@ -38,20 +38,20 @@ class HDF_GHI_L1(FY4Base): def __init__(self, filename, filename_info, filetype_info): """Init filehandler.""" super(HDF_GHI_L1, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'GHI' + self.sensor = "GHI" def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" - ds_name = dataset_id['name'] - logger.debug('Reading in get_dataset %s.', ds_name) - file_key = ds_info.get('file_key', ds_name) + ds_name = dataset_id["name"] + logger.debug("Reading in get_dataset %s.", ds_name) + file_key = ds_info.get("file_key", ds_name) if self.CHANS_ID in file_key: - file_key = f'Data/{file_key}' + file_key = f"Data/{file_key}" elif self.SUN_ID in file_key or self.SAT_ID in file_key: - file_key = f'Navigation/{file_key}' + file_key = f"Navigation/{file_key}" data = self.get(file_key) if data.ndim >= 2: - data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'}) + data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) data = self.calibrate(data, ds_info, ds_name, file_key) @@ -61,58 +61,58 @@ def get_dataset(self, dataset_id, ds_info): def adjust_attrs(self, data, ds_info): """Adjust the attrs of the data.""" - satname = self.PLATFORM_NAMES.get(self['/attr/Satellite Name'], self['/attr/Satellite Name']) - data.attrs.update({'platform_name': satname, - 'sensor': self['/attr/Sensor Identification Code'].lower(), - 'orbital_parameters': { - 'satellite_nominal_latitude': self['/attr/NOMSubSatLat'].item(), - 'satellite_nominal_longitude': self['/attr/NOMSubSatLon'].item(), - 'satellite_nominal_altitude': self['/attr/NOMSatHeight'].item()}}) + satname = self.PLATFORM_NAMES.get(self["/attr/Satellite Name"], self["/attr/Satellite Name"]) + data.attrs.update({"platform_name": satname, + "sensor": self["/attr/Sensor Identification Code"].lower(), + "orbital_parameters": { + "satellite_nominal_latitude": self["/attr/NOMSubSatLat"].item(), + "satellite_nominal_longitude": self["/attr/NOMSubSatLon"].item(), + "satellite_nominal_altitude": self["/attr/NOMSatHeight"].item()}}) data.attrs.update(ds_info) # remove attributes that could be confusing later - data.attrs.pop('FillValue', None) - data.attrs.pop('Intercept', None) - data.attrs.pop('Slope', None) + data.attrs.pop("FillValue", None) + data.attrs.pop("Intercept", None) + data.attrs.pop("Slope", None) def get_area_def(self, key): """Get the area definition.""" # Coordination Group for Meteorological Satellites LRIT/HRIT Global Specification # https://www.cgms-info.org/documents/cgms-lrit-hrit-global-specification-(v2-8-of-30-oct-2013).pdf - res = key['resolution'] + res = key["resolution"] pdict = {} - c_lats = self.file_content['/attr/Corner-Point Latitudes'] - c_lons = self.file_content['/attr/Corner-Point Longitudes'] + c_lats = self.file_content["/attr/Corner-Point Latitudes"] + c_lons = self.file_content["/attr/Corner-Point Longitudes"] p1 = (c_lons[0], c_lats[0]) p2 = (c_lons[1], c_lats[1]) p3 = (c_lons[2], c_lats[2]) p4 = (c_lons[3], c_lats[3]) - pdict['a'] = self.file_content['/attr/Semi_major_axis'] * 1E3 # equator radius (m) - pdict['b'] = self.file_content['/attr/Semi_minor_axis'] * 1E3 # equator radius (m) - pdict['h'] = self.file_content['/attr/NOMSatHeight'] * 1E3 # the altitude of satellite (m) + pdict["a"] = self.file_content["/attr/Semi_major_axis"] * 1E3 # equator radius (m) + pdict["b"] = self.file_content["/attr/Semi_minor_axis"] * 1E3 # equator radius (m) + pdict["h"] = self.file_content["/attr/NOMSatHeight"] * 1E3 # the altitude of satellite (m) - pdict['h'] = pdict['h'] - pdict['a'] + pdict["h"] = pdict["h"] - pdict["a"] - pdict['ssp_lon'] = float(self.file_content['/attr/NOMSubSatLon']) - pdict['nlines'] = float(self.file_content['/attr/RegLength']) - pdict['ncols'] = float(self.file_content['/attr/RegWidth']) + pdict["ssp_lon"] = float(self.file_content["/attr/NOMSubSatLon"]) + pdict["nlines"] = float(self.file_content["/attr/RegLength"]) + pdict["ncols"] = float(self.file_content["/attr/RegWidth"]) - pdict['scandir'] = 'S2N' + pdict["scandir"] = "S2N" - pdict['a_desc'] = "FY-4 {} area".format(self.filename_info['observation_type']) - pdict['a_name'] = f'{self.filename_info["observation_type"]}_{res}m' - pdict['p_id'] = f'FY-4, {res}m' + pdict["a_desc"] = "FY-4 {} area".format(self.filename_info["observation_type"]) + pdict["a_name"] = f'{self.filename_info["observation_type"]}_{res}m' + pdict["p_id"] = f"FY-4, {res}m" - proj_dict = {'a': pdict['a'], - 'b': pdict['b'], - 'lon_0': pdict['ssp_lon'], - 'h': pdict['h'], - 'proj': 'geos', - 'units': 'm', - 'sweep': 'y'} + proj_dict = {"a": pdict["a"], + "b": pdict["b"], + "lon_0": pdict["ssp_lon"], + "h": pdict["h"], + "proj": "geos", + "units": "m", + "sweep": "y"} p = Proj(proj_dict) o1 = (p(p1[0], p1[1])) # Upper left diff --git a/satpy/readers/ghrsst_l2.py b/satpy/readers/ghrsst_l2.py index 384bafa289..6c4005623e 100644 --- a/satpy/readers/ghrsst_l2.py +++ b/satpy/readers/ghrsst_l2.py @@ -39,15 +39,15 @@ def __init__(self, filename, filename_info, filetype_info, engine=None): self._engine = engine self._tarfile = None - self.filename_info['start_time'] = datetime.strptime( - self.nc.start_time, '%Y%m%dT%H%M%SZ') - self.filename_info['end_time'] = datetime.strptime( - self.nc.stop_time, '%Y%m%dT%H%M%SZ') + self.filename_info["start_time"] = datetime.strptime( + self.nc.start_time, "%Y%m%dT%H%M%SZ") + self.filename_info["end_time"] = datetime.strptime( + self.nc.stop_time, "%Y%m%dT%H%M%SZ") @cached_property def nc(self): """Get the xarray Dataset for the filename.""" - if os.fspath(self.filename).endswith('tar'): + if os.fspath(self.filename).endswith("tar"): file_obj = self._open_tarfile() else: file_obj = self.filename @@ -56,13 +56,13 @@ def nc(self): decode_cf=True, mask_and_scale=True, engine=self._engine, - chunks={'ni': CHUNK_SIZE, - 'nj': CHUNK_SIZE}) + chunks={"ni": CHUNK_SIZE, + "nj": CHUNK_SIZE}) - return nc.rename({'ni': 'x', 'nj': 'y'}) + return nc.rename({"ni": "x", "nj": "y"}) def _open_tarfile(self): - self._tarfile = tarfile.open(name=self.filename, mode='r') + self._tarfile = tarfile.open(name=self.filename, mode="r") sst_filename = next((name for name in self._tarfile.getnames() if self._is_sst_file(name))) file_obj = self._tarfile.extractfile(sst_filename) @@ -71,27 +71,27 @@ def _open_tarfile(self): @staticmethod def _is_sst_file(name): """Check if file in the tar archive is a valid SST file.""" - return name.endswith('nc') and 'GHRSST-SSTskin' in name + return name.endswith("nc") and "GHRSST-SSTskin" in name def get_dataset(self, key, info): """Get any available dataset.""" - stdname = info.get('standard_name') + stdname = info.get("standard_name") return self.nc[stdname].squeeze() @property def start_time(self): """Get start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" - return self.filename_info['end_time'] + return self.filename_info["end_time"] @property def sensor(self): """Get the sensor name.""" - return self.nc.attrs['sensor'].lower() + return self.nc.attrs["sensor"].lower() def __del__(self): """Close the tarfile object.""" diff --git a/satpy/readers/ghrsst_l3c_sst.py b/satpy/readers/ghrsst_l3c_sst.py index d35621d341..ef1dd220a9 100644 --- a/satpy/readers/ghrsst_l3c_sst.py +++ b/satpy/readers/ghrsst_l3c_sst.py @@ -28,16 +28,16 @@ logger = logging.getLogger(__name__) -PLATFORM_NAME = {'NPP': 'Suomi-NPP', } -SENSOR_NAME = {'VIIRS': 'viirs', - 'AVHRR': 'avhrr/3'} +PLATFORM_NAME = {"NPP": "Suomi-NPP", } +SENSOR_NAME = {"VIIRS": "viirs", + "AVHRR": "avhrr/3"} class GHRSST_OSISAFL2(NetCDF4FileHandler): """Reader for the OSISAF SST GHRSST format.""" def _parse_datetime(self, datestr): - return datetime.strptime(datestr, '%Y%m%dT%H%M%SZ') + return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") def get_area_def(self, area_id, area_info): """Override abstract baseclass method.""" @@ -45,21 +45,21 @@ def get_area_def(self, area_id, area_info): def get_dataset(self, dataset_id, ds_info, out=None): """Load a dataset.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) - dtype = ds_info.get('dtype', np.float32) - if var_path + '/shape' not in self: + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) + dtype = ds_info.get("dtype", np.float32) + if var_path + "/shape" not in self: # loading a scalar value shape = 1 else: - shape = self[var_path + '/shape'] + shape = self[var_path + "/shape"] if shape[0] == 1: # Remove the time dimenstion from dataset shape = shape[1], shape[2] - file_units = ds_info.get('file_units') + file_units = ds_info.get("file_units") if file_units is None: try: - file_units = self[var_path + '/attr/units'] + file_units = self[var_path + "/attr/units"] # they were almost completely CF compliant... if file_units == "none": file_units = "1" @@ -76,19 +76,19 @@ def get_dataset(self, dataset_id, ds_info, out=None): ds_info.update({ "units": ds_info.get("units", file_units), - "platform_name": PLATFORM_NAME.get(self['/attr/platform'], self['/attr/platform']), - "sensor": SENSOR_NAME.get(self['/attr/sensor'], self['/attr/sensor']), + "platform_name": PLATFORM_NAME.get(self["/attr/platform"], self["/attr/platform"]), + "sensor": SENSOR_NAME.get(self["/attr/sensor"], self["/attr/sensor"]), }) ds_info.update(dataset_id.to_dict()) cls = ds_info.pop("container", Dataset) return cls(out, **ds_info) def _scale_and_mask_data(self, out, var_path): - valid_min = self[var_path + '/attr/valid_min'] - valid_max = self[var_path + '/attr/valid_max'] + valid_min = self[var_path + "/attr/valid_min"] + valid_max = self[var_path + "/attr/valid_max"] try: - scale_factor = self[var_path + '/attr/scale_factor'] - scale_offset = self[var_path + '/attr/add_offset'] + scale_factor = self[var_path + "/attr/scale_factor"] + scale_offset = self[var_path + "/attr/add_offset"] except KeyError: scale_factor = scale_offset = None if valid_min is not None and valid_max is not None: @@ -100,16 +100,16 @@ def _scale_and_mask_data(self, out, var_path): def get_lonlats(self, navid, nav_info, lon_out=None, lat_out=None): """Load an area.""" - lon_key = 'lon' - valid_min = self[lon_key + '/attr/valid_min'] - valid_max = self[lon_key + '/attr/valid_max'] + lon_key = "lon" + valid_min = self[lon_key + "/attr/valid_min"] + valid_max = self[lon_key + "/attr/valid_max"] lon_out.data[:] = self[lon_key][::-1] lon_out.mask[:] = (lon_out < valid_min) | (lon_out > valid_max) - lat_key = 'lat' - valid_min = self[lat_key + '/attr/valid_min'] - valid_max = self[lat_key + '/attr/valid_max'] + lat_key = "lat" + valid_min = self[lat_key + "/attr/valid_min"] + valid_max = self[lat_key + "/attr/valid_max"] lat_out.data[:] = self[lat_key][::-1] lat_out.mask[:] = (lat_out < valid_min) | (lat_out > valid_max) @@ -119,9 +119,9 @@ def get_lonlats(self, navid, nav_info, lon_out=None, lat_out=None): def start_time(self): """Get start time.""" # return self.filename_info['start_time'] - return self._parse_datetime(self['/attr/start_time']) + return self._parse_datetime(self["/attr/start_time"]) @property def end_time(self): """Get end time.""" - return self._parse_datetime(self['/attr/stop_time']) + return self._parse_datetime(self["/attr/stop_time"]) diff --git a/satpy/readers/glm_l2.py b/satpy/readers/glm_l2.py index bfb2719b07..ceb11a33bc 100644 --- a/satpy/readers/glm_l2.py +++ b/satpy/readers/glm_l2.py @@ -33,9 +33,9 @@ logger = logging.getLogger(__name__) PLATFORM_NAMES = { - 'G16': 'GOES-16', - 'G17': 'GOES-17', - 'G18': 'GOES-18', + "G16": "GOES-16", + "G17": "GOES-17", + "G18": "GOES-18", } # class NC_GLM_L2_LCFA(BaseFileHandler): — add this with glmtools @@ -47,45 +47,45 @@ class NCGriddedGLML2(NC_ABI_BASE): @property def sensor(self): """Get sensor name for current file handler.""" - return 'glm' + return "glm" @property def start_time(self): """Start time of the current file's observations.""" - return datetime.strptime(self.nc.attrs['time_coverage_start'], '%Y-%m-%dT%H:%M:%SZ') + return datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%SZ") @property def end_time(self): """End time of the current file's observations.""" - return datetime.strptime(self.nc.attrs['time_coverage_end'], '%Y-%m-%dT%H:%M:%SZ') + return datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%SZ") def _is_category_product(self, data_arr): # if after autoscaling we still have an integer is_int = np.issubdtype(data_arr.dtype, np.integer) # and it has a fill value - has_fill = '_FillValue' in data_arr.attrs + has_fill = "_FillValue" in data_arr.attrs # or it has flag_meanings - has_meanings = 'flag_meanings' in data_arr.attrs + has_meanings = "flag_meanings" in data_arr.attrs # then it is likely a category product and we should keep the # _FillValue for satpy to use later return is_int and (has_fill or has_meanings) def get_dataset(self, key, info): """Load a dataset.""" - logger.debug('Reading in get_dataset %s.', key['name']) - res = self[key['name']] - res.attrs.update({'platform_name': self.platform_name, - 'sensor': self.sensor}) + logger.debug("Reading in get_dataset %s.", key["name"]) + res = self[key["name"]] + res.attrs.update({"platform_name": self.platform_name, + "sensor": self.sensor}) res.attrs.update(self.filename_info) # Add orbital parameters projection = self.nc["goes_imager_projection"] - res.attrs['orbital_parameters'] = { - 'projection_longitude': float(projection.attrs['longitude_of_projection_origin']), - 'projection_latitude': float(projection.attrs['latitude_of_projection_origin']), - 'projection_altitude': float(projection.attrs['perspective_point_height']), - 'satellite_nominal_latitude': float(self['nominal_satellite_subpoint_lat']), - 'satellite_nominal_longitude': float(self['nominal_satellite_subpoint_lon']), + res.attrs["orbital_parameters"] = { + "projection_longitude": float(projection.attrs["longitude_of_projection_origin"]), + "projection_latitude": float(projection.attrs["latitude_of_projection_origin"]), + "projection_altitude": float(projection.attrs["perspective_point_height"]), + "satellite_nominal_latitude": float(self["nominal_satellite_subpoint_lat"]), + "satellite_nominal_longitude": float(self["nominal_satellite_subpoint_lon"]), # 'satellite_nominal_altitude': float(self['nominal_satellite_height']), } @@ -93,25 +93,25 @@ def get_dataset(self, key, info): # remove attributes that could be confusing later if not self._is_category_product(res): - res.attrs.pop('_FillValue', None) - res.attrs.pop('scale_factor', None) - res.attrs.pop('add_offset', None) - res.attrs.pop('_Unsigned', None) - res.attrs.pop('ancillary_variables', None) # Can't currently load DQF + res.attrs.pop("_FillValue", None) + res.attrs.pop("scale_factor", None) + res.attrs.pop("add_offset", None) + res.attrs.pop("_Unsigned", None) + res.attrs.pop("ancillary_variables", None) # Can't currently load DQF # add in information from the filename that may be useful to the user # for key in ('observation_type', 'scene_abbr', 'scan_mode', 'platform_shortname'): - for attr in ('scene_abbr', 'scan_mode', 'platform_shortname'): + for attr in ("scene_abbr", "scan_mode", "platform_shortname"): res.attrs[attr] = self.filename_info[attr] # copy global attributes to metadata - for attr in ('scene_id', 'orbital_slot', 'instrument_ID', - 'production_site', 'timeline_ID', 'spatial_resolution'): + for attr in ("scene_id", "orbital_slot", "instrument_ID", + "production_site", "timeline_ID", "spatial_resolution"): res.attrs[attr] = self.nc.attrs.get(attr) return res def _is_2d_xy_var(self, data_arr): is_2d = data_arr.ndim == 2 - has_x_dim = 'x' in data_arr.dims - has_y_dim = 'y' in data_arr.dims + has_x_dim = "x" in data_arr.dims + has_y_dim = "y" in data_arr.dims return is_2d and has_x_dim and has_y_dim def available_datasets(self, configured_datasets=None): @@ -127,14 +127,14 @@ def available_datasets(self, configured_datasets=None): if is_avail is not None: yield is_avail, ds_info - matches = self.file_type_matches(ds_info['file_type']) - if matches and ds_info.get('resolution') != res: + matches = self.file_type_matches(ds_info["file_type"]) + if matches and ds_info.get("resolution") != res: # we are meant to handle this dataset (file type matches) # and the information we can provide isn't available yet new_info = ds_info.copy() - new_info['resolution'] = res - exists = ds_info['name'] in self.nc - handled_vars.add(ds_info['name']) + new_info["resolution"] = res + exists = ds_info["name"] in self.nc + handled_vars.add(ds_info["name"]) yield exists, new_info elif is_avail is None: # we don't know what to do with this @@ -150,9 +150,9 @@ def available_datasets(self, configured_datasets=None): continue new_info = { - 'name': var_name, - 'resolution': res, - 'file_type': self.filetype_info['file_type'] + "name": var_name, + "resolution": res, + "file_type": self.filetype_info["file_type"] } handled_vars.add(var_name) yield True, new_info diff --git a/satpy/readers/gms/gms5_vissr_format.py b/satpy/readers/gms/gms5_vissr_format.py index a5052097eb..a48fcde77f 100644 --- a/satpy/readers/gms/gms5_vissr_format.py +++ b/satpy/readers/gms/gms5_vissr_format.py @@ -32,329 +32,329 @@ CHANNELS = [("VIS", R4), ("IR1", R4), ("IR2", R4), ("WV", R4)] VISIR_SOLAR = [("VIS", R4), ("IR", R4)] -CONTROL_BLOCK = np.dtype([('control_block_size', I2), - ('head_block_number_of_parameter_block', I2), - ('parameter_block_size', I2), - ('head_block_number_of_image_data', I2), - ('total_block_size_of_image_data', I2), - ('available_block_size_of_image_data', I2), - ('head_valid_line_number', I2), - ('final_valid_line_number', I2), - ('final_data_block_number', I2)]) +CONTROL_BLOCK = np.dtype([("control_block_size", I2), + ("head_block_number_of_parameter_block", I2), + ("parameter_block_size", I2), + ("head_block_number_of_image_data", I2), + ("total_block_size_of_image_data", I2), + ("available_block_size_of_image_data", I2), + ("head_valid_line_number", I2), + ("final_valid_line_number", I2), + ("final_data_block_number", I2)]) -MODE_BLOCK_FRAME_PARAMETERS = [('bit_length', I4), - ('number_of_lines', I4), - ('number_of_pixels', I4), - ('stepping_angle', R4), - ('sampling_angle', R4), - ('lcw_pixel_size', I4), - ('doc_pixel_size', I4), - ('reserved', I4)] +MODE_BLOCK_FRAME_PARAMETERS = [("bit_length", I4), + ("number_of_lines", I4), + ("number_of_pixels", I4), + ("stepping_angle", R4), + ("sampling_angle", R4), + ("lcw_pixel_size", I4), + ("doc_pixel_size", I4), + ("reserved", I4)] -MODE_BLOCK = np.dtype([('satellite_number', I4), - ('satellite_name', '|S12'), - ('observation_time_ad', '|S16'), - ('observation_time_mjd', R8), - ('gms_operation_mode', I4), - ('dpc_operation_mode', I4), - ('vissr_observation_mode', I4), - ('scanner_selection', I4), - ('sensor_selection', I4), - ('sensor_mode', I4), - ('scan_frame_mode', I4), - ('scan_mode', I4), - ('upper_limit_of_scan_number', I4), - ('lower_limit_of_scan_number', I4), - ('equatorial_scan_line_number', I4), - ('spin_rate', R4), - ('vis_frame_parameters', MODE_BLOCK_FRAME_PARAMETERS), - ('ir_frame_parameters', MODE_BLOCK_FRAME_PARAMETERS), - ('satellite_height', R4), - ('earth_radius', R4), - ('ssp_longitude', R4), - ('reserved_1', I4, 9), - ('table_of_sensor_trouble', I4, 14), - ('reserved_2', I4, 36), - ('status_tables_of_data_relative_address_segment', I4, 60)]) +MODE_BLOCK = np.dtype([("satellite_number", I4), + ("satellite_name", "|S12"), + ("observation_time_ad", "|S16"), + ("observation_time_mjd", R8), + ("gms_operation_mode", I4), + ("dpc_operation_mode", I4), + ("vissr_observation_mode", I4), + ("scanner_selection", I4), + ("sensor_selection", I4), + ("sensor_mode", I4), + ("scan_frame_mode", I4), + ("scan_mode", I4), + ("upper_limit_of_scan_number", I4), + ("lower_limit_of_scan_number", I4), + ("equatorial_scan_line_number", I4), + ("spin_rate", R4), + ("vis_frame_parameters", MODE_BLOCK_FRAME_PARAMETERS), + ("ir_frame_parameters", MODE_BLOCK_FRAME_PARAMETERS), + ("satellite_height", R4), + ("earth_radius", R4), + ("ssp_longitude", R4), + ("reserved_1", I4, 9), + ("table_of_sensor_trouble", I4, 14), + ("reserved_2", I4, 36), + ("status_tables_of_data_relative_address_segment", I4, 60)]) COORDINATE_CONVERSION_PARAMETERS = np.dtype([ - ('data_segment', I4), - ('data_validity', I4), - ('data_generation_time', TIME), - ('scheduled_observation_time', R8), - ('stepping_angle_along_line', CHANNELS), - ('sampling_angle_along_pixel', CHANNELS), - ('central_line_number_of_vissr_frame', CHANNELS), - ('central_pixel_number_of_vissr_frame', CHANNELS), - ('pixel_difference_of_vissr_center_from_normal_position', CHANNELS), - ('number_of_sensor_elements', CHANNELS), - ('total_number_of_vissr_frame_lines', CHANNELS), - ('total_number_of_vissr_frame_pixels', CHANNELS), - ('vissr_misalignment', R4, (3,)), - ('matrix_of_misalignment', R4, (3, 3)), - ('parameters', [('judgement_of_observation_convergence_time', R4), - ('judgement_of_line_convergence', R4), - ('east_west_angle_of_sun_light_condense_prism', R4), - ('north_south_angle_of_sun_light_condense_prism', R4), - ('pi', R4), - ('pi_divided_by_180', R4), - ('180_divided_by_pi', R4), - ('equatorial_radius', R4), - ('oblateness_of_earth', R4), - ('eccentricity_of_earth_orbit', R4), - ('first_angle_of_vissr_observation_in_sdb', R4), - ('upper_limited_line_of_2nd_prism_for_vis_solar_observation', R4), - ('lower_limited_line_of_1st_prism_for_vis_solar_observation', R4), - ('upper_limited_line_of_3rd_prism_for_vis_solar_observation', R4), - ('lower_limited_line_of_2nd_prism_for_vis_solar_observation', R4)]), - ('solar_stepping_angle_along_line', VISIR_SOLAR), - ('solar_sampling_angle_along_pixel', VISIR_SOLAR), - ('solar_center_line_of_vissr_frame', VISIR_SOLAR), - ('solar_center_pixel_of_vissr_frame', VISIR_SOLAR), - ('solar_pixel_difference_of_vissr_center_from_normal_position', VISIR_SOLAR), - ('solar_number_of_sensor_elements', VISIR_SOLAR), - ('solar_total_number_of_vissr_frame_lines', VISIR_SOLAR), - ('solar_total_number_of_vissr_frame_pixels', VISIR_SOLAR), - ('reserved_1', I4, 19), - ('orbital_parameters', [('epoch_time', R8), - ('semi_major_axis', R8), - ('eccentricity', R8), - ('orbital_inclination', R8), - ('longitude_of_ascending_node', R8), - ('argument_of_perigee', R8), - ('mean_anomaly', R8), - ('longitude_of_ssp', R8), - ('latitude_of_ssp', R8)]), - ('reserved_2', I4, 2), - ('attitude_parameters', [('epoch_time', R8), - ('angle_between_z_axis_and_satellite_spin_axis_at_epoch_time', R8), - ('angle_change_rate_between_spin_axis_and_z_axis', R8), - ('angle_between_spin_axis_and_zy_axis', R8), - ('angle_change_rate_between_spin_axis_and_zt_axis', R8), - ('daily_mean_of_spin_rate', R8)]), - ('reserved_3', I4, 529), - ('correction_of_image_distortion', [('stepping_angle_along_line_of_ir1', R4), - ('stepping_angle_along_line_of_ir2', R4), - ('stepping_angle_along_line_of_wv', R4), - ('stepping_angle_along_line_of_vis', R4), - ('sampling_angle_along_pixel_of_ir1', R4), - ('sampling_angle_along_pixel_of_ir2', R4), - ('sampling_angle_along_pixel_of_wv', R4), - ('sampling_angle_along_pixel_of_vis', R4), - ('x_component_vissr_misalignment', R4), - ('y_component_vissr_misalignment', R4)]) + ("data_segment", I4), + ("data_validity", I4), + ("data_generation_time", TIME), + ("scheduled_observation_time", R8), + ("stepping_angle_along_line", CHANNELS), + ("sampling_angle_along_pixel", CHANNELS), + ("central_line_number_of_vissr_frame", CHANNELS), + ("central_pixel_number_of_vissr_frame", CHANNELS), + ("pixel_difference_of_vissr_center_from_normal_position", CHANNELS), + ("number_of_sensor_elements", CHANNELS), + ("total_number_of_vissr_frame_lines", CHANNELS), + ("total_number_of_vissr_frame_pixels", CHANNELS), + ("vissr_misalignment", R4, (3,)), + ("matrix_of_misalignment", R4, (3, 3)), + ("parameters", [("judgement_of_observation_convergence_time", R4), + ("judgement_of_line_convergence", R4), + ("east_west_angle_of_sun_light_condense_prism", R4), + ("north_south_angle_of_sun_light_condense_prism", R4), + ("pi", R4), + ("pi_divided_by_180", R4), + ("180_divided_by_pi", R4), + ("equatorial_radius", R4), + ("oblateness_of_earth", R4), + ("eccentricity_of_earth_orbit", R4), + ("first_angle_of_vissr_observation_in_sdb", R4), + ("upper_limited_line_of_2nd_prism_for_vis_solar_observation", R4), + ("lower_limited_line_of_1st_prism_for_vis_solar_observation", R4), + ("upper_limited_line_of_3rd_prism_for_vis_solar_observation", R4), + ("lower_limited_line_of_2nd_prism_for_vis_solar_observation", R4)]), + ("solar_stepping_angle_along_line", VISIR_SOLAR), + ("solar_sampling_angle_along_pixel", VISIR_SOLAR), + ("solar_center_line_of_vissr_frame", VISIR_SOLAR), + ("solar_center_pixel_of_vissr_frame", VISIR_SOLAR), + ("solar_pixel_difference_of_vissr_center_from_normal_position", VISIR_SOLAR), + ("solar_number_of_sensor_elements", VISIR_SOLAR), + ("solar_total_number_of_vissr_frame_lines", VISIR_SOLAR), + ("solar_total_number_of_vissr_frame_pixels", VISIR_SOLAR), + ("reserved_1", I4, 19), + ("orbital_parameters", [("epoch_time", R8), + ("semi_major_axis", R8), + ("eccentricity", R8), + ("orbital_inclination", R8), + ("longitude_of_ascending_node", R8), + ("argument_of_perigee", R8), + ("mean_anomaly", R8), + ("longitude_of_ssp", R8), + ("latitude_of_ssp", R8)]), + ("reserved_2", I4, 2), + ("attitude_parameters", [("epoch_time", R8), + ("angle_between_z_axis_and_satellite_spin_axis_at_epoch_time", R8), + ("angle_change_rate_between_spin_axis_and_z_axis", R8), + ("angle_between_spin_axis_and_zy_axis", R8), + ("angle_change_rate_between_spin_axis_and_zt_axis", R8), + ("daily_mean_of_spin_rate", R8)]), + ("reserved_3", I4, 529), + ("correction_of_image_distortion", [("stepping_angle_along_line_of_ir1", R4), + ("stepping_angle_along_line_of_ir2", R4), + ("stepping_angle_along_line_of_wv", R4), + ("stepping_angle_along_line_of_vis", R4), + ("sampling_angle_along_pixel_of_ir1", R4), + ("sampling_angle_along_pixel_of_ir2", R4), + ("sampling_angle_along_pixel_of_wv", R4), + ("sampling_angle_along_pixel_of_vis", R4), + ("x_component_vissr_misalignment", R4), + ("y_component_vissr_misalignment", R4)]) ]) -ATTITUDE_PREDICTION_DATA = np.dtype([('prediction_time_mjd', R8), - ('prediction_time_utc', TIME), - ('right_ascension_of_attitude', R8), - ('declination_of_attitude', R8), - ('sun_earth_angle', R8), - ('spin_rate', R8), - ('right_ascension_of_orbital_plane', R8), - ('declination_of_orbital_plane', R8), - ('reserved', R8), - ('eclipse_flag', I4), - ('spin_axis_flag', I4)]) +ATTITUDE_PREDICTION_DATA = np.dtype([("prediction_time_mjd", R8), + ("prediction_time_utc", TIME), + ("right_ascension_of_attitude", R8), + ("declination_of_attitude", R8), + ("sun_earth_angle", R8), + ("spin_rate", R8), + ("right_ascension_of_orbital_plane", R8), + ("declination_of_orbital_plane", R8), + ("reserved", R8), + ("eclipse_flag", I4), + ("spin_axis_flag", I4)]) -ATTITUDE_PREDICTION = np.dtype([('data_segment', I4), - ('data_validity', I4), - ('data_generation_time', TIME), - ('start_time', R8), - ('end_time', R8), - ('prediction_interval_time', R8), - ('number_of_prediction', I4), - ('data_size', I4), - ('data', ATTITUDE_PREDICTION_DATA, (33,))]) +ATTITUDE_PREDICTION = np.dtype([("data_segment", I4), + ("data_validity", I4), + ("data_generation_time", TIME), + ("start_time", R8), + ("end_time", R8), + ("prediction_interval_time", R8), + ("number_of_prediction", I4), + ("data_size", I4), + ("data", ATTITUDE_PREDICTION_DATA, (33,))]) -ORBIT_PREDICTION_DATA = [('prediction_time_mjd', R8), - ('prediction_time_utc', TIME), - ('satellite_position_1950', R8, (3,)), - ('satellite_velocity_1950', R8, (3,)), - ('satellite_position_earth_fixed', R8, (3,)), - ('satellite_velocity_earth_fixed', R8, (3,)), - ('greenwich_sidereal_time', R8), - ('sat_sun_vector_1950', [('azimuth', R8), - ('elevation', R8)]), - ('sat_sun_vector_earth_fixed', [('azimuth', R8), - ('elevation', R8)]), - ('conversion_matrix', R8, (3, 3)), - ('moon_directional_vector', R8, (3,)), - ('satellite_position', [('ssp_longitude', R8), - ('ssp_latitude', R8), - ('satellite_height', R8)]), - ('eclipse_period_flag', I4), - ('reserved', I4)] +ORBIT_PREDICTION_DATA = [("prediction_time_mjd", R8), + ("prediction_time_utc", TIME), + ("satellite_position_1950", R8, (3,)), + ("satellite_velocity_1950", R8, (3,)), + ("satellite_position_earth_fixed", R8, (3,)), + ("satellite_velocity_earth_fixed", R8, (3,)), + ("greenwich_sidereal_time", R8), + ("sat_sun_vector_1950", [("azimuth", R8), + ("elevation", R8)]), + ("sat_sun_vector_earth_fixed", [("azimuth", R8), + ("elevation", R8)]), + ("conversion_matrix", R8, (3, 3)), + ("moon_directional_vector", R8, (3,)), + ("satellite_position", [("ssp_longitude", R8), + ("ssp_latitude", R8), + ("satellite_height", R8)]), + ("eclipse_period_flag", I4), + ("reserved", I4)] -ORBIT_PREDICTION = np.dtype([('data_segment', I4), - ('data_validity', I4), - ('data_generation_time', TIME), - ('start_time', R8), - ('end_time', R8), - ('prediction_interval_time', R8), - ('number_of_prediction', I4), - ('data_size', I4), - ('data', ORBIT_PREDICTION_DATA, (9,))]) +ORBIT_PREDICTION = np.dtype([("data_segment", I4), + ("data_validity", I4), + ("data_generation_time", TIME), + ("start_time", R8), + ("end_time", R8), + ("prediction_interval_time", R8), + ("number_of_prediction", I4), + ("data_size", I4), + ("data", ORBIT_PREDICTION_DATA, (9,))]) VIS_CALIBRATION_TABLE = np.dtype([ - ('channel_number', I4), - ('data_validity', I4), - ('updated_time', TIME), - ('table_id', I4), - ('brightness_albedo_conversion_table', R4, (64,)), - ('vis_channel_staircase_brightness_data', R4, (6,)), - ('coefficients_table_of_vis_staircase_regression_curve', R4, (10,)), - ('brightness_table_for_calibration', [('universal_space_brightness', R4), - ('solar_brightness', R4)]), - ('calibration_uses_brightness_correspondence_voltage_chart', [('universal_space_voltage', R4), - ('solar_voltage', R4)]), - ('calibration_coefficients_of_radiation_observation', [('G', R4), ('V0', R4)]), - ('reserved', I4, (9,)) + ("channel_number", I4), + ("data_validity", I4), + ("updated_time", TIME), + ("table_id", I4), + ("brightness_albedo_conversion_table", R4, (64,)), + ("vis_channel_staircase_brightness_data", R4, (6,)), + ("coefficients_table_of_vis_staircase_regression_curve", R4, (10,)), + ("brightness_table_for_calibration", [("universal_space_brightness", R4), + ("solar_brightness", R4)]), + ("calibration_uses_brightness_correspondence_voltage_chart", [("universal_space_voltage", R4), + ("solar_voltage", R4)]), + ("calibration_coefficients_of_radiation_observation", [("G", R4), ("V0", R4)]), + ("reserved", I4, (9,)) ]) -VIS_CALIBRATION = np.dtype([('data_segment', I4), - ('data_validity', I4), - ('data_generation_time', TIME), - ('sensor_group', I4), - ('vis1_calibration_table', VIS_CALIBRATION_TABLE), - ('vis2_calibration_table', VIS_CALIBRATION_TABLE), - ('vis3_calibration_table', VIS_CALIBRATION_TABLE), - ('reserved', I4, (267,))]) +VIS_CALIBRATION = np.dtype([("data_segment", I4), + ("data_validity", I4), + ("data_generation_time", TIME), + ("sensor_group", I4), + ("vis1_calibration_table", VIS_CALIBRATION_TABLE), + ("vis2_calibration_table", VIS_CALIBRATION_TABLE), + ("vis3_calibration_table", VIS_CALIBRATION_TABLE), + ("reserved", I4, (267,))]) TELEMETRY_DATA = np.dtype([ - ('shutter_temp', R4), - ('redundant_mirror_temp', R4), - ('primary_mirror_temp', R4), - ('baffle_fw_temp', R4), - ('baffle_af_temp', R4), - ('15_volt_auxiliary_power_supply', R4), - ('radiative_cooler_temp_1', R4), - ('radiative_cooler_temp_2', R4), - ('electronics_module_temp', R4), - ('scan_mirror_temp', R4), - ('shutter_cavity_temp', R4), - ('primary_mirror_sealed_temp', R4), - ('redundant_mirror_sealed_temp', R4), - ('shutter_temp_2', R4), - ('reserved', R4, (2,)) + ("shutter_temp", R4), + ("redundant_mirror_temp", R4), + ("primary_mirror_temp", R4), + ("baffle_fw_temp", R4), + ("baffle_af_temp", R4), + ("15_volt_auxiliary_power_supply", R4), + ("radiative_cooler_temp_1", R4), + ("radiative_cooler_temp_2", R4), + ("electronics_module_temp", R4), + ("scan_mirror_temp", R4), + ("shutter_cavity_temp", R4), + ("primary_mirror_sealed_temp", R4), + ("redundant_mirror_sealed_temp", R4), + ("shutter_temp_2", R4), + ("reserved", R4, (2,)) ]) IR_CALIBRATION = np.dtype([ - ('data_segment', I4), - ('data_validity', I4), - ('updated_time', TIME), - ('sensor_group', I4), - ('table_id', I4), - ('reserved_1', I4, (2,)), - ('conversion_table_of_equivalent_black_body_radiation', R4, (256,)), - ('conversion_table_of_equivalent_black_body_temperature', R4, (256,)), - ('staircase_brightness_data', R4, (6,)), - ('coefficients_table_of_staircase_regression_curve', R4, (10,)), - ('brightness_data_for_calibration', [('brightness_of_space', R4), - ('brightness_of_black_body_shutter', R4), - ('reserved', R4)]), - ('voltage_table_for_brightness_of_calibration', [('voltage_of_space', R4), - ('voltage_of_black_body_shutter', R4), - ('reserved', R4)]), - ('calibration_coefficients_of_radiation_observation', [('G', R4), ('V0', R4)]), - ('valid_shutter_temperature', R4), - ('valid_shutter_radiation', R4), - ('telemetry_data_table', TELEMETRY_DATA), - ('flag_of_calid_shutter_temperature_calculation', I4), - ('reserved_2', I4, (109,)) + ("data_segment", I4), + ("data_validity", I4), + ("updated_time", TIME), + ("sensor_group", I4), + ("table_id", I4), + ("reserved_1", I4, (2,)), + ("conversion_table_of_equivalent_black_body_radiation", R4, (256,)), + ("conversion_table_of_equivalent_black_body_temperature", R4, (256,)), + ("staircase_brightness_data", R4, (6,)), + ("coefficients_table_of_staircase_regression_curve", R4, (10,)), + ("brightness_data_for_calibration", [("brightness_of_space", R4), + ("brightness_of_black_body_shutter", R4), + ("reserved", R4)]), + ("voltage_table_for_brightness_of_calibration", [("voltage_of_space", R4), + ("voltage_of_black_body_shutter", R4), + ("reserved", R4)]), + ("calibration_coefficients_of_radiation_observation", [("G", R4), ("V0", R4)]), + ("valid_shutter_temperature", R4), + ("valid_shutter_radiation", R4), + ("telemetry_data_table", TELEMETRY_DATA), + ("flag_of_calid_shutter_temperature_calculation", I4), + ("reserved_2", I4, (109,)) ]) SIMPLE_COORDINATE_CONVERSION_TABLE = np.dtype([ - ('coordinate_conversion_table', I2, (1250,)), - ('earth_equator_radius', R4), - ('satellite_height', R4), - ('stepping_angle', R4), - ('sampling_angle', R4), - ('ssp_latitude', R4), - ('ssp_longitude', R4), - ('ssp_line_number', R4), - ('ssp_pixel_number', R4), - ('pi', R4), - ('line_correction_ir1_vis', R4), - ('pixel_correction_ir1_vis', R4), - ('line_correction_ir1_ir2', R4), - ('pixel_correction_ir1_ir2', R4), - ('line_correction_ir1_wv', R4), - ('pixel_correction_ir1_wv', R4), - ('reserved', R4, (32,)), + ("coordinate_conversion_table", I2, (1250,)), + ("earth_equator_radius", R4), + ("satellite_height", R4), + ("stepping_angle", R4), + ("sampling_angle", R4), + ("ssp_latitude", R4), + ("ssp_longitude", R4), + ("ssp_line_number", R4), + ("ssp_pixel_number", R4), + ("pi", R4), + ("line_correction_ir1_vis", R4), + ("pixel_correction_ir1_vis", R4), + ("line_correction_ir1_ir2", R4), + ("pixel_correction_ir1_ir2", R4), + ("line_correction_ir1_wv", R4), + ("pixel_correction_ir1_wv", R4), + ("reserved", R4, (32,)), ]) IMAGE_PARAMS = { - 'mode': { - 'dtype': MODE_BLOCK, - 'offset': { + "mode": { + "dtype": MODE_BLOCK, + "offset": { VIS_CHANNEL: 2 * BLOCK_SIZE_VIS, IR_CHANNEL: 2 * BLOCK_SIZE_IR } }, - 'coordinate_conversion': { - 'dtype': COORDINATE_CONVERSION_PARAMETERS, - 'offset': { + "coordinate_conversion": { + "dtype": COORDINATE_CONVERSION_PARAMETERS, + "offset": { VIS_CHANNEL: 2 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 4 * BLOCK_SIZE_IR } }, - 'attitude_prediction': { - 'dtype': ATTITUDE_PREDICTION, - 'offset': { + "attitude_prediction": { + "dtype": ATTITUDE_PREDICTION, + "offset": { VIS_CHANNEL: 2 * BLOCK_SIZE_VIS + 3 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 5 * BLOCK_SIZE_IR }, - 'preserve': 'data' + "preserve": "data" }, - 'orbit_prediction_1': { - 'dtype': ORBIT_PREDICTION, - 'offset': { + "orbit_prediction_1": { + "dtype": ORBIT_PREDICTION, + "offset": { VIS_CHANNEL: 3 * BLOCK_SIZE_VIS, IR_CHANNEL: 6 * BLOCK_SIZE_IR }, - 'preserve': 'data' + "preserve": "data" }, - 'orbit_prediction_2': { - 'dtype': ORBIT_PREDICTION, - 'offset': { + "orbit_prediction_2": { + "dtype": ORBIT_PREDICTION, + "offset": { VIS_CHANNEL: 3 * BLOCK_SIZE_VIS + 1 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 7 * BLOCK_SIZE_IR }, - 'preserve': 'data' + "preserve": "data" }, - 'vis_calibration': { - 'dtype': VIS_CALIBRATION, - 'offset': { + "vis_calibration": { + "dtype": VIS_CALIBRATION, + "offset": { VIS_CHANNEL: 3 * BLOCK_SIZE_VIS + 3 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 9 * BLOCK_SIZE_IR }, - 'preserve': 'data' + "preserve": "data" }, - 'ir1_calibration': { - 'dtype': IR_CALIBRATION, - 'offset': { + "ir1_calibration": { + "dtype": IR_CALIBRATION, + "offset": { VIS_CHANNEL: 4 * BLOCK_SIZE_VIS, IR_CHANNEL: 10 * BLOCK_SIZE_IR }, }, - 'ir2_calibration': { - 'dtype': IR_CALIBRATION, - 'offset': { + "ir2_calibration": { + "dtype": IR_CALIBRATION, + "offset": { VIS_CHANNEL: 4 * BLOCK_SIZE_VIS + IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 11 * BLOCK_SIZE_IR }, }, - 'wv_calibration': { - 'dtype': IR_CALIBRATION, - 'offset': { + "wv_calibration": { + "dtype": IR_CALIBRATION, + "offset": { VIS_CHANNEL: 4 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 12 * BLOCK_SIZE_IR }, }, - 'simple_coordinate_conversion_table': { - 'dtype': SIMPLE_COORDINATE_CONVERSION_TABLE, - 'offset': { + "simple_coordinate_conversion_table": { + "dtype": SIMPLE_COORDINATE_CONVERSION_TABLE, + "offset": { VIS_CHANNEL: 5 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 16 * BLOCK_SIZE_IR }, @@ -362,36 +362,36 @@ } LINE_CONTROL_WORD = np.dtype([ - ('data_id', U1, (4, )), - ('line_number', I4), - ('line_name', I4), - ('error_line_flag', I4), - ('error_message', I4), - ('mode_error_flag', I4), - ('scan_time', R8), - ('beta_angle', R4), - ('west_side_earth_edge', I4), - ('east_side_earth_edge', I4), - ('received_time_1', R8), # Typo in format description (I*4) - ('received_time_2', I4), - ('reserved', U1, (8, )) + ("data_id", U1, (4, )), + ("line_number", I4), + ("line_name", I4), + ("error_line_flag", I4), + ("error_message", I4), + ("mode_error_flag", I4), + ("scan_time", R8), + ("beta_angle", R4), + ("west_side_earth_edge", I4), + ("east_side_earth_edge", I4), + ("received_time_1", R8), # Typo in format description (I*4) + ("received_time_2", I4), + ("reserved", U1, (8, )) ]) -IMAGE_DATA_BLOCK_IR = np.dtype([('LCW', LINE_CONTROL_WORD), - ('DOC', U1, (256,)), # Omitted - ('image_data', U1, 3344)]) +IMAGE_DATA_BLOCK_IR = np.dtype([("LCW", LINE_CONTROL_WORD), + ("DOC", U1, (256,)), # Omitted + ("image_data", U1, 3344)]) -IMAGE_DATA_BLOCK_VIS = np.dtype([('LCW', LINE_CONTROL_WORD), - ('DOC', U1, (64,)), # Omitted - ('image_data', U1, (13376,))]) +IMAGE_DATA_BLOCK_VIS = np.dtype([("LCW", LINE_CONTROL_WORD), + ("DOC", U1, (64,)), # Omitted + ("image_data", U1, (13376,))]) IMAGE_DATA = { VIS_CHANNEL: { - 'offset': 6 * BLOCK_SIZE_VIS, - 'dtype': IMAGE_DATA_BLOCK_VIS, + "offset": 6 * BLOCK_SIZE_VIS, + "dtype": IMAGE_DATA_BLOCK_VIS, }, IR_CHANNEL: { - 'offset': 18 * BLOCK_SIZE_IR, - 'dtype': IMAGE_DATA_BLOCK_IR + "offset": 18 * BLOCK_SIZE_IR, + "dtype": IMAGE_DATA_BLOCK_IR } } diff --git a/satpy/readers/goes_imager_hrit.py b/satpy/readers/goes_imager_hrit.py index 05bcc513d7..457d5d809c 100644 --- a/satpy/readers/goes_imager_hrit.py +++ b/satpy/readers/goes_imager_hrit.py @@ -46,7 +46,7 @@ class CalibrationError(Exception): """Dummy error-class.""" -logger = logging.getLogger('hrit_goes') +logger = logging.getLogger("hrit_goes") # Geometric constants [meters] EQUATOR_RADIUS = 6378169.00 @@ -54,30 +54,30 @@ class CalibrationError(Exception): ALTITUDE = 35785831.00 # goes implementation: -key_header = np.dtype([('key_number', 'u1'), - ('seed', '>f8')]) - -segment_identification = np.dtype([('GP_SC_ID', '>i2'), - ('spectral_channel_id', '>i1'), - ('segment_sequence_number', '>u2'), - ('planned_start_segment_number', '>u2'), - ('planned_end_segment_number', '>u2'), - ('data_field_representation', '>i1')]) - -image_segment_line_quality = np.dtype([('line_number_in_grid', '>i4'), - ('line_mean_acquisition', - [('days', '>u2'), - ('milliseconds', '>u4')]), - ('line_validity', 'u1'), - ('line_radiometric_quality', 'u1'), - ('line_geometric_quality', 'u1')]) +key_header = np.dtype([("key_number", "u1"), + ("seed", ">f8")]) + +segment_identification = np.dtype([("GP_SC_ID", ">i2"), + ("spectral_channel_id", ">i1"), + ("segment_sequence_number", ">u2"), + ("planned_start_segment_number", ">u2"), + ("planned_end_segment_number", ">u2"), + ("data_field_representation", ">i1")]) + +image_segment_line_quality = np.dtype([("line_number_in_grid", ">i4"), + ("line_mean_acquisition", + [("days", ">u2"), + ("milliseconds", ">u4")]), + ("line_validity", "u1"), + ("line_radiometric_quality", "u1"), + ("line_geometric_quality", "u1")]) goms_variable_length_headers = { - image_segment_line_quality: 'image_segment_line_quality'} + image_segment_line_quality: "image_segment_line_quality"} -goms_text_headers = {image_data_function: 'image_data_function', - annotation_header: 'annotation_header', - ancillary_text: 'ancillary_text'} +goms_text_headers = {image_data_function: "image_data_function", + annotation_header: "annotation_header", + ancillary_text: "ancillary_text"} goes_hdr_map = base_hdr_map.copy() goes_hdr_map.update({7: key_header, @@ -86,53 +86,53 @@ class CalibrationError(Exception): }) -orbit_coef = np.dtype([('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('X', '>f8', (8, )), - ('Y', '>f8', (8, )), - ('Z', '>f8', (8, )), - ('VX', '>f8', (8, )), - ('VY', '>f8', (8, )), - ('VZ', '>f8', (8, ))]) +orbit_coef = np.dtype([("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("X", ">f8", (8, )), + ("Y", ">f8", (8, )), + ("Z", ">f8", (8, )), + ("VX", ">f8", (8, )), + ("VY", ">f8", (8, )), + ("VZ", ">f8", (8, ))]) -attitude_coef = np.dtype([('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('XofSpinAxis', '>f8', (8, )), - ('YofSpinAxis', '>f8', (8, )), - ('ZofSpinAxis', '>f8', (8, ))]) +attitude_coef = np.dtype([("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("XofSpinAxis", ">f8", (8, )), + ("YofSpinAxis", ">f8", (8, )), + ("ZofSpinAxis", ">f8", (8, ))]) -cuc_time = np.dtype([('coarse', 'u1', (4, )), - ('fine', 'u1', (3, ))]) +cuc_time = np.dtype([("coarse", "u1", (4, )), + ("fine", "u1", (3, ))]) -sgs_time = np.dtype([('century', 'u1'), - ('year', 'u1'), - ('doy1', 'u1'), - ('doy_hours', 'u1'), - ('hours_mins', 'u1'), - ('mins_secs', 'u1'), - ('secs_msecs', 'u1'), - ('msecs', 'u1')]) +sgs_time = np.dtype([("century", "u1"), + ("year", "u1"), + ("doy1", "u1"), + ("doy_hours", "u1"), + ("hours_mins", "u1"), + ("mins_secs", "u1"), + ("secs_msecs", "u1"), + ("msecs", "u1")]) def make_sgs_time(sgs_time_array): """Make sgs time.""" - year = ((sgs_time_array['century'] >> 4) * 1000 + - (sgs_time_array['century'] & 15) * 100 + - (sgs_time_array['year'] >> 4) * 10 + - (sgs_time_array['year'] & 15)) - doy = ((sgs_time_array['doy1'] >> 4) * 100 + - (sgs_time_array['doy1'] & 15) * 10 + - (sgs_time_array['doy_hours'] >> 4)) - hours = ((sgs_time_array['doy_hours'] & 15) * 10 + - (sgs_time_array['hours_mins'] >> 4)) - mins = ((sgs_time_array['hours_mins'] & 15) * 10 + - (sgs_time_array['mins_secs'] >> 4)) - secs = ((sgs_time_array['mins_secs'] & 15) * 10 + - (sgs_time_array['secs_msecs'] >> 4)) - msecs = ((sgs_time_array['secs_msecs'] & 15) * 100 + - (sgs_time_array['msecs'] >> 4) * 10 + - (sgs_time_array['msecs'] & 15)) + year = ((sgs_time_array["century"] >> 4) * 1000 + + (sgs_time_array["century"] & 15) * 100 + + (sgs_time_array["year"] >> 4) * 10 + + (sgs_time_array["year"] & 15)) + doy = ((sgs_time_array["doy1"] >> 4) * 100 + + (sgs_time_array["doy1"] & 15) * 10 + + (sgs_time_array["doy_hours"] >> 4)) + hours = ((sgs_time_array["doy_hours"] & 15) * 10 + + (sgs_time_array["hours_mins"] >> 4)) + mins = ((sgs_time_array["hours_mins"] & 15) * 10 + + (sgs_time_array["mins_secs"] >> 4)) + secs = ((sgs_time_array["mins_secs"] & 15) * 10 + + (sgs_time_array["secs_msecs"] >> 4)) + msecs = ((sgs_time_array["secs_msecs"] & 15) * 100 + + (sgs_time_array["msecs"] >> 4) * 10 + + (sgs_time_array["msecs"] & 15)) return (datetime(int(year), 1, 1) + timedelta(days=int(doy - 1), hours=int(hours), @@ -156,7 +156,7 @@ def make_sgs_time(sgs_time_array): ("Cel", "u2'), - ("AbsoluteScanCount", '>u2'), - ("NorthernmostScanLine", '>u2'), - ("WesternmostPixel", '>u2'), - ("EasternmostPixel", '>u2'), - ("NorthernmostFrameLine", '>u2'), - ("SouthernmostFrameLine", '>u2'), - ("0Pixel", '>u2'), - ("0ScanLine", '>u2'), - ("0Scan", '>u2'), - ("SubSatScan", '>u2'), - ("SubSatPixel", '>u2'), + ("RelativeScanCount", ">u2"), + ("AbsoluteScanCount", ">u2"), + ("NorthernmostScanLine", ">u2"), + ("WesternmostPixel", ">u2"), + ("EasternmostPixel", ">u2"), + ("NorthernmostFrameLine", ">u2"), + ("SouthernmostFrameLine", ">u2"), + ("0Pixel", ">u2"), + ("0ScanLine", ">u2"), + ("0Scan", ">u2"), + ("SubSatScan", ">u2"), + ("SubSatPixel", ">u2"), ("SubSatLatitude", gvar_float), ("SubSatLongitude", gvar_float), ("Junk4", "u1", 96), # move to "word" 295 @@ -250,7 +250,7 @@ def __init__(self, filename, filename_info, filetype_info): def read_prologue(self): """Read the prologue metadata.""" with open(self.filename, "rb") as fp_: - fp_.seek(self.mda['total_header_length']) + fp_.seek(self.mda["total_header_length"]) data = np.fromfile(fp_, dtype=prologue, count=1) self.prologue.update(recarray2dict(data)) @@ -258,16 +258,16 @@ def read_prologue(self): def process_prologue(self): """Reprocess prologue to correct types.""" - for key in ['TCurr', 'TCHED', 'TCTRL', 'TLHED', 'TLTRL', 'TIPFS', - 'TINFS', 'TISPC', 'TIECL', 'TIBBC', 'TISTR', 'TLRAN', - 'TIIRT', 'TIVIT', 'TCLMT', 'TIONA']: + for key in ["TCurr", "TCHED", "TCTRL", "TLHED", "TLTRL", "TIPFS", + "TINFS", "TISPC", "TIECL", "TIBBC", "TISTR", "TLRAN", + "TIIRT", "TIVIT", "TCLMT", "TIONA"]: try: self.prologue[key] = make_sgs_time(self.prologue[key]) except ValueError: self.prologue.pop(key, None) logger.debug("Invalid data for %s", key) - for key in ['SubSatLatitude', "SubSatLongitude", "ReferenceLongitude", + for key in ["SubSatLatitude", "SubSatLongitude", "ReferenceLongitude", "ReferenceDistance", "ReferenceLatitude"]: self.prologue[key] = make_gvar_float(self.prologue[key]) @@ -352,7 +352,7 @@ def process_prologue(self): 14: "GOES-14", 15: "GOES-15"} -SENSOR_NAME = 'goes_imager' +SENSOR_NAME = "goes_imager" class HRITGOESFileHandler(HRITFileHandler): @@ -367,12 +367,12 @@ def __init__(self, filename, filename_info, filetype_info, goms_variable_length_headers, goms_text_headers)) self.prologue = prologue.prologue - self.chid = self.mda['spectral_channel_id'] + self.chid = self.mda["spectral_channel_id"] - sublon = self.prologue['SubSatLongitude'] - self.mda['projection_parameters']['SSP_longitude'] = sublon + sublon = self.prologue["SubSatLongitude"] + self.mda["projection_parameters"]["SSP_longitude"] = sublon - satellite_id = self.prologue['SatelliteID'] + satellite_id = self.prologue["SatelliteID"] self.platform_name = SPACECRAFTS[satellite_id] def get_dataset(self, key, info): @@ -380,17 +380,17 @@ def get_dataset(self, key, info): logger.debug("Getting raw data") res = super(HRITGOESFileHandler, self).get_dataset(key, info) - self.mda['calibration_parameters'] = self._get_calibration_params() + self.mda["calibration_parameters"] = self._get_calibration_params() - res = self.calibrate(res, key['calibration']) + res = self.calibrate(res, key["calibration"]) new_attrs = info.copy() new_attrs.update(res.attrs) res.attrs = new_attrs - res.attrs['platform_name'] = self.platform_name - res.attrs['sensor'] = SENSOR_NAME - res.attrs['orbital_parameters'] = {'projection_longitude': self.mda['projection_parameters']['SSP_longitude'], - 'projection_latitude': 0.0, - 'projection_altitude': ALTITUDE} + res.attrs["platform_name"] = self.platform_name + res.attrs["sensor"] = SENSOR_NAME + res.attrs["orbital_parameters"] = {"projection_longitude": self.mda["projection_parameters"]["SSP_longitude"], + "projection_latitude": 0.0, + "projection_altitude": ALTITUDE} return res def _get_calibration_params(self): @@ -398,9 +398,9 @@ def _get_calibration_params(self): params = {} idx_table = [] val_table = [] - for elt in self.mda['image_data_function'].split(b'\r\n'): + for elt in self.mda["image_data_function"].split(b"\r\n"): try: - key, val = elt.split(b':=') + key, val = elt.split(b":=") try: idx_table.append(int(key)) val_table.append(float(val)) @@ -408,19 +408,19 @@ def _get_calibration_params(self): params[key] = val except ValueError: pass - params['indices'] = np.array(idx_table) - params['values'] = np.array(val_table, dtype=np.float32) + params["indices"] = np.array(idx_table) + params["values"] = np.array(val_table, dtype=np.float32) return params def calibrate(self, data, calibration): """Calibrate the data.""" logger.debug("Calibration") tic = datetime.now() - if calibration == 'counts': + if calibration == "counts": return data - if calibration == 'reflectance': + if calibration == "reflectance": res = self._calibrate(data) - elif calibration == 'brightness_temperature': + elif calibration == "brightness_temperature": res = self._calibrate(data) else: raise NotImplementedError("Don't know how to calibrate to " + @@ -431,17 +431,17 @@ def calibrate(self, data, calibration): def _calibrate(self, data): """Calibrate *data*.""" - idx = self.mda['calibration_parameters']['indices'] - val = self.mda['calibration_parameters']['values'] + idx = self.mda["calibration_parameters"]["indices"] + val = self.mda["calibration_parameters"]["values"] data.data = da.where(data.data == 0, np.nan, data.data) ddata = data.data.map_blocks(np.interp, idx, val, dtype=val.dtype) res = xr.DataArray(ddata, dims=data.dims, attrs=data.attrs, coords=data.coords) res = res.clip(min=0) - units = {b'percent': '%', b'degree Kelvin': 'K'} - unit = self.mda['calibration_parameters'][b'_UNIT'] - res.attrs['units'] = units.get(unit, unit) + units = {b"percent": "%", b"degree Kelvin": "K"} + unit = self.mda["calibration_parameters"][b"_UNIT"] + res.attrs["units"] = units.get(unit, unit) return res def get_area_def(self, dataset_id): @@ -453,32 +453,32 @@ def get_area_def(self, dataset_id): return area def _get_proj_dict(self, dataset_id): - loff = np.float32(self.mda['loff']) - nlines = np.int32(self.mda['number_of_lines']) + loff = np.float32(self.mda["loff"]) + nlines = np.int32(self.mda["number_of_lines"]) loff = nlines - loff name_dict = get_geos_area_naming({ - 'platform_name': self.platform_name, - 'instrument_name': SENSOR_NAME, + "platform_name": self.platform_name, + "instrument_name": SENSOR_NAME, # Partial scans are padded to full disk - 'service_name': 'FD', - 'service_desc': 'Full Disk', - 'resolution': dataset_id['resolution'] + "service_name": "FD", + "service_desc": "Full Disk", + "resolution": dataset_id["resolution"] }) return { - 'a': EQUATOR_RADIUS, - 'b': POLE_RADIUS, - 'ssp_lon': float(self.prologue['SubSatLongitude']), - 'h': ALTITUDE, - 'proj': 'geos', - 'units': 'm', - 'a_name': name_dict['area_id'], - 'a_desc': name_dict['description'], - 'p_id': '', - 'nlines': nlines, - 'ncols': np.int32(self.mda['number_of_columns']), - 'cfac': np.int32(self.mda['cfac']), - 'lfac': np.int32(self.mda['lfac']), - 'coff': np.float32(self.mda['coff']), - 'loff': loff, - 'scandir': 'N2S' + "a": EQUATOR_RADIUS, + "b": POLE_RADIUS, + "ssp_lon": float(self.prologue["SubSatLongitude"]), + "h": ALTITUDE, + "proj": "geos", + "units": "m", + "a_name": name_dict["area_id"], + "a_desc": name_dict["description"], + "p_id": "", + "nlines": nlines, + "ncols": np.int32(self.mda["number_of_columns"]), + "cfac": np.int32(self.mda["cfac"]), + "lfac": np.int32(self.mda["lfac"]), + "coff": np.float32(self.mda["coff"]), + "loff": loff, + "scandir": "N2S" } diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index 68932531a8..c343b7f7c5 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -278,287 +278,287 @@ SCALE_13_3 = 5.5297 OFFSET_13_3 = 16.5892 CALIB_COEFS = { - 'GOES-15': {'00_7': {'slope': [5.851966E-1, 5.879772E-1, 5.856793E-1, + "GOES-15": {"00_7": {"slope": [5.851966E-1, 5.879772E-1, 5.856793E-1, 5.854250E-1, 5.866992E-1, 5.836241E-1, 5.846555E-1, 5.843753E-1], - 'offset': [-16.9707, -17.0513, -16.9847, -16.9773, + "offset": [-16.9707, -17.0513, -16.9847, -16.9773, -17.0143, -16.9251, -16.9550, -16.9469], - 'x0': 29, - 'k': 1.88852E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2562.7905, 2562.7905], - 'a': [-1.5693377, -1.5693377], - 'b': [1.0025034, 1.0025034], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_5': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1521.1988, 1521.5277], - 'a': [-3.4706545, -3.4755568], - 'b': [1.0093296, 1.0092838], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [935.89417, 935.78158], - 'a': [-0.36151367, -0.35316361], - 'b': [1.0012715, 1.0012570], - 'btmin': 180.0, - 'btmax': 340.0}, - '13_3': {'scale': SCALE_13_3, - 'offset': OFFSET_13_3, - 'n': [753.72229, 753.93403], - 'a': [-0.21475817, -0.24630068], - 'b': [1.0006485, 1.0007178], - 'btmin': 180.0, - 'btmax': 340.0} + "x0": 29, + "k": 1.88852E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2562.7905, 2562.7905], + "a": [-1.5693377, -1.5693377], + "b": [1.0025034, 1.0025034], + "btmin": 205.0, + "btmax": 340.0}, + "06_5": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1521.1988, 1521.5277], + "a": [-3.4706545, -3.4755568], + "b": [1.0093296, 1.0092838], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [935.89417, 935.78158], + "a": [-0.36151367, -0.35316361], + "b": [1.0012715, 1.0012570], + "btmin": 180.0, + "btmax": 340.0}, + "13_3": {"scale": SCALE_13_3, + "offset": OFFSET_13_3, + "n": [753.72229, 753.93403], + "a": [-0.21475817, -0.24630068], + "b": [1.0006485, 1.0007178], + "btmin": 180.0, + "btmax": 340.0} }, # ITT RevH + STAR Correction - 'GOES-14': {'00_7': {'slope': [5.874693E-1, 5.865367E-1, 5.862807E-1, + "GOES-14": {"00_7": {"slope": [5.874693E-1, 5.865367E-1, 5.862807E-1, 5.864086E-1, 5.857146E-1, 5.852004E-1, 5.860814E-1, 5.841697E-1], - 'offset': [-17.037, -17.010, -17.002, -17.006, + "offset": [-17.037, -17.010, -17.002, -17.006, -16.986, -16.971, -16.996, -16.941], - 'x0': 29, - 'k': 1.88772E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2577.3518, 2577.3518], - 'a': [-1.5297091, -1.5297091], - 'b': [1.0025608, 1.0025608], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_5': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1519.3488, 1518.5610], - 'a': [-3.4647892, -3.4390527], - 'b': [1.0093656, 1.0094427], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [933.98541, 934.19579], - 'a': [-0.29201763, -0.31824779], - 'b': [1.0012018, 1.0012303], - 'btmin': 180.0, - 'btmax': 340.0}, - '13_3': {'scale': SCALE_13_3, - 'offset': OFFSET_13_3, - 'n': [752.88143, 752.82392], - 'a': [-0.22508805, -0.21700982], - 'b': [1.0006686, 1.0006503], - 'btmin': 180.0, - 'btmax': 340.0} + "x0": 29, + "k": 1.88772E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2577.3518, 2577.3518], + "a": [-1.5297091, -1.5297091], + "b": [1.0025608, 1.0025608], + "btmin": 205.0, + "btmax": 340.0}, + "06_5": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1519.3488, 1518.5610], + "a": [-3.4647892, -3.4390527], + "b": [1.0093656, 1.0094427], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [933.98541, 934.19579], + "a": [-0.29201763, -0.31824779], + "b": [1.0012018, 1.0012303], + "btmin": 180.0, + "btmax": 340.0}, + "13_3": {"scale": SCALE_13_3, + "offset": OFFSET_13_3, + "n": [752.88143, 752.82392], + "a": [-0.22508805, -0.21700982], + "b": [1.0006686, 1.0006503], + "btmin": 180.0, + "btmax": 340.0} }, # ITT RevH + STAR Correction - 'GOES-13': {'00_7': {'slope': [6.120196E-1, 6.118504E-1, 6.096360E-1, + "GOES-13": {"00_7": {"slope": [6.120196E-1, 6.118504E-1, 6.096360E-1, 6.087055E-1, 6.132860E-1, 6.118208E-1, 6.122307E-1, 6.066968E-1], - 'offset': [-17.749, -17.744, -17.769, -17.653, + "offset": [-17.749, -17.744, -17.769, -17.653, -17.785, -17.743, -17.755, -17.594], - 'x0': 29, - 'k': 1.89544E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2561.74, 2561.74], - 'a': [-1.437204, -1.437204], - 'b': [1.002562, 1.002562], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_5': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1522.52, 1521.66], - 'a': [-3.625663, -3.607841], - 'b': [1.010018, 1.010010], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [937.23, 937.27], - 'a': [-0.386043, -0.380113], - 'b': [1.001298, 1.001285], - 'btmin': 180.0, - 'btmax': 340.0}, - '13_3': {'scale': SCALE_13_3, - 'offset': OFFSET_13_3, - 'n': [749.83], - 'a': [-0.134801], - 'b': [1.000482], - 'btmin': 180.0, - 'btmax': 340.0} # Has only one detector on GOES-13 + "x0": 29, + "k": 1.89544E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2561.74, 2561.74], + "a": [-1.437204, -1.437204], + "b": [1.002562, 1.002562], + "btmin": 205.0, + "btmax": 340.0}, + "06_5": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1522.52, 1521.66], + "a": [-3.625663, -3.607841], + "b": [1.010018, 1.010010], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [937.23, 937.27], + "a": [-0.386043, -0.380113], + "b": [1.001298, 1.001285], + "btmin": 180.0, + "btmax": 340.0}, + "13_3": {"scale": SCALE_13_3, + "offset": OFFSET_13_3, + "n": [749.83], + "a": [-0.134801], + "b": [1.000482], + "btmin": 180.0, + "btmax": 340.0} # Has only one detector on GOES-13 }, - 'GOES-12': {'00_7': {'slope': [5.771030E-1, 5.761764E-1, 5.775825E-1, + "GOES-12": {"00_7": {"slope": [5.771030E-1, 5.761764E-1, 5.775825E-1, 5.790699E-1, 5.787051E-1, 5.755969E-1, 5.753973E-1, 5.752099E-1], - 'offset': [-16.736, -16.709, -16.750, -16.793, + "offset": [-16.736, -16.709, -16.750, -16.793, -16.782, -16.692, -16.687, -16.681], - 'x0': 29, - 'k': 1.97658E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2562.45, 2562.45], - 'a': [-0.650731, -0.650731], - 'b': [1.001520, 1.001520], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_5': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1536.43, 1536.94], - 'a': [-4.764728, -4.775517], - 'b': [1.012420, 1.012403], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [933.21, 933.21], - 'a': [-0.360331, -0.360331], - 'b': [1.001306, 1.001306], - 'btmin': 180.0, - 'btmax': 340.0}, - '13_3': {'scale': SCALE_13_3, - 'offset': OFFSET_13_3, - 'n': [751.91], - 'a': [-0.253449], - 'b': [1.000743], - 'btmin': 180.0, - 'btmax': 340.0} # Has only one detector on GOES-12 + "x0": 29, + "k": 1.97658E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2562.45, 2562.45], + "a": [-0.650731, -0.650731], + "b": [1.001520, 1.001520], + "btmin": 205.0, + "btmax": 340.0}, + "06_5": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1536.43, 1536.94], + "a": [-4.764728, -4.775517], + "b": [1.012420, 1.012403], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [933.21, 933.21], + "a": [-0.360331, -0.360331], + "b": [1.001306, 1.001306], + "btmin": 180.0, + "btmax": 340.0}, + "13_3": {"scale": SCALE_13_3, + "offset": OFFSET_13_3, + "n": [751.91], + "a": [-0.253449], + "b": [1.000743], + "btmin": 180.0, + "btmax": 340.0} # Has only one detector on GOES-12 }, - 'GOES-11': {'00_7': {'slope': [5.561568E-1, 5.552979E-1, 5.558981E-1, + "GOES-11": {"00_7": {"slope": [5.561568E-1, 5.552979E-1, 5.558981E-1, 5.577627E-1, 5.557238E-1, 5.587978E-1, 5.586530E-1, 5.528971E-1], - 'offset': [-16.129, -16.104, -16.121, -16.175, + "offset": [-16.129, -16.104, -16.121, -16.175, -16.116, -16.205, -16.201, -16.034], - 'x0': 29, - 'k': 2.01524E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2562.07, 2562.07], - 'a': [-0.644790, -0.644790], - 'b': [1.000775, 1.000775], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_8': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1481.53], - 'a': [-0.543401], - 'b': [1.001495], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [931.76, 931.76], - 'a': [-0.306809, -0.306809], - 'b': [1.001274, 1.001274], - 'btmin': 180.0, - 'btmax': 340.0}, - '12_0': {'scale': SCALE_12_0, - 'offset': OFFSET_12_0, - 'n': [833.67, 833.04], - 'a': [-0.333216, -0.315110], - 'b': [1.001000, 1.000967], - 'btmin': 180.0, - 'btmax': 340.0} + "x0": 29, + "k": 2.01524E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2562.07, 2562.07], + "a": [-0.644790, -0.644790], + "b": [1.000775, 1.000775], + "btmin": 205.0, + "btmax": 340.0}, + "06_8": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1481.53], + "a": [-0.543401], + "b": [1.001495], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [931.76, 931.76], + "a": [-0.306809, -0.306809], + "b": [1.001274, 1.001274], + "btmin": 180.0, + "btmax": 340.0}, + "12_0": {"scale": SCALE_12_0, + "offset": OFFSET_12_0, + "n": [833.67, 833.04], + "a": [-0.333216, -0.315110], + "b": [1.001000, 1.000967], + "btmin": 180.0, + "btmax": 340.0} }, - 'GOES-10': {'00_7': {'slope': [5.605602E-1, 5.563529E-1, 5.566574E-1, + "GOES-10": {"00_7": {"slope": [5.605602E-1, 5.563529E-1, 5.566574E-1, 5.582154E-1, 5.583361E-1, 5.571736E-1, 5.563135E-1, 5.613536E-1], - 'offset': [-16.256, -16.134, -16.143, -16.188, + "offset": [-16.256, -16.134, -16.143, -16.188, -16.192, -16.158, -16.133, -16.279], - 'x0': 29, - 'k': 1.98808E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2552.9845, 2552.9845], - 'a': [-0.60584483, -0.60584483], - 'b': [1.0011017, 1.0011017], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_8': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1486.2212], - 'a': [-0.61653805], - 'b': [1.0014011], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [936.10260, 935.98981], - 'a': [-0.27128884, -0.27064036], - 'b': [1.0009674, 1.0009687], - 'btmin': 180.0, - 'btmax': 340.0}, - '12_0': {'scale': SCALE_12_0, - 'offset': OFFSET_12_0, - 'n': [830.88473, 830.89691], - 'a': [-0.26505411, -0.26056452], - 'b': [1.0009087, 1.0008962], - 'btmin': 180.0, - 'btmax': 340.0} + "x0": 29, + "k": 1.98808E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2552.9845, 2552.9845], + "a": [-0.60584483, -0.60584483], + "b": [1.0011017, 1.0011017], + "btmin": 205.0, + "btmax": 340.0}, + "06_8": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1486.2212], + "a": [-0.61653805], + "b": [1.0014011], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [936.10260, 935.98981], + "a": [-0.27128884, -0.27064036], + "b": [1.0009674, 1.0009687], + "btmin": 180.0, + "btmax": 340.0}, + "12_0": {"scale": SCALE_12_0, + "offset": OFFSET_12_0, + "n": [830.88473, 830.89691], + "a": [-0.26505411, -0.26056452], + "b": [1.0009087, 1.0008962], + "btmin": 180.0, + "btmax": 340.0} }, - 'GOES-9': {'00_7': {'slope': [0.5492361], - 'offset': [-15.928], - 'x0': 29, - 'k': 1.94180E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2555.18, 2555.18], - 'a': [-0.579908, -0.579908], - 'b': [1.000942, 1.000942], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_8': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1481.82], - 'a': [-0.493016], - 'b': [1.001076], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [934.59, 934.28], - 'a': [-0.384798, -0.363703], - 'b': [1.001293, 1.001272], - 'btmin': 180.0, - 'btmax': 340.0}, - '12_0': {'scale': SCALE_12_0, - 'offset': OFFSET_12_0, - 'n': [834.02, 834.09], - 'a': [-0.302995, -0.306838], - 'b': [1.000941, 1.000948], - 'btmin': 180.0, - 'btmax': 340.0} + "GOES-9": {"00_7": {"slope": [0.5492361], + "offset": [-15.928], + "x0": 29, + "k": 1.94180E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2555.18, 2555.18], + "a": [-0.579908, -0.579908], + "b": [1.000942, 1.000942], + "btmin": 205.0, + "btmax": 340.0}, + "06_8": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1481.82], + "a": [-0.493016], + "b": [1.001076], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [934.59, 934.28], + "a": [-0.384798, -0.363703], + "b": [1.001293, 1.001272], + "btmin": 180.0, + "btmax": 340.0}, + "12_0": {"scale": SCALE_12_0, + "offset": OFFSET_12_0, + "n": [834.02, 834.09], + "a": [-0.302995, -0.306838], + "b": [1.000941, 1.000948], + "btmin": 180.0, + "btmax": 340.0} }, - 'GOES-8': {'00_7': {'slope': [0.5501873], - 'offset': [-15.955], - 'x0': 29, - 'k': 1.92979E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2556.71, 2558.62], - 'a': [-0.578526, -0.581853], - 'b': [1.001512, 1.001532], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_8': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1481.91], - 'a': [-0.593903], - 'b': [1.001418], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [934.30, 935.38], - 'a': [-0.322585, -0.351889], - 'b': [1.001271, 1.001293], - 'btmin': 180.0, - 'btmax': 340.0}, - '12_0': {'scale': SCALE_12_0, - 'offset': OFFSET_12_0, - 'n': [837.06, 837.00], - 'a': [-0.422571, -0.466954], - 'b': [1.001170, 1.001257], - 'btmin': 180.0, - 'btmax': 340.0} + "GOES-8": {"00_7": {"slope": [0.5501873], + "offset": [-15.955], + "x0": 29, + "k": 1.92979E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2556.71, 2558.62], + "a": [-0.578526, -0.581853], + "b": [1.001512, 1.001532], + "btmin": 205.0, + "btmax": 340.0}, + "06_8": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1481.91], + "a": [-0.593903], + "b": [1.001418], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [934.30, 935.38], + "a": [-0.322585, -0.351889], + "b": [1.001271, 1.001293], + "btmin": 180.0, + "btmax": 340.0}, + "12_0": {"scale": SCALE_12_0, + "offset": OFFSET_12_0, + "n": [837.06, 837.00], + "a": [-0.422571, -0.466954], + "b": [1.001170, 1.001257], + "btmin": 180.0, + "btmax": 340.0} } } @@ -569,12 +569,12 @@ SAMPLING_NS_IR = 112E-6 # Sector definitions. TODO: Add remaining sectors (PACUS, CONUS, ...) -FULL_DISC = 'Full Disc' -NORTH_HEMIS_EAST = 'Northern Hemisphere (GOES-East)' -SOUTH_HEMIS_EAST = 'Southern Hemisphere (GOES-East)' -NORTH_HEMIS_WEST = 'Northern Hemisphere (GOES-West)' -SOUTH_HEMIS_WEST = 'Southern Hemisphere (GOES-West)' -UNKNOWN_SECTOR = 'Unknown' +FULL_DISC = "Full Disc" +NORTH_HEMIS_EAST = "Northern Hemisphere (GOES-East)" +SOUTH_HEMIS_EAST = "Southern Hemisphere (GOES-East)" +NORTH_HEMIS_WEST = "Northern Hemisphere (GOES-West)" +SOUTH_HEMIS_WEST = "Southern Hemisphere (GOES-West)" +UNKNOWN_SECTOR = "Unknown" IR_SECTORS = { (2704, 5208): FULL_DISC, @@ -613,14 +613,14 @@ def __init__(self, filename, filename_info, filetype_info, geo_data=None): self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, - chunks={'xc': CHUNK_SIZE, 'yc': CHUNK_SIZE}) - self.sensor = 'goes_imager' - self.nlines = self.nc.dims['yc'] - self.ncols = self.nc.dims['xc'] + chunks={"xc": CHUNK_SIZE, "yc": CHUNK_SIZE}) + self.sensor = "goes_imager" + self.nlines = self.nc.dims["yc"] + self.ncols = self.nc.dims["xc"] self.platform_name = self._get_platform_name( - self.nc.attrs['Satellite Sensor']) - self.platform_shortname = self.platform_name.replace('-', '').lower() - self.gvar_channel = int(self.nc['bands'].values) + self.nc.attrs["Satellite Sensor"]) + self.platform_shortname = self.platform_name.replace("-", "").lower() + self.gvar_channel = int(self.nc["bands"].values) self.sector = self._get_sector(channel=self.gvar_channel, nlines=self.nlines, ncols=self.ncols) @@ -652,7 +652,7 @@ def ir_sectors(self): @staticmethod def _get_platform_name(ncattr): """Determine name of the platform.""" - match = re.match(r'G-(\d+)', ncattr) + match = re.match(r"G-(\d+)", ncattr) if match: return SPACECRAFTS.get(int(match.groups()[0])) @@ -681,7 +681,7 @@ def _get_earth_mask(lat): Returns: Mask (1=earth, 0=space) """ - logger.debug('Computing earth mask') + logger.debug("Computing earth mask") return np.fabs(lat) <= 90 @staticmethod @@ -695,7 +695,7 @@ def _get_nadir_pixel(earth_mask, sector): nadir row, nadir column """ if sector == FULL_DISC: - logger.debug('Computing nadir pixel') + logger.debug("Computing nadir pixel") # The earth is not centered in the image, compute bounding box # of the earth disc first @@ -711,7 +711,7 @@ def _get_nadir_pixel(earth_mask, sector): def _is_yaw_flip(self, lat): """Determine whether the satellite is yaw-flipped ('upside down').""" - logger.debug('Computing yaw flip flag') + logger.debug("Computing yaw flip flag") # In case of yaw-flip the data and coordinates in the netCDF files are # also flipped. Just check whether the latitude increases or decrases # with the line number. @@ -721,7 +721,7 @@ def _is_yaw_flip(self, lat): def _get_area_def_uniform_sampling(self, lon0, channel): """Get area definition with uniform sampling.""" - logger.debug('Computing area definition') + logger.debug("Computing area definition") if lon0 is not None: est = AreaDefEstimator(self.platform_name, channel) return est.get_area_def_with_uniform_sampling(lon0) @@ -730,7 +730,7 @@ def _get_area_def_uniform_sampling(self, lon0, channel): @property def start_time(self): """Start timestamp of the dataset.""" - dt = self.nc['time'].dt + dt = self.nc["time"].dt return datetime(year=int(dt.year), month=int(dt.month), day=int(dt.day), hour=int(dt.hour), minute=int(dt.minute), second=int(dt.second), microsecond=int(dt.microsecond)) @@ -757,7 +757,7 @@ def resolution(self): Returns: Spatial resolution in kilometers """ - return 1000. * self.nc['lineRes'].values + return 1000. * self.nc["lineRes"].values def get_shape(self, key, info): """Get the shape of the data. @@ -772,7 +772,7 @@ def meta(self): """Derive metadata from the coordinates.""" # Use buffered data if available if self._meta is None: - lat = self.geo_data['lat'] + lat = self.geo_data["lat"] earth_mask = self._get_earth_mask(lat) crow, ccol = self._get_nadir_pixel(earth_mask=earth_mask, sector=self.sector) @@ -780,55 +780,55 @@ def meta(self): yaw_flip = self._is_yaw_flip(lat) del lat - lon = self.geo_data['lon'] + lon = self.geo_data["lon"] lon0 = lon.values[crow, ccol] if crow is not None else None area_def_uni = self._get_area_def_uniform_sampling( lon0=lon0, channel=self.gvar_channel) del lon - self._meta = {'earth_mask': earth_mask, - 'yaw_flip': yaw_flip, - 'lat0': lat0, - 'lon0': lon0, - 'nadir_row': crow, - 'nadir_col': ccol, - 'area_def_uni': area_def_uni} + self._meta = {"earth_mask": earth_mask, + "yaw_flip": yaw_flip, + "lat0": lat0, + "lon0": lon0, + "nadir_row": crow, + "nadir_col": ccol, + "area_def_uni": area_def_uni} return self._meta def _counts2radiance(self, counts, coefs, channel): """Convert raw detector counts to radiance.""" - logger.debug('Converting counts to radiance') + logger.debug("Converting counts to radiance") if is_vis_channel(channel): # Since the scanline-detector assignment is unknown, use the average # coefficients for all scanlines. - slope = np.array(coefs['slope']).mean() - offset = np.array(coefs['offset']).mean() + slope = np.array(coefs["slope"]).mean() + offset = np.array(coefs["offset"]).mean() return self._viscounts2radiance(counts=counts, slope=slope, offset=offset) - return self._ircounts2radiance(counts=counts, scale=coefs['scale'], - offset=coefs['offset']) + return self._ircounts2radiance(counts=counts, scale=coefs["scale"], + offset=coefs["offset"]) def _calibrate(self, radiance, coefs, channel, calibration): """Convert radiance to reflectance or brightness temperature.""" if is_vis_channel(channel): - if not calibration == 'reflectance': - raise ValueError('Cannot calibrate VIS channel to ' - '{}'.format(calibration)) - return self._calibrate_vis(radiance=radiance, k=coefs['k']) + if not calibration == "reflectance": + raise ValueError("Cannot calibrate VIS channel to " + "{}".format(calibration)) + return self._calibrate_vis(radiance=radiance, k=coefs["k"]) else: - if not calibration == 'brightness_temperature': - raise ValueError('Cannot calibrate IR channel to ' - '{}'.format(calibration)) + if not calibration == "brightness_temperature": + raise ValueError("Cannot calibrate IR channel to " + "{}".format(calibration)) # Since the scanline-detector assignment is unknown, use the average # coefficients for all scanlines. - mean_coefs = {'a': np.array(coefs['a']).mean(), - 'b': np.array(coefs['b']).mean(), - 'n': np.array(coefs['n']).mean(), - 'btmin': coefs['btmin'], - 'btmax': coefs['btmax']} + mean_coefs = {"a": np.array(coefs["a"]).mean(), + "b": np.array(coefs["b"]).mean(), + "n": np.array(coefs["n"]).mean(), + "btmin": coefs["btmin"], + "btmax": coefs["btmax"]} return self._calibrate_ir(radiance=radiance, coefs=mean_coefs) @staticmethod @@ -866,16 +866,16 @@ def _calibrate_ir(radiance, coefs): Returns: Brightness temperature [K] """ - logger.debug('Calibrating to brightness temperature') + logger.debug("Calibrating to brightness temperature") # Compute brightness temperature using inverse Planck formula - n = coefs['n'] + n = coefs["n"] bteff = C2 * n / np.log(1 + C1 * n ** 3 / radiance.where(radiance > 0)) - bt = xr.DataArray(bteff * coefs['b'] + coefs['a']) + bt = xr.DataArray(bteff * coefs["b"] + coefs["a"]) # Apply BT threshold - return bt.where(np.logical_and(bt >= coefs['btmin'], - bt <= coefs['btmax'])) + return bt.where(np.logical_and(bt >= coefs["btmin"], + bt <= coefs["btmax"])) @staticmethod def _viscounts2radiance(counts, slope, offset): @@ -916,7 +916,7 @@ def _calibrate_vis(radiance, k): Returns: Reflectance [%] """ - logger.debug('Calibrating to reflectance') + logger.debug("Calibrating to reflectance") refl = 100 * k * radiance return refl.clip(min=0) @@ -928,28 +928,28 @@ def _update_metadata(self, data, ds_info): # If the file_type attribute is a list and the data is xarray # the concat of the dataset will not work. As the file_type is # not needed this will be popped here. - if 'file_type' in data.attrs: - data.attrs.pop('file_type') + if "file_type" in data.attrs: + data.attrs.pop("file_type") # Metadata discovered from the file. data.attrs.update( - {'platform_name': self.platform_name, - 'sensor': self.sensor, - 'sector': self.sector, - 'orbital_parameters': {'yaw_flip': self.meta['yaw_flip']}} + {"platform_name": self.platform_name, + "sensor": self.sensor, + "sector": self.sector, + "orbital_parameters": {"yaw_flip": self.meta["yaw_flip"]}} ) - if self.meta['lon0'] is not None: + if self.meta["lon0"] is not None: # Attributes only available for full disc images. YAML reader # doesn't like it if satellite_* is present but None data.attrs.update( - {'nadir_row': self.meta['nadir_row'], - 'nadir_col': self.meta['nadir_col'], - 'area_def_uniform_sampling': self.meta['area_def_uni']} + {"nadir_row": self.meta["nadir_row"], + "nadir_col": self.meta["nadir_col"], + "area_def_uniform_sampling": self.meta["area_def_uni"]} ) - data.attrs['orbital_parameters'].update( - {'projection_longitude': self.meta['lon0'], - 'projection_latitude': self.meta['lat0'], - 'projection_altitude': ALTITUDE} + data.attrs["orbital_parameters"].update( + {"projection_longitude": self.meta["lon0"], + "projection_latitude": self.meta["lat0"], + "projection_altitude": ALTITUDE} ) def __del__(self): @@ -977,10 +977,10 @@ def available_datasets(self, configured_datasets=None): if is_avail is not None: yield is_avail, ds_info - matches = self.file_type_matches(ds_info['file_type']) - if matches and ds_info.get('resolution') != res: + matches = self.file_type_matches(ds_info["file_type"]) + if matches and ds_info.get("resolution") != res: new_info = ds_info.copy() - new_info['resolution'] = res + new_info["resolution"] = res yield True, new_info elif is_avail is None: yield is_avail, ds_info @@ -989,10 +989,10 @@ def available_datasets(self, configured_datasets=None): def is_vis_channel(channel): """Determine whether the given channel is a visible channel.""" if isinstance(channel, str): - return channel == '00_7' + return channel == "00_7" if isinstance(channel, int): return channel == 1 - raise ValueError('Invalid channel') + raise ValueError("Invalid channel") class GOESNCFileHandler(GOESNCBaseFileHandler): @@ -1008,25 +1008,25 @@ def __init__(self, filename, filename_info, filetype_info): def get_dataset(self, key, info): """Load dataset designated by the given key from file.""" - logger.debug('Reading dataset {}'.format(key['name'])) + logger.debug("Reading dataset {}".format(key["name"])) # Read data from file and calibrate if necessary - if 'longitude' in key['name']: - data = self.geo_data['lon'] - elif 'latitude' in key['name']: - data = self.geo_data['lat'] + if "longitude" in key["name"]: + data = self.geo_data["lon"] + elif "latitude" in key["name"]: + data = self.geo_data["lat"] else: tic = datetime.now() - data = self.calibrate(self.nc['data'].isel(time=0), - calibration=key['calibration'], - channel=key['name']) - logger.debug('Calibration time: {}'.format(datetime.now() - tic)) + data = self.calibrate(self.nc["data"].isel(time=0), + calibration=key["calibration"], + channel=key["name"]) + logger.debug("Calibration time: {}".format(datetime.now() - tic)) # Mask space pixels - data = data.where(self.meta['earth_mask']) + data = data.where(self.meta["earth_mask"]) # Set proper dimension names - data = data.rename({'xc': 'x', 'yc': 'y'}) + data = data.rename({"xc": "x", "yc": "y"}) # Update metadata self._update_metadata(data, ds_info=info) @@ -1040,19 +1040,19 @@ def calibrate(self, counts, calibration, channel): counts = counts / 32. coefs = CALIB_COEFS[self.platform_name][channel] - if calibration == 'counts': + if calibration == "counts": return counts - if calibration in ['radiance', 'reflectance', - 'brightness_temperature']: + if calibration in ["radiance", "reflectance", + "brightness_temperature"]: radiance = self._counts2radiance(counts=counts, coefs=coefs, channel=channel) - if calibration == 'radiance': + if calibration == "radiance": return radiance return self._calibrate(radiance=radiance, coefs=coefs, channel=channel, calibration=calibration) - raise ValueError('Unsupported calibration for channel {}: {}'.format(channel, calibration)) + raise ValueError("Unsupported calibration for channel {}: {}".format(channel, calibration)) class GOESEUMNCFileHandler(GOESNCBaseFileHandler): @@ -1072,20 +1072,20 @@ def __init__(self, filename, filename_info, filetype_info, geo_data): def get_dataset(self, key, info): """Load dataset designated by the given key from file.""" - logger.debug('Reading dataset {}'.format(key['name'])) + logger.debug("Reading dataset {}".format(key["name"])) tic = datetime.now() - data = self.calibrate(self.nc['data'].isel(time=0), - calibration=key['calibration'], - channel=key['name']) - logger.debug('Calibration time: {}'.format(datetime.now() - tic)) + data = self.calibrate(self.nc["data"].isel(time=0), + calibration=key["calibration"], + channel=key["name"]) + logger.debug("Calibration time: {}".format(datetime.now() - tic)) # Mask space pixels - data = data.where(self.meta['earth_mask']) + data = data.where(self.meta["earth_mask"]) # Set proper dimension names - data = data.rename({'xc': 'x', 'yc': 'y'}) - data = data.drop('time') + data = data.rename({"xc": "x", "yc": "y"}) + data = data.drop("time") # Update metadata self._update_metadata(data, ds_info=info) @@ -1098,15 +1098,15 @@ def calibrate(self, data, calibration, channel): is_vis = is_vis_channel(channel) # IR files provide radiances, VIS file provides reflectances - if is_vis and calibration == 'reflectance': + if is_vis and calibration == "reflectance": return data - if not is_vis and calibration == 'radiance': + if not is_vis and calibration == "radiance": return data - if not is_vis and calibration == 'brightness_temperature': + if not is_vis and calibration == "brightness_temperature": return self._calibrate(radiance=data, calibration=calibration, coefs=coefs, channel=channel) - raise ValueError('Unsupported calibration for channel {}: {}' + raise ValueError("Unsupported calibration for channel {}: {}" .format(channel, calibration)) @@ -1120,13 +1120,13 @@ def __init__(self, filename, filename_info, filetype_info): self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, - chunks={'xc': CHUNK_SIZE, 'yc': CHUNK_SIZE}) - self.sensor = 'goes_imager' - self.nlines = self.nc.dims['yc'] - self.ncols = self.nc.dims['xc'] + chunks={"xc": CHUNK_SIZE, "yc": CHUNK_SIZE}) + self.sensor = "goes_imager" + self.nlines = self.nc.dims["yc"] + self.ncols = self.nc.dims["xc"] self.platform_name = GOESNCBaseFileHandler._get_platform_name( - self.nc.attrs['Satellite Sensor']) - self.platform_shortname = self.platform_name.replace('-', '').lower() + self.nc.attrs["Satellite Sensor"]) + self.platform_shortname = self.platform_name.replace("-", "").lower() self._meta = None def __getitem__(self, item): @@ -1135,18 +1135,18 @@ def __getitem__(self, item): def get_dataset(self, key, info): """Load dataset designated by the given key from file.""" - logger.debug('Reading dataset {}'.format(key['name'])) + logger.debug("Reading dataset {}".format(key["name"])) # Read data from file and calibrate if necessary - if 'longitude' in key['name']: - data = self.nc['lon'] - elif 'latitude' in key['name']: - data = self.nc['lat'] + if "longitude" in key["name"]: + data = self.nc["lon"] + elif "latitude" in key["name"]: + data = self.nc["lat"] else: - raise KeyError("Unknown dataset: {}".format(key['name'])) + raise KeyError("Unknown dataset: {}".format(key["name"])) # Set proper dimension names - data = data.rename({'xc': 'x', 'yc': 'y'}) + data = data.rename({"xc": "x", "yc": "y"}) # Update metadata data.attrs.update(info) @@ -1165,36 +1165,36 @@ class GOESCoefficientReader(object): """Read GOES Imager calibration coefficients from NOAA reference HTMLs.""" gvar_channels = { - 'GOES-8': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, - 'GOES-9': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, - 'GOES-10': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, - 'GOES-11': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, - 'GOES-12': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, - 'GOES-13': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, - 'GOES-14': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, - 'GOES-15': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, + "GOES-8": {"00_7": 1, "03_9": 2, "06_8": 3, "10_7": 4, "12_0": 5}, + "GOES-9": {"00_7": 1, "03_9": 2, "06_8": 3, "10_7": 4, "12_0": 5}, + "GOES-10": {"00_7": 1, "03_9": 2, "06_8": 3, "10_7": 4, "12_0": 5}, + "GOES-11": {"00_7": 1, "03_9": 2, "06_8": 3, "10_7": 4, "12_0": 5}, + "GOES-12": {"00_7": 1, "03_9": 2, "06_5": 3, "10_7": 4, "13_3": 6}, + "GOES-13": {"00_7": 1, "03_9": 2, "06_5": 3, "10_7": 4, "13_3": 6}, + "GOES-14": {"00_7": 1, "03_9": 2, "06_5": 3, "10_7": 4, "13_3": 6}, + "GOES-15": {"00_7": 1, "03_9": 2, "06_5": 3, "10_7": 4, "13_3": 6}, } ir_tables = { - 'GOES-8': '2-1', - 'GOES-9': '2-2', - 'GOES-10': '2-3', - 'GOES-11': '2-4', - 'GOES-12': '2-5a', - 'GOES-13': '2-6', - 'GOES-14': '2-7c', - 'GOES-15': '2-8b' + "GOES-8": "2-1", + "GOES-9": "2-2", + "GOES-10": "2-3", + "GOES-11": "2-4", + "GOES-12": "2-5a", + "GOES-13": "2-6", + "GOES-14": "2-7c", + "GOES-15": "2-8b" } vis_tables = { - 'GOES-8': 'Table 1.', - 'GOES-9': 'Table 1.', - 'GOES-10': 'Table 2.', - 'GOES-11': 'Table 3.', - 'GOES-12': 'Table 4.', - 'GOES-13': 'Table 5.', - 'GOES-14': 'Table 6.', - 'GOES-15': 'Table 7.' + "GOES-8": "Table 1.", + "GOES-9": "Table 1.", + "GOES-10": "Table 2.", + "GOES-11": "Table 3.", + "GOES-12": "Table 4.", + "GOES-13": "Table 5.", + "GOES-14": "Table 6.", + "GOES-15": "Table 7." } def __init__(self, ir_url, vis_url): @@ -1217,13 +1217,13 @@ def _load_url_or_file(self, url): except (MissingSchema, requests.HTTPError): # Not a valid URL, is it a file? try: - return open(url, mode='r') + return open(url, mode="r") except IOError: - raise ValueError('Invalid URL or file: {}'.format(url)) + raise ValueError("Invalid URL or file: {}".format(url)) def get_coefs(self, platform, channel): """Get the coefs.""" - if channel == '00_7': + if channel == "00_7": return self._get_vis_coefs(platform=platform) return self._get_ir_coefs(platform=platform, channel=channel) @@ -1236,27 +1236,27 @@ def _get_ir_coefs(self, platform, channel): # Extract scale and offset for conversion counts->radiance from # Table 1-1 (same for all platforms, only depends on the channel) gvar_channel = self.gvar_channels[platform][channel] - table11 = self._get_table(root=self.ir_html, heading='Table 1-1', - heading_type='h3') + table11 = self._get_table(root=self.ir_html, heading="Table 1-1", + heading_type="h3") for row in table11: if int(row[0]) == gvar_channel: - coefs['scale'] = self._float(row[1]) - coefs['offset'] = self._float(row[2]) + coefs["scale"] = self._float(row[1]) + coefs["offset"] = self._float(row[2]) # Extract n,a,b (radiance -> BT) from the coefficient table for the # given platform table = self._get_table(root=self.ir_html, heading=self.ir_tables[platform], - heading_type='h3') - channel_regex = re.compile('^{}(?:/[a,b])?$'.format(gvar_channel)) + heading_type="h3") + channel_regex = re.compile("^{}(?:/[a,b])?$".format(gvar_channel)) for row in table: if channel_regex.match(row[0]): # Extract coefficients. Detector (a) always comes before (b) # in the table so that simply appending preserves the order. - coefs['n'].append(self._float(row[1])) - coefs['a'].append(self._float(row[2])) - coefs['b'].append(self._float(row[3])) + coefs["n"].append(self._float(row[1])) + coefs["a"].append(self._float(row[2])) + coefs["b"].append(self._float(row[3])) return coefs @@ -1266,28 +1266,28 @@ def _get_vis_coefs(self, platform): # Find calibration table table = self._get_table(root=self.vis_html, heading=self.vis_tables[platform], - heading_type='p') + heading_type="p") # Extract values coefs = defaultdict(list) - if platform in ('GOES-8', 'GOES-9'): + if platform in ("GOES-8", "GOES-9"): # GOES 8&9 coefficients are in the same table - col = 1 if platform == 'GOES-8' else 2 - coefs['slope'].append(self._float(table[1][col])) - coefs['x0'] = self._float(table[2][col]) - coefs['offset'].append(self._float(table[3][col])) - coefs['k'] = self._float(table[4][col]) + col = 1 if platform == "GOES-8" else 2 + coefs["slope"].append(self._float(table[1][col])) + coefs["x0"] = self._float(table[2][col]) + coefs["offset"].append(self._float(table[3][col])) + coefs["k"] = self._float(table[4][col]) else: # k and x0 appear in the first row only - coefs['slope'].append(self._float(table[0][1])) - coefs['x0'] = self._float(table[0][2]) - coefs['k'] = self._float(table[0][4]) - coefs['offset'].append(self._float(table[0][3])) + coefs["slope"].append(self._float(table[0][1])) + coefs["x0"] = self._float(table[0][2]) + coefs["k"] = self._float(table[0][4]) + coefs["offset"].append(self._float(table[0][3])) # Remaining rows for row in table[1:]: - coefs['slope'].append(self._float(row[1])) - coefs['offset'].append(self._float(row[2])) + coefs["slope"].append(self._float(row[1])) + coefs["offset"].append(self._float(row[2])) return coefs @@ -1296,7 +1296,7 @@ def _get_table(self, root, heading, heading_type, ): headings = [h for h in root.find_all(heading_type) if heading in h.text] if not headings: - raise ValueError('Cannot find a coefficient table matching text ' + raise ValueError("Cannot find a coefficient table matching text " '"{}"'.format(heading)) if len(headings) > 1: raise ValueError('Found multiple headings matching text "{}"' @@ -1305,14 +1305,14 @@ def _get_table(self, root, heading, heading_type, ): # Copy items to a list of lists tab = list() - for row in table.find_all('tr'): - cols = row.find_all('td') + for row in table.find_all("tr"): + cols = row.find_all("td") if cols: tab.append([c.text for c in cols]) return tab def _denoise(self, string): - return string.replace('\n', '').replace(' ', '') + return string.replace("\n", "").replace(" ", "") def _float(self, string): """Convert string to float. @@ -1320,11 +1320,11 @@ def _float(self, string): Take care of numbers in exponential format """ string = self._denoise(string) - exp_match = re.match(r'^[-.\d]+x10-(\d)$', string) + exp_match = re.match(r"^[-.\d]+x10-(\d)$", string) if exp_match: exp = int(exp_match.groups()[0]) fac = 10 ** -exp - string = string.replace('x10-{}'.format(exp), '') + string = string.replace("x10-{}".format(exp), "") else: fac = 1 @@ -1355,10 +1355,10 @@ def test_coefs(ir_url, vis_url): for cname in coefs_expected.keys(): if not np.allclose(coefs[cname], coefs_expected[cname]): raise ValueError( - 'Coefficient {} for {} channel {} does not match the ' - 'reference'.format(cname, platform, channel)) + "Coefficient {} for {} channel {} does not match the " + "reference".format(cname, platform, channel)) - logger.info('Coefficients OK') + logger.info("Coefficients OK") return True @@ -1384,12 +1384,12 @@ def get_area_def_with_uniform_sampling(self, projection_longitude): def _get_projection(self, projection_longitude): return { - 'a': EQUATOR_RADIUS, - 'b': POLE_RADIUS, - 'lon_0': projection_longitude, - 'h': ALTITUDE, - 'proj': 'geos', - 'units': 'm' + "a": EQUATOR_RADIUS, + "b": POLE_RADIUS, + "lon_0": projection_longitude, + "h": ALTITUDE, + "proj": "geos", + "units": "m" } def _get_area_extent_at_max_scan_angle(self, proj_dict): @@ -1398,9 +1398,9 @@ def _get_area_extent_at_max_scan_angle(self, proj_dict): def _get_max_scan_angle(self, proj_dict): dummy_area = pyresample.geometry.AreaDefinition( - area_id='dummy', - proj_id='dummy', - description='dummy', + area_id="dummy", + proj_id="dummy", + description="dummy", projection=proj_dict, width=2, height=2, @@ -1427,8 +1427,8 @@ def _get_uniform_pixel_size(self): def _create_area_def(self, projection, area_extent, shape): width, height = shape return pyresample.geometry.AreaDefinition( - area_id='goes_geos_uniform', - proj_id='goes_geos_uniform', + area_id="goes_geos_uniform", + proj_id="goes_geos_uniform", description=self._get_area_description(), projection=projection, width=width, @@ -1437,6 +1437,6 @@ def _create_area_def(self, projection, area_extent, shape): ) def _get_area_description(self): - return '{} geostationary projection (uniform sampling)'.format( + return "{} geostationary projection (uniform sampling)".format( self.platform_name ) diff --git a/satpy/readers/gpm_imerg.py b/satpy/readers/gpm_imerg.py index 3a68f8a9bb..7bc65ac4c6 100644 --- a/satpy/readers/gpm_imerg.py +++ b/satpy/readers/gpm_imerg.py @@ -49,34 +49,34 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Find the start time from filename info.""" - return datetime(self.finfo['date'].year, - self.finfo['date'].month, - self.finfo['date'].day, - self.finfo['start_time'].hour, - self.finfo['start_time'].minute, - self.finfo['start_time'].second) + return datetime(self.finfo["date"].year, + self.finfo["date"].month, + self.finfo["date"].day, + self.finfo["start_time"].hour, + self.finfo["start_time"].minute, + self.finfo["start_time"].second) @property def end_time(self): """Find the end time from filename info.""" - return datetime(self.finfo['date'].year, - self.finfo['date'].month, - self.finfo['date'].day, - self.finfo['end_time'].hour, - self.finfo['end_time'].minute, - self.finfo['end_time'].second) + return datetime(self.finfo["date"].year, + self.finfo["date"].month, + self.finfo["date"].day, + self.finfo["end_time"].hour, + self.finfo["end_time"].minute, + self.finfo["end_time"].second) def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" - file_key = ds_info.get('file_key', dataset_id['name']) - dsname = 'Grid/' + file_key + file_key = ds_info.get("file_key", dataset_id["name"]) + dsname = "Grid/" + file_key data = self.get(dsname) data = data.squeeze().transpose() if data.ndim >= 2: - data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'}) + data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) data.data = da.flip(data.data, axis=0) - fill = data.attrs['_FillValue'] + fill = data.attrs["_FillValue"] data = data.where(data != fill) for key in list(data.attrs.keys()): @@ -89,8 +89,8 @@ def get_dataset(self, dataset_id, ds_info): def get_area_def(self, dsid): """Create area definition from the gridded lat/lon values.""" - lats = self.__getitem__('Grid/lat').values - lons = self.__getitem__('Grid/lon').values + lats = self.__getitem__("Grid/lat").values + lons = self.__getitem__("Grid/lon").values width = lons.shape[0] height = lats.shape[0] @@ -103,8 +103,8 @@ def get_area_def(self, dsid): area_extent = (lower_left_x, lower_left_y, upper_right_x, upper_right_y) description = "IMERG GPM Equirectangular Projection" - area_id = 'imerg' - proj_id = 'equirectangular' - proj_dict = {'proj': 'longlat', 'datum': 'WGS84', 'ellps': 'WGS84', } + area_id = "imerg" + proj_id = "equirectangular" + proj_dict = {"proj": "longlat", "datum": "WGS84", "ellps": "WGS84", } area_def = AreaDefinition(area_id, description, proj_id, proj_dict, width, height, area_extent, ) return area_def diff --git a/satpy/readers/grib.py b/satpy/readers/grib.py index 2d78792f2b..dadccce77a 100644 --- a/satpy/readers/grib.py +++ b/satpy/readers/grib.py @@ -41,7 +41,7 @@ CHUNK_SIZE = get_legacy_chunk_size() CF_UNITS = { - 'none': '1', + "none": "1", } @@ -60,46 +60,46 @@ def __init__(self, filename, filename_info, filetype_info): first_msg = grib_file.message(1) last_msg = grib_file.message(grib_file.messages) start_time = self._convert_datetime( - first_msg, 'validityDate', 'validityTime') + first_msg, "validityDate", "validityTime") end_time = self._convert_datetime( - last_msg, 'validityDate', 'validityTime') + last_msg, "validityDate", "validityTime") self._start_time = start_time self._end_time = end_time - if 'keys' not in filetype_info: + if "keys" not in filetype_info: self._analyze_messages(grib_file) self._idx = None else: - self._create_dataset_ids(filetype_info['keys']) + self._create_dataset_ids(filetype_info["keys"]) self._idx = pygrib.index(self.filename, - *filetype_info['keys'].keys()) + *filetype_info["keys"].keys()) except (RuntimeError, KeyError): raise IOError("Unknown GRIB file format: {}".format(self.filename)) def _analyze_messages(self, grib_file): grib_file.seek(0) for idx, msg in enumerate(grib_file): - msg_id = DataQuery(name=msg['shortName'], - level=msg['level'], + msg_id = DataQuery(name=msg["shortName"], + level=msg["level"], modifiers=tuple()) ds_info = { - 'message': idx + 1, - 'name': msg['shortName'], - 'level': msg['level'], - 'file_type': self.filetype_info['file_type'], + "message": idx + 1, + "name": msg["shortName"], + "level": msg["level"], + "file_type": self.filetype_info["file_type"], } self._msg_datasets[msg_id] = ds_info def _create_dataset_ids(self, keys): from itertools import product - ordered_keys = [k for k in keys.keys() if 'id_key' in keys[k]] - for id_vals in product(*[keys[k]['values'] for k in ordered_keys]): - id_keys = [keys[k]['id_key'] for k in ordered_keys] + ordered_keys = [k for k in keys.keys() if "id_key" in keys[k]] + for id_vals in product(*[keys[k]["values"] for k in ordered_keys]): + id_keys = [keys[k]["id_key"] for k in ordered_keys] msg_info = dict(zip(ordered_keys, id_vals)) ds_info = dict(zip(id_keys, id_vals)) msg_id = DataQuery(**ds_info) ds_info = msg_id.to_dict() ds_info.update(msg_info) - ds_info['file_type'] = self.filetype_info['file_type'] + ds_info["file_type"] = self.filetype_info["file_type"] self._msg_datasets[msg_id] = ds_info @staticmethod @@ -137,11 +137,11 @@ def available_datasets(self, configured_datasets=None): def _get_message(self, ds_info): with pygrib.open(self.filename) as grib_file: - if 'message' in ds_info: - msg_num = ds_info['message'] + if "message" in ds_info: + msg_num = ds_info["message"] msg = grib_file.message(msg_num) else: - msg_keys = self.filetype_info['keys'].keys() + msg_keys = self.filetype_info["keys"].keys() msg = self._idx(**{k: ds_info[k] for k in msg_keys})[0] return msg @@ -154,7 +154,7 @@ def _correct_cyl_minmax_xy(proj_params, min_lon, min_lat, max_lon, max_lat): # wrap around # make 180 longitude the prime meridian # assuming we are going from 0 to 360 longitude - proj_params['pm'] = 180 + proj_params["pm"] = 180 proj = Proj(**proj_params) # recompute x/y extents with this new projection min_x, min_y = proj(min_lon, min_lat) @@ -173,9 +173,9 @@ def _get_cyl_minmax_lonlat(lons, lats): return min_lon, min_lat, max_lon, max_lat def _get_cyl_area_info(self, msg, proj_params): - proj_params['proj'] = 'eqc' - lons = msg['distinctLongitudes'] - lats = msg['distinctLatitudes'] + proj_params["proj"] = "eqc" + lons = msg["distinctLongitudes"] + lats = msg["distinctLatitudes"] shape = (lats.shape[0], lons.shape[0]) minmax_lonlat = self._get_cyl_minmax_lonlat(lons, lats) proj_params, minmax_xy = self._correct_cyl_minmax_xy(proj_params, *minmax_lonlat) @@ -208,14 +208,14 @@ def _get_corner_lonlat(proj_params, lons, lats): # if we have longitudes over 180, assume 0-360 if (lons > 180).any(): # make 180 longitude the prime meridian - proj_params['pm'] = 180 + proj_params["pm"] = 180 return proj_params, lons, lats def _get_area_info(self, msg, proj_params): lats, lons = msg.latlons() shape = lats.shape - scans_positively = (msg.valid_key('jScansPositively') and - msg['jScansPositively'] == 1) + scans_positively = (msg.valid_key("jScansPositively") and + msg["jScansPositively"] == 1) proj_params, lons, lats = self._get_corner_lonlat( proj_params, lons, lats) minmax_xy = self._get_corner_xy(proj_params, lons, lats, scans_positively) @@ -225,7 +225,7 @@ def _get_area_info(self, msg, proj_params): @staticmethod def _correct_proj_params_over_prime_meridian(proj_params): # correct for longitudes over 180 - for lon_param in ['lon_0', 'lon_1', 'lon_2']: + for lon_param in ["lon_0", "lon_1", "lon_2"]: if proj_params.get(lon_param, 0) > 180: proj_params[lon_param] -= 360 return proj_params @@ -234,16 +234,16 @@ def _area_def_from_msg(self, msg): proj_params = msg.projparams.copy() proj_params = self._correct_proj_params_over_prime_meridian(proj_params) - if proj_params['proj'] in ('cyl', 'eqc'): + if proj_params["proj"] in ("cyl", "eqc"): # eqc projection that goes from 0 to 360 proj_params, shape, extents = self._get_cyl_area_info(msg, proj_params) else: proj_params, shape, extents = self._get_area_info(msg, proj_params) return geometry.AreaDefinition( - 'on-the-fly grib area', - 'on-the-fly grib area', - 'on-the-fly grib area', + "on-the-fly grib area", + "on-the-fly grib area", + "on-the-fly grib area", proj_params, shape[1], shape[0], @@ -264,41 +264,41 @@ def get_area_def(self, dsid): def get_metadata(self, msg, ds_info): """Get metadata.""" - model_time = self._convert_datetime(msg, 'dataDate', - 'dataTime') - start_time = self._convert_datetime(msg, 'validityDate', - 'validityTime') + model_time = self._convert_datetime(msg, "dataDate", + "dataTime") + start_time = self._convert_datetime(msg, "validityDate", + "validityTime") end_time = start_time try: - center_description = msg['centreDescription'] + center_description = msg["centreDescription"] except (RuntimeError, KeyError): center_description = None key_dicts = { - 'shortName': 'shortName', - 'long_name': 'name', - 'pressureUnits': 'pressureUnits', - 'typeOfLevel': 'typeOfLevel', - 'standard_name': 'cfName', - 'units': 'units', - 'modelName': 'modelName', - 'valid_min': 'minimum', - 'valid_max': 'maximum', - 'sensor': 'modelName'} + "shortName": "shortName", + "long_name": "name", + "pressureUnits": "pressureUnits", + "typeOfLevel": "typeOfLevel", + "standard_name": "cfName", + "units": "units", + "modelName": "modelName", + "valid_min": "minimum", + "valid_max": "maximum", + "sensor": "modelName"} ds_info.update({ - 'filename': self.filename, - 'model_time': model_time, - 'centreDescription': center_description, - 'start_time': start_time, - 'end_time': end_time, - 'platform_name': 'unknown'}) + "filename": self.filename, + "model_time": model_time, + "centreDescription": center_description, + "start_time": start_time, + "end_time": end_time, + "platform_name": "unknown"}) for key in key_dicts: if key_dicts[key] in msg.keys(): ds_info[key] = msg[key_dicts[key]] else: - ds_info[key] = 'unknown' + ds_info[key] = "unknown" return ds_info @@ -306,9 +306,9 @@ def get_dataset(self, dataset_id, ds_info): """Read a GRIB message into an xarray DataArray.""" msg = self._get_message(ds_info) ds_info = self.get_metadata(msg, ds_info) - fill = msg['missingValue'] + fill = msg["missingValue"] data = msg.values.astype(np.float32) - if msg.valid_key('jScansPositively') and msg['jScansPositively'] == 1: + if msg.valid_key("jScansPositively") and msg["jScansPositively"] == 1: data = data[::-1] if isinstance(data, np.ma.MaskedArray): @@ -318,4 +318,4 @@ def get_dataset(self, dataset_id, ds_info): data[data == fill] = np.nan data = da.from_array(data, chunks=CHUNK_SIZE) - return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) + return xr.DataArray(data, attrs=ds_info, dims=("y", "x")) diff --git a/satpy/readers/hdf4_utils.py b/satpy/readers/hdf4_utils.py index acc86fd64d..90fbc6cccc 100644 --- a/satpy/readers/hdf4_utils.py +++ b/satpy/readers/hdf4_utils.py @@ -47,9 +47,9 @@ def from_sds(var, *args, **kwargs): """Create a dask array from a SD dataset.""" - var.__dict__['dtype'] = np.dtype(HTYPE_TO_DTYPE[var.info()[3]]) + var.__dict__["dtype"] = np.dtype(HTYPE_TO_DTYPE[var.info()[3]]) shape = var.info()[2] - var.__dict__['shape'] = shape if isinstance(shape, (tuple, list)) else tuple(shape) + var.__dict__["shape"] = shape if isinstance(shape, (tuple, list)) else tuple(shape) return da.from_array(var, *args, **kwargs) @@ -61,7 +61,7 @@ def __init__(self, filename, filename_info, filetype_info): super(HDF4FileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = {} file_handle = SD(self.filename, SDC.READ) - self._collect_attrs('', file_handle.attributes()) + self._collect_attrs("", file_handle.attributes()) for k in file_handle.datasets().keys(): self.collect_metadata(k, file_handle.select(k)) del file_handle @@ -94,7 +94,7 @@ def _open_xarray_dataset(self, val, chunks=CHUNK_SIZE): """Read the band in blocks.""" dask_arr = from_sds(val, chunks=chunks) attrs = val.attributes() - return xr.DataArray(dask_arr, dims=('y', 'x'), + return xr.DataArray(dask_arr, dims=("y", "x"), attrs=attrs) def __getitem__(self, key): diff --git a/satpy/readers/hdf5_utils.py b/satpy/readers/hdf5_utils.py index 2a1c8c23bb..428d64e2f1 100644 --- a/satpy/readers/hdf5_utils.py +++ b/satpy/readers/hdf5_utils.py @@ -43,14 +43,14 @@ def __init__(self, filename, filename_info, filetype_info): self._attrs_cache = {} try: - file_handle = h5py.File(self.filename, 'r') + file_handle = h5py.File(self.filename, "r") except IOError: LOG.exception( - 'Failed reading file %s. Possibly corrupted file', self.filename) + "Failed reading file %s. Possibly corrupted file", self.filename) raise file_handle.visititems(self.collect_metadata) - self._collect_attrs('', file_handle.attrs) + self._collect_attrs("", file_handle.attrs) file_handle.close() def _collect_attrs(self, name, attrs): @@ -73,7 +73,7 @@ def _collect_attrs(self, name, attrs): def get_reference(self, name, key): """Get reference.""" - with h5py.File(self.filename, 'r') as hf: + with h5py.File(self.filename, "r") as hf: return self._get_reference(hf, hf[name].attrs[key]) def _get_reference(self, hf, ref): @@ -97,11 +97,11 @@ def __getitem__(self, key): val = self.file_content[key] if isinstance(val, h5py.Dataset): # these datasets are closed and inaccessible when the file is closed, need to reopen - dset = h5py.File(self.filename, 'r')[key] + dset = h5py.File(self.filename, "r")[key] dset_data = da.from_array(dset, chunks=CHUNK_SIZE) attrs = self._attrs_cache.get(key, dset.attrs) if dset.ndim == 2: - return xr.DataArray(dset_data, dims=['y', 'x'], attrs=attrs) + return xr.DataArray(dset_data, dims=["y", "x"], attrs=attrs) return xr.DataArray(dset_data, attrs=attrs) return val diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index f776256e89..91affbade6 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -119,7 +119,7 @@ def _load_all_metadata_attributes(self): @classmethod def read_mda(cls, attribute): """Read the EOS metadata.""" - line_iterator = iter(attribute.split('\n')) + line_iterator = iter(attribute.split("\n")) return cls._read_mda(line_iterator) @classmethod @@ -129,18 +129,18 @@ def _read_mda(cls, lines, element=None): for line in lines: if not line: continue - if line == 'END': + if line == "END": return current_dict key, val = cls._split_line(line, lines) - if key in ['GROUP', 'OBJECT']: + if key in ["GROUP", "OBJECT"]: current_dict[val] = cls._read_mda(lines, val) - elif key in ['END_GROUP', 'END_OBJECT']: + elif key in ["END_GROUP", "END_OBJECT"]: if val != element: raise SyntaxError("Non-matching end-tag") return current_dict - elif key in ['CLASS', 'NUM_VAL']: + elif key in ["CLASS", "NUM_VAL"]: pass else: current_dict[key] = val @@ -149,7 +149,7 @@ def _read_mda(cls, lines, element=None): @classmethod def _split_line(cls, line, lines): - key, val = line.split('=') + key, val = line.split("=") key = key.strip() val = val.strip() try: @@ -164,8 +164,8 @@ def metadata_platform_name(self): """Platform name from the internal file metadata.""" try: # Example: 'Terra' or 'Aqua' - return self.metadata['INVENTORYMETADATA']['ASSOCIATEDPLATFORMINSTRUMENTSENSOR'][ - 'ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER']['ASSOCIATEDPLATFORMSHORTNAME']['VALUE'] + return self.metadata["INVENTORYMETADATA"]["ASSOCIATEDPLATFORMINSTRUMENTSENSOR"][ + "ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER"]["ASSOCIATEDPLATFORMSHORTNAME"]["VALUE"] except KeyError: return self._platform_name_from_filename() @@ -181,9 +181,9 @@ def _platform_name_from_filename(self): def start_time(self): """Get the start time of the dataset.""" try: - date = (self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEBEGINNINGDATE']['VALUE'] + ' ' + - self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEBEGINNINGTIME']['VALUE']) - return datetime.strptime(date, '%Y-%m-%d %H:%M:%S.%f') + date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGDATE"]["VALUE"] + " " + + self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGTIME"]["VALUE"]) + return datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") except KeyError: return self._start_time_from_filename() @@ -194,9 +194,9 @@ def _start_time_from_filename(self): def end_time(self): """Get the end time of the dataset.""" try: - date = (self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEENDINGDATE']['VALUE'] + ' ' + - self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEENDINGTIME']['VALUE']) - return datetime.strptime(date, '%Y-%m-%d %H:%M:%S.%f') + date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGDATE"]["VALUE"] + " " + + self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGTIME"]["VALUE"]) + return datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") except KeyError: return self.start_time @@ -216,7 +216,7 @@ def load_dataset(self, dataset_name, is_category=False): dataset = self._read_dataset_in_file(dataset_name) dask_arr = from_sds(dataset, chunks=CHUNK_SIZE) - dims = ('y', 'x') if dask_arr.ndim == 2 else None + dims = ("y", "x") if dask_arr.ndim == 2 else None data = xr.DataArray(dask_arr, dims=dims, attrs=dataset.attributes()) data = self._scale_and_mask_data_array(data, is_category=is_category) @@ -236,8 +236,8 @@ def _scale_and_mask_data_array(self, data, is_category=False): """ good_mask, new_fill = self._get_good_data_mask(data, is_category=is_category) - scale_factor = data.attrs.pop('scale_factor', None) - add_offset = data.attrs.pop('add_offset', None) + scale_factor = data.attrs.pop("scale_factor", None) + add_offset = data.attrs.pop("add_offset", None) # don't scale category products, even though scale_factor may equal 1 # we still need to convert integers to floats if scale_factor is not None and not is_category: @@ -260,15 +260,15 @@ def _get_good_data_mask(self, data_arr, is_category=False): # no need to mask, the fill value is already what it needs to be return None, None new_fill = np.nan - data_arr.attrs.pop('_FillValue', None) + data_arr.attrs.pop("_FillValue", None) good_mask = data_arr != fill_value return good_mask, new_fill def _add_satpy_metadata(self, data_id: DataID, data_arr: xr.DataArray): """Add metadata that is specific to Satpy.""" new_attrs = { - 'platform_name': 'EOS-' + self.metadata_platform_name, - 'sensor': 'modis', + "platform_name": "EOS-" + self.metadata_platform_name, + "sensor": "modis", } res = data_id["resolution"] @@ -293,12 +293,12 @@ class HDFEOSGeoReader(HDFEOSBaseFileReader): # list of geographical datasets handled by the georeader # mapping to the default variable name if not specified in YAML DATASET_NAMES = { - 'longitude': 'Longitude', - 'latitude': 'Latitude', - 'satellite_azimuth_angle': ('SensorAzimuth', 'Sensor_Azimuth'), - 'satellite_zenith_angle': ('SensorZenith', 'Sensor_Zenith'), - 'solar_azimuth_angle': ('SolarAzimuth', 'SolarAzimuth'), - 'solar_zenith_angle': ('SolarZenith', 'Solar_Zenith'), + "longitude": "Longitude", + "latitude": "Latitude", + "satellite_azimuth_angle": ("SensorAzimuth", "Sensor_Azimuth"), + "satellite_zenith_angle": ("SensorZenith", "Sensor_Zenith"), + "solar_azimuth_angle": ("SolarAzimuth", "SolarAzimuth"), + "solar_zenith_angle": ("SolarZenith", "Solar_Zenith"), } def __init__(self, filename, filename_info, filetype_info, **kwargs): @@ -325,8 +325,8 @@ def read_geo_resolution(metadata): @staticmethod def _geo_resolution_for_l1b(metadata): - ds = metadata['INVENTORYMETADATA']['COLLECTIONDESCRIPTIONCLASS']['SHORTNAME']['VALUE'] - if ds.endswith('D03') or ds.endswith('HKM') or ds.endswith('QKM'): + ds = metadata["INVENTORYMETADATA"]["COLLECTIONDESCRIPTIONCLASS"]["SHORTNAME"]["VALUE"] + if ds.endswith("D03") or ds.endswith("HKM") or ds.endswith("QKM"): return 1000 # 1km files have 5km geolocation usually return 5000 @@ -336,10 +336,10 @@ def _geo_resolution_for_l2_l1b(metadata): # data files probably have this level 2 files # this does not work for L1B 1KM data files because they are listed # as 1KM data but the geo data inside is at 5km - latitude_dim = metadata['SwathStructure']['SWATH_1']['DimensionMap']['DimensionMap_2']['GeoDimension'] - resolution_regex = re.compile(r'(?P\d+)(km|KM)') + latitude_dim = metadata["SwathStructure"]["SWATH_1"]["DimensionMap"]["DimensionMap_2"]["GeoDimension"] + resolution_regex = re.compile(r"(?P\d+)(km|KM)") resolution_match = resolution_regex.search(latitude_dim) - return int(resolution_match.group('resolution')) * 1000 + return int(resolution_match.group("resolution")) * 1000 @property def geo_resolution(self): @@ -365,7 +365,7 @@ def get_interpolated_dataset(self, name1, name2, resolution, offset=0): result1 = self._load_ds_by_name(name1) result2 = self._load_ds_by_name(name2) - offset try: - sensor_zenith = self._load_ds_by_name('satellite_zenith_angle') + sensor_zenith = self._load_ds_by_name("satellite_zenith_angle") except KeyError: # no sensor zenith angle, do "simple" interpolation sensor_zenith = None @@ -380,11 +380,11 @@ def get_interpolated_dataset(self, name1, name2, resolution, offset=0): def get_dataset(self, dataset_id: DataID, dataset_info: dict) -> xr.DataArray: """Get the geolocation dataset.""" # Name of the dataset as it appears in the HDF EOS file - in_file_dataset_name = dataset_info.get('file_key') + in_file_dataset_name = dataset_info.get("file_key") # Name of the dataset in the YAML file - dataset_name = dataset_id['name'] + dataset_name = dataset_id["name"] # Resolution asked - resolution = dataset_id['resolution'] + resolution = dataset_id["resolution"] if in_file_dataset_name is not None: # if the YAML was configured with a specific name use that data = self.load_dataset(in_file_dataset_name) @@ -401,21 +401,21 @@ def get_dataset(self, dataset_id: DataID, dataset_info: dict) -> xr.DataArray: # The data must be interpolated logger.debug("Loading %s", dataset_name) - if dataset_name in ['longitude', 'latitude']: - self.get_interpolated_dataset('longitude', 'latitude', + if dataset_name in ["longitude", "latitude"]: + self.get_interpolated_dataset("longitude", "latitude", resolution) - elif dataset_name in ['satellite_azimuth_angle', 'satellite_zenith_angle']: + elif dataset_name in ["satellite_azimuth_angle", "satellite_zenith_angle"]: # Sensor dataset names differs between L1b and L2 products - self.get_interpolated_dataset('satellite_azimuth_angle', 'satellite_zenith_angle', + self.get_interpolated_dataset("satellite_azimuth_angle", "satellite_zenith_angle", resolution, offset=90) - elif dataset_name in ['solar_azimuth_angle', 'solar_zenith_angle']: + elif dataset_name in ["solar_azimuth_angle", "solar_zenith_angle"]: # Sensor dataset names differs between L1b and L2 products - self.get_interpolated_dataset('solar_azimuth_angle', 'solar_zenith_angle', + self.get_interpolated_dataset("solar_azimuth_angle", "solar_zenith_angle", resolution, offset=90) data = self.cache[dataset_name, resolution] - for key in ('standard_name', 'units'): + for key in ("standard_name", "units"): if key in dataset_info: data.attrs[key] = dataset_info[key] self._add_satpy_metadata(dataset_id, data) diff --git a/satpy/readers/hrit_base.py b/satpy/readers/hrit_base.py index c8b2287653..bf53d84a65 100644 --- a/satpy/readers/hrit_base.py +++ b/satpy/readers/hrit_base.py @@ -48,41 +48,41 @@ from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.seviri_base import dec10216 -logger = logging.getLogger('hrit_base') +logger = logging.getLogger("hrit_base") -common_hdr = np.dtype([('hdr_id', 'u1'), - ('record_length', '>u2')]) +common_hdr = np.dtype([("hdr_id", "u1"), + ("record_length", ">u2")]) -primary_header = np.dtype([('file_type', 'u1'), - ('total_header_length', '>u4'), - ('data_field_length', '>u8')]) +primary_header = np.dtype([("file_type", "u1"), + ("total_header_length", ">u4"), + ("data_field_length", ">u8")]) -image_structure = np.dtype([('number_of_bits_per_pixel', 'u1'), - ('number_of_columns', '>u2'), - ('number_of_lines', '>u2'), - ('compression_flag_for_data', 'u1')]) +image_structure = np.dtype([("number_of_bits_per_pixel", "u1"), + ("number_of_columns", ">u2"), + ("number_of_lines", ">u2"), + ("compression_flag_for_data", "u1")]) -image_navigation = np.dtype([('projection_name', 'S32'), - ('cfac', '>i4'), - ('lfac', '>i4'), - ('coff', '>i4'), - ('loff', '>i4')]) +image_navigation = np.dtype([("projection_name", "S32"), + ("cfac", ">i4"), + ("lfac", ">i4"), + ("coff", ">i4"), + ("loff", ">i4")]) -image_data_function = np.dtype([('function', '|S1')]) +image_data_function = np.dtype([("function", "|S1")]) -annotation_header = np.dtype([('annotation', '|S1')]) +annotation_header = np.dtype([("annotation", "|S1")]) -timestamp_record = np.dtype([('cds_p_field', 'u1'), - ('timestamp', time_cds_short)]) +timestamp_record = np.dtype([("cds_p_field", "u1"), + ("timestamp", time_cds_short)]) -ancillary_text = np.dtype([('ancillary', '|S1')]) +ancillary_text = np.dtype([("ancillary", "|S1")]) -key_header = np.dtype([('key', '|S1')]) +key_header = np.dtype([("key", "|S1")]) -base_text_headers = {image_data_function: 'image_data_function', - annotation_header: 'annotation_header', - ancillary_text: 'ancillary_text', - key_header: 'key_header'} +base_text_headers = {image_data_function: "image_data_function", + annotation_header: "annotation_header", + ancillary_text: "ancillary_text", + key_header: "key_header"} base_hdr_map = {0: primary_header, 1: image_structure, @@ -97,7 +97,7 @@ def get_xritdecompress_cmd(): """Find a valid binary for the xRITDecompress command.""" - cmd = os.environ.get('XRIT_DECOMPRESS_PATH', None) + cmd = os.environ.get("XRIT_DECOMPRESS_PATH", None) if not cmd: raise IOError("XRIT_DECOMPRESS_PATH is not defined (complete path to xRITDecompress)") @@ -112,20 +112,20 @@ def get_xritdecompress_cmd(): def get_xritdecompress_outfile(stdout): """Analyse the output of the xRITDecompress command call and return the file.""" - outfile = b'' + outfile = b"" for line in stdout: try: - k, v = [x.strip() for x in line.split(b':', 1)] + k, v = [x.strip() for x in line.split(b":", 1)] except ValueError: break - if k == b'Decompressed file': + if k == b"Decompressed file": outfile = v break return outfile -def decompress(infile, outdir='.'): +def decompress(infile, outdir="."): """Decompress an XRIT data file and return the path to the decompressed file. It expect to find Eumetsat's xRITDecompress through the environment variable @@ -149,7 +149,7 @@ def decompress(infile, outdir='.'): if not outfile: raise IOError("xrit_decompress '%s', failed, no output file is generated" % infile) - return os.path.join(outdir, outfile.decode('utf-8')) + return os.path.join(outdir, outfile.decode("utf-8")) def get_header_id(fp): @@ -175,20 +175,20 @@ def __init__(self, filename, filename_info, filetype_info, hdr_info): self.mda = {} self.hdr_info = hdr_info self._get_hd(self.hdr_info) - self._start_time = filename_info['start_time'] + self._start_time = filename_info["start_time"] self._end_time = self._start_time + timedelta(minutes=15) def _get_hd(self, hdr_info): """Open the file, read and get the basic file header info and set the mda dictionary.""" hdr_map, variable_length_headers, text_headers = hdr_info - with utils.generic_open(self.filename, mode='rb') as fp: + with utils.generic_open(self.filename, mode="rb") as fp: total_header_length = 16 while fp.tell() < total_header_length: hdr_id = get_header_id(fp) - the_type = hdr_map[hdr_id['hdr_id']] + the_type = hdr_map[hdr_id["hdr_id"]] if the_type in variable_length_headers: - field_length = int((hdr_id['record_length'] - 3) / + field_length = int((hdr_id["record_length"] - 3) / the_type.itemsize) current_hdr = get_header_content(fp, the_type, field_length) key = variable_length_headers[the_type] @@ -199,7 +199,7 @@ def _get_hd(self, hdr_info): else: self.mda[key] = current_hdr elif the_type in text_headers: - field_length = int((hdr_id['record_length'] - 3) / + field_length = int((hdr_id["record_length"] - 3) / the_type.itemsize) char = list(the_type.fields.values())[0][0].char new_type = np.dtype(char + str(field_length)) @@ -210,16 +210,16 @@ def _get_hd(self, hdr_info): self.mda.update( dict(zip(current_hdr.dtype.names, current_hdr))) - total_header_length = self.mda['total_header_length'] + total_header_length = self.mda["total_header_length"] - self.mda.setdefault('number_of_bits_per_pixel', 10) + self.mda.setdefault("number_of_bits_per_pixel", 10) - self.mda['projection_parameters'] = {'a': 6378169.00, - 'b': 6356583.80, - 'h': 35785831.00, + self.mda["projection_parameters"] = {"a": 6378169.00, + "b": 6356583.80, + "h": 35785831.00, # FIXME: find a reasonable SSP - 'SSP_longitude': 0.0} - self.mda['orbital_parameters'] = {} + "SSP_longitude": 0.0} + self.mda["orbital_parameters"] = {} @property def observation_start_time(self): @@ -247,7 +247,7 @@ def get_dataset(self, key, info): data = self.read_band(key, info) # Convert to xarray - xdata = xr.DataArray(data, dims=['y', 'x']) + xdata = xr.DataArray(data, dims=["y", "x"]) return xdata @@ -282,34 +282,34 @@ def get_area_extent(self, size, offsets, factors, platform_height): def get_area_def(self, dsid): """Get the area definition of the band.""" - cfac = np.int32(self.mda['cfac']) - lfac = np.int32(self.mda['lfac']) - coff = np.float32(self.mda['coff']) - loff = np.float32(self.mda['loff']) - - a = self.mda['projection_parameters']['a'] - b = self.mda['projection_parameters']['b'] - h = self.mda['projection_parameters']['h'] - lon_0 = self.mda['projection_parameters']['SSP_longitude'] - nlines = int(self.mda['number_of_lines']) - ncols = int(self.mda['number_of_columns']) + cfac = np.int32(self.mda["cfac"]) + lfac = np.int32(self.mda["lfac"]) + coff = np.float32(self.mda["coff"]) + loff = np.float32(self.mda["loff"]) + + a = self.mda["projection_parameters"]["a"] + b = self.mda["projection_parameters"]["b"] + h = self.mda["projection_parameters"]["h"] + lon_0 = self.mda["projection_parameters"]["SSP_longitude"] + nlines = int(self.mda["number_of_lines"]) + ncols = int(self.mda["number_of_columns"]) area_extent = self.get_area_extent((nlines, ncols), (loff, coff), (lfac, cfac), h) - proj_dict = {'a': float(a), - 'b': float(b), - 'lon_0': float(lon_0), - 'h': float(h), - 'proj': 'geos', - 'units': 'm'} + proj_dict = {"a": float(a), + "b": float(b), + "lon_0": float(lon_0), + "h": float(h), + "proj": "geos", + "units": "m"} area = geometry.AreaDefinition( - 'some_area_name', + "some_area_name", "On-the-fly area", - 'geosmsg', + "geosmsg", proj_dict, ncols, nlines, @@ -326,14 +326,14 @@ def read_band(self, key, info): dtype=output_dtype) def _get_output_info(self): - bpp = self.mda['number_of_bits_per_pixel'] + bpp = self.mda["number_of_bits_per_pixel"] if bpp in [10, 16]: output_dtype = np.uint16 elif bpp == 8: output_dtype = np.uint8 else: raise ValueError(f"Unexpected number of bits per pixel: {bpp}") - output_shape = (self.mda['number_of_lines'], self.mda['number_of_columns']) + output_shape = (self.mda["number_of_lines"], self.mda["number_of_columns"]) return output_dtype, output_shape @@ -361,12 +361,12 @@ def __init__(self, filename, mda): """Set up the segment.""" self.filename = filename self.mda = mda - self.lines = mda['number_of_lines'] - self.cols = mda['number_of_columns'] - self.bpp = mda['number_of_bits_per_pixel'] - self.compressed = mda['compression_flag_for_data'] == 1 - self.offset = mda['total_header_length'] - self.zipped = os.fspath(filename).endswith('.bz2') + self.lines = mda["number_of_lines"] + self.cols = mda["number_of_columns"] + self.bpp = mda["number_of_bits_per_pixel"] + self.compressed = mda["compression_flag_for_data"] == 1 + self.offset = mda["total_header_length"] + self.zipped = os.fspath(filename).endswith(".bz2") def read_data(self): """Read the data.""" @@ -410,7 +410,7 @@ def _get_input_info(self): total_bits = int(self.lines) * int(self.cols) * int(self.bpp) input_shape = int(np.ceil(total_bits / 8.)) if self.bpp == 16: - input_dtype = '>u2' + input_dtype = ">u2" input_shape //= 2 elif self.bpp in [8, 10]: input_dtype = np.uint8 diff --git a/satpy/readers/hrit_jma.py b/satpy/readers/hrit_jma.py index 4b06a3d707..2a85a95cd4 100644 --- a/satpy/readers/hrit_jma.py +++ b/satpy/readers/hrit_jma.py @@ -123,33 +123,33 @@ ) from satpy.readers.utils import get_geostationary_mask -logger = logging.getLogger('hrit_jma') +logger = logging.getLogger("hrit_jma") # JMA implementation: -key_header = np.dtype([('key_number', 'u4')]) +key_header = np.dtype([("key_number", "u4")]) -segment_identification = np.dtype([('image_segm_seq_no', '>u1'), - ('total_no_image_segm', '>u1'), - ('line_no_image_segm', '>u2')]) +segment_identification = np.dtype([("image_segm_seq_no", ">u1"), + ("total_no_image_segm", ">u1"), + ("line_no_image_segm", ">u2")]) -encryption_key_message = np.dtype([('station_number', '>u2')]) +encryption_key_message = np.dtype([("station_number", ">u2")]) -image_compensation_information = np.dtype([('compensation', '|S1')]) +image_compensation_information = np.dtype([("compensation", "|S1")]) -image_observation_time = np.dtype([('times', '|S1')]) +image_observation_time = np.dtype([("times", "|S1")]) -image_quality_information = np.dtype([('quality', '|S1')]) +image_quality_information = np.dtype([("quality", "|S1")]) jma_variable_length_headers: dict = {} -jma_text_headers = {image_data_function: 'image_data_function', - annotation_header: 'annotation_header', - ancillary_text: 'ancillary_text', - image_compensation_information: 'image_compensation_information', - image_observation_time: 'image_observation_time', - image_quality_information: 'image_quality_information'} +jma_text_headers = {image_data_function: "image_data_function", + annotation_header: "annotation_header", + ancillary_text: "ancillary_text", + image_compensation_information: "image_compensation_information", + image_observation_time: "image_observation_time", + image_quality_information: "image_quality_information"} jma_hdr_map = base_hdr_map.copy() jma_hdr_map.update({7: key_header, @@ -161,45 +161,45 @@ }) -cuc_time = np.dtype([('coarse', 'u1', (4, )), - ('fine', 'u1', (3, ))]) +cuc_time = np.dtype([("coarse", "u1", (4, )), + ("fine", "u1", (3, ))]) -time_cds_expanded = np.dtype([('days', '>u2'), - ('milliseconds', '>u4'), - ('microseconds', '>u2'), - ('nanoseconds', '>u2')]) +time_cds_expanded = np.dtype([("days", ">u2"), + ("milliseconds", ">u4"), + ("microseconds", ">u2"), + ("nanoseconds", ">u2")]) FULL_DISK = 1 NORTH_HEMIS = 2 SOUTH_HEMIS = 3 UNKNOWN_AREA = -1 -AREA_NAMES = {FULL_DISK: {'short': 'FLDK', 'long': 'Full Disk'}, - NORTH_HEMIS: {'short': 'NH', 'long': 'Northern Hemisphere'}, - SOUTH_HEMIS: {'short': 'SH', 'long': 'Southern Hemisphere'}, - UNKNOWN_AREA: {'short': 'UNKNOWN', 'long': 'Unknown Area'}} - -MTSAT1R = 'MTSAT-1R' -MTSAT2 = 'MTSAT-2' -HIMAWARI8 = 'Himawari-8' -UNKNOWN_PLATFORM = 'Unknown Platform' +AREA_NAMES = {FULL_DISK: {"short": "FLDK", "long": "Full Disk"}, + NORTH_HEMIS: {"short": "NH", "long": "Northern Hemisphere"}, + SOUTH_HEMIS: {"short": "SH", "long": "Southern Hemisphere"}, + UNKNOWN_AREA: {"short": "UNKNOWN", "long": "Unknown Area"}} + +MTSAT1R = "MTSAT-1R" +MTSAT2 = "MTSAT-2" +HIMAWARI8 = "Himawari-8" +UNKNOWN_PLATFORM = "Unknown Platform" PLATFORMS = { - 'GEOS(140.00)': MTSAT1R, - 'GEOS(140.25)': MTSAT1R, - 'GEOS(140.70)': HIMAWARI8, - 'GEOS(145.00)': MTSAT2, + "GEOS(140.00)": MTSAT1R, + "GEOS(140.25)": MTSAT1R, + "GEOS(140.70)": HIMAWARI8, + "GEOS(145.00)": MTSAT2, } SENSORS = { - MTSAT1R: 'jami', - MTSAT2: 'mtsat2_imager', - HIMAWARI8: 'ahi' + MTSAT1R: "jami", + MTSAT2: "mtsat2_imager", + HIMAWARI8: "ahi" } def mjd2datetime64(mjd): """Convert Modified Julian Day (MJD) to datetime64.""" - epoch = np.datetime64('1858-11-17 00:00') + epoch = np.datetime64("1858-11-17 00:00") day2usec = 24 * 3600 * 1E6 - mjd_usec = (mjd * day2usec).astype(np.int64).astype('timedelta64[us]') + mjd_usec = (mjd * day2usec).astype(np.int64).astype("timedelta64[us]") return epoch + mjd_usec @@ -242,20 +242,20 @@ def __init__(self, filename, filename_info, filetype_info, use_acquisition_time_ jma_text_headers)) self._use_acquisition_time_as_start_time = use_acquisition_time_as_start_time - self.mda['segment_sequence_number'] = self.mda['image_segm_seq_no'] - self.mda['planned_end_segment_number'] = self.mda['total_no_image_segm'] - self.mda['planned_start_segment_number'] = 1 + self.mda["segment_sequence_number"] = self.mda["image_segm_seq_no"] + self.mda["planned_end_segment_number"] = self.mda["total_no_image_segm"] + self.mda["planned_start_segment_number"] = 1 - items = self.mda['image_data_function'].decode().split('\r') - if items[0].startswith('$HALFTONE'): + items = self.mda["image_data_function"].decode().split("\r") + if items[0].startswith("$HALFTONE"): self.calibration_table = [] for item in items[1:]: - if item == '': + if item == "": continue - key, value = item.split(':=') - if key.startswith('_UNIT'): - self.mda['unit'] = item.split(':=')[1] - elif key.startswith('_NAME'): + key, value = item.split(":=") + if key.startswith("_UNIT"): + self.mda["unit"] = item.split(":=")[1] + elif key.startswith("_NAME"): pass elif key.isdigit(): key = int(key) @@ -264,12 +264,12 @@ def __init__(self, filename, filename_info, filetype_info, use_acquisition_time_ self.calibration_table = np.array(self.calibration_table) - self.projection_name = self.mda['projection_name'].decode().strip() - sublon = float(self.projection_name.split('(')[1][:-1]) - self.mda['projection_parameters']['SSP_longitude'] = sublon + self.projection_name = self.mda["projection_name"].decode().strip() + sublon = float(self.projection_name.split("(")[1][:-1]) + self.mda["projection_parameters"]["SSP_longitude"] = sublon self.platform = self._get_platform() - self.is_segmented = self.mda['segment_sequence_number'] > 0 - self.area_id = filename_info.get('area', UNKNOWN_AREA) + self.is_segmented = self.mda["segment_sequence_number"] > 0 + self.area_id = filename_info.get("area", UNKNOWN_AREA) if self.area_id not in AREA_NAMES: self.area_id = UNKNOWN_AREA self.area = self._get_area_def() @@ -304,7 +304,7 @@ def _get_platform(self): try: return PLATFORMS[self.projection_name] except KeyError: - logger.error('Unable to determine platform: Unknown projection ' + logger.error("Unable to determine platform: Unknown projection " 'name "{}"'.format(self.projection_name)) return UNKNOWN_PLATFORM @@ -320,8 +320,8 @@ def _check_sensor_platform_consistency(self, sensor): """ ref_sensor = SENSORS.get(self.platform, None) if ref_sensor and not sensor == ref_sensor: - logger.error('Sensor-Platform mismatch: {} is not a payload ' - 'of {}. Did you choose the correct reader?' + logger.error("Sensor-Platform mismatch: {} is not a payload " + "of {}. Did you choose the correct reader?" .format(sensor, self.platform)) def _get_line_offset(self): @@ -335,41 +335,41 @@ def _get_line_offset(self): because this is what get_geostationary_area_extent() expects. """ # Get line offset from the file - nlines = int(self.mda['number_of_lines']) - loff = np.float32(self.mda['loff']) + nlines = int(self.mda["number_of_lines"]) + loff = np.float32(self.mda["loff"]) # Adapt it to the current segment if self.is_segmented: # loff in the file specifies the offset of the full disk image # centre (1375/2750 for VIS/IR) - segment_number = self.mda['segment_sequence_number'] - 1 - loff -= (self.mda['total_no_image_segm'] - segment_number - 1) * nlines + segment_number = self.mda["segment_sequence_number"] - 1 + loff -= (self.mda["total_no_image_segm"] - segment_number - 1) * nlines elif self.area_id in (NORTH_HEMIS, SOUTH_HEMIS): # loff in the file specifies the start line of the half disk image # in the full disk image loff = nlines - loff elif self.area_id == UNKNOWN_AREA: - logger.error('Cannot compute line offset for unknown area') + logger.error("Cannot compute line offset for unknown area") return loff def _get_area_def(self): """Get the area definition of the band.""" pdict = { - 'cfac': np.int32(self.mda['cfac']), - 'lfac': np.int32(self.mda['lfac']), - 'coff': np.float32(self.mda['coff']), - 'loff': self._get_line_offset(), - 'ncols': int(self.mda['number_of_columns']), - 'nlines': int(self.mda['number_of_lines']), - 'scandir': 'N2S', - 'a': float(self.mda['projection_parameters']['a']), - 'b': float(self.mda['projection_parameters']['b']), - 'h': float(self.mda['projection_parameters']['h']), - 'ssp_lon': float(self.mda['projection_parameters']['SSP_longitude']), - 'a_name': AREA_NAMES[self.area_id]['short'], - 'a_desc': AREA_NAMES[self.area_id]['long'], - 'p_id': 'geosmsg' + "cfac": np.int32(self.mda["cfac"]), + "lfac": np.int32(self.mda["lfac"]), + "coff": np.float32(self.mda["coff"]), + "loff": self._get_line_offset(), + "ncols": int(self.mda["number_of_columns"]), + "nlines": int(self.mda["number_of_lines"]), + "scandir": "N2S", + "a": float(self.mda["projection_parameters"]["a"]), + "b": float(self.mda["projection_parameters"]["b"]), + "h": float(self.mda["projection_parameters"]["h"]), + "ssp_lon": float(self.mda["projection_parameters"]["SSP_longitude"]), + "a_name": AREA_NAMES[self.area_id]["short"], + "a_desc": AREA_NAMES[self.area_id]["long"], + "p_id": "geosmsg" } area_extent = get_area_extent(pdict) return get_area_definition(pdict, area_extent) @@ -385,22 +385,22 @@ def get_dataset(self, key, info): # Filenames of segmented data is identical for MTSAT-1R, MTSAT-2 # and Himawari-8/9. Make sure we have the correct reader for the data # at hand. - self._check_sensor_platform_consistency(info['sensor']) + self._check_sensor_platform_consistency(info["sensor"]) # Calibrate and mask space pixels res = self._mask_space(self.calibrate(res, key["calibration"])) # Add scanline acquisition time - res.coords['acq_time'] = ('y', self.acq_time) - res.coords['acq_time'].attrs['long_name'] = 'Scanline acquisition time' + res.coords["acq_time"] = ("y", self.acq_time) + res.coords["acq_time"].attrs["long_name"] = "Scanline acquisition time" # Update attributes res.attrs.update(info) - res.attrs['platform_name'] = self.platform - res.attrs['orbital_parameters'] = { - 'projection_longitude': float(self.mda['projection_parameters']['SSP_longitude']), - 'projection_latitude': 0., - 'projection_altitude': float(self.mda['projection_parameters']['h'])} + res.attrs["platform_name"] = self.platform + res.attrs["orbital_parameters"] = { + "projection_longitude": float(self.mda["projection_parameters"]["SSP_longitude"]), + "projection_latitude": 0., + "projection_altitude": float(self.mda["projection_parameters"]["h"])} return res @@ -419,17 +419,17 @@ def _get_acq_time(self): Missing timestamps in between are computed using linear interpolation. """ - buf_b = np.frombuffer(self.mda['image_observation_time'], + buf_b = np.frombuffer(self.mda["image_observation_time"], dtype=image_observation_time) # Replace \r by \n before encoding, otherwise encoding will drop all # elements except the last one - buf_s = b''.join(buf_b['times']).replace(b'\r', b'\n').decode() + buf_s = b"".join(buf_b["times"]).replace(b"\r", b"\n").decode() # Split into key:=value pairs; then extract line number and timestamp - splits = buf_s.strip().split('\n') - lines_sparse = [int(s.split(':=')[1]) for s in splits[0::2]] - times_sparse = [float(s.split(':=')[1]) for s in splits[1::2]] + splits = buf_s.strip().split("\n") + lines_sparse = [int(s.split(":=")[1]) for s in splits[0::2]] + times_sparse = [float(s.split(":=")[1]) for s in splits[1::2]] if self.platform == HIMAWARI8: # Only a couple of timestamps in the header, and only the first @@ -454,9 +454,9 @@ def calibrate(self, data, calibration): """Calibrate the data.""" tic = datetime.now() - if calibration == 'counts': + if calibration == "counts": return data - if calibration == 'radiance': + if calibration == "radiance": raise NotImplementedError("Can't calibrate to radiance.") cal = self.calibration_table diff --git a/satpy/readers/hrpt.py b/satpy/readers/hrpt.py index cbde23559c..2a54eed664 100644 --- a/satpy/readers/hrpt.py +++ b/satpy/readers/hrpt.py @@ -48,21 +48,21 @@ AVHRR_CHANNEL_NAMES = ("1", "2", "3a", "3b", "4", "5") -dtype = np.dtype([('frame_sync', '>u2', (6, )), - ('id', [('id', '>u2'), - ('spare', '>u2')]), - ('timecode', '>u2', (4, )), - ('telemetry', [("ramp_calibration", '>u2', (5, )), - ("PRT", '>u2', (3, )), - ("ch3_patch_temp", '>u2'), - ("spare", '>u2'), ]), - ('back_scan', '>u2', (10, 3)), - ('space_data', '>u2', (10, 5)), - ('sync', '>u2'), - ('TIP_data', '>u2', (520, )), - ('spare', '>u2', (127, )), - ('image_data', '>u2', (2048, 5)), - ('aux_sync', '>u2', (100, ))]) +dtype = np.dtype([("frame_sync", ">u2", (6, )), + ("id", [("id", ">u2"), + ("spare", ">u2")]), + ("timecode", ">u2", (4, )), + ("telemetry", [("ramp_calibration", ">u2", (5, )), + ("PRT", ">u2", (3, )), + ("ch3_patch_temp", ">u2"), + ("spare", ">u2"), ]), + ("back_scan", ">u2", (10, 3)), + ("space_data", ">u2", (10, 5)), + ("sync", ">u2"), + ("TIP_data", ">u2", (520, )), + ("spare", ">u2", (127, )), + ("image_data", ">u2", (2048, 5)), + ("aux_sync", ">u2", (100, ))]) def time_seconds(tc_array, year): @@ -78,9 +78,9 @@ def time_seconds(tc_array, year): word = tc_array[:, 3] msecs += word & 1023 return (np.datetime64( - str(year) + '-01-01T00:00:00Z', 's') + - msecs[:].astype('timedelta64[ms]') + - (day - 1)[:].astype('timedelta64[D]')) + str(year) + "-01-01T00:00:00Z", "s") + + msecs[:].astype("timedelta64[ms]") + + (day - 1)[:].astype("timedelta64[D]")) def bfield(array, bit): @@ -111,13 +111,13 @@ def geo_interpolate(lons32km, lats32km): def _get_channel_index(key): """Get the avhrr channel index.""" - avhrr_channel_index = {'1': 0, - '2': 1, - '3a': 2, - '3b': 2, - '4': 3, - '5': 4} - index = avhrr_channel_index[key['name']] + avhrr_channel_index = {"1": 0, + "2": 1, + "3a": 2, + "3b": 2, + "4": 3, + "5": 4} + index = avhrr_channel_index[key["name"]] return index @@ -128,9 +128,9 @@ def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(HRPTFile, self).__init__(filename, filename_info, filetype_info) self.channels = {i: None for i in AVHRR_CHANNEL_NAMES} - self.units = {i: 'counts' for i in AVHRR_CHANNEL_NAMES} + self.units = {i: "counts" for i in AVHRR_CHANNEL_NAMES} - self.year = filename_info.get('start_time', datetime.utcnow()).year + self.year = filename_info.get("start_time", datetime.utcnow()).year @cached_property def times(self): @@ -151,7 +151,7 @@ def read(self): """Read the file.""" with open(self.filename, "rb") as fp_: data = np.memmap(fp_, dtype=dtype, mode="r") - if np.all(np.median(data['frame_sync'], axis=0) > 1024): + if np.all(np.median(data["frame_sync"], axis=0) > 1024): data = self._data.newbyteorder() return data @@ -163,32 +163,32 @@ def platform_name(self): def get_dataset(self, key, info): """Get the dataset.""" attrs = info.copy() - attrs['platform_name'] = self.platform_name + attrs["platform_name"] = self.platform_name - if key['name'] in ['latitude', 'longitude']: + if key["name"] in ["latitude", "longitude"]: data = self._get_navigation_data(key) else: data = self._get_channel_data(key) - result = xr.DataArray(data, dims=['y', 'x'], attrs=attrs) + result = xr.DataArray(data, dims=["y", "x"], attrs=attrs) mask = self._get_ch3_mask_or_true(key) return result.where(mask) def _get_channel_data(self, key): """Get channel data.""" data = da.from_array(self._data["image_data"][:, :, _get_channel_index(key)], chunks=self._chunks) - if key['calibration'] != 'counts': - if key['name'] in ['1', '2', '3a']: + if key["calibration"] != "counts": + if key["name"] in ["1", "2", "3a"]: data = self.calibrate_solar_channel(data, key) - if key['name'] in ['3b', '4', '5']: + if key["name"] in ["3b", "4", "5"]: data = self.calibrate_thermal_channel(data, key) return data def _get_navigation_data(self, key): """Get navigation data.""" lons, lats = self.lons_lats - if key['name'] == 'latitude': + if key["name"] == "latitude": data = da.from_array(lats, chunks=self._chunks) else: data = da.from_array(lons, chunks=self._chunks) @@ -196,9 +196,9 @@ def _get_navigation_data(self, key): def _get_ch3_mask_or_true(self, key): mask = True - if key['name'] == '3a': + if key["name"] == "3a": mask = np.tile(np.logical_not(self._is3b), (2048, 1)).T - elif key['name'] == '3b': + elif key["name"] == "3b": mask = np.tile(self._is3b, (2048, 1)).T return mask @@ -211,7 +211,7 @@ def calibrate_thermal_channel(self, data, key): from pygac.calibration import calibrate_thermal line_numbers = ( np.round((self.times - self.times[-1]) / - np.timedelta64(166666667, 'ns'))).astype(int) + np.timedelta64(166666667, "ns"))).astype(int) line_numbers -= line_numbers[0] prt, ict, space = self.telemetry index = _get_channel_index(key) @@ -224,8 +224,8 @@ def calibrate_solar_channel(self, data, key): """Calibrate a solar channel.""" from pygac.calibration import calibrate_solar julian_days = ((np.datetime64(self.start_time) - - np.datetime64(str(self.year) + '-01-01T00:00:00Z')) - / np.timedelta64(1, 'D')) + - np.datetime64(str(self.year) + "-01-01T00:00:00Z")) + / np.timedelta64(1, "D")) data = calibrate_solar(data, _get_channel_index(key), self.year, julian_days, self.calibrator) return data @@ -234,16 +234,16 @@ def calibrate_solar_channel(self, data, key): def calibrator(self): """Create a calibrator for the data.""" from pygac.calibration import Calibrator - pg_spacecraft = ''.join(self.platform_name.split()).lower() + pg_spacecraft = "".join(self.platform_name.split()).lower() return Calibrator(pg_spacecraft) @cached_property def telemetry(self): """Get the telemetry.""" # This isn't converted to dask arrays as it does not work with pygac - prt = np.mean(self._data["telemetry"]['PRT'], axis=1) - ict = np.mean(self._data['back_scan'], axis=1) - space = np.mean(self._data['space_data'][:, :], axis=1) + prt = np.mean(self._data["telemetry"]["PRT"], axis=1) + ict = np.mean(self._data["back_scan"], axis=1) + space = np.mean(self._data["space_data"][:, :], axis=1) return prt, ict, space diff --git a/satpy/readers/hsaf_grib.py b/satpy/readers/hsaf_grib.py index 69361cb648..a041bf0c73 100644 --- a/satpy/readers/hsaf_grib.py +++ b/satpy/readers/hsaf_grib.py @@ -39,7 +39,7 @@ CHUNK_SIZE = get_legacy_chunk_size() CF_UNITS = { - 'none': '1', + "none": "1", } @@ -67,7 +67,7 @@ def __init__(self, filename, filename_info, filetype_info): @staticmethod def _get_datetime(msg): - dtstr = str(msg['dataDate']) + str(msg['dataTime']).zfill(4) + dtstr = str(msg["dataDate"]) + str(msg["dataTime"]).zfill(4) return datetime.strptime(dtstr, "%Y%m%d%H%M") @property @@ -78,19 +78,19 @@ def analysis_time(self): def get_metadata(self, msg): """Get the metadata.""" try: - center_description = msg['centreDescription'] + center_description = msg["centreDescription"] except (RuntimeError, KeyError): center_description = None ds_info = { - 'filename': self.filename, - 'shortName': msg['shortName'], - 'long_name': msg['name'], - 'units': msg['units'], - 'centreDescription': center_description, - 'data_time': self._analysis_time, - 'nx': msg['Nx'], - 'ny': msg['Ny'], - 'projparams': msg.projparams + "filename": self.filename, + "shortName": msg["shortName"], + "long_name": msg["name"], + "units": msg["units"], + "centreDescription": center_description, + "data_time": self._analysis_time, + "nx": msg["Nx"], + "ny": msg["Ny"], + "projparams": msg.projparams } return ds_info @@ -106,28 +106,28 @@ def _get_area_def(self, msg): """Get the area definition of the datasets in the file.""" proj_param = msg.projparams.copy() - Rx = 2 * np.arcsin(1. / msg['NrInRadiusOfEarth']) / msg['dx'] - Ry = 2 * np.arcsin(1. / msg['NrInRadiusOfEarth']) / msg['dy'] + Rx = 2 * np.arcsin(1. / msg["NrInRadiusOfEarth"]) / msg["dx"] + Ry = 2 * np.arcsin(1. / msg["NrInRadiusOfEarth"]) / msg["dy"] - x_0 = - msg['XpInGridLengths'] - x_1 = msg['Nx'] - msg['XpInGridLengths'] - y_0 = (msg['Ny'] - msg['YpInGridLengths']) * -1 - y_1 = msg['YpInGridLengths'] + x_0 = - msg["XpInGridLengths"] + x_1 = msg["Nx"] - msg["XpInGridLengths"] + y_0 = (msg["Ny"] - msg["YpInGridLengths"]) * -1 + y_1 = msg["YpInGridLengths"] - min_x = (x_0 * Rx) * proj_param['h'] - max_x = (x_1 * Rx) * proj_param['h'] + min_x = (x_0 * Rx) * proj_param["h"] + max_x = (x_1 * Rx) * proj_param["h"] - min_y = (y_0 * Ry) * proj_param['h'] - max_y = (y_1 * Ry) * proj_param['h'] + min_y = (y_0 * Ry) * proj_param["h"] + max_y = (y_1 * Ry) * proj_param["h"] area_extent = (min_x, min_y, max_x, max_y) - area = geometry.AreaDefinition('hsaf_region', - 'A region from H-SAF', - 'geos', + area = geometry.AreaDefinition("hsaf_region", + "A region from H-SAF", + "geos", proj_param, - msg['Nx'], - msg['Ny'], + msg["Nx"], + msg["Ny"], area_extent) return area @@ -139,24 +139,24 @@ def _get_message(self, idx): def get_dataset(self, ds_id, ds_info): """Read a GRIB message into an xarray DataArray.""" - if (ds_id['name'] not in self.filename): - raise IOError("File does not contain {} data".format(ds_id['name'])) + if (ds_id["name"] not in self.filename): + raise IOError("File does not contain {} data".format(ds_id["name"])) msg = self._get_message(1) ds_info = self.get_metadata(msg) - ds_info['end_time'] = ds_info['data_time'] + ds_info["end_time"] = ds_info["data_time"] - if (ds_id['name'] == 'h05' or ds_id['name'] == 'h05B'): + if (ds_id["name"] == "h05" or ds_id["name"] == "h05B"): flen = len(self.filename) timedelt = self.filename[flen-10:flen-8] - ds_info['start_time'] = (ds_info['end_time'] - + ds_info["start_time"] = (ds_info["end_time"] - timedelta(hours=int(timedelt))) else: - ds_info['start_time'] = ds_info['end_time'] - fill = msg['missingValue'] + ds_info["start_time"] = ds_info["end_time"] + fill = msg["missingValue"] data = msg.values.astype(np.float32) - if msg.valid_key('jScansPositively') and msg['jScansPositively'] == 1: + if msg.valid_key("jScansPositively") and msg["jScansPositively"] == 1: data = data[::-1] if isinstance(data, np.ma.MaskedArray): @@ -166,4 +166,4 @@ def get_dataset(self, ds_id, ds_info): data[data == fill] = np.nan data = da.from_array(data, chunks=CHUNK_SIZE) - return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) + return xr.DataArray(data, attrs=ds_info, dims=("y", "x")) diff --git a/satpy/readers/hsaf_h5.py b/satpy/readers/hsaf_h5.py index 73be63b29f..478b91ce2d 100644 --- a/satpy/readers/hsaf_h5.py +++ b/satpy/readers/hsaf_h5.py @@ -42,7 +42,7 @@ def __init__(self, filename, filename_info, filetype_info): super(HSAFFileHandler, self).__init__(filename, filename_info, filetype_info) - self._h5fh = h5py.File(self.filename, 'r') + self._h5fh = h5py.File(self.filename, "r") @property def end_time(self): @@ -52,21 +52,21 @@ def end_time(self): @property def start_time(self): """Get start time.""" - return self.filename_info['sensing_time'] + return self.filename_info["sensing_time"] def _prepare_variable_for_palette(self, dset, ds_info): colormap = np.array(dset) - return xr.DataArray(colormap, attrs=ds_info, dims=('idx', 'RGB')) + return xr.DataArray(colormap, attrs=ds_info, dims=("idx", "RGB")) def get_metadata(self, dset, name): """Get the metadata.""" - ds_info = {'name': name} - if name == 'SC': + ds_info = {"name": name} + if name == "SC": ds_info.update({ - 'filename': self.filename, - 'data_time': self.start_time, - 'nx': dset.shape[1], - 'ny': dset.shape[0] + "filename": self.filename, + "data_time": self.start_time, + "nx": dset.shape[1], + "ny": dset.shape[0] }) return ds_info @@ -76,7 +76,7 @@ def get_area_def(self, dsid): Since it is not available in the HDF5 message, using hardcoded one (it's known). """ - if dsid['name'] == 'SC': + if dsid["name"] == "SC": return self._get_area_def() raise NotImplementedError @@ -109,31 +109,31 @@ def _get_area_def(self): units: m """ - fd_def = get_area_def('msg_seviri_fes_3km') + fd_def = get_area_def("msg_seviri_fes_3km") hsaf_def = fd_def[AREA_Y_OFFSET:AREA_Y_OFFSET+916, AREA_X_OFFSET:AREA_X_OFFSET+1902] return hsaf_def def _get_dataset(self, ds_name): - if ds_name == 'SC_pal': - _ds_name = 'colormap' + if ds_name == "SC_pal": + _ds_name = "colormap" else: _ds_name = ds_name return self._h5fh.get(_ds_name) def get_dataset(self, ds_id, ds_info): """Read a HDF5 file into an xarray DataArray.""" - ds = self._get_dataset(ds_id['name']) - ds_info = self.get_metadata(ds, ds_id['name']) + ds = self._get_dataset(ds_id["name"]) + ds_info = self.get_metadata(ds, ds_id["name"]) - if ds_id['name'] == 'SC': - ds_info['start_time'] = self.start_time - ds_info['data_time'] = self.start_time - ds_info['end_time'] = self.end_time + if ds_id["name"] == "SC": + ds_info["start_time"] = self.start_time + ds_info["data_time"] = self.start_time + ds_info["end_time"] = self.end_time data = da.from_array(ds, chunks=CHUNK_SIZE) - return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) + return xr.DataArray(data, attrs=ds_info, dims=("y", "x")) - elif ds_id['name'] == 'SC_pal': + elif ds_id["name"] == "SC_pal": return self._prepare_variable_for_palette(ds, ds_info) diff --git a/satpy/readers/hy2_scat_l2b_h5.py b/satpy/readers/hy2_scat_l2b_h5.py index 64520bae9a..929d7dc934 100644 --- a/satpy/readers/hy2_scat_l2b_h5.py +++ b/satpy/readers/hy2_scat_l2b_h5.py @@ -35,82 +35,82 @@ class HY2SCATL2BH5FileHandler(HDF5FileHandler): @property def start_time(self): """Time for first observation.""" - return datetime.strptime(self['/attr/Range_Beginning_Time'], - '%Y%m%dT%H:%M:%S') + return datetime.strptime(self["/attr/Range_Beginning_Time"], + "%Y%m%dT%H:%M:%S") @property def end_time(self): """Time for final observation.""" - return datetime.strptime(self['/attr/Range_Ending_Time'], - '%Y%m%dT%H:%M:%S') + return datetime.strptime(self["/attr/Range_Ending_Time"], + "%Y%m%dT%H:%M:%S") @property def platform_name(self): """Get the Platform ShortName.""" - return self['/attr/Platform_ShortName'] + return self["/attr/Platform_ShortName"] def get_variable_metadata(self): """Get the variable metadata.""" - info = getattr(self, 'attrs', {}) + info = getattr(self, "attrs", {}) info.update({ - "Equator_Crossing_Longitude": self['/attr/Equator_Crossing_Longitude'], - "Equator_Crossing_Time": self['/attr/Equator_Crossing_Time'], - "Input_L2A_Filename": self['/attr/Input_L2A_Filename'], - "L2B_Actual_WVC_Rows": self['/attr/L2B_Actual_WVC_Rows'], - "Orbit_Inclination": self['/attr/Orbit_Inclination'], - "Orbit_Number": self['/attr/Orbit_Number'], - "Output_L2B_Filename": self['/attr/Output_L2B_Filename'], - "Production_Date_Time": self['/attr/Production_Date_Time'], - "L2B_Expected_WVC_Rows": self['/attr/L2B_Expected_WVC_Rows'] + "Equator_Crossing_Longitude": self["/attr/Equator_Crossing_Longitude"], + "Equator_Crossing_Time": self["/attr/Equator_Crossing_Time"], + "Input_L2A_Filename": self["/attr/Input_L2A_Filename"], + "L2B_Actual_WVC_Rows": self["/attr/L2B_Actual_WVC_Rows"], + "Orbit_Inclination": self["/attr/Orbit_Inclination"], + "Orbit_Number": self["/attr/Orbit_Number"], + "Output_L2B_Filename": self["/attr/Output_L2B_Filename"], + "Production_Date_Time": self["/attr/Production_Date_Time"], + "L2B_Expected_WVC_Rows": self["/attr/L2B_Expected_WVC_Rows"] }) try: - info.update({"L2B_Number_WVC_cells": self['/attr/L2B_Number_WVC_cells']}) + info.update({"L2B_Number_WVC_cells": self["/attr/L2B_Number_WVC_cells"]}) except KeyError: - info.update({"L2B_Expected_WVC_Cells": self['/attr/L2B_Expected_WVC_Cells']}) + info.update({"L2B_Expected_WVC_Cells": self["/attr/L2B_Expected_WVC_Cells"]}) return info def get_metadata(self): """Get the metadata.""" - info = getattr(self, 'attrs', {}) + info = getattr(self, "attrs", {}) info.update({ - "WVC_Size": self['/attr/WVC_Size'], - "HDF_Version_Id": self['/attr/HDF_Version_Id'], - "Instrument_ShorName": self['/attr/Instrument_ShorName'], - "L2A_Inputdata_Version": self['/attr/L2A_Inputdata_Version'], - "L2B_Algorithm_Descriptor": self['/attr/L2B_Algorithm_Descriptor'], - "L2B_Data_Version": self['/attr/L2B_Data_Version'], - "L2B_Processing_Type": self['/attr/L2B_Processing_Type'], - "L2B_Processor_Name": self['/attr/L2B_Processor_Name'], - "L2B_Processor_Version": self['/attr/L2B_Processor_Version'], - "Long_Name": self['/attr/Long_Name'], - "Platform_LongName": self['/attr/Platform_LongName'], - "Platform_ShortName": self['/attr/Platform_ShortName'], - "Platform_Type": self['/attr/Platform_Type'], - "Producer_Agency": self['/attr/Producer_Agency'], - "Producer_Institution": self['/attr/Producer_Institution'], - "Rev_Orbit_Perio": self['/attr/Rev_Orbit_Period'], - "Short_Name": self['/attr/Short_Name'], - "Sigma0_Granularity": self['/attr/Sigma0_Granularity'], + "WVC_Size": self["/attr/WVC_Size"], + "HDF_Version_Id": self["/attr/HDF_Version_Id"], + "Instrument_ShorName": self["/attr/Instrument_ShorName"], + "L2A_Inputdata_Version": self["/attr/L2A_Inputdata_Version"], + "L2B_Algorithm_Descriptor": self["/attr/L2B_Algorithm_Descriptor"], + "L2B_Data_Version": self["/attr/L2B_Data_Version"], + "L2B_Processing_Type": self["/attr/L2B_Processing_Type"], + "L2B_Processor_Name": self["/attr/L2B_Processor_Name"], + "L2B_Processor_Version": self["/attr/L2B_Processor_Version"], + "Long_Name": self["/attr/Long_Name"], + "Platform_LongName": self["/attr/Platform_LongName"], + "Platform_ShortName": self["/attr/Platform_ShortName"], + "Platform_Type": self["/attr/Platform_Type"], + "Producer_Agency": self["/attr/Producer_Agency"], + "Producer_Institution": self["/attr/Producer_Institution"], + "Rev_Orbit_Perio": self["/attr/Rev_Orbit_Period"], + "Short_Name": self["/attr/Short_Name"], + "Sigma0_Granularity": self["/attr/Sigma0_Granularity"], }) return info def get_dataset(self, key, info): """Get the dataset.""" - dims = ['y', 'x'] - if self[key['name']].ndim == 3: - dims = ['y', 'x', 'selection'] - data = self[key['name']] + dims = ["y", "x"] + if self[key["name"]].ndim == 3: + dims = ["y", "x", "selection"] + data = self[key["name"]] if "valid range" in data.attrs: - data.attrs.update({'valid_range': data.attrs.pop('valid range')}) - if key['name'] in 'wvc_row_time': - data = data.rename({data.dims[0]: 'y'}) + data.attrs.update({"valid_range": data.attrs.pop("valid range")}) + if key["name"] in "wvc_row_time": + data = data.rename({data.dims[0]: "y"}) else: dim_map = {curr_dim: new_dim for curr_dim, new_dim in zip(data.dims, dims)} data = data.rename(dim_map) data = self._mask_data(data) data = self._scale_data(data) - if key['name'] in 'wvc_lon': + if key["name"] in "wvc_lon": _attrs = data.attrs data = xr.where(data > 180, data - 360., data) data.attrs.update(_attrs) @@ -118,17 +118,17 @@ def get_dataset(self, key, info): data.attrs.update(self.get_metadata()) data.attrs.update(self.get_variable_metadata()) if "Platform_ShortName" in data.attrs: - data.attrs.update({'platform_name': data.attrs['Platform_ShortName']}) + data.attrs.update({"platform_name": data.attrs["Platform_ShortName"]}) return data def _scale_data(self, data): - return data * data.attrs['scale_factor'] + data.attrs['add_offset'] + return data * data.attrs["scale_factor"] + data.attrs["add_offset"] def _mask_data(self, data): _attrs = data.attrs - valid_range = data.attrs['valid_range'] - data = xr.where(data == data.attrs['fill_value'], np.nan, data) + valid_range = data.attrs["valid_range"] + data = xr.where(data == data.attrs["fill_value"], np.nan, data) data = xr.where(data < valid_range[0], np.nan, data) data = xr.where(data > valid_range[1], np.nan, data) data.attrs.update(_attrs) diff --git a/satpy/readers/iasi_l2.py b/satpy/readers/iasi_l2.py index 64a060a789..8280416d8b 100644 --- a/satpy/readers/iasi_l2.py +++ b/satpy/readers/iasi_l2.py @@ -45,33 +45,33 @@ # Epoch for the dates EPOCH = dt.datetime(2000, 1, 1) -SHORT_NAMES = {'M01': 'Metop-B', - 'M02': 'Metop-A', - 'M03': 'Metop-C'} - -DSET_NAMES = {'ozone_mixing_ratio': 'O', - 'ozone_mixing_ratio_quality': 'QO', - 'pressure': 'P', - 'pressure_quality': 'QP', - 'temperature': 'T', - 'temperature_quality': 'QT', - 'water_mixing_ratio': 'W', - 'water_mixing_ratio_quality': 'QW', - 'water_total_column': 'WC', - 'ozone_total_column': 'OC', - 'surface_skin_temperature': 'Ts', - 'surface_skin_temperature_quality': 'QTs', - 'emissivity': 'E', - 'emissivity_quality': 'QE'} - -GEO_NAMES = {'latitude': 'Latitude', - 'longitude': 'Longitude', - 'satellite_azimuth_angle': 'SatAzimuth', - 'satellite_zenith_angle': 'SatZenith', - 'sensing_time': {'day': 'SensingTime_day', - 'msec': 'SensingTime_msec'}, - 'solar_azimuth_angle': 'SunAzimuth', - 'solar_zenith_angle': 'SunZenith'} +SHORT_NAMES = {"M01": "Metop-B", + "M02": "Metop-A", + "M03": "Metop-C"} + +DSET_NAMES = {"ozone_mixing_ratio": "O", + "ozone_mixing_ratio_quality": "QO", + "pressure": "P", + "pressure_quality": "QP", + "temperature": "T", + "temperature_quality": "QT", + "water_mixing_ratio": "W", + "water_mixing_ratio_quality": "QW", + "water_total_column": "WC", + "ozone_total_column": "OC", + "surface_skin_temperature": "Ts", + "surface_skin_temperature_quality": "QTs", + "emissivity": "E", + "emissivity_quality": "QE"} + +GEO_NAMES = {"latitude": "Latitude", + "longitude": "Longitude", + "satellite_azimuth_angle": "SatAzimuth", + "satellite_zenith_angle": "SatZenith", + "sensing_time": {"day": "SensingTime_day", + "msec": "SensingTime_msec"}, + "solar_azimuth_angle": "SunAzimuth", + "solar_zenith_angle": "SunZenith"} LOGGER = logging.getLogger(__name__) @@ -88,51 +88,51 @@ def __init__(self, filename, filename_info, filetype_info): self.finfo = filename_info self.lons = None self.lats = None - self.sensor = 'iasi' + self.sensor = "iasi" self.mda = {} - short_name = filename_info['platform_id'] - self.mda['platform_name'] = SHORT_NAMES.get(short_name, short_name) - self.mda['sensor'] = 'iasi' + short_name = filename_info["platform_id"] + self.mda["platform_name"] = SHORT_NAMES.get(short_name, short_name) + self.mda["sensor"] = "iasi" @property def start_time(self): """Get the start time.""" - return self.finfo['start_time'] + return self.finfo["start_time"] @property def end_time(self): """Get the end time.""" end_time = dt.datetime.combine(self.start_time.date(), - self.finfo['end_time'].time()) + self.finfo["end_time"].time()) if end_time < self.start_time: end_time += dt.timedelta(days=1) return end_time def get_dataset(self, key, info): """Load a dataset.""" - with h5py.File(self.filename, 'r') as fid: - LOGGER.debug('Reading %s.', key['name']) - if key['name'] in DSET_NAMES: + with h5py.File(self.filename, "r") as fid: + LOGGER.debug("Reading %s.", key["name"]) + if key["name"] in DSET_NAMES: m_data = read_dataset(fid, key) else: m_data = read_geo(fid, key) m_data.attrs.update(info) - m_data.attrs['sensor'] = self.sensor + m_data.attrs["sensor"] = self.sensor return m_data def read_dataset(fid, key): """Read dataset.""" - dsid = DSET_NAMES[key['name']] + dsid = DSET_NAMES[key["name"]] dset = fid["/PWLR/" + dsid] if dset.ndim == 3: - dims = ['y', 'x', 'level'] + dims = ["y", "x", "level"] else: - dims = ['y', 'x'] + dims = ["y", "x"] data = xr.DataArray(da.from_array(dset[()], chunks=CHUNK_SIZE), - name=key['name'], dims=dims).astype(np.float32) + name=key["name"], dims=dims).astype(np.float32) data = xr.where(data > 1e30, np.nan, data) dset_attrs = dict(dset.attrs) @@ -143,9 +143,9 @@ def read_dataset(fid, key): def read_geo(fid, key): """Read geolocation and related datasets.""" - dsid = GEO_NAMES[key['name']] + dsid = GEO_NAMES[key["name"]] add_epoch = False - if "time" in key['name']: + if "time" in key["name"]: days = fid["/L1C/" + dsid["day"]][()] msecs = fid["/L1C/" + dsid["msec"]][()] data = _form_datetimes(days, msecs) @@ -155,10 +155,10 @@ def read_geo(fid, key): data = fid["/L1C/" + dsid][()] dtype = np.float32 data = xr.DataArray(da.from_array(data, chunks=CHUNK_SIZE), - name=key['name'], dims=['y', 'x']).astype(dtype) + name=key["name"], dims=["y", "x"]).astype(dtype) if add_epoch: - data.attrs['sensing_time_epoch'] = EPOCH + data.attrs["sensing_time_epoch"] = EPOCH return data diff --git a/satpy/readers/iasi_l2_so2_bufr.py b/satpy/readers/iasi_l2_so2_bufr.py index 1bb1fbf0e0..500c2b29df 100644 --- a/satpy/readers/iasi_l2_so2_bufr.py +++ b/satpy/readers/iasi_l2_so2_bufr.py @@ -101,9 +101,9 @@ from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size -logger = logging.getLogger('IASIL2SO2BUFR') +logger = logging.getLogger("IASIL2SO2BUFR") CHUNK_SIZE = get_legacy_chunk_size() -data_center_dict = {3: 'METOP-1', 4: 'METOP-2', 5: 'METOP-3'} +data_center_dict = {3: "METOP-1", 4: "METOP-2", 5: "METOP-3"} class IASIL2SO2BUFR(BaseFileHandler): @@ -115,27 +115,27 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): start_time, end_time = self.get_start_end_date() - sc_id = self.get_attribute('satelliteIdentifier') + sc_id = self.get_attribute("satelliteIdentifier") self.metadata = {} - self.metadata['start_time'] = start_time - self.metadata['end_time'] = end_time - self.metadata['SpacecraftName'] = data_center_dict[sc_id] + self.metadata["start_time"] = start_time + self.metadata["end_time"] = end_time + self.metadata["SpacecraftName"] = data_center_dict[sc_id] @property def start_time(self): """Return the start time of data acqusition.""" - return self.metadata['start_time'] + return self.metadata["start_time"] @property def end_time(self): """Return the end time of data acquisition.""" - return self.metadata['end_time'] + return self.metadata["end_time"] @property def platform_name(self): """Return spacecraft name.""" - return '{}'.format(self.metadata['SpacecraftName']) + return "{}".format(self.metadata["SpacecraftName"]) def get_start_end_date(self): """Get the first and last date from the bufr file.""" @@ -146,13 +146,13 @@ def get_start_end_date(self): bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break - ec.codes_set(bufr, 'unpack', 1) - year = ec.codes_get(bufr, 'year') - month = ec.codes_get(bufr, 'month') - day = ec.codes_get(bufr, 'day') - hour = ec.codes_get(bufr, 'hour') - minute = ec.codes_get(bufr, 'minute') - second = ec.codes_get(bufr, 'second') + ec.codes_set(bufr, "unpack", 1) + year = ec.codes_get(bufr, "year") + month = ec.codes_get(bufr, "month") + day = ec.codes_get(bufr, "day") + hour = ec.codes_get(bufr, "hour") + minute = ec.codes_get(bufr, "minute") + second = ec.codes_get(bufr, "second") obs_time = datetime(year=year, month=month, day=day, hour=hour, minute=minute, second=second) @@ -181,7 +181,7 @@ def get_attribute(self, key): bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break - ec.codes_set(bufr, 'unpack', 1) + ec.codes_set(bufr, "unpack", 1) attr = ec.codes_get(bufr, key) ec.codes_release(bufr) @@ -198,7 +198,7 @@ def get_array(self, key): if bufr is None: break - ec.codes_set(bufr, 'unpack', 1) + ec.codes_set(bufr, "unpack", 1) values = ec.codes_get_array( bufr, key, float) @@ -225,12 +225,12 @@ def get_array(self, key): def get_dataset(self, dataset_id, dataset_info): """Get dataset using the BUFR key in dataset_info.""" - arr = self.get_array(dataset_info['key']) - arr[arr == dataset_info['fill_value']] = np.nan + arr = self.get_array(dataset_info["key"]) + arr[arr == dataset_info["fill_value"]] = np.nan - xarr = xr.DataArray(arr, dims=["y", "x"], name=dataset_info['name']) - xarr.attrs['sensor'] = 'IASI' - xarr.attrs['platform_name'] = self.platform_name + xarr = xr.DataArray(arr, dims=["y", "x"], name=dataset_info["name"]) + xarr.attrs["sensor"] = "IASI" + xarr.attrs["platform_name"] = self.platform_name xarr.attrs.update(dataset_info) return xarr diff --git a/satpy/readers/ici_l1b_nc.py b/satpy/readers/ici_l1b_nc.py index a4f15c3c35..d6ebea0c56 100644 --- a/satpy/readers/ici_l1b_nc.py +++ b/satpy/readers/ici_l1b_nc.py @@ -65,26 +65,26 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): filename, filename_info, filetype_info, auto_maskandscale=True, ) # Read the variables which are required for the calibration - measurement = 'data/measurement_data' - self._bt_conversion_a = self[f'{measurement}/bt_conversion_a'].values - self._bt_conversion_b = self[f'{measurement}/bt_conversion_b'].values - self._channel_cw = self[f'{measurement}/centre_wavenumber'].values + measurement = "data/measurement_data" + self._bt_conversion_a = self[f"{measurement}/bt_conversion_a"].values + self._bt_conversion_b = self[f"{measurement}/bt_conversion_b"].values + self._channel_cw = self[f"{measurement}/centre_wavenumber"].values self._n_samples = self[measurement].n_samples.size self._filetype_info = filetype_info - self.orthorect = filetype_info.get('orthorect', True) + self.orthorect = filetype_info.get("orthorect", True) @property def start_time(self): """Get observation start time.""" try: start_time = datetime.strptime( - self['/attr/sensing_start_time_utc'], - '%Y%m%d%H%M%S.%f', + self["/attr/sensing_start_time_utc"], + "%Y%m%d%H%M%S.%f", ) except ValueError: start_time = datetime.strptime( - self['/attr/sensing_start_time_utc'], - '%Y-%m-%d %H:%M:%S.%f', + self["/attr/sensing_start_time_utc"], + "%Y-%m-%d %H:%M:%S.%f", ) return start_time @@ -93,25 +93,25 @@ def end_time(self): """Get observation end time.""" try: end_time = datetime.strptime( - self['/attr/sensing_end_time_utc'], - '%Y%m%d%H%M%S.%f', + self["/attr/sensing_end_time_utc"], + "%Y%m%d%H%M%S.%f", ) except ValueError: end_time = datetime.strptime( - self['/attr/sensing_end_time_utc'], - '%Y-%m-%d %H:%M:%S.%f', + self["/attr/sensing_end_time_utc"], + "%Y-%m-%d %H:%M:%S.%f", ) return end_time @property def platform_name(self): """Return platform name.""" - return self['/attr/spacecraft'] + return self["/attr/spacecraft"] @property def sensor(self): """Return sensor.""" - return self['/attr/instrument'] + return self["/attr/instrument"] @property def ssp_lon(self): @@ -208,7 +208,7 @@ def _interpolate_geo( lons_horn, lats_horn = satint.interpolate() lons[:, :, horn] = lons_horn lats[:, :, horn] = lats_horn - dims = ['y', 'x', third_dim_name] + dims = ["y", "x", third_dim_name] lon = xr.DataArray( lons, attrs=longitude.attrs, @@ -258,16 +258,16 @@ def _interpolate( """Interpolate from tie points to pixel points.""" try: if interpolation_type is InterpolationType.SOLAR_ANGLES: - var_key1 = self.filetype_info['solar_azimuth'] - var_key2 = self.filetype_info['solar_zenith'] + var_key1 = self.filetype_info["solar_azimuth"] + var_key2 = self.filetype_info["solar_zenith"] interp_method = self._interpolate_viewing_angle elif interpolation_type is InterpolationType.OBSERVATION_ANGLES: - var_key1 = self.filetype_info['observation_azimuth'] - var_key2 = self.filetype_info['observation_zenith'] + var_key1 = self.filetype_info["observation_azimuth"] + var_key2 = self.filetype_info["observation_zenith"] interp_method = self._interpolate_viewing_angle else: - var_key1 = self.filetype_info['longitude'] - var_key2 = self.filetype_info['latitude'] + var_key1 = self.filetype_info["longitude"] + var_key2 = self.filetype_info["latitude"] interp_method = self._interpolate_geo return interp_method( self[var_key1], @@ -275,7 +275,7 @@ def _interpolate( self._n_samples, ) except KeyError: - logger.warning(f'Datasets for {interpolation_type.name} interpolation not correctly defined in YAML file') # noqa: E501 + logger.warning(f"Datasets for {interpolation_type.name} interpolation not correctly defined in YAML file") # noqa: E501 return None, None @staticmethod @@ -308,18 +308,18 @@ def _calibrate(self, variable, dataset_info): original metadata. """ - calibration_name = dataset_info['calibration'] - if calibration_name == 'brightness_temperature': - chan_index = dataset_info['chan_index'] + calibration_name = dataset_info["calibration"] + if calibration_name == "brightness_temperature": + chan_index = dataset_info["chan_index"] cw = self._channel_cw[chan_index] a = self._bt_conversion_a[chan_index] b = self._bt_conversion_b[chan_index] calibrated_variable = self._calibrate_bt(variable, cw, a, b) calibrated_variable.attrs = variable.attrs - elif calibration_name == 'radiance': + elif calibration_name == "radiance": calibrated_variable = variable else: - raise ValueError("Unknown calibration %s for dataset %s" % (calibration_name, dataset_info['name'])) # noqa: E501 + raise ValueError("Unknown calibration %s for dataset %s" % (calibration_name, dataset_info["name"])) # noqa: E501 return calibrated_variable @@ -345,18 +345,18 @@ def _orthorectify(self, variable, orthorect_data_name): orthorect_data = orthorect_data.sel({dim: variable[dim]}) variable += np.degrees(orthorect_data.values / MEAN_EARTH_RADIUS) except KeyError: - logger.warning('Required dataset %s for orthorectification not available, skipping', orthorect_data_name) # noqa: E501 + logger.warning("Required dataset %s for orthorectification not available, skipping", orthorect_data_name) # noqa: E501 return variable @staticmethod def _standardize_dims(variable): """Standardize dims to y, x.""" - if 'n_scan' in variable.dims: - variable = variable.rename({'n_scan': 'y'}) - if 'n_samples' in variable.dims: - variable = variable.rename({'n_samples': 'x'}) - if variable.dims[0] == 'x': - variable = variable.transpose('y', 'x') + if "n_scan" in variable.dims: + variable = variable.rename({"n_scan": "y"}) + if "n_samples" in variable.dims: + variable = variable.rename({"n_samples": "x"}) + if variable.dims[0] == "x": + variable = variable.transpose("y", "x") return variable def _filter_variable(self, variable, dataset_info): @@ -385,12 +385,12 @@ def _get_third_dimension_name(variable): def _fetch_variable(self, var_key): """Fetch variable.""" if var_key in [ - 'longitude', - 'latitude', - 'observation_zenith', - 'observation_azimuth', - 'solar_zenith', - 'solar_azimuth', + "longitude", + "latitude", + "observation_zenith", + "observation_azimuth", + "solar_zenith", + "solar_azimuth", ] and getattr(self, var_key) is not None: variable = getattr(self, var_key).copy() else: @@ -399,18 +399,18 @@ def _fetch_variable(self, var_key): def get_dataset(self, dataset_id, dataset_info): """Get dataset using file_key in dataset_info.""" - var_key = dataset_info['file_key'] - logger.debug(f'Reading in file to get dataset with key {var_key}.') + var_key = dataset_info["file_key"] + logger.debug(f"Reading in file to get dataset with key {var_key}.") try: variable = self._fetch_variable(var_key) except KeyError: - logger.warning(f'Could not find key {var_key} in NetCDF file, no valid Dataset created') # noqa: E501 + logger.warning(f"Could not find key {var_key} in NetCDF file, no valid Dataset created") # noqa: E501 return None variable = self._filter_variable(variable, dataset_info) - if dataset_info.get('calibration') is not None: + if dataset_info.get("calibration") is not None: variable = self._calibrate(variable, dataset_info) if self.orthorect: - orthorect_data_name = dataset_info.get('orthorect_data', None) + orthorect_data_name = dataset_info.get("orthorect_data", None) if orthorect_data_name is not None: variable = self._orthorectify(variable, orthorect_data_name) variable = self._manage_attributes(variable, dataset_info) @@ -420,7 +420,7 @@ def get_dataset(self, dataset_id, dataset_info): def _manage_attributes(self, variable, dataset_info): """Manage attributes of the dataset.""" - variable.attrs.setdefault('units', None) + variable.attrs.setdefault("units", None) variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) return variable @@ -428,21 +428,21 @@ def _manage_attributes(self, variable, dataset_info): def _get_global_attributes(self): """Create a dictionary of global attributes.""" return { - 'filename': self.filename, - 'start_time': self.start_time, - 'end_time': self.end_time, - 'spacecraft_name': self.platform_name, - 'ssp_lon': self.ssp_lon, - 'sensor': self.sensor, - 'filename_start_time': self.filename_info['sensing_start_time'], - 'filename_end_time': self.filename_info['sensing_end_time'], - 'platform_name': self.platform_name, - 'quality_group': self._get_quality_attributes(), + "filename": self.filename, + "start_time": self.start_time, + "end_time": self.end_time, + "spacecraft_name": self.platform_name, + "ssp_lon": self.ssp_lon, + "sensor": self.sensor, + "filename_start_time": self.filename_info["sensing_start_time"], + "filename_end_time": self.filename_info["sensing_end_time"], + "platform_name": self.platform_name, + "quality_group": self._get_quality_attributes(), } def _get_quality_attributes(self): """Get quality attributes.""" - quality_group = self['quality'] + quality_group = self["quality"] quality_dict = {} for key in quality_group: # Add the values (as Numpy array) of each variable in the group diff --git a/satpy/readers/insat3d_img_l1b_h5.py b/satpy/readers/insat3d_img_l1b_h5.py index fb0697be45..a7dcf371cc 100644 --- a/satpy/readers/insat3d_img_l1b_h5.py +++ b/satpy/readers/insat3d_img_l1b_h5.py @@ -120,13 +120,13 @@ class Insat3DIMGL1BH5FileHandler(BaseFileHandler): @property def start_time(self): """Get the start time.""" - start_time = datetime.strptime(self.datatree.attrs['Acquisition_Start_Time'], '%d-%b-%YT%H:%M:%S') + start_time = datetime.strptime(self.datatree.attrs["Acquisition_Start_Time"], "%d-%b-%YT%H:%M:%S") return start_time @property def end_time(self): """Get the end time.""" - end_time = datetime.strptime(self.datatree.attrs['Acquisition_End_Time'], '%d-%b-%YT%H:%M:%S') + end_time = datetime.strptime(self.datatree.attrs["Acquisition_End_Time"], "%d-%b-%YT%H:%M:%S") return end_time @cached_property @@ -154,7 +154,7 @@ def get_dataset(self, ds_id, ds_info): darr = ds["IMG_" + ds_id["name"] + calibration] - nlat, nlon = ds.attrs['Nominal_Central_Point_Coordinates(degrees)_Latitude_Longitude'] + nlat, nlon = ds.attrs["Nominal_Central_Point_Coordinates(degrees)_Latitude_Longitude"] darr.attrs["orbital_parameters"] = dict(satellite_nominal_longitude=float(nlon), satellite_nominal_latitude=float(nlat), satellite_nominal_altitude=float(ds.attrs["Nominal_Altitude(km)"]), @@ -183,20 +183,20 @@ def get_area_def(self, ds_id): b = 6356752.314245 pdict = { - 'cfac': cfac, - 'lfac': lfac, - 'coff': cols / 2, - 'loff': lines / 2, - 'ncols': cols, - 'nlines': lines, - 'scandir': 'N2S', - 'a': a, - 'b': b, - 'h': h, - 'ssp_lon': 82.0, - 'a_name': "insat3d82", - 'a_desc': "insat3d82", - 'p_id': 'geosmsg' + "cfac": cfac, + "lfac": lfac, + "coff": cols / 2, + "loff": lines / 2, + "ncols": cols, + "nlines": lines, + "scandir": "N2S", + "a": a, + "b": b, + "h": h, + "ssp_lon": 82.0, + "a_name": "insat3d82", + "a_desc": "insat3d82", + "p_id": "geosmsg" } area_extent = get_area_extent(pdict) adef = get_area_definition(pdict, area_extent) diff --git a/satpy/readers/li_base_nc.py b/satpy/readers/li_base_nc.py index 57e234b910..848306e77c 100644 --- a/satpy/readers/li_base_nc.py +++ b/satpy/readers/li_base_nc.py @@ -210,14 +210,14 @@ def __init__(self, filename, filename_info, filetype_info, cache_handle=True): # Note: the default dict assignment is need to avoid error when using the fake # netcdf4 file handler in mock unit tests: self._xarray_kwargs = getattr(self, "_xarray_kwargs", {}) - self._xarray_kwargs['decode_times'] = False - self._xarray_kwargs['mask_and_scale'] = False + self._xarray_kwargs["decode_times"] = False + self._xarray_kwargs["mask_and_scale"] = False # Processing level that should be set by derived classes. - self.processing_level = filetype_info.get('processing_level', 'L0') + self.processing_level = filetype_info.get("processing_level", "L0") # This class will only provide support for the LI sensor: - self.sensors = {'li'} + self.sensors = {"li"} # Set of dataset names explicitly provided by this file handler: # This set is required to filter the retrieval of datasets later in the @@ -234,19 +234,19 @@ def __init__(self, filename, filename_info, filetype_info, cache_handle=True): # directly here: self.provided_datasets = set() - self.ds_desc = filetype_info['file_desc'] + self.ds_desc = filetype_info["file_desc"] # Store the extra infos available on specific variables: # Write the correct product type here: - self.product_type = self.ds_desc['product_type'] + self.product_type = self.ds_desc["product_type"] logger.debug("Product type is: %s", self.product_type) - self.variable_transforms = self.ds_desc.get('variable_transforms', {}) + self.variable_transforms = self.ds_desc.get("variable_transforms", {}) # Store the pattern for the default swath coordinates: # Note that we should always have this swath coordinates entry now: - self.swath_coordinates = self.ds_desc.get('swath_coordinates', {}) - patterns = self.swath_coordinates.get('variable_patterns', []) - self.swath_coordinates['patterns'] = [re.compile(pstr) for pstr in patterns] + self.swath_coordinates = self.ds_desc.get("swath_coordinates", {}) + patterns = self.swath_coordinates.get("variable_patterns", []) + self.swath_coordinates["patterns"] = [re.compile(pstr) for pstr in patterns] # check if the current product is in an accumulation grid self.prod_in_accumulation_grid = self.is_prod_in_accumulation_grid() @@ -264,8 +264,8 @@ def __init__(self, filename, filename_info, filetype_info, cache_handle=True): # Ordered list of transform operations supported in this file handler: # those transforms are applied if requested in the 'apply_transforms' method below - self.transform_names = ['use_rescaling', 'seconds_to_timedelta', 'milliseconds_to_timedelta', - 'seconds_to_datetime', 'broadcast_to', 'accumulate_index_offset'] + self.transform_names = ["use_rescaling", "seconds_to_timedelta", "milliseconds_to_timedelta", + "seconds_to_datetime", "broadcast_to", "accumulate_index_offset"] # store internal variables self.internal_variables = {} @@ -276,12 +276,12 @@ def __init__(self, filename, filename_info, filetype_info, cache_handle=True): @property def start_time(self): """Get the start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get the end time.""" - return self.filename_info['end_time'] + return self.filename_info["end_time"] @property def sensor_names(self): @@ -290,7 +290,7 @@ def sensor_names(self): def is_prod_in_accumulation_grid(self): """Check if the current product is an accumulated product in geos grid.""" - in_grid = self.swath_coordinates.get('projection', None) == 'mtg_geos_projection' + in_grid = self.swath_coordinates.get("projection", None) == "mtg_geos_projection" return in_grid def get_latlon_names(self): @@ -298,14 +298,14 @@ def get_latlon_names(self): Use default 'latitude' / 'longitude' if not specified. """ - lon_name = self.swath_coordinates.setdefault('longitude', 'longitude') - lat_name = self.swath_coordinates.setdefault('latitude', 'latitude') + lon_name = self.swath_coordinates.setdefault("longitude", "longitude") + lat_name = self.swath_coordinates.setdefault("latitude", "latitude") return lat_name, lon_name def get_projection_config(self): """Retrieve the projection configuration details.""" # We retrieve the projection variable name directly from our swath settings: - proj_var = self.swath_coordinates['projection'] + proj_var = self.swath_coordinates["projection"] geos_proj = self.get_measured_variable(proj_var, fill_value=None) # cast projection attributes to float/str: @@ -317,12 +317,12 @@ def get_projection_config(self): sweep = str(geos_proj.attrs["sweep_angle_axis"]) # use a (semi-major axis) and rf (reverse flattening) to define ellipsoid as recommended by EUM - proj_dict = {'a': major_axis, - 'lon_0': lon_0, - 'h': point_height, + proj_dict = {"a": major_axis, + "lon_0": lon_0, + "h": point_height, "rf": inv_flattening, - 'proj': 'geos', - 'units': 'm', + "proj": "geos", + "units": "m", "sweep": sweep} return proj_dict @@ -330,10 +330,10 @@ def get_projection_config(self): def get_daskified_lon_lat(self, proj_dict): """Get daskified lon and lat array using map_blocks.""" # Get our azimuth/elevation arrays, - azimuth = self.get_measured_variable(self.swath_coordinates['azimuth']) + azimuth = self.get_measured_variable(self.swath_coordinates["azimuth"]) azimuth = self.apply_use_rescaling(azimuth) - elevation = self.get_measured_variable(self.swath_coordinates['elevation']) + elevation = self.get_measured_variable(self.swath_coordinates["elevation"]) elevation = self.apply_use_rescaling(elevation) # Daskify inverse projection computation: @@ -355,9 +355,9 @@ def generate_coords_from_scan_angles(self): # Finally, we should store those arrays as internal variables for later retrieval as # standard datasets: self.internal_variables[lon_name] = xr.DataArray( - da.asarray(lon), dims=['y'], attrs={'standard_name': 'longitude'}) + da.asarray(lon), dims=["y"], attrs={"standard_name": "longitude"}) self.internal_variables[lat_name] = xr.DataArray( - da.asarray(lat), dims=['y'], attrs={'standard_name': 'latitude'}) + da.asarray(lat), dims=["y"], attrs={"standard_name": "latitude"}) def inverse_projection(self, azimuth, elevation, proj_dict): """Compute inverse projection.""" @@ -365,7 +365,7 @@ def inverse_projection(self, azimuth, elevation, proj_dict): projection = Proj(proj_dict) # Retrieve the point height from the projection config: - point_height = proj_dict['h'] + point_height = proj_dict["h"] # Convert scan angles to projection coordinates by multiplying with perspective point height azimuth = azimuth.values * point_height @@ -444,7 +444,7 @@ def apply_fill_value(self, arr, fill_value): if fill_value is not None: if np.isnan(fill_value): fill_value = np.float32(np.nan) - arr = arr.where(arr != arr.attrs.get('_FillValue'), fill_value) + arr = arr.where(arr != arr.attrs.get("_FillValue"), fill_value) return arr def get_variable_search_paths(self, var_paths): @@ -461,25 +461,25 @@ def add_provided_dataset(self, ds_infos): """Add a provided dataset to our internal list.""" # Check if we have extra infos for that variable: # Note that if available we should use the alias name instead here: - vname = ds_infos["alias_name"] if 'alias_name' in ds_infos else ds_infos["variable_name"] + vname = ds_infos["alias_name"] if "alias_name" in ds_infos else ds_infos["variable_name"] self.check_variable_extra_info(ds_infos, vname) # We check here if we should include the default coordinates on that dataset: - if self.swath_coordinates is not None and 'coordinates' not in ds_infos: + if self.swath_coordinates is not None and "coordinates" not in ds_infos: # Check if the variable corresponding to this dataset will match one of the valid patterns # for the swath usage: - if any([p.search(vname) is not None for p in self.swath_coordinates['patterns']]): + if any([p.search(vname) is not None for p in self.swath_coordinates["patterns"]]): # Get the target coordinate names, applying the sector name as needed: lat_coord_name, lon_coord_name = self.get_coordinate_names(ds_infos) # Ensure we do not try to add the coordinates on the coordinates themself: - dname = ds_infos['name'] + dname = ds_infos["name"] if dname != lat_coord_name and dname != lon_coord_name: - ds_infos['coordinates'] = [lon_coord_name, lat_coord_name] + ds_infos["coordinates"] = [lon_coord_name, lat_coord_name] self.dataset_infos.append(ds_infos) - self.provided_datasets.add(ds_infos['name']) + self.provided_datasets.add(ds_infos["name"]) def check_variable_extra_info(self, ds_infos, vname): """Check if we have extra infos for that variable.""" @@ -492,8 +492,8 @@ def check_variable_extra_info(self, ds_infos, vname): def get_coordinate_names(self, ds_infos): """Get the target coordinate names, applying the sector name as needed.""" lat_coord_name, lon_coord_name = self.get_latlon_names() - if 'sector_name' in ds_infos: - sname = ds_infos['sector_name'] + if "sector_name" in ds_infos: + sname = ds_infos["sector_name"] lat_coord_name = lat_coord_name.replace("{sector_name}", sname) lon_coord_name = lon_coord_name.replace("{sector_name}", sname) return lat_coord_name, lon_coord_name @@ -501,7 +501,7 @@ def get_coordinate_names(self, ds_infos): def get_dataset_infos(self, dname): """Retrieve the dataset infos corresponding to one of the registered datasets.""" for dsinfos in self.dataset_infos: - if dsinfos['name'] == dname: + if dsinfos["name"] == dname: return dsinfos # nothing found. @@ -514,15 +514,15 @@ def register_dataset(self, var_name, oc_name=None): ds_name = var_name if oc_name is None else f"{var_name}_{oc_name}_sector" ds_info = { - 'name': ds_name, - 'variable_name': var_name, - 'sensor': 'li', - 'file_type': self.filetype_info['file_type'] + "name": ds_name, + "variable_name": var_name, + "sensor": "li", + "file_type": self.filetype_info["file_type"] } # add the sector name: if oc_name is not None: - ds_info['sector_name'] = oc_name + ds_info["sector_name"] = oc_name self.add_provided_dataset(ds_info) @@ -535,7 +535,7 @@ def register_available_datasets(self): self.dataset_infos = [] # Assign the search paths for this product type: - self.search_paths = self.ds_desc.get('search_paths', []) + self.search_paths = self.ds_desc.get("search_paths", []) # Register our coordinates from azimuth/elevation data # if the product is accumulated @@ -553,17 +553,17 @@ def register_available_datasets(self): def register_variable_datasets(self): """Register all the available raw (i.e. not in sectors).""" - if 'variables' in self.ds_desc: - all_vars = self.ds_desc['variables'] + if "variables" in self.ds_desc: + all_vars = self.ds_desc["variables"] # No sector to handle so we write simple datasets from the variables: for var_name in all_vars: self.register_dataset(var_name) def register_sector_datasets(self): """Register all the available sector datasets.""" - if 'sectors' in self.ds_desc: - sectors = self.ds_desc['sectors'] - sector_vars = self.ds_desc['sector_variables'] + if "sectors" in self.ds_desc: + sectors = self.ds_desc["sectors"] + sector_vars = self.ds_desc["sector_variables"] # We should generate the datasets per sector: for oc_name in sectors: for var_name in sector_vars: @@ -590,16 +590,16 @@ def apply_use_rescaling(self, data_array, ds_info=None): # Check if we have the scaling elements: attribs = data_array.attrs - if 'scale_factor' in attribs or 'scaling_factor' in attribs or 'add_offset' in attribs: + if "scale_factor" in attribs or "scaling_factor" in attribs or "add_offset" in attribs: # TODO remove scaling_factor fallback after issue in NetCDF is fixed - scale_factor = attribs.setdefault('scale_factor', attribs.get('scaling_factor', 1)) - add_offset = attribs.setdefault('add_offset', 0) + scale_factor = attribs.setdefault("scale_factor", attribs.get("scaling_factor", 1)) + add_offset = attribs.setdefault("add_offset", 0) data_array = (data_array * scale_factor) + add_offset # rescale the valid range accordingly - if 'valid_range' in attribs.keys(): - attribs['valid_range'] = attribs['valid_range'] * scale_factor + add_offset + if "valid_range" in attribs.keys(): + attribs["valid_range"] = attribs["valid_range"] * scale_factor + add_offset data_array.attrs.update(attribs) @@ -607,11 +607,11 @@ def apply_use_rescaling(self, data_array, ds_info=None): def apply_broadcast_to(self, data_array, ds_info): """Apply the broadcast_to transform on a given array.""" - ref_var = self.get_transform_reference('broadcast_to', ds_info) + ref_var = self.get_transform_reference("broadcast_to", ds_info) - logger.debug("Broascasting %s to shape %s", ds_info['name'], ref_var.shape) + logger.debug("Broascasting %s to shape %s", ds_info["name"], ref_var.shape) new_array = da.broadcast_to(data_array, ref_var.shape) - dims = data_array.dims if data_array.ndim > 0 else ('y',) + dims = data_array.dims if data_array.ndim > 0 else ("y",) data_array = xr.DataArray(new_array, coords=data_array.coords, dims=dims, name=data_array.name, attrs=data_array.attrs) return data_array @@ -621,42 +621,42 @@ def apply_accumulate_index_offset(self, data_array, ds_info): # retrieve the __index_offset here, or create it if missing: # And keep track of the shared ds_info dict to reset it later in combine_info() self.current_ds_info = ds_info - offset = ds_info.setdefault('__index_offset', 0) + offset = ds_info.setdefault("__index_offset", 0) - ref_var = self.get_transform_reference('accumulate_index_offset', ds_info) + ref_var = self.get_transform_reference("accumulate_index_offset", ds_info) # Apply the current index_offset already reached on the indices we have in the current dataset: data_array = data_array + offset # Now update the __index_offset adding the number of elements in the reference array: - ds_info['__index_offset'] = offset + ref_var.size + ds_info["__index_offset"] = offset + ref_var.size logger.debug("Adding %d elements for index offset, new value is: %d", - ref_var.size, ds_info['__index_offset']) + ref_var.size, ds_info["__index_offset"]) return data_array def apply_seconds_to_datetime(self, data_array, ds_info): """Apply the seconds_to_datetime transform on a given array.""" # Retrieve the epoch timestamp: - epoch_ts = np.datetime64('2000-01-01T00:00:00.000000') + epoch_ts = np.datetime64("2000-01-01T00:00:00.000000") # And add our values as delta times in seconds: # note that we use a resolution of 1ns here: - data_array = epoch_ts + (data_array * 1e9).astype('timedelta64[ns]') + data_array = epoch_ts + (data_array * 1e9).astype("timedelta64[ns]") return data_array def apply_seconds_to_timedelta(self, data_array, _ds_info): """Apply the seconds_to_timedelta transform on a given array.""" # Apply the type conversion in place in the data_array: # note that we use a resolution of 1ns here: - data_array = (data_array * 1e9).astype('timedelta64[ns]') + data_array = (data_array * 1e9).astype("timedelta64[ns]") return data_array def apply_milliseconds_to_timedelta(self, data_array, _ds_info): """Apply the milliseconds_to_timedelta transform on a given array.""" # Apply the type conversion in place in the data_array: # note that we use a resolution of 1ns here: - data_array = (data_array * 1e6).astype('timedelta64[ns]') + data_array = (data_array * 1e6).astype("timedelta64[ns]") return data_array def get_transform_reference(self, transform_name, ds_info): @@ -665,7 +665,7 @@ def get_transform_reference(self, transform_name, ds_info): if "{sector_name}" in var_path: # We really expect to have a sector name for that variable: - var_path = var_path.replace("{sector_name}", ds_info['sector_name']) + var_path = var_path.replace("{sector_name}", ds_info["sector_name"]) # get the variable on that path: ref_var = self.get_measured_variable(var_path) @@ -679,7 +679,7 @@ def apply_transforms(self, data_array, ds_info): for tname in self.transform_names: if tname in ds_info: # Retrieve the transform function: - transform = getattr(self, f'apply_{tname}') + transform = getattr(self, f"apply_{tname}") # Apply the transformation on the dataset: data_array = transform(data_array, ds_info) return data_array @@ -690,7 +690,7 @@ def combine_info(self, all_infos): This is to be able to reset our __index_offset attribute in the shared ds_info currently being updated. """ if self.current_ds_info is not None: - del self.current_ds_info['__index_offset'] + del self.current_ds_info["__index_offset"] self.current_ds_info = None return super().combine_info(all_infos) @@ -698,10 +698,10 @@ def combine_info(self, all_infos): def get_transformed_dataset(self, ds_info): """Retrieve a dataset with all transformations applied on it.""" # Extract base variable name: - vname = ds_info['variable_name'] + vname = ds_info["variable_name"] # Note that the sector name might be None below: - sname = ds_info.get('sector_name', None) + sname = ds_info.get("sector_name", None) # Use the sector name as prefix for the variable path if applicable: var_paths = vname if sname is None else f"{sname}/{vname}" @@ -717,17 +717,17 @@ def validate_array_dimensions(self, data_array, ds_info=None): # in order to potentially support data array combination in a satpy scene: if data_array.ndim == 0: # If we have no dimension, we should force creating one here: - data_array = data_array.expand_dims({'y': 1}) + data_array = data_array.expand_dims({"y": 1}) - data_array = data_array.rename({data_array.dims[0]: 'y'}) + data_array = data_array.rename({data_array.dims[0]: "y"}) return data_array def update_array_attributes(self, data_array, ds_info): """Inject the attributes from the ds_info structure into the final data array, ignoring the internal entries.""" # ignore some internal processing only entries: - ignored_attribs = ["__index_offset", "broadcast_to", 'accumulate_index_offset', - 'seconds_to_timedelta', 'seconds_to_datetime'] + ignored_attribs = ["__index_offset", "broadcast_to", "accumulate_index_offset", + "seconds_to_timedelta", "seconds_to_datetime"] for key, value in ds_info.items(): if key not in ignored_attribs: data_array.attrs[key] = value @@ -738,13 +738,13 @@ def get_dataset(self, dataset_id, ds_info=None): """Get a dataset.""" # Retrieve default infos if missing: if ds_info is None: - ds_info = self.get_dataset_infos(dataset_id['name']) + ds_info = self.get_dataset_infos(dataset_id["name"]) # check for potential error: if ds_info is None: raise KeyError(f"No dataset registered for {dataset_id}") - ds_name = ds_info['name'] + ds_name = ds_info["name"] # In case this dataset name is not explicitly provided by this file handler then we # should simply return None. if ds_name not in self.provided_datasets: diff --git a/satpy/readers/li_l2_nc.py b/satpy/readers/li_l2_nc.py index 569dc2bf51..4fe0826380 100644 --- a/satpy/readers/li_l2_nc.py +++ b/satpy/readers/li_l2_nc.py @@ -71,14 +71,14 @@ def get_area_def(self, dsid): """Compute area definition for a dataset, only supported for accumulated products.""" var_with_swath_coord = self.is_var_with_swath_coord(dsid) if var_with_swath_coord and self.with_area_def: - return get_area_def('mtg_fci_fdss_2km') + return get_area_def("mtg_fci_fdss_2km") - raise NotImplementedError('Area definition is not supported for accumulated products.') + raise NotImplementedError("Area definition is not supported for accumulated products.") def is_var_with_swath_coord(self, dsid): """Check if the variable corresponding to this dataset is listed as variable with swath coordinates.""" # since the patterns are compiled to regex we use the search() method below to find matches - with_swath_coords = any([p.search(dsid['name']) is not None for p in self.swath_coordinates['patterns']]) + with_swath_coords = any([p.search(dsid["name"]) is not None for p in self.swath_coordinates["patterns"]]) return with_swath_coords def get_array_on_fci_grid(self, data_array: xr.DataArray): @@ -92,8 +92,8 @@ def get_array_on_fci_grid(self, data_array: xr.DataArray): # Note that x and y have origin in the south-west corner of the image # and start with index 1. - rows = self.get_measured_variable('y') - cols = self.get_measured_variable('x') + rows = self.get_measured_variable("y") + cols = self.get_measured_variable("x") attrs = data_array.attrs rows, cols = da.compute(rows, cols) @@ -110,7 +110,7 @@ def get_array_on_fci_grid(self, data_array: xr.DataArray): flattened_result[rows * LI_GRID_SHAPE[0] + cols] = data_array # ... reshape to final 2D grid data_2d = da.reshape(flattened_result, LI_GRID_SHAPE) - xarr = xr.DataArray(da.asarray(data_2d, CHUNK_SIZE), dims=('y', 'x')) + xarr = xr.DataArray(da.asarray(data_2d, CHUNK_SIZE), dims=("y", "x")) xarr.attrs = attrs return xarr diff --git a/satpy/readers/maia.py b/satpy/readers/maia.py index 941bf34208..75591c59d5 100644 --- a/satpy/readers/maia.py +++ b/satpy/readers/maia.py @@ -48,69 +48,69 @@ def __init__(self, filename, filename_info, filetype_info): filename, filename_info, filetype_info) self.finfo = filename_info # set the day date part for end_time from the file name - self.finfo['end_time'] = self.finfo['end_time'].replace( - year=self.finfo['start_time'].year, - month=self.finfo['start_time'].month, - day=self.finfo['start_time'].day) - if self.finfo['end_time'] < self.finfo['start_time']: - myday = self.finfo['end_time'].day - self.finfo['end_time'] = self.finfo['end_time'].replace( + self.finfo["end_time"] = self.finfo["end_time"].replace( + year=self.finfo["start_time"].year, + month=self.finfo["start_time"].month, + day=self.finfo["start_time"].day) + if self.finfo["end_time"] < self.finfo["start_time"]: + myday = self.finfo["end_time"].day + self.finfo["end_time"] = self.finfo["end_time"].replace( day=myday + 1) self.selected = None self.read(self.filename) def read(self, filename): """Read the file.""" - self.h5 = h5py.File(filename, 'r') + self.h5 = h5py.File(filename, "r") missing = -9999. - self.Lat = da.from_array(self.h5[u'DATA/Latitude'], chunks=CHUNK_SIZE) / 10000. - self.Lon = da.from_array(self.h5[u'DATA/Longitude'], chunks=CHUNK_SIZE) / 10000. + self.Lat = da.from_array(self.h5[u"DATA/Latitude"], chunks=CHUNK_SIZE) / 10000. + self.Lon = da.from_array(self.h5[u"DATA/Longitude"], chunks=CHUNK_SIZE) / 10000. self.selected = (self.Lon > missing) self.file_content = {} - for key in self.h5['DATA'].keys(): - self.file_content[key] = da.from_array(self.h5[u'DATA/' + key], chunks=CHUNK_SIZE) - for key in self.h5[u'HEADER'].keys(): - self.file_content[key] = self.h5[u'HEADER/' + key][:] + for key in self.h5["DATA"].keys(): + self.file_content[key] = da.from_array(self.h5[u"DATA/" + key], chunks=CHUNK_SIZE) + for key in self.h5[u"HEADER"].keys(): + self.file_content[key] = self.h5[u"HEADER/" + key][:] # Cloud Mask on pixel mask = 2**0 + 2**1 + 2**2 - lst = self.file_content[u'CloudMask'] & mask + lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**0 self.file_content[u"cma"] = lst # Cloud Mask confidence mask = 2**5 + 2**6 - lst = self.file_content[u'CloudMask'] & mask + lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**5 self.file_content[u"cma_conf"] = lst # Cloud Mask Quality mask = 2**3 + 2**4 - lst = self.file_content[u'CloudMask'] & mask + lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**3 - self.file_content[u'cma_qual'] = lst + self.file_content[u"cma_qual"] = lst # Opaque Cloud mask = 2**21 - lst = self.file_content[u'CloudMask'] & mask + lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**21 - self.file_content[u'opaq_cloud'] = lst + self.file_content[u"opaq_cloud"] = lst # land /water Background mask = 2**15 + 2**16 + 2**17 - lst = self.file_content[u'CloudMask'] & mask + lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**15 - self.file_content[u'land_water_background'] = lst + self.file_content[u"land_water_background"] = lst # CT (Actual CloudType) mask = 2**4 + 2**5 + 2**6 + 2**7 + 2**8 - classif = self.file_content[u'CloudType'] & mask + classif = self.file_content[u"CloudType"] & mask classif = classif / 2**4 - self.file_content['ct'] = classif.astype(np.uint8) + self.file_content["ct"] = classif.astype(np.uint8) def get_platform(self, platform): """Get the platform.""" - if self.file_content['sat_id'] in (14,): + if self.file_content["sat_id"] in (14,): return "viirs" else: return "avhrr" @@ -118,26 +118,26 @@ def get_platform(self, platform): @property def start_time(self): """Get the start time.""" - return self.finfo['start_time'] + return self.finfo["start_time"] @property def end_time(self): """Get the end time.""" - return self.finfo['end_time'] + return self.finfo["end_time"] def get_dataset(self, key, info, out=None): """Get a dataset from the file.""" - logger.debug("Reading %s.", key['name']) - values = self.file_content[key['name']] + logger.debug("Reading %s.", key["name"]) + values = self.file_content[key["name"]] selected = np.array(self.selected) - if key['name'] in ("Latitude", "Longitude"): + if key["name"] in ("Latitude", "Longitude"): values = values / 10000. - if key['name'] in ('Tsurf', 'CloudTopPres', 'CloudTopTemp'): + if key["name"] in ("Tsurf", "CloudTopPres", "CloudTopTemp"): goods = values > -9998. selected = np.array(selected & goods) - if key['name'] in ('Tsurf', "Alt_surface", "CloudTopTemp"): + if key["name"] in ("Tsurf", "Alt_surface", "CloudTopTemp"): values = values / 100. - if key['name'] in ("CloudTopPres"): + if key["name"] in ("CloudTopPres"): values = values / 10. else: selected = self.selected @@ -145,10 +145,10 @@ def get_dataset(self, key, info, out=None): fill_value = np.nan - if key['name'] == 'ct': + if key["name"] == "ct": fill_value = 0 - info['_FillValue'] = 0 - ds = DataArray(values, dims=['y', 'x'], attrs=info).where(selected, fill_value) + info["_FillValue"] = 0 + ds = DataArray(values, dims=["y", "x"], attrs=info).where(selected, fill_value) # update dataset info with file_info return ds diff --git a/satpy/readers/meris_nc_sen3.py b/satpy/readers/meris_nc_sen3.py index 61fc761f50..fa69dad2cc 100644 --- a/satpy/readers/meris_nc_sen3.py +++ b/satpy/readers/meris_nc_sen3.py @@ -40,7 +40,7 @@ class NCMERISCal(NCOLCIBase): def __init__(self, filename, filename_info, filetype_info): """Init the meris reader base.""" super(NCMERISCal, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'meris' + self.sensor = "meris" class NCMERISGeo(NCOLCIBase): @@ -49,7 +49,7 @@ class NCMERISGeo(NCOLCIBase): def __init__(self, filename, filename_info, filetype_info): """Init the meris reader base.""" super(NCMERISGeo, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'meris' + self.sensor = "meris" class NCMERIS2(NCOLCI2): @@ -58,24 +58,24 @@ class NCMERIS2(NCOLCI2): def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(NCMERIS2, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'meris' - self.reflectance_prefix = 'M' - self.reflectance_suffix = '_rho_w' + self.sensor = "meris" + self.reflectance_prefix = "M" + self.reflectance_suffix = "_rho_w" def getbitmask(self, wqsf, items=None): """Get the bitmask. Experimental default mask.""" - items = items or ['SEA_ICE', 'MEGLINT', 'HIGHGLINT', - 'HAZE_OVER_WATER', 'WHITECAPS', 'AC_FAIL', 'WHITE_SCATT', - 'LOWRW', 'HIGHRW', 'OUT_OF_RANGE_AAC', 'OUT_OF_SCOPE_AAC', - 'OUT_OF_RANGE_OC_NN', 'OUT_OF_SCOPE_OC_NN', - 'OUT_OF_RANGE_CHL_OC4ME_INPUT', 'OUT_OF_RANGE_CHL_OC4ME'] + items = items or ["SEA_ICE", "MEGLINT", "HIGHGLINT", + "HAZE_OVER_WATER", "WHITECAPS", "AC_FAIL", "WHITE_SCATT", + "LOWRW", "HIGHRW", "OUT_OF_RANGE_AAC", "OUT_OF_SCOPE_AAC", + "OUT_OF_RANGE_OC_NN", "OUT_OF_SCOPE_OC_NN", + "OUT_OF_RANGE_CHL_OC4ME_INPUT", "OUT_OF_RANGE_CHL_OC4ME"] bflags = BitFlags( wqsf, - flag_list=['SEA_ICE', 'MEGLINT', 'HIGHGLINT', 'CASE2_S', 'CASE2_ANOM', - 'HAZE_OVER_WATER', 'WHITECAPS', 'AC_FAIL', 'BPAC_ON', 'WHITE_SCATT', - 'LOWRW', 'HIGHRW', 'OUT_OF_RANGE_AAC', 'OUT_OF_SCOPE_AAC', - 'OUT_OF_RANGE_OC_NN', 'OUT_OF_SCOPE_OC_NN', - 'OUT_OF_RANGE_CHL_OC4ME_INPUT', 'OUT_OF_RANGE_CHL_OC4ME'], + flag_list=["SEA_ICE", "MEGLINT", "HIGHGLINT", "CASE2_S", "CASE2_ANOM", + "HAZE_OVER_WATER", "WHITECAPS", "AC_FAIL", "BPAC_ON", "WHITE_SCATT", + "LOWRW", "HIGHRW", "OUT_OF_RANGE_AAC", "OUT_OF_SCOPE_AAC", + "OUT_OF_RANGE_OC_NN", "OUT_OF_SCOPE_OC_NN", + "OUT_OF_RANGE_CHL_OC4ME_INPUT", "OUT_OF_RANGE_CHL_OC4ME"], ) return reduce(np.logical_or, [bflags[item] for item in items]) @@ -86,7 +86,7 @@ class NCMERISAngles(NCOLCIAngles): def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(NCMERISAngles, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'meris' + self.sensor = "meris" class NCMERISMeteo(NCOLCIMeteo): @@ -95,4 +95,4 @@ class NCMERISMeteo(NCOLCIMeteo): def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(NCMERISMeteo, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'meris' + self.sensor = "meris" diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index b0225ebcb4..905db0654f 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -44,25 +44,25 @@ def _strptime(self, date_attr, time_attr): time = self[time_attr] # "18:27:39.720" # cuts off microseconds because of unknown meaning # is .720 == 720 microseconds or 720000 microseconds - return datetime.strptime(date + " " + time.split('.')[0], "%Y-%m-%d %H:%M:%S") + return datetime.strptime(date + " " + time.split(".")[0], "%Y-%m-%d %H:%M:%S") @property def start_time(self): """Time for first observation.""" - return self._strptime('/attr/Observing Beginning Date', '/attr/Observing Beginning Time') + return self._strptime("/attr/Observing Beginning Date", "/attr/Observing Beginning Time") @property def end_time(self): """Time for final observation.""" - return self._strptime('/attr/Observing Ending Date', '/attr/Observing Ending Time') + return self._strptime("/attr/Observing Ending Date", "/attr/Observing Ending Time") @property def sensor_name(self): """Map sensor name to Satpy 'standard' sensor names.""" - file_sensor = self['/attr/Sensor Identification Code'] + file_sensor = self["/attr/Sensor Identification Code"] sensor = { - 'MERSI': 'mersi-2', - 'MERSI LL': 'mersi-ll', + "MERSI": "mersi-2", + "MERSI LL": "mersi-ll", }.get(file_sensor, file_sensor) return sensor @@ -76,8 +76,8 @@ def _get_single_slope_intercept(self, slope, intercept, cal_index): def _get_coefficients(self, cal_key, cal_index): coeffs = self[cal_key][cal_index] - slope = coeffs.attrs.pop('Slope', None) - intercept = coeffs.attrs.pop('Intercept', None) + slope = coeffs.attrs.pop("Slope", None) + intercept = coeffs.attrs.pop("Intercept", None) if slope is not None: slope, intercept = self._get_single_slope_intercept( slope, intercept, cal_index) @@ -86,37 +86,37 @@ def _get_coefficients(self, cal_key, cal_index): def get_dataset(self, dataset_id, ds_info): """Load data variable and metadata and calibrate if needed.""" - file_key = ds_info.get('file_key', dataset_id['name']) - band_index = ds_info.get('band_index') + file_key = ds_info.get("file_key", dataset_id["name"]) + band_index = ds_info.get("band_index") data = self[file_key] if band_index is not None: data = data[band_index] if data.ndim >= 2: - data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'}) + data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) attrs = data.attrs.copy() # avoid contaminating other band loading attrs.update(ds_info) - if 'rows_per_scan' in self.filetype_info: - attrs.setdefault('rows_per_scan', self.filetype_info['rows_per_scan']) + if "rows_per_scan" in self.filetype_info: + attrs.setdefault("rows_per_scan", self.filetype_info["rows_per_scan"]) data = self._mask_data(data, dataset_id, attrs) - slope = attrs.pop('Slope', None) - intercept = attrs.pop('Intercept', None) - if slope is not None and dataset_id.get('calibration') != 'counts': + slope = attrs.pop("Slope", None) + intercept = attrs.pop("Intercept", None) + if slope is not None and dataset_id.get("calibration") != "counts": if band_index is not None: slope = slope[band_index] intercept = intercept[band_index] data = data * slope + intercept - if dataset_id.get('calibration') == "reflectance": - coeffs = self._get_coefficients(ds_info['calibration_key'], - ds_info['calibration_index']) + if dataset_id.get("calibration") == "reflectance": + coeffs = self._get_coefficients(ds_info["calibration_key"], + ds_info["calibration_index"]) data = coeffs[0] + coeffs[1] * data + coeffs[2] * data ** 2 - elif dataset_id.get('calibration') == "brightness_temperature": - calibration_index = ds_info['calibration_index'] + elif dataset_id.get("calibration") == "brightness_temperature": + calibration_index = ds_info["calibration_index"] # Converts um^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. - wave_number = 1. / (dataset_id['wavelength'][1] / 1e6) + wave_number = 1. / (dataset_id["wavelength"][1] / 1e6) data = self._get_bt_dataset(data, calibration_index, wave_number) @@ -125,29 +125,29 @@ def get_dataset(self, dataset_id, ds_info): for key, val in attrs.items(): # python 3 only if bytes is not str and isinstance(val, bytes): - data.attrs[key] = val.decode('utf8') + data.attrs[key] = val.decode("utf8") data.attrs.update({ - 'platform_name': self['/attr/Satellite Name'], - 'sensor': self.sensor_name, + "platform_name": self["/attr/Satellite Name"], + "sensor": self.sensor_name, }) return data def _mask_data(self, data, dataset_id, attrs): """Mask the data using fill_value and valid_range attributes.""" - fill_value = attrs.pop('FillValue', np.nan) # covered by valid_range - valid_range = attrs.pop('valid_range', None) - if dataset_id.get('calibration') == 'counts': + fill_value = attrs.pop("FillValue", np.nan) # covered by valid_range + valid_range = attrs.pop("valid_range", None) + if dataset_id.get("calibration") == "counts": # preserve integer type of counts if possible - attrs['_FillValue'] = fill_value + attrs["_FillValue"] = fill_value new_fill = fill_value else: new_fill = np.nan if valid_range is not None: # Due to a bug in the valid_range upper limit in the 10.8(24) and 12.0(25) # in the HDF data, this is hardcoded here. - if dataset_id['name'] in ['24', '25'] and valid_range[1] == 4095: + if dataset_id["name"] in ["24", "25"] and valid_range[1] == 4095: valid_range[1] = 25000 # typically bad_values == 65535, saturated == 65534 # dead detector == 65533 @@ -184,13 +184,13 @@ def _get_bt_dataset(self, data, calibration_index, wave_number): data = data.where(data != 0) # additional corrections from the file - if self.sensor_name == 'mersi-2': - corr_coeff_a = float(self['/attr/TBB_Trans_Coefficient_A'][calibration_index]) - corr_coeff_b = float(self['/attr/TBB_Trans_Coefficient_B'][calibration_index]) - elif self.sensor_name == 'mersi-ll': + if self.sensor_name == "mersi-2": + corr_coeff_a = float(self["/attr/TBB_Trans_Coefficient_A"][calibration_index]) + corr_coeff_b = float(self["/attr/TBB_Trans_Coefficient_B"][calibration_index]) + elif self.sensor_name == "mersi-ll": # MERSI-LL stores these coefficients differently try: - coeffs = self['/attr/TBB_Trans_Coefficient'] + coeffs = self["/attr/TBB_Trans_Coefficient"] corr_coeff_a = coeffs[calibration_index] corr_coeff_b = coeffs[calibration_index + N_TOT_IR_CHANS_LL] except KeyError: diff --git a/satpy/readers/mimic_TPW2_nc.py b/satpy/readers/mimic_TPW2_nc.py index d4b7422ab1..8a22002cf4 100644 --- a/satpy/readers/mimic_TPW2_nc.py +++ b/satpy/readers/mimic_TPW2_nc.py @@ -54,8 +54,8 @@ def __init__(self, filename, filename_info, filetype_info): def available_datasets(self, configured_datasets=None): """Get datasets in file matching gelocation shape (lat/lon).""" - lat_shape = self.file_content.get('/dimension/lat') - lon_shape = self.file_content.get('/dimension/lon') + lat_shape = self.file_content.get("/dimension/lat") + lon_shape = self.file_content.get("/dimension/lon") # Read the lat/lon variables? handled_variables = set() @@ -67,9 +67,9 @@ def available_datasets(self, configured_datasets=None): if is_avail is not None: yield is_avail, ds_info - var_name = ds_info.get('file_key', ds_info['name']) + var_name = ds_info.get("file_key", ds_info["name"]) # logger.debug("Evaluating previously configured variable: %s", var_name) - matches = self.file_type_matches(ds_info['file_type']) + matches = self.file_type_matches(ds_info["file_type"]) # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self: @@ -98,35 +98,35 @@ def available_datasets(self, configured_datasets=None): handled_variables.add(var_name) # Create new ds_info object new_info = { - 'name': var_name, - 'file_key': var_name, - 'file_type': self.filetype_info['file_type'], + "name": var_name, + "file_key": var_name, + "file_type": self.filetype_info["file_type"], } logger.debug(var_name) yield True, new_info def get_dataset(self, ds_id, info): """Load dataset designated by the given key from file.""" - logger.debug("Getting data for: %s", ds_id['name']) - file_key = info.get('file_key', ds_id['name']) + logger.debug("Getting data for: %s", ds_id["name"]) + file_key = info.get("file_key", ds_id["name"]) data = np.flipud(self[file_key]) - data = xr.DataArray(data, dims=['y', 'x']) + data = xr.DataArray(data, dims=["y", "x"]) data.attrs = self.get_metadata(data, info) - if 'lon' in data.dims: - data.rename({'lon': 'x'}) - if 'lat' in data.dims: - data.rename({'lat': 'y'}) + if "lon" in data.dims: + data.rename({"lon": "x"}) + if "lat" in data.dims: + data.rename({"lat": "y"}) return data def get_area_def(self, dsid): """Flip data up/down and define equirectangular AreaDefintion.""" - flip_lat = np.flipud(self['latArr']) - latlon = np.meshgrid(self['lonArr'], flip_lat) + flip_lat = np.flipud(self["latArr"]) + latlon = np.meshgrid(self["lonArr"], flip_lat) - width = self['lonArr/shape'][0] - height = self['latArr/shape'][0] + width = self["lonArr/shape"][0] + height = self["latArr/shape"][0] lower_left_x = latlon[0][height-1][0] lower_left_y = latlon[1][height-1][0] @@ -136,9 +136,9 @@ def get_area_def(self, dsid): area_extent = (lower_left_x, lower_left_y, upper_right_x, upper_right_y) description = "MIMIC TPW WGS84" - area_id = 'mimic' - proj_id = 'World Geodetic System 1984' - projection = 'EPSG:4326' + area_id = "mimic" + proj_id = "World Geodetic System 1984" + projection = "EPSG:4326" area_def = AreaDefinition(area_id, description, proj_id, projection, width, height, area_extent, ) return area_def @@ -148,24 +148,24 @@ def get_metadata(self, data, info): metadata.update(data.attrs) metadata.update(info) metadata.update({ - 'platform_shortname': 'aggregated microwave', - 'sensor': 'mimic', - 'start_time': self.start_time, - 'end_time': self.end_time, + "platform_shortname": "aggregated microwave", + "sensor": "mimic", + "start_time": self.start_time, + "end_time": self.end_time, }) - metadata.update(self[info.get('file_key')].variable.attrs) + metadata.update(self[info.get("file_key")].variable.attrs) return metadata @property def start_time(self): """Start timestamp of the dataset determined from yaml.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """End timestamp of the dataset same as start_time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def sensor_name(self): diff --git a/satpy/readers/mirs.py b/satpy/readers/mirs.py index de02b1dc36..1ee0912b0f 100644 --- a/satpy/readers/mirs.py +++ b/satpy/readers/mirs.py @@ -64,7 +64,7 @@ } SENSOR = {"n18": amsu, "n19": amsu, - "n20": 'atms', + "n20": "atms", "np": amsu, "m1": amsu, "m2": amsu, @@ -173,11 +173,11 @@ def get_coeff_by_sfc(coeff_fn, bt_data, idx): def limb_correct_atms_bt(bt_data, surf_type_mask, coeff_fns, ds_info): """Gather data needed for limb correction.""" - idx = ds_info['channel_index'] + idx = ds_info["channel_index"] LOG.info("Starting ATMS Limb Correction...") - sea_bt = get_coeff_by_sfc(coeff_fns['sea'], bt_data, idx) - land_bt = get_coeff_by_sfc(coeff_fns['land'], bt_data, idx) + sea_bt = get_coeff_by_sfc(coeff_fns["sea"], bt_data, idx) + land_bt = get_coeff_by_sfc(coeff_fns["land"], bt_data, idx) LOG.info("Finishing limb correction") is_sea = (surf_type_mask == 0) @@ -217,8 +217,8 @@ def __init__(self, filename, filename_info, filetype_info, decode_cf=True, mask_and_scale=False, decode_coords=True, - chunks={'Field_of_view': CHUNK_SIZE, - 'Scanline': CHUNK_SIZE}) + chunks={"Field_of_view": CHUNK_SIZE, + "Scanline": CHUNK_SIZE}) # y,x is used in satpy, bands rather than channel using in xrimage self.nc = self.nc.rename_dims({"Scanline": "y", "Field_of_view": "x"}) @@ -232,13 +232,13 @@ def __init__(self, filename, filename_info, filetype_info, @property def platform_shortname(self): """Get platform shortname.""" - return self.filename_info['platform_shortname'] + return self.filename_info["platform_shortname"] @property def _get_platform_name(self): """Get platform name.""" try: - res = PLATFORMS[self.filename_info['platform_shortname'].lower()] + res = PLATFORMS[self.filename_info["platform_shortname"].lower()] except KeyError: res = "mirs" return res.lower() @@ -296,13 +296,13 @@ def force_time(self, key): @property def _get_coeff_filenames(self): """Retrieve necessary files for coefficients if needed.""" - coeff_fn = {'sea': None, 'land': None} + coeff_fn = {"sea": None, "land": None} if self.platform_name == "noaa-20": - coeff_fn['land'] = retrieve("readers/limbcoef_atmsland_noaa20.txt") - coeff_fn['sea'] = retrieve("readers/limbcoef_atmssea_noaa20.txt") - if self.platform_name == 'npp': - coeff_fn['land'] = retrieve("readers/limbcoef_atmsland_snpp.txt") - coeff_fn['sea'] = retrieve("readers/limbcoef_atmssea_snpp.txt") + coeff_fn["land"] = retrieve("readers/limbcoef_atmsland_noaa20.txt") + coeff_fn["sea"] = retrieve("readers/limbcoef_atmssea_noaa20.txt") + if self.platform_name == "npp": + coeff_fn["land"] = retrieve("readers/limbcoef_atmsland_snpp.txt") + coeff_fn["sea"] = retrieve("readers/limbcoef_atmssea_snpp.txt") return coeff_fn @@ -311,10 +311,10 @@ def update_metadata(self, ds_info): metadata = {} metadata.update(ds_info) metadata.update({ - 'sensor': self.sensor, - 'platform_name': self.platform_name, - 'start_time': self.start_time, - 'end_time': self.end_time, + "sensor": self.sensor, + "platform_name": self.platform_name, + "start_time": self.start_time, + "end_time": self.end_time, }) return metadata @@ -325,9 +325,9 @@ def _nan_for_dtype(data_arr_dtype): if data_arr_dtype.type == np.float32: return np.float32(np.nan) if np.issubdtype(data_arr_dtype, np.timedelta64): - return np.timedelta64('NaT') + return np.timedelta64("NaT") if np.issubdtype(data_arr_dtype, np.datetime64): - return np.datetime64('NaT') + return np.datetime64("NaT") return np.nan @staticmethod @@ -375,19 +375,19 @@ def apply_attributes(self, data, ds_info): ds_info.update(data.attrs) # special cases - if ds_info['name'] in ["latitude", "longitude"]: + if ds_info["name"] in ["latitude", "longitude"]: ds_info["standard_name"] = ds_info.get("standard_name", - ds_info['name']) + ds_info["name"]) # try to assign appropriate units (if "Kelvin" covert to K) units_convert = {"Kelvin": "K"} - data_unit = ds_info.get('units', None) - ds_info['units'] = units_convert.get(data_unit, data_unit) + data_unit = ds_info.get("units", None) + ds_info["units"] = units_convert.get(data_unit, data_unit) - scale = ds_info.pop('scale_factor', 1.0) - offset = ds_info.pop('add_offset', 0.) + scale = ds_info.pop("scale_factor", 1.0) + offset = ds_info.pop("add_offset", 0.) fill_value = ds_info.pop("_FillValue", global_attr_fill) - valid_range = ds_info.pop('valid_range', None) + valid_range = ds_info.pop("valid_range", None) data = self._scale_data(data, scale, offset) data = self._fill_data(data, fill_value, scale, offset) @@ -399,14 +399,14 @@ def apply_attributes(self, data, ds_info): def get_dataset(self, ds_id, ds_info): """Get datasets.""" - if 'dependencies' in ds_info.keys(): - idx = ds_info['channel_index'] - data = self['BT'] + if "dependencies" in ds_info.keys(): + idx = ds_info["channel_index"] + data = self["BT"] data = data.rename(new_name_or_name_dict=ds_info["name"]) data, ds_info = self.apply_attributes(data, ds_info) if self.sensor.lower() == "atms" and self.limb_correction: - sfc_type_mask = self['Sfc_type'] + sfc_type_mask = self["Sfc_type"] data = limb_correct_atms_bt(data, sfc_type_mask, self._get_coeff_filenames, ds_info) @@ -416,7 +416,7 @@ def get_dataset(self, ds_id, ds_info): LOG.info("No Limb Correction applied.") data = data[:, :, idx] else: - data = self[ds_id['name']] + data = self[ds_id["name"]] data, ds_info = self.apply_attributes(data, ds_info) data.attrs = self.update_metadata(ds_info) @@ -440,24 +440,24 @@ def available_datasets(self, configured_datasets=None): continue yaml_info = {} - if self.file_type_matches(ds_info['file_type']): - handled_vars.add(ds_info['name']) + if self.file_type_matches(ds_info["file_type"]): + handled_vars.add(ds_info["name"]) yaml_info = ds_info - if ds_info['name'] == 'BT': + if ds_info["name"] == "BT": yield from self._available_btemp_datasets(yaml_info) yield True, ds_info yield from self._available_new_datasets(handled_vars) def _count_channel_repeat_number(self): """Count channel/polarization pair repetition.""" - freq = self.nc.coords.get('Freq', self.nc.get('Freq')) - polo = self.nc['Polo'] + freq = self.nc.coords.get("Freq", self.nc.get("Freq")) + polo = self.nc["Polo"] chn_total = Counter() normals = [] for idx, (f, p) in enumerate(zip(freq, polo)): normal_f = str(int(f)) - normal_p = 'v' if p == POLO_V else 'h' + normal_p = "v" if p == POLO_V else "h" chn_total[normal_f + normal_p] += 1 normals.append((idx, f, p, normal_f, normal_p)) @@ -471,7 +471,7 @@ def _available_btemp_datasets(self, yaml_info): for idx, _f, _p, normal_f, normal_p in normals: chn_cnt[normal_f + normal_p] += 1 p_count = str(chn_cnt[normal_f + normal_p] - if chn_total[normal_f + normal_p] > 1 else '') + if chn_total[normal_f + normal_p] > 1 else "") new_name = "btemp_{}{}{}".format(normal_f, normal_p, p_count) @@ -479,22 +479,22 @@ def _available_btemp_datasets(self, yaml_info): desc_bt = desc_bt.format(idx, normal_f, normal_p, p_count) ds_info = yaml_info.copy() ds_info.update({ - 'file_type': self.filetype_info['file_type'], - 'name': new_name, - 'description': desc_bt, - 'channel_index': idx, - 'frequency': "{}GHz".format(normal_f), - 'polarization': normal_p, - 'dependencies': ('BT', 'Sfc_type'), - 'coordinates': ['longitude', 'latitude'] + "file_type": self.filetype_info["file_type"], + "name": new_name, + "description": desc_bt, + "channel_index": idx, + "frequency": "{}GHz".format(normal_f), + "polarization": normal_p, + "dependencies": ("BT", "Sfc_type"), + "coordinates": ["longitude", "latitude"] }) yield True, ds_info def _get_ds_info_for_data_arr(self, var_name): ds_info = { - 'file_type': self.filetype_info['file_type'], - 'name': var_name, - 'coordinates': ["longitude", "latitude"] + "file_type": self.filetype_info["file_type"], + "name": var_name, + "coordinates": ["longitude", "latitude"] } return ds_info @@ -524,7 +524,7 @@ def __getitem__(self, item): data = self.nc[item] # 'Freq' dimension causes issues in other processing - if 'Freq' in data.coords: - data = data.drop_vars('Freq') + if "Freq" in data.coords: + data = data.drop_vars("Freq") return data diff --git a/satpy/readers/modis_l1b.py b/satpy/readers/modis_l1b.py index 5f0627b95d..d2bb1c9661 100644 --- a/satpy/readers/modis_l1b.py +++ b/satpy/readers/modis_l1b.py @@ -92,13 +92,13 @@ class HDFEOSBandReader(HDFEOSBaseFileReader): "H": 500} res_to_possible_variable_names = { - 1000: ['EV_250_Aggr1km_RefSB', - 'EV_500_Aggr1km_RefSB', - 'EV_1KM_RefSB', - 'EV_1KM_Emissive'], - 500: ['EV_250_Aggr500_RefSB', - 'EV_500_RefSB'], - 250: ['EV_250_RefSB'], + 1000: ["EV_250_Aggr1km_RefSB", + "EV_500_Aggr1km_RefSB", + "EV_1KM_RefSB", + "EV_1KM_Emissive"], + 500: ["EV_250_Aggr500_RefSB", + "EV_500_RefSB"], + 250: ["EV_250_RefSB"], } def __init__(self, filename, filename_info, filetype_info, mask_saturated=True, **kwargs): @@ -106,21 +106,21 @@ def __init__(self, filename, filename_info, filetype_info, mask_saturated=True, super().__init__(filename, filename_info, filetype_info, **kwargs) self._mask_saturated = mask_saturated - ds = self.metadata['INVENTORYMETADATA'][ - 'COLLECTIONDESCRIPTIONCLASS']['SHORTNAME']['VALUE'] + ds = self.metadata["INVENTORYMETADATA"][ + "COLLECTIONDESCRIPTIONCLASS"]["SHORTNAME"]["VALUE"] self.resolution = self.res[ds[-3]] def get_dataset(self, key, info): """Read data from file and return the corresponding projectables.""" - if self.resolution != key['resolution']: + if self.resolution != key["resolution"]: return var_name, band_index = self._get_band_variable_name_and_index(key["name"]) subdata = self.sd.select(var_name) var_attrs = subdata.attributes() uncertainty = self.sd.select(var_name + "_Uncert_Indexes") array = xr.DataArray(from_sds(subdata, chunks=CHUNK_SIZE)[band_index, :, :], - dims=['y', 'x']).astype(np.float32) - valid_range = var_attrs['valid_range'] + dims=["y", "x"]).astype(np.float32) + valid_range = var_attrs["valid_range"] valid_min = np.float32(valid_range[0]) valid_max = np.float32(valid_range[1]) if not self._mask_saturated: @@ -219,24 +219,24 @@ def _mask_uncertain_pixels(self, array, uncertainty, band_index): return array def _calibrate_data(self, key, info, array, var_attrs, index): - if key['calibration'] == 'brightness_temperature': - projectable = calibrate_bt(array, var_attrs, index, key['name']) - info.setdefault('units', 'K') - info.setdefault('standard_name', 'toa_brightness_temperature') - elif key['calibration'] == 'reflectance': + if key["calibration"] == "brightness_temperature": + projectable = calibrate_bt(array, var_attrs, index, key["name"]) + info.setdefault("units", "K") + info.setdefault("standard_name", "toa_brightness_temperature") + elif key["calibration"] == "reflectance": projectable = calibrate_refl(array, var_attrs, index) - info.setdefault('units', '%') - info.setdefault('standard_name', - 'toa_bidirectional_reflectance') - elif key['calibration'] == 'radiance': + info.setdefault("units", "%") + info.setdefault("standard_name", + "toa_bidirectional_reflectance") + elif key["calibration"] == "radiance": projectable = calibrate_radiance(array, var_attrs, index) - info.setdefault('units', var_attrs.get('radiance_units')) - info.setdefault('standard_name', - 'toa_outgoing_radiance_per_unit_wavelength') - elif key['calibration'] == 'counts': + info.setdefault("units", var_attrs.get("radiance_units")) + info.setdefault("standard_name", + "toa_outgoing_radiance_per_unit_wavelength") + elif key["calibration"] == "counts": projectable = calibrate_counts(array, var_attrs, index) - info.setdefault('units', 'counts') - info.setdefault('standard_name', 'counts') # made up + info.setdefault("units", "counts") + info.setdefault("standard_name", "counts") # made up else: raise ValueError("Unknown calibration for " "key: {}".format(key)) @@ -254,7 +254,7 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): def get_dataset(self, key, info): """Get the dataset.""" - if key['name'] in HDFEOSGeoReader.DATASET_NAMES: + if key["name"] in HDFEOSGeoReader.DATASET_NAMES: return HDFEOSGeoReader.get_dataset(self, key, info) return HDFEOSBandReader.get_dataset(self, key, info) diff --git a/satpy/readers/modis_l2.py b/satpy/readers/modis_l2.py index 1a7fc3ae38..1b526a07a2 100644 --- a/satpy/readers/modis_l2.py +++ b/satpy/readers/modis_l2.py @@ -113,30 +113,30 @@ def _select_hdf_dataset(self, hdf_dataset_name, byte_dimension): dataset = self.sd.select(hdf_dataset_name) dask_arr = from_sds(dataset, chunks=CHUNK_SIZE) attrs = dataset.attributes() - dims = ['y', 'x'] + dims = ["y", "x"] if byte_dimension == 0: - dims = ['i', 'y', 'x'] + dims = ["i", "y", "x"] dask_arr = dask_arr.astype(np.uint8) elif byte_dimension == 2: - dims = ['y', 'x', 'i'] + dims = ["y", "x", "i"] dask_arr = dask_arr.astype(np.uint8) dataset = xr.DataArray(dask_arr, dims=dims, attrs=attrs) - if 'i' in dataset.dims: + if "i" in dataset.dims: # Reorder dimensions for consistency - dataset = dataset.transpose('i', 'y', 'x') + dataset = dataset.transpose("i", "y", "x") return dataset def get_dataset(self, dataset_id, dataset_info): """Get DataArray for specified dataset.""" - dataset_name = dataset_id['name'] + dataset_name = dataset_id["name"] if self.is_geo_loadable_dataset(dataset_name): return HDFEOSGeoReader.get_dataset(self, dataset_id, dataset_info) - dataset_name_in_file = dataset_info['file_key'] + dataset_name_in_file = dataset_info["file_key"] if self.is_imapp_mask_byte1: - dataset_name_in_file = dataset_info.get('imapp_file_key', dataset_name_in_file) + dataset_name_in_file = dataset_info.get("imapp_file_key", dataset_name_in_file) # The dataset asked correspond to a given set of bits of the HDF EOS dataset - if 'byte' in dataset_info and 'byte_dimension' in dataset_info: + if "byte" in dataset_info and "byte_dimension" in dataset_info: dataset = self._extract_and_mask_category_dataset(dataset_id, dataset_info, dataset_name_in_file) else: # No byte manipulation required @@ -147,39 +147,39 @@ def get_dataset(self, dataset_id, dataset_info): def _extract_and_mask_category_dataset(self, dataset_id, dataset_info, var_name): # what dimension is per-byte - byte_dimension = None if self.is_imapp_mask_byte1 else dataset_info['byte_dimension'] + byte_dimension = None if self.is_imapp_mask_byte1 else dataset_info["byte_dimension"] dataset = self._select_hdf_dataset(var_name, byte_dimension) # category products always have factor=1/offset=0 so don't apply them # also remove them so they don't screw up future satpy processing - dataset.attrs.pop('scale_factor', None) - dataset.attrs.pop('add_offset', None) + dataset.attrs.pop("scale_factor", None) + dataset.attrs.pop("add_offset", None) # Don't do this byte work if we are using the IMAPP mask_byte1 file if self.is_imapp_mask_byte1: return dataset dataset = _extract_byte_mask(dataset, - dataset_info['byte'], - dataset_info['bit_start'], - dataset_info['bit_count']) + dataset_info["byte"], + dataset_info["bit_start"], + dataset_info["bit_count"]) dataset = self._mask_with_quality_assurance_if_needed(dataset, dataset_info, dataset_id) return dataset def _mask_with_quality_assurance_if_needed(self, dataset, dataset_info, dataset_id): - if not dataset_info.get('quality_assurance', False): + if not dataset_info.get("quality_assurance", False): return dataset # Get quality assurance dataset recursively quality_assurance_dataset_id = dataset_id.from_dict( - dict(name='quality_assurance', resolution=1000) + dict(name="quality_assurance", resolution=1000) ) quality_assurance_dataset_info = { - 'name': 'quality_assurance', - 'resolution': 1000, - 'byte_dimension': 2, - 'byte': 0, - 'bit_start': 0, - 'bit_count': 1, - 'file_key': 'Quality_Assurance' + "name": "quality_assurance", + "resolution": 1000, + "byte_dimension": 2, + "byte": 0, + "bit_start": 0, + "bit_count": 1, + "file_key": "Quality_Assurance" } quality_assurance = self.get_dataset( quality_assurance_dataset_id, quality_assurance_dataset_info diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 517e096db8..1131e40a96 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -49,10 +49,10 @@ logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() -PLATFORMS = {'S2A': "Sentinel-2A", - 'S2B': "Sentinel-2B", - 'S2C': "Sentinel-2C", - 'S2D': "Sentinel-2D"} +PLATFORMS = {"S2A": "Sentinel-2A", + "S2B": "Sentinel-2B", + "S2C": "Sentinel-2C", + "S2D": "Sentinel-2D"} class SAFEMSIL1C(BaseFileHandler): @@ -63,23 +63,23 @@ def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_s super(SAFEMSIL1C, self).__init__(filename, filename_info, filetype_info) del mask_saturated - self._start_time = filename_info['observation_time'] - self._end_time = filename_info['observation_time'] - self._channel = filename_info['band_name'] + self._start_time = filename_info["observation_time"] + self._end_time = filename_info["observation_time"] + self._channel = filename_info["band_name"] self._tile_mda = tile_mda self._mda = mda - self.platform_name = PLATFORMS[filename_info['fmission_id']] + self.platform_name = PLATFORMS[filename_info["fmission_id"]] def get_dataset(self, key, info): """Load a dataset.""" - if self._channel != key['name']: + if self._channel != key["name"]: return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) proj = self._read_from_file(key) proj.attrs = info.copy() - proj.attrs['units'] = '%' - proj.attrs['platform_name'] = self.platform_name + proj.attrs["units"] = "%" + proj.attrs["platform_name"] = self.platform_name return proj def _read_from_file(self, key): @@ -102,7 +102,7 @@ def end_time(self): def get_area_def(self, dsid): """Get the area def.""" - if self._channel != dsid['name']: + if self._channel != dsid["name"]: return return self._tile_mda.get_area_def(dsid) @@ -113,11 +113,11 @@ class SAFEMSIXMLMetadata(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info, mask_saturated=True): """Init the reader.""" super().__init__(filename, filename_info, filetype_info) - self._start_time = filename_info['observation_time'] - self._end_time = filename_info['observation_time'] + self._start_time = filename_info["observation_time"] + self._end_time = filename_info["observation_time"] self.root = ET.parse(self.filename) - self.tile = filename_info['dtile_number'] - self.platform_name = PLATFORMS[filename_info['fmission_id']] + self.tile = filename_info["dtile_number"] + self.platform_name = PLATFORMS[filename_info["fmission_id"]] self.mask_saturated = mask_saturated import bottleneck # noqa import geotiepoints # noqa @@ -138,7 +138,7 @@ class SAFEMSIMDXML(SAFEMSIXMLMetadata): def calibrate_to_reflectances(self, data, band_name): """Calibrate *data* using the radiometric information for the metadata.""" - quantification = int(self.root.find('.//QUANTIFICATION_VALUE').text) + quantification = int(self.root.find(".//QUANTIFICATION_VALUE").text) data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / quantification * 100 @@ -163,14 +163,14 @@ def _band_index(self, band): @cached_property def band_indices(self): """Get the band indices from the metadata.""" - spectral_info = self.root.findall('.//Spectral_Information') + spectral_info = self.root.findall(".//Spectral_Information") band_indices = {spec.attrib["physicalBand"]: int(spec.attrib["bandId"]) for spec in spectral_info} return band_indices @cached_property def band_offsets(self): """Get the band offsets from the metadata.""" - offsets = self.root.find('.//Radiometric_Offset_List') + offsets = self.root.find(".//Radiometric_Offset_List") if offsets is not None: band_offsets = {int(off.attrib["band_id"]): float(off.text) for off in offsets} else: @@ -180,7 +180,7 @@ def band_offsets(self): @cached_property def special_values(self): """Get the special values from the metadata.""" - special_values = self.root.findall('.//Special_Values') + special_values = self.root.findall(".//Special_Values") special_values_dict = {value[0].text: float(value[1].text) for value in special_values} return special_values_dict @@ -214,11 +214,11 @@ def physical_gains(self): def _fill_swath_edges(angles): """Fill gaps at edges of swath.""" - darr = xr.DataArray(angles, dims=['y', 'x']) - darr = darr.bfill('x') - darr = darr.ffill('x') - darr = darr.bfill('y') - darr = darr.ffill('y') + darr = xr.DataArray(angles, dims=["y", "x"]) + darr = darr.bfill("x") + darr = darr.ffill("x") + darr = darr.bfill("y") + darr = darr.ffill("y") angles = darr.data return angles @@ -229,12 +229,12 @@ class SAFEMSITileMDXML(SAFEMSIXMLMetadata): def __init__(self, filename, filename_info, filetype_info, mask_saturated=True): """Init the reader.""" super().__init__(filename, filename_info, filetype_info, mask_saturated) - self.geocoding = self.root.find('.//Tile_Geocoding') + self.geocoding = self.root.find(".//Tile_Geocoding") def get_area_def(self, dsid): """Get the area definition of the dataset.""" - area_extent = self._area_extent(dsid['resolution']) - cols, rows = self._shape(dsid['resolution']) + area_extent = self._area_extent(dsid["resolution"]) + cols, rows = self._shape(dsid["resolution"]) area = geometry.AreaDefinition( self.tile, "On-the-fly area", @@ -249,16 +249,16 @@ def get_area_def(self, dsid): def projection(self): """Get the geographic projection.""" from pyproj import CRS - epsg = self.geocoding.find('HORIZONTAL_CS_CODE').text + epsg = self.geocoding.find("HORIZONTAL_CS_CODE").text return CRS(epsg) def _area_extent(self, resolution): cols, rows = self._shape(resolution) geoposition = self.geocoding.find('Geoposition[@resolution="' + str(resolution) + '"]') - ulx = float(geoposition.find('ULX').text) - uly = float(geoposition.find('ULY').text) - xdim = float(geoposition.find('XDIM').text) - ydim = float(geoposition.find('YDIM').text) + ulx = float(geoposition.find("ULX").text) + uly = float(geoposition.find("ULY").text) + xdim = float(geoposition.find("XDIM").text) + ydim = float(geoposition.find("YDIM").text) area_extent = (ulx, uly + rows * ydim, ulx + cols * xdim, uly) return area_extent @@ -292,30 +292,30 @@ def interpolate_angles(self, angles, resolution): def _get_coarse_dataset(self, key, info): """Get the coarse dataset refered to by `key` from the XML data.""" - angles = self.root.find('.//Tile_Angles') - if key['name'] in ['solar_zenith_angle', 'solar_azimuth_angle']: + angles = self.root.find(".//Tile_Angles") + if key["name"] in ["solar_zenith_angle", "solar_azimuth_angle"]: angles = self._get_solar_angles(angles, info) - elif key['name'] in ['satellite_zenith_angle', 'satellite_azimuth_angle']: + elif key["name"] in ["satellite_zenith_angle", "satellite_azimuth_angle"]: angles = self._get_satellite_angles(angles, info) else: angles = None return angles def _get_solar_angles(self, angles, info): - angles = self._get_values_from_tag(angles, info['xml_tag']) + angles = self._get_values_from_tag(angles, info["xml_tag"]) return angles @staticmethod def _get_values_from_tag(xml_tree, xml_tag): - elts = xml_tree.findall(xml_tag + '/Values_List/VALUES') + elts = xml_tree.findall(xml_tag + "/Values_List/VALUES") return np.array([[val for val in elt.text.split()] for elt in elts], dtype=np.float64) def _get_satellite_angles(self, angles, info): arrays = [] - elts = angles.findall(info['xml_tag'] + '[@bandId="1"]') + elts = angles.findall(info["xml_tag"] + '[@bandId="1"]') for elt in elts: - arrays.append(self._get_values_from_tag(elt, info['xml_item'])) + arrays.append(self._get_values_from_tag(elt, info["xml_item"])) angles = np.nanmean(np.dstack(arrays), -1) return angles @@ -327,10 +327,10 @@ def get_dataset(self, key, info): angles = _fill_swath_edges(angles) - res = self.interpolate_angles(angles, key['resolution']) + res = self.interpolate_angles(angles, key["resolution"]) - proj = xr.DataArray(res, dims=['y', 'x']) + proj = xr.DataArray(res, dims=["y", "x"]) proj.attrs = info.copy() - proj.attrs['units'] = 'degrees' - proj.attrs['platform_name'] = self.platform_name + proj.attrs["units"] = "degrees" + proj.attrs["platform_name"] = self.platform_name return proj diff --git a/satpy/readers/msu_gsa_l1b.py b/satpy/readers/msu_gsa_l1b.py index df06239b43..c4e45aa333 100644 --- a/satpy/readers/msu_gsa_l1b.py +++ b/satpy/readers/msu_gsa_l1b.py @@ -37,7 +37,7 @@ class MSUGSAFileHandler(HDF5FileHandler): @property def start_time(self): """Time for timeslot scan start.""" - dtstr = self['/attr/timestamp_without_timezone'] + dtstr = self["/attr/timestamp_without_timezone"] return datetime.strptime(dtstr, "%Y-%m-%dT%H:%M:%S") @property @@ -47,65 +47,65 @@ def satellite_altitude(self): There is no documentation but this appears to be height above surface in meters. """ - return float(self['/attr/satellite_observation_point_height']) + return float(self["/attr/satellite_observation_point_height"]) @property def satellite_latitude(self): """Satellite latitude at time of scan.""" - return float(self['/attr/satellite_observation_point_latitude']) + return float(self["/attr/satellite_observation_point_latitude"]) @property def satellite_longitude(self): """Satellite longitude at time of scan.""" - return float(self['/attr/satellite_observation_point_longitude']) + return float(self["/attr/satellite_observation_point_longitude"]) @property def sensor_name(self): """Sensor name is hardcoded.""" - sensor = 'msu_gsa' + sensor = "msu_gsa" return sensor @property def platform_name(self): """Platform name is also hardcoded.""" - platform = 'Arctica-M-N1' + platform = "Arctica-M-N1" return platform @staticmethod def _apply_scale_offset(in_data): """Apply the scale and offset to data.""" - scl = in_data.attrs['scale'] - off = in_data.attrs['offset'] + scl = in_data.attrs["scale"] + off = in_data.attrs["offset"] return in_data * scl + off def get_dataset(self, dataset_id, ds_info): """Load data variable and metadata and calibrate if needed.""" - file_key = ds_info.get('file_key', dataset_id['name']) + file_key = ds_info.get("file_key", dataset_id["name"]) data = self[file_key] attrs = data.attrs.copy() # avoid contaminating other band loading attrs.update(ds_info) # The fill value also needs to be applied - fill_val = attrs.pop('fill_value') + fill_val = attrs.pop("fill_value") data = data.where(data != fill_val, np.nan) # Data has a scale and offset that we must apply data = self._apply_scale_offset(data) # Data is given as radiance values, we must convert if we want reflectance - if dataset_id.get('calibration') == "reflectance": - solconst = float(attrs.pop('F_solar_constant')) + if dataset_id.get("calibration") == "reflectance": + solconst = float(attrs.pop("F_solar_constant")) data = np.pi * data / solconst # Satpy expects reflectance values in 0-100 range data = data * 100. data.attrs = attrs data.attrs.update({ - 'platform_name': self.platform_name, - 'sensor': self.sensor_name, - 'sat_altitude': self.satellite_altitude, - 'sat_latitude': self.satellite_latitude, - 'sat_longitude': self.satellite_longitude, + "platform_name": self.platform_name, + "sensor": self.sensor_name, + "sat_altitude": self.satellite_altitude, + "sat_latitude": self.satellite_latitude, + "sat_longitude": self.satellite_longitude, }) return data diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index 121e7d7d1b..979483513a 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -173,16 +173,16 @@ MVIRI_FIELD_OF_VIEW = 18.0 """[Handbook] section 5.3.2.1.""" -CHANNELS = ['VIS', 'WV', 'IR'] +CHANNELS = ["VIS", "WV", "IR"] ANGLES = [ - 'solar_zenith_angle', - 'solar_azimuth_angle', - 'satellite_zenith_angle', - 'satellite_azimuth_angle' + "solar_zenith_angle", + "solar_azimuth_angle", + "satellite_zenith_angle", + "satellite_azimuth_angle" ] OTHER_REFLECTANCES = [ - 'u_independent_toa_bidirectional_reflectance', - 'u_structured_toa_bidirectional_reflectance' + "u_independent_toa_bidirectional_reflectance", + "u_structured_toa_bidirectional_reflectance" ] HIGH_RESOL = 2250 @@ -200,19 +200,19 @@ def __init__(self, coefs): def calibrate(self, counts, calibration): """Calibrate IR/WV counts to the given calibration.""" - if calibration == 'counts': + if calibration == "counts": return counts - elif calibration in ('radiance', 'brightness_temperature'): + elif calibration in ("radiance", "brightness_temperature"): return self._calibrate_rad_bt(counts, calibration) else: raise KeyError( - 'Invalid IR/WV calibration: {}'.format(calibration.name) + "Invalid IR/WV calibration: {}".format(calibration.name) ) def _calibrate_rad_bt(self, counts, calibration): """Calibrate counts to radiance or brightness temperature.""" rad = self._counts_to_radiance(counts) - if calibration == 'radiance': + if calibration == "radiance": return rad bt = self._radiance_to_brightness_temperature(rad) return bt @@ -222,7 +222,7 @@ def _counts_to_radiance(self, counts): Reference: [PUG], equations (4.1) and (4.2). """ - rad = self.coefs['a'] + self.coefs['b'] * counts + rad = self.coefs["a"] + self.coefs["b"] * counts return rad.where(rad > 0, np.float32(np.nan)) def _radiance_to_brightness_temperature(self, rad): @@ -230,7 +230,7 @@ def _radiance_to_brightness_temperature(self, rad): Reference: [PUG], equations (5.1) and (5.2). """ - bt = self.coefs['bt_b'] / (np.log(rad) - self.coefs['bt_a']) + bt = self.coefs["bt_b"] / (np.log(rad) - self.coefs["bt_a"]) return bt.where(bt > 0, np.float32(np.nan)) @@ -252,19 +252,19 @@ def __init__(self, coefs, solar_zenith_angle=None): def calibrate(self, counts, calibration): """Calibrate VIS counts.""" - if calibration == 'counts': + if calibration == "counts": return counts - elif calibration in ('radiance', 'reflectance'): + elif calibration in ("radiance", "reflectance"): return self._calibrate_rad_refl(counts, calibration) else: raise KeyError( - 'Invalid VIS calibration: {}'.format(calibration.name) + "Invalid VIS calibration: {}".format(calibration.name) ) def _calibrate_rad_refl(self, counts, calibration): """Calibrate counts to radiance or reflectance.""" rad = self._counts_to_radiance(counts) - if calibration == 'radiance': + if calibration == "radiance": return rad refl = self._radiance_to_reflectance(rad) refl = self.update_refl_attrs(refl) @@ -275,11 +275,11 @@ def _counts_to_radiance(self, counts): Reference: [PUG], equations (7) and (8). """ - years_since_launch = self.coefs['years_since_launch'] - a_cf = (self.coefs['a0'] + - self.coefs['a1'] * years_since_launch + - self.coefs['a2'] * years_since_launch ** 2) - mean_count_space_vis = self.coefs['mean_count_space'] + years_since_launch = self.coefs["years_since_launch"] + a_cf = (self.coefs["a0"] + + self.coefs["a1"] * years_since_launch + + self.coefs["a2"] * years_since_launch ** 2) + mean_count_space_vis = self.coefs["mean_count_space"] rad = (counts - mean_count_space_vis) * a_cf return rad.where(rad > 0, np.float32(np.nan)) @@ -298,17 +298,17 @@ def _radiance_to_reflectance(self, rad): ) # direct illumination only cos_sza = np.cos(np.deg2rad(sza)) refl = ( - (np.pi * self.coefs['distance_sun_earth'] ** 2) / - (self.coefs['solar_irradiance'] * cos_sza) * + (np.pi * self.coefs["distance_sun_earth"] ** 2) / + (self.coefs["solar_irradiance"] * cos_sza) * rad ) return self.refl_factor_to_percent(refl) def update_refl_attrs(self, refl): """Update attributes of reflectance datasets.""" - refl.attrs['sun_earth_distance_correction_applied'] = True - refl.attrs['sun_earth_distance_correction_factor'] = self.coefs[ - 'distance_sun_earth'].item() + refl.attrs["sun_earth_distance_correction_applied"] = True + refl.attrs["sun_earth_distance_correction_factor"] = self.coefs[ + "distance_sun_earth"].item() return refl @staticmethod @@ -328,24 +328,24 @@ def get_area_def(self, im_size, projection_longitude): def _get_proj_params(self, im_size, projection_longitude): """Get projection parameters for the given settings.""" - area_name = 'geos_mviri_{0}x{0}'.format(im_size) + area_name = "geos_mviri_{0}x{0}".format(im_size) lfac, cfac, loff, coff = self._get_factors_offsets(im_size) return { - 'ssp_lon': projection_longitude, - 'a': EQUATOR_RADIUS, - 'b': POLE_RADIUS, - 'h': ALTITUDE, - 'units': 'm', - 'loff': loff - im_size, - 'coff': coff, - 'lfac': -lfac, - 'cfac': -cfac, - 'nlines': im_size, - 'ncols': im_size, - 'scandir': 'S2N', # Reference: [PUG] section 2. - 'p_id': area_name, - 'a_name': area_name, - 'a_desc': 'MVIRI Geostationary Projection' + "ssp_lon": projection_longitude, + "a": EQUATOR_RADIUS, + "b": POLE_RADIUS, + "h": ALTITUDE, + "units": "m", + "loff": loff - im_size, + "coff": coff, + "lfac": -lfac, + "cfac": -cfac, + "nlines": im_size, + "ncols": im_size, + "scandir": "S2N", # Reference: [PUG] section 2. + "p_id": area_name, + "a_name": area_name, + "a_desc": "MVIRI Geostationary Projection" } def _get_factors_offsets(self, im_size): @@ -382,7 +382,7 @@ def interp_tiepoints(ds, target_x, target_y): # No tiepoint coordinates specified in the files. Use dimensions # to calculate tiepoint sampling and assign tiepoint coordinates # accordingly. - sampling = target_x.size // ds.coords['x'].size + sampling = target_x.size // ds.coords["x"].size ds = ds.assign_coords(x=target_x.values[::sampling], y=target_y.values[::sampling]) @@ -406,11 +406,11 @@ def interp_acq_time(time2d, target_y): Mean scanline acquisition timestamps """ # Compute mean timestamp per scanline - time = time2d.mean(dim='x') + time = time2d.mean(dim="x") # If required, repeat timestamps in y-direction to obtain higher # resolution - y = time.coords['y'].values + y = time.coords["y"].values if y.size < target_y.size: reps = target_y.size // y.size y_rep = np.repeat(y, reps) @@ -476,15 +476,15 @@ def __getitem__(self, item): def _should_dims_be_renamed(self, ds): """Determine whether dataset dimensions need to be renamed.""" - return 'y_ir_wv' in ds.dims or 'y_tie' in ds.dims + return "y_ir_wv" in ds.dims or "y_tie" in ds.dims def _rename_dims(self, ds): """Rename dataset dimensions to match satpy's expectations.""" new_names = { - 'y_ir_wv': 'y', - 'x_ir_wv': 'x', - 'y_tie': 'y', - 'x_tie': 'x' + "y_ir_wv": "y", + "x_ir_wv": "x", + "y_tie": "y", + "x_tie": "x" } for old_name, new_name in new_names.items(): if old_name in ds.dims: @@ -492,7 +492,7 @@ def _rename_dims(self, ds): return ds def _coordinates_not_assigned(self, ds): - return 'y' in ds.dims and 'y' not in ds.coords + return "y" in ds.dims and "y" not in ds.coords def _reassign_coords(self, ds): """Re-assign coordinates. @@ -500,14 +500,14 @@ def _reassign_coords(self, ds): For some reason xarray doesn't assign coordinates to all high resolution data variables. """ - return ds.assign_coords({'y': self.nc.coords['y'], - 'x': self.nc.coords['x']}) + return ds.assign_coords({"y": self.nc.coords["y"], + "x": self.nc.coords["x"]}) def _cleanup_attrs(self, ds): """Cleanup dataset attributes.""" # Remove ancillary_variables attribute to avoid downstream # satpy warnings. - ds.attrs.pop('ancillary_variables', None) + ds.attrs.pop("ancillary_variables", None) def get_time(self): """Get time coordinate. @@ -515,29 +515,29 @@ def get_time(self): Variable is sometimes named "time" and sometimes "time_ir_wv". """ try: - return self['time_ir_wv'] + return self["time_ir_wv"] except KeyError: - return self['time'] + return self["time"] def get_xy_coords(self, resolution): """Get x and y coordinates for the given resolution.""" if is_high_resol(resolution): - return self.nc.coords['x'], self.nc.coords['y'] - return self.nc.coords['x_ir_wv'], self.nc.coords['x_ir_wv'] + return self.nc.coords["x"], self.nc.coords["y"] + return self.nc.coords["x_ir_wv"], self.nc.coords["x_ir_wv"] def get_image_size(self, resolution): """Get image size for the given resolution.""" if is_high_resol(resolution): - return self.nc.coords['y'].size - return self.nc.coords['y_ir_wv'].size + return self.nc.coords["y"].size + return self.nc.coords["y_ir_wv"].size class FiduceoMviriBase(BaseFileHandler): """Baseclass for FIDUCEO MVIRI file handlers.""" nc_keys = { - 'WV': 'count_wv', - 'IR': 'count_ir' + "WV": "count_wv", + "IR": "count_ir" } def __init__(self, filename, filename_info, filetype_info, @@ -555,16 +555,16 @@ def __init__(self, filename, filename_info, filetype_info, self.mask_bad_quality = mask_bad_quality nc_raw = xr.open_dataset( filename, - chunks={'x': CHUNK_SIZE, - 'y': CHUNK_SIZE, - 'x_ir_wv': CHUNK_SIZE, - 'y_ir_wv': CHUNK_SIZE} + chunks={"x": CHUNK_SIZE, + "y": CHUNK_SIZE, + "x_ir_wv": CHUNK_SIZE, + "y_ir_wv": CHUNK_SIZE} ) self.nc = DatasetWrapper(nc_raw) # Projection longitude is not provided in the file, read it from the # filename. - self.projection_longitude = float(filename_info['projection_longitude']) + self.projection_longitude = float(filename_info["projection_longitude"]) self.calib_coefs = self._get_calib_coefs() self._get_angles = functools.lru_cache(maxsize=8)( @@ -576,12 +576,12 @@ def __init__(self, filename, filename_info, filetype_info, def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" - name = dataset_id['name'] - resolution = dataset_id['resolution'] + name = dataset_id["name"] + resolution = dataset_id["resolution"] if name in ANGLES: ds = self._get_angles(name, resolution) elif name in CHANNELS: - ds = self._get_channel(name, resolution, dataset_id['calibration']) + ds = self._get_channel(name, resolution, dataset_id["calibration"]) else: ds = self._get_other_dataset(name) ds = self._cleanup_coords(ds) @@ -590,7 +590,7 @@ def get_dataset(self, dataset_id, dataset_info): def get_area_def(self, dataset_id): """Get area definition of the given dataset.""" - im_size = self.nc.get_image_size(dataset_id['resolution']) + im_size = self.nc.get_image_size(dataset_id["resolution"]) nav = Navigator() return nav.get_area_def( im_size=im_size, @@ -605,13 +605,13 @@ def _get_channel(self, name, resolution, calibration): channel=name, calibration=calibration ) - if name == 'VIS': - qc = VisQualityControl(self.nc['quality_pixel_bitmask']) + if name == "VIS": + qc = VisQualityControl(self.nc["quality_pixel_bitmask"]) if self.mask_bad_quality: ds = qc.mask(ds) else: qc.check() - ds['acq_time'] = self._get_acq_time(resolution) + ds["acq_time"] = self._get_acq_time(resolution) return ds def _get_angles_uncached(self, name, resolution): @@ -638,10 +638,10 @@ def _get_other_dataset(self, name): def _update_attrs(self, ds, info): """Update dataset attributes.""" ds.attrs.update(info) - ds.attrs.update({'platform': self.filename_info['platform'], - 'sensor': self.filename_info['sensor']}) - ds.attrs['raw_metadata'] = self.nc.attrs - ds.attrs['orbital_parameters'] = self._get_orbital_parameters() + ds.attrs.update({"platform": self.filename_info["platform"], + "sensor": self.filename_info["sensor"]}) + ds.attrs["raw_metadata"] = self.nc.attrs + ds.attrs["orbital_parameters"] = self._get_orbital_parameters() def _cleanup_coords(self, ds): """Cleanup dataset coordinates. @@ -651,11 +651,11 @@ def _cleanup_coords(self, ds): can assign projection coordinates upstream (based on the area definition). """ - return ds.drop_vars(['y', 'x']) + return ds.drop_vars(["y", "x"]) def _calibrate(self, ds, channel, calibration): """Calibrate the given dataset.""" - if channel == 'VIS': + if channel == "VIS": return self._calibrate_vis(ds, channel, calibration) calib = IRWVCalibrator(self.calib_coefs[channel]) return calib.calibrate(ds, calibration) @@ -671,21 +671,21 @@ def _get_calib_coefs(self): Note: Only coefficients present in both file types. """ coefs = { - 'VIS': { - 'distance_sun_earth': self.nc['distance_sun_earth'], - 'solar_irradiance': self.nc['solar_irradiance_vis'] + "VIS": { + "distance_sun_earth": self.nc["distance_sun_earth"], + "solar_irradiance": self.nc["solar_irradiance_vis"] }, - 'IR': { - 'a': self.nc['a_ir'], - 'b': self.nc['b_ir'], - 'bt_a': self.nc['bt_a_ir'], - 'bt_b': self.nc['bt_b_ir'] + "IR": { + "a": self.nc["a_ir"], + "b": self.nc["b_ir"], + "bt_a": self.nc["bt_a_ir"], + "bt_b": self.nc["bt_b_ir"] }, - 'WV': { - 'a': self.nc['a_wv'], - 'b': self.nc['b_wv'], - 'bt_a': self.nc['bt_a_wv'], - 'bt_b': self.nc['bt_b_wv'] + "WV": { + "a": self.nc["a_wv"], + "b": self.nc["b_wv"], + "bt_a": self.nc["bt_a_wv"], + "bt_b": self.nc["bt_b_wv"] }, } @@ -710,15 +710,15 @@ def _get_acq_time_uncached(self, resolution): def _get_orbital_parameters(self): """Get the orbital parameters.""" orbital_parameters = { - 'projection_longitude': self.projection_longitude, - 'projection_latitude': 0.0, - 'projection_altitude': ALTITUDE + "projection_longitude": self.projection_longitude, + "projection_latitude": 0.0, + "projection_altitude": ALTITUDE } ssp_lon, ssp_lat = self._get_ssp_lonlat() if not np.isnan(ssp_lon) and not np.isnan(ssp_lat): orbital_parameters.update({ - 'satellite_actual_longitude': ssp_lon, - 'satellite_actual_latitude': ssp_lat, + "satellite_actual_longitude": ssp_lon, + "satellite_actual_latitude": ssp_lat, # altitude not available }) return orbital_parameters @@ -733,13 +733,13 @@ def _get_ssp_lonlat(self): Returns: Subsatellite longitude and latitude """ - ssp_lon = self._get_ssp('longitude') - ssp_lat = self._get_ssp('latitude') + ssp_lon = self._get_ssp("longitude") + ssp_lat = self._get_ssp("latitude") return ssp_lon, ssp_lat def _get_ssp(self, coord): - key_start = 'sub_satellite_{}_start'.format(coord) - key_end = 'sub_satellite_{}_end'.format(coord) + key_start = "sub_satellite_{}_start".format(coord) + key_end = "sub_satellite_{}_end".format(coord) try: sub_lonlat = np.nanmean( [self.nc[key_start].values, @@ -755,42 +755,42 @@ class FiduceoMviriEasyFcdrFileHandler(FiduceoMviriBase): """File handler for FIDUCEO MVIRI Easy FCDR.""" nc_keys = FiduceoMviriBase.nc_keys.copy() - nc_keys['VIS'] = 'toa_bidirectional_reflectance_vis' + nc_keys["VIS"] = "toa_bidirectional_reflectance_vis" def _calibrate_vis(self, ds, channel, calibration): """Calibrate VIS channel. Easy FCDR provides reflectance only, no counts or radiance. """ - if calibration == 'reflectance': + if calibration == "reflectance": coefs = self.calib_coefs[channel] cal = VISCalibrator(coefs) refl = cal.refl_factor_to_percent(ds) refl = cal.update_refl_attrs(refl) return refl - elif calibration in ('counts', 'radiance'): - raise ValueError('Cannot calibrate to {}. Easy FCDR provides ' - 'reflectance only.'.format(calibration.name)) + elif calibration in ("counts", "radiance"): + raise ValueError("Cannot calibrate to {}. Easy FCDR provides " + "reflectance only.".format(calibration.name)) else: - raise KeyError('Invalid calibration: {}'.format(calibration.name)) + raise KeyError("Invalid calibration: {}".format(calibration.name)) class FiduceoMviriFullFcdrFileHandler(FiduceoMviriBase): """File handler for FIDUCEO MVIRI Full FCDR.""" nc_keys = FiduceoMviriBase.nc_keys.copy() - nc_keys['VIS'] = 'count_vis' + nc_keys["VIS"] = "count_vis" def _get_calib_coefs(self): """Add additional VIS coefficients only present in full FCDR.""" coefs = super()._get_calib_coefs() - coefs['VIS'].update({ - 'years_since_launch': np.float32(self.nc['years_since_launch']), - 'a0': np.float32(self.nc['a0_vis']), - 'a1': np.float32(self.nc['a1_vis']), - 'a2': np.float32(self.nc['a2_vis']), - 'mean_count_space': np.float32( - self.nc['mean_count_space_vis'] + coefs["VIS"].update({ + "years_since_launch": np.float32(self.nc["years_since_launch"]), + "a0": np.float32(self.nc["a0_vis"]), + "a1": np.float32(self.nc["a1_vis"]), + "a2": np.float32(self.nc["a2_vis"]), + "mean_count_space": np.float32( + self.nc["mean_count_space_vis"] ) }) return coefs @@ -798,7 +798,7 @@ def _get_calib_coefs(self): def _calibrate_vis(self, ds, channel, calibration): """Calibrate VIS channel.""" sza = None - if calibration == 'reflectance': - sza = self._get_angles('solar_zenith_angle', HIGH_RESOL) + if calibration == "reflectance": + sza = self._get_angles("solar_zenith_angle", HIGH_RESOL) cal = VISCalibrator(self.calib_coefs[channel], sza) return cal.calibrate(ds, calibration) diff --git a/satpy/readers/mws_l1b.py b/satpy/readers/mws_l1b.py index 528675eeb5..372a59ac37 100644 --- a/satpy/readers/mws_l1b.py +++ b/satpy/readers/mws_l1b.py @@ -33,23 +33,23 @@ # parameter name and values are the paths to the variable inside the netcdf AUX_DATA = { - 'scantime_utc': 'data/navigation/mws_scantime_utc', - 'solar_azimuth': 'data/navigation/mws_solar_azimuth_angle', - 'solar_zenith': 'data/navigation/mws_solar_zenith_angle', - 'satellite_azimuth': 'data/navigation/mws_satellite_azimuth_angle', - 'satellite_zenith': 'data/navigation/mws_satellite_zenith_angle', - 'surface_type': 'data/navigation/mws_surface_type', - 'terrain_elevation': 'data/navigation/mws_terrain_elevation', - 'mws_lat': 'data/navigation/mws_lat', - 'mws_lon': 'data/navigation/mws_lon', + "scantime_utc": "data/navigation/mws_scantime_utc", + "solar_azimuth": "data/navigation/mws_solar_azimuth_angle", + "solar_zenith": "data/navigation/mws_solar_zenith_angle", + "satellite_azimuth": "data/navigation/mws_satellite_azimuth_angle", + "satellite_zenith": "data/navigation/mws_satellite_zenith_angle", + "surface_type": "data/navigation/mws_surface_type", + "terrain_elevation": "data/navigation/mws_terrain_elevation", + "mws_lat": "data/navigation/mws_lat", + "mws_lon": "data/navigation/mws_lon", } -MWS_CHANNEL_NAMES_TO_NUMBER = {'1': 1, '2': 2, '3': 3, '4': 4, - '5': 5, '6': 6, '7': 7, '8': 8, - '9': 9, '10': 10, '11': 11, '12': 12, - '13': 13, '14': 14, '15': 15, '16': 16, - '17': 17, '18': 18, '19': 19, '20': 20, - '21': 21, '22': 22, '23': 23, '24': 24} +MWS_CHANNEL_NAMES_TO_NUMBER = {"1": 1, "2": 2, "3": 3, "4": 4, + "5": 5, "6": 6, "7": 7, "8": 8, + "9": 9, "10": 10, "11": 11, "12": 12, + "13": 13, "14": 14, "15": 15, "16": 16, + "17": 17, "18": 18, "19": 19, "20": 20, + "21": 21, "22": 22, "23": 23, "24": 24} MWS_CHANNEL_NAMES = list(MWS_CHANNEL_NAMES_TO_NUMBER.keys()) MWS_CHANNELS = set(MWS_CHANNEL_NAMES) @@ -90,9 +90,9 @@ def __init__(self, filename, filename_info, filetype_info): filetype_info, cache_var_size=10000, cache_handle=True) - logger.debug('Reading: {}'.format(self.filename)) - logger.debug('Start: {}'.format(self.start_time)) - logger.debug('End: {}'.format(self.end_time)) + logger.debug("Reading: {}".format(self.filename)) + logger.debug("Start: {}".format(self.start_time)) + logger.debug("End: {}".format(self.end_time)) self._cache = {} @@ -101,57 +101,57 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Get start time.""" - return datetime.strptime(self['/attr/sensing_start_time_utc'], - '%Y-%m-%d %H:%M:%S.%f') + return datetime.strptime(self["/attr/sensing_start_time_utc"], + "%Y-%m-%d %H:%M:%S.%f") @property def end_time(self): """Get end time.""" - return datetime.strptime(self['/attr/sensing_end_time_utc'], - '%Y-%m-%d %H:%M:%S.%f') + return datetime.strptime(self["/attr/sensing_end_time_utc"], + "%Y-%m-%d %H:%M:%S.%f") @property def sensor(self): """Get the sensor name.""" - return self['/attr/instrument'] + return self["/attr/instrument"] @property def platform_name(self): """Get the platform name.""" - return self._platform_name_translate.get(self['/attr/spacecraft']) + return self._platform_name_translate.get(self["/attr/spacecraft"]) @property def sub_satellite_longitude_start(self): """Get the longitude of sub-satellite point at start of the product.""" - return self['status/satellite/subsat_longitude_start'].data.item() + return self["status/satellite/subsat_longitude_start"].data.item() @property def sub_satellite_latitude_start(self): """Get the latitude of sub-satellite point at start of the product.""" - return self['status/satellite/subsat_latitude_start'].data.item() + return self["status/satellite/subsat_latitude_start"].data.item() @property def sub_satellite_longitude_end(self): """Get the longitude of sub-satellite point at end of the product.""" - return self['status/satellite/subsat_longitude_end'].data.item() + return self["status/satellite/subsat_longitude_end"].data.item() @property def sub_satellite_latitude_end(self): """Get the latitude of sub-satellite point at end of the product.""" - return self['status/satellite/subsat_latitude_end'].data.item() + return self["status/satellite/subsat_latitude_end"].data.item() def get_dataset(self, dataset_id, dataset_info): """Get dataset using file_key in dataset_info.""" - logger.debug('Reading {} from {}'.format(dataset_id['name'], self.filename)) + logger.debug("Reading {} from {}".format(dataset_id["name"], self.filename)) - var_key = dataset_info['file_key'] - if _get_aux_data_name_from_dsname(dataset_id['name']) is not None: - variable = self._get_dataset_aux_data(dataset_id['name']) - elif any(lb in dataset_id['name'] for lb in MWS_CHANNELS): - logger.debug(f'Reading in file to get dataset with key {var_key}.') + var_key = dataset_info["file_key"] + if _get_aux_data_name_from_dsname(dataset_id["name"]) is not None: + variable = self._get_dataset_aux_data(dataset_id["name"]) + elif any(lb in dataset_id["name"] for lb in MWS_CHANNELS): + logger.debug(f"Reading in file to get dataset with key {var_key}.") variable = self._get_dataset_channel(dataset_id, dataset_info) else: - logger.warning(f'Could not find key {var_key} in NetCDF file, no valid Dataset created') # noqa: E501 + logger.warning(f"Could not find key {var_key} in NetCDF file, no valid Dataset created") # noqa: E501 return None variable = self._manage_attributes(variable, dataset_info) @@ -162,10 +162,10 @@ def get_dataset(self, dataset_id, dataset_info): @staticmethod def _standardize_dims(variable): """Standardize dims to y, x.""" - if 'n_scans' in variable.dims: - variable = variable.rename({'n_fovs': 'x', 'n_scans': 'y'}) - if variable.dims[0] == 'x': - variable = variable.transpose('y', 'x') + if "n_scans" in variable.dims: + variable = variable.rename({"n_fovs": "x", "n_scans": "y"}) + if variable.dims[0] == "x": + variable = variable.transpose("y", "x") return variable @staticmethod @@ -178,7 +178,7 @@ def _drop_coords(variable): def _manage_attributes(self, variable, dataset_info): """Manage attributes of the dataset.""" - variable.attrs.setdefault('units', None) + variable.attrs.setdefault("units", None) variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) return variable @@ -192,8 +192,8 @@ def _get_dataset_channel(self, key, dataset_info): """ # Get the dataset # Get metadata for given dataset - grp_pth = dataset_info['file_key'] - channel_index = get_channel_index_from_name(key['name']) + grp_pth = dataset_info["file_key"] + channel_index = get_channel_index_from_name(key["name"]) data = self[grp_pth][:, :, channel_index] attrs = data.attrs.copy() @@ -203,7 +203,7 @@ def _get_dataset_channel(self, key, dataset_info): default_fillvals.get(data.dtype.str[1:], np.nan)) vr = attrs.get("valid_range", [-np.inf, np.inf]) - if key['calibration'] == "counts": + if key["calibration"] == "counts": attrs["_FillValue"] = fv nfv = fv else: @@ -212,18 +212,18 @@ def _get_dataset_channel(self, key, dataset_info): data = data.where(data <= vr[1], nfv) # Manage the attributes of the dataset - data.attrs.setdefault('units', None) + data.attrs.setdefault("units", None) data.attrs.update(dataset_info) - dataset_attrs = getattr(data, 'attrs', {}) + dataset_attrs = getattr(data, "attrs", {}) dataset_attrs.update(dataset_info) dataset_attrs.update({ "platform_name": self.platform_name, "sensor": self.sensor, - "orbital_parameters": {'sub_satellite_latitude_start': self.sub_satellite_latitude_start, - 'sub_satellite_longitude_start': self.sub_satellite_longitude_start, - 'sub_satellite_latitude_end': self.sub_satellite_latitude_end, - 'sub_satellite_longitude_end': self.sub_satellite_longitude_end}, + "orbital_parameters": {"sub_satellite_latitude_start": self.sub_satellite_latitude_start, + "sub_satellite_longitude_start": self.sub_satellite_longitude_start, + "sub_satellite_latitude_end": self.sub_satellite_latitude_end, + "sub_satellite_longitude_end": self.sub_satellite_longitude_end}, }) try: @@ -237,10 +237,10 @@ def _get_dataset_channel(self, key, dataset_info): def _get_dataset_aux_data(self, dsname): """Get the auxiliary data arrays using the index map.""" # Geolocation and navigation data: - if dsname in ['mws_lat', 'mws_lon', - 'solar_azimuth', 'solar_zenith', - 'satellite_azimuth', 'satellite_zenith', - 'surface_type', 'terrain_elevation']: + if dsname in ["mws_lat", "mws_lon", + "solar_azimuth", "solar_zenith", + "satellite_azimuth", "satellite_zenith", + "surface_type", "terrain_elevation"]: var_key = AUX_DATA.get(dsname) else: raise NotImplementedError(f"Dataset {dsname!r} not supported!") @@ -252,30 +252,30 @@ def _get_dataset_aux_data(self, dsname): raise # Scale the data: - if 'scale_factor' in variable.attrs and 'add_offset' in variable.attrs: - missing_value = variable.attrs['missing_value'] + if "scale_factor" in variable.attrs and "add_offset" in variable.attrs: + missing_value = variable.attrs["missing_value"] variable.data = da.where(variable.data == missing_value, np.nan, - variable.data * variable.attrs['scale_factor'] + variable.attrs['add_offset']) + variable.data * variable.attrs["scale_factor"] + variable.attrs["add_offset"]) return variable def _get_global_attributes(self): """Create a dictionary of global attributes.""" return { - 'filename': self.filename, - 'start_time': self.start_time, - 'end_time': self.end_time, - 'spacecraft_name': self.platform_name, - 'sensor': self.sensor, - 'filename_start_time': self.filename_info['start_time'], - 'filename_end_time': self.filename_info['end_time'], - 'platform_name': self.platform_name, - 'quality_group': self._get_quality_attributes(), + "filename": self.filename, + "start_time": self.start_time, + "end_time": self.end_time, + "spacecraft_name": self.platform_name, + "sensor": self.sensor, + "filename_start_time": self.filename_info["start_time"], + "filename_end_time": self.filename_info["end_time"], + "platform_name": self.platform_name, + "quality_group": self._get_quality_attributes(), } def _get_quality_attributes(self): """Get quality attributes.""" - quality_group = self['quality'] + quality_group = self["quality"] quality_dict = {} for key in quality_group: # Add the values (as Numpy array) of each variable in the group diff --git a/satpy/readers/netcdf_utils.py b/satpy/readers/netcdf_utils.py index ac98f81d8d..cb5c38d1cf 100644 --- a/satpy/readers/netcdf_utils.py +++ b/satpy/readers/netcdf_utils.py @@ -103,7 +103,7 @@ def __init__(self, filename, filename_info, filetype_info, file_handle = self._get_file_handle() except IOError: LOG.exception( - 'Failed reading file %s. Possibly corrupted file', self.filename) + "Failed reading file %s. Possibly corrupted file", self.filename) raise self._set_file_handle_auto_maskandscale(file_handle, auto_maskandscale) @@ -123,7 +123,7 @@ def __init__(self, filename, filename_info, filetype_info, file_handle.close() def _get_file_handle(self): - return netCDF4.Dataset(self.filename, 'r') + return netCDF4.Dataset(self.filename, "r") @staticmethod def _set_file_handle_auto_maskandscale(file_handle, auto_maskandscale): @@ -132,8 +132,8 @@ def _set_file_handle_auto_maskandscale(file_handle, auto_maskandscale): def _set_xarray_kwargs(self, xarray_kwargs, auto_maskandscale): self._xarray_kwargs = xarray_kwargs or {} - self._xarray_kwargs.setdefault('chunks', CHUNK_SIZE) - self._xarray_kwargs.setdefault('mask_and_scale', auto_maskandscale) + self._xarray_kwargs.setdefault("chunks", CHUNK_SIZE) + self._xarray_kwargs.setdefault("mask_and_scale", auto_maskandscale) def collect_metadata(self, name, obj): """Collect all file variables and attributes for the provided file object. @@ -171,11 +171,11 @@ def _collect_variable_info(self, var_name, var_obj): def _collect_listed_variables(self, file_handle, listed_variables): variable_name_replacements = self.filetype_info.get("variable_name_replacements") for itm in self._get_required_variable_names(listed_variables, variable_name_replacements): - parts = itm.split('/') + parts = itm.split("/") grp = file_handle for p in parts[:-1]: if p == "attr": - n = '/'.join(parts) + n = "/".join(parts) self.file_content[n] = self._get_attr_value(grp, parts[-1]) break grp = grp[p] @@ -188,7 +188,7 @@ def _collect_listed_variables(self, file_handle, listed_variables): def _get_required_variable_names(listed_variables, variable_name_replacements): variable_names = [] for var in listed_variables: - if variable_name_replacements and '{' in var: + if variable_name_replacements and "{" in var: _compose_replacement_names(variable_name_replacements, var, variable_names) else: variable_names.append(var) @@ -290,7 +290,7 @@ def _get_variable(self, key, val): # these datasets are closed and inaccessible when the file is # closed, need to reopen # TODO: Handle HDF4 versus NetCDF3 versus NetCDF4 - parts = key.rsplit('/', 1) + parts = key.rsplit("/", 1) if len(parts) == 2: group, key = parts else: @@ -392,7 +392,7 @@ def _get_file_handle(self): import h5netcdf f_obj = open_file_or_filename(self.filename) self._use_h5netcdf = True - return h5netcdf.File(f_obj, 'r') + return h5netcdf.File(f_obj, "r") def __getitem__(self, key): """Get item for given key.""" diff --git a/satpy/readers/nucaps.py b/satpy/readers/nucaps.py index 806a20cece..2c9e2ba39f 100644 --- a/satpy/readers/nucaps.py +++ b/satpy/readers/nucaps.py @@ -70,10 +70,10 @@ class NUCAPSFileHandler(NetCDF4FileHandler): def __init__(self, *args, **kwargs): """Initialize file handler.""" # remove kwargs that reader instance used that file handler does not - kwargs.pop('mask_surface', None) - kwargs.pop('mask_quality', None) - kwargs.setdefault('xarray_kwargs', {}).setdefault( - 'decode_times', False) + kwargs.pop("mask_surface", None) + kwargs.pop("mask_quality", None) + kwargs.setdefault("xarray_kwargs", {}).setdefault( + "decode_times", False) super(NUCAPSFileHandler, self).__init__(*args, **kwargs) def __contains__(self, item): @@ -88,25 +88,25 @@ def _parse_datetime(self, datestr): def start_time(self): """Get start time.""" try: - return self._parse_datetime(self['/attr/time_coverage_start']) + return self._parse_datetime(self["/attr/time_coverage_start"]) except KeyError: # If attribute not present, use time from file name - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" try: - return self._parse_datetime(self['/attr/time_coverage_end']) + return self._parse_datetime(self["/attr/time_coverage_end"]) except KeyError: # If attribute not present, use time from file name - return self.filename_info['end_time'] + return self.filename_info["end_time"] @property def start_orbit_number(self): """Return orbit number for the beginning of the swath.""" try: - return int(self['/attr/start_orbit_number']) + return int(self["/attr/start_orbit_number"]) except KeyError: return 0 @@ -114,7 +114,7 @@ def start_orbit_number(self): def end_orbit_number(self): """Return orbit number for the end of the swath.""" try: - return int(self['/attr/end_orbit_number']) + return int(self["/attr/end_orbit_number"]) except KeyError: return 0 @@ -122,29 +122,29 @@ def end_orbit_number(self): def platform_name(self): """Return standard platform name for the file's data.""" try: - res = self['/attr/platform_name'] + res = self["/attr/platform_name"] if isinstance(res, np.ndarray): return str(res.astype(str)) return res except KeyError: - return self.filename_info['platform_shortname'] + return self.filename_info["platform_shortname"] @property def sensor_names(self): """Return standard sensor or instrument name for the file's data.""" try: - res = self['/attr/instrument_name'] - res = [x.strip() for x in res.split(',')] + res = self["/attr/instrument_name"] + res = [x.strip() for x in res.split(",")] if len(res) == 1: return res[0].lower() except KeyError: - res = ['CrIS', 'ATMS', 'VIIRS'] + res = ["CrIS", "ATMS", "VIIRS"] return set(name.lower() for name in res) def get_shape(self, ds_id, ds_info): """Return data array shape for item specified.""" - var_path = ds_info.get('file_key', '{}'.format(ds_id['name'])) - if var_path + '/shape' not in self: + var_path = ds_info.get("file_key", "{}".format(ds_id["name"])) + if var_path + "/shape" not in self: # loading a scalar value shape = 1 else: @@ -157,11 +157,11 @@ def get_shape(self, ds_id, ds_info): def get_metadata(self, dataset_id, ds_info): """Get metadata.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) shape = self.get_shape(dataset_id, ds_info) - file_units = ds_info.get('file_units', - self.get(var_path + '/attr/units')) - ds_info.update(getattr(self[var_path], 'attrs', {})) + file_units = ds_info.get("file_units", + self.get(var_path + "/attr/units")) + ds_info.update(getattr(self[var_path], "attrs", {})) # don't overwrite information in the files attrs because the same # `.attrs` is used for each separate Temperature pressure level dataset # Plus, if someone gets metadata multiple times then we are screwed @@ -176,22 +176,22 @@ def get_metadata(self, dataset_id, ds_info): "start_orbit": self.start_orbit_number, "end_orbit": self.end_orbit_number, }) - if 'standard_name' not in info: - sname_path = var_path + '/attr/standard_name' - info['standard_name'] = self.get(sname_path) - if dataset_id['name'] != 'Quality_Flag': - anc_vars = info.get('ancillary_variables', []) - if 'Quality_Flag' not in anc_vars: - anc_vars.append('Quality_Flag') - info['ancillary_variables'] = anc_vars + if "standard_name" not in info: + sname_path = var_path + "/attr/standard_name" + info["standard_name"] = self.get(sname_path) + if dataset_id["name"] != "Quality_Flag": + anc_vars = info.get("ancillary_variables", []) + if "Quality_Flag" not in anc_vars: + anc_vars.append("Quality_Flag") + info["ancillary_variables"] = anc_vars return info def get_dataset(self, dataset_id, ds_info): """Load data array and metadata for specified dataset.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) metadata = self.get_metadata(dataset_id, ds_info) - valid_min, valid_max = self[var_path + '/attr/valid_range'] - fill_value = self.get(var_path + '/attr/_FillValue') + valid_min, valid_max = self[var_path + "/attr/valid_range"] + fill_value = self.get(var_path + "/attr/_FillValue") d_tmp = self[var_path] if "index" in ds_info: @@ -200,19 +200,19 @@ def get_dataset(self, dataset_id, ds_info): d_tmp = d_tmp[..., int(ds_info["pressure_index"])] # this is a pressure based field # include surface_pressure as metadata - sp = self['Surface_Pressure'] + sp = self["Surface_Pressure"] # Older format - if 'number_of_FORs' in sp.dims: - sp = sp.rename({'number_of_FORs': 'y'}) + if "number_of_FORs" in sp.dims: + sp = sp.rename({"number_of_FORs": "y"}) # Newer format - if 'Number_of_CrIS_FORs' in sp.dims: - sp = sp.rename({'Number_of_CrIS_FORs': 'y'}) - if 'surface_pressure' in ds_info: - ds_info['surface_pressure'] = xr.concat((ds_info['surface_pressure'], sp), dim='y') + if "Number_of_CrIS_FORs" in sp.dims: + sp = sp.rename({"Number_of_CrIS_FORs": "y"}) + if "surface_pressure" in ds_info: + ds_info["surface_pressure"] = xr.concat((ds_info["surface_pressure"], sp), dim="y") else: - ds_info['surface_pressure'] = sp + ds_info["surface_pressure"] = sp # include all the pressure levels - ds_info.setdefault('pressure_levels', self['Pressure'][0]) + ds_info.setdefault("pressure_levels", self["Pressure"][0]) data = d_tmp if valid_min is not None and valid_max is not None: @@ -221,16 +221,16 @@ def get_dataset(self, dataset_id, ds_info): if fill_value is not None: data = data.where(data != fill_value) # this _FillValue is no longer valid - metadata.pop('_FillValue', None) - data.attrs.pop('_FillValue', None) + metadata.pop("_FillValue", None) + data.attrs.pop("_FillValue", None) data.attrs.update(metadata) # Older format - if 'number_of_FORs' in data.dims: - data = data.rename({'number_of_FORs': 'y'}) + if "number_of_FORs" in data.dims: + data = data.rename({"number_of_FORs": "y"}) # Newer format - if 'Number_of_CrIS_FORs' in data.dims: - data = data.rename({'Number_of_CrIS_FORs': 'y'}) + if "Number_of_CrIS_FORs" in data.dims: + data = data.rename({"Number_of_CrIS_FORs": "y"}) return data @@ -248,8 +248,8 @@ def __init__(self, config_files, mask_surface=True, mask_quality=True, **kwargs) self.pressure_dataset_names = defaultdict(list) super(NUCAPSReader, self).__init__(config_files, **kwargs) - self.mask_surface = self.info.get('mask_surface', mask_surface) - self.mask_quality = self.info.get('mask_quality', mask_quality) + self.mask_surface = self.info.get("mask_surface", mask_surface) + self.mask_quality = self.info.get("mask_quality", mask_quality) def load_ds_ids_from_config(self): """Convert config dataset entries to DataIDs. @@ -263,7 +263,7 @@ def load_ds_ids_from_config(self): super(NUCAPSReader, self).load_ds_ids_from_config() for ds_id in list(self.all_ids.keys()): ds_info = self.all_ids[ds_id] - if ds_info.get('pressure_based', False): + if ds_info.get("pressure_based", False): for idx, lvl_num in enumerate(ALL_PRESSURE_LEVELS): if lvl_num < 5.0: suffix = "_{:0.03f}mb".format(lvl_num) @@ -271,14 +271,14 @@ def load_ds_ids_from_config(self): suffix = "_{:0.0f}mb".format(lvl_num) new_info = ds_info.copy() - new_info['pressure_level'] = lvl_num - new_info['pressure_index'] = idx - new_info['file_key'] = '{}'.format(ds_id['name']) - new_info['name'] = ds_id['name'] + suffix - new_ds_id = ds_id._replace(name=new_info['name']) - new_info['id'] = new_ds_id + new_info["pressure_level"] = lvl_num + new_info["pressure_index"] = idx + new_info["file_key"] = "{}".format(ds_id["name"]) + new_info["name"] = ds_id["name"] + suffix + new_ds_id = ds_id._replace(name=new_info["name"]) + new_info["id"] = new_ds_id self.all_ids[new_ds_id] = new_info - self.pressure_dataset_names[ds_id['name']].append(new_info['name']) + self.pressure_dataset_names[ds_id["name"]].append(new_info["name"]) def load(self, dataset_keys, previous_datasets=None, pressure_levels=None): """Load data from one or more set of files. @@ -294,7 +294,7 @@ def load(self, dataset_keys, previous_datasets=None, pressure_levels=None): # Add pressure levels to the datasets to load if needed so # we can do further filtering after loading - plevels_ds_id = self.get_dataset_key('Pressure_Levels') + plevels_ds_id = self.get_dataset_key("Pressure_Levels") remove_plevels = False if plevels_ds_id not in dataset_keys: dataset_keys.add(plevels_ds_id) @@ -353,7 +353,7 @@ def _remove_data_at_pressure_levels(datasets_loaded, plevels_ds, pressure_levels if cond is not None: datasets_loaded[ds_id] = ds_obj.where(cond, drop=True) - datasets_loaded[ds_id].attrs['pressure_levels'] = new_plevels + datasets_loaded[ds_id].attrs["pressure_levels"] = new_plevels def _get_pressure_level_condition(plevels_ds, pressure_levels): @@ -399,8 +399,8 @@ def _mask_data_with_quality_flag(datasets_loaded, dataset_keys): for ds_id in sorted(dataset_keys): ds = datasets_loaded[ds_id] quality_flag = [ - x for x in ds.attrs.get('ancillary_variables', []) - if x.attrs.get('name') == 'Quality_Flag'] + x for x in ds.attrs.get("ancillary_variables", []) + if x.attrs.get("name") == "Quality_Flag"] if not quality_flag: continue diff --git a/satpy/readers/nwcsaf_msg2013_hdf5.py b/satpy/readers/nwcsaf_msg2013_hdf5.py index a8fdf45f3c..40a6441655 100644 --- a/satpy/readers/nwcsaf_msg2013_hdf5.py +++ b/satpy/readers/nwcsaf_msg2013_hdf5.py @@ -38,10 +38,10 @@ logger = logging.getLogger(__name__) -PLATFORM_NAMES = {'MSG1': 'Meteosat-8', - 'MSG2': 'Meteosat-9', - 'MSG3': 'Meteosat-10', - 'MSG4': 'Meteosat-11', } +PLATFORM_NAMES = {"MSG1": "Meteosat-8", + "MSG2": "Meteosat-9", + "MSG3": "Meteosat-10", + "MSG4": "Meteosat-11", } class Hdf5NWCSAF(HDF5FileHandler): @@ -56,27 +56,27 @@ def __init__(self, filename, filename_info, filetype_info): def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" - file_key = ds_info.get('file_key', dataset_id['name']) + file_key = ds_info.get("file_key", dataset_id["name"]) data = self[file_key] nodata = None - if 'SCALING_FACTOR' in data.attrs and 'OFFSET' in data.attrs: + if "SCALING_FACTOR" in data.attrs and "OFFSET" in data.attrs: dtype = np.dtype(data.data) - if dataset_id['name'] in ['ctth_alti']: - data.attrs['valid_range'] = (0, 27000) - data.attrs['_FillValue'] = np.nan + if dataset_id["name"] in ["ctth_alti"]: + data.attrs["valid_range"] = (0, 27000) + data.attrs["_FillValue"] = np.nan - if dataset_id['name'] in ['ctth_alti', 'ctth_pres', 'ctth_tempe', 'ctth_effective_cloudiness']: - dtype = np.dtype('float32') + if dataset_id["name"] in ["ctth_alti", "ctth_pres", "ctth_tempe", "ctth_effective_cloudiness"]: + dtype = np.dtype("float32") nodata = 255 - if dataset_id['name'] in ['ct']: - data.attrs['valid_range'] = (0, 20) - data.attrs['_FillValue'] = 255 + if dataset_id["name"] in ["ct"]: + data.attrs["valid_range"] = (0, 20) + data.attrs["_FillValue"] = 255 # data.attrs['palette_meanings'] = list(range(21)) attrs = data.attrs - scaled_data = (data * data.attrs['SCALING_FACTOR'] + data.attrs['OFFSET']).astype(dtype) + scaled_data = (data * data.attrs["SCALING_FACTOR"] + data.attrs["OFFSET"]).astype(dtype) if nodata: scaled_data = scaled_data.where(data != nodata) scaled_data = scaled_data.where(scaled_data >= 0) @@ -92,18 +92,18 @@ def get_dataset(self, dataset_id, ds_info): def get_area_def(self, dsid): """Get the area definition of the datasets in the file.""" - if dsid['name'].endswith('_pal'): + if dsid["name"].endswith("_pal"): raise NotImplementedError - cfac = self.file_content['/attr/CFAC'] - lfac = self.file_content['/attr/LFAC'] - coff = self.file_content['/attr/COFF'] - loff = self.file_content['/attr/LOFF'] - numcols = int(self.file_content['/attr/NC']) - numlines = int(self.file_content['/attr/NL']) + cfac = self.file_content["/attr/CFAC"] + lfac = self.file_content["/attr/LFAC"] + coff = self.file_content["/attr/COFF"] + loff = self.file_content["/attr/LOFF"] + numcols = int(self.file_content["/attr/NC"]) + numlines = int(self.file_content["/attr/NL"]) aex = get_area_extent(cfac, lfac, coff, loff, numcols, numlines) - pname = self.file_content['/attr/PROJECTION_NAME'] + pname = self.file_content["/attr/PROJECTION_NAME"] proj = {} if pname.startswith("GEOS"): proj["proj"] = "geos" @@ -114,8 +114,8 @@ def get_area_def(self, dsid): else: raise NotImplementedError("Only geos projection supported yet.") - area_def = AreaDefinition(self.file_content['/attr/REGION_NAME'], - self.file_content['/attr/REGION_NAME'], + area_def = AreaDefinition(self.file_content["/attr/REGION_NAME"], + self.file_content["/attr/REGION_NAME"], pname, proj, numcols, @@ -127,7 +127,7 @@ def get_area_def(self, dsid): @property def start_time(self): """Return the start time of the object.""" - return datetime.strptime(self.file_content['/attr/IMAGE_ACQUISITION_TIME'], '%Y%m%d%H%M') + return datetime.strptime(self.file_content["/attr/IMAGE_ACQUISITION_TIME"], "%Y%m%d%H%M") def get_area_extent(cfac, lfac, coff, loff, numcols, numlines): diff --git a/satpy/readers/nwcsaf_nc.py b/satpy/readers/nwcsaf_nc.py index 7ecc5f43f4..1b3d65cb96 100644 --- a/satpy/readers/nwcsaf_nc.py +++ b/satpy/readers/nwcsaf_nc.py @@ -42,36 +42,36 @@ CHUNK_SIZE = get_legacy_chunk_size() -SENSOR = {'NOAA-19': 'avhrr-3', - 'NOAA-18': 'avhrr-3', - 'NOAA-15': 'avhrr-3', - 'Metop-A': 'avhrr-3', - 'Metop-B': 'avhrr-3', - 'Metop-C': 'avhrr-3', - 'EOS-Aqua': 'modis', - 'EOS-Terra': 'modis', - 'Suomi-NPP': 'viirs', - 'NOAA-20': 'viirs', - 'NOAA-21': 'viirs', - 'NOAA-22': 'viirs', - 'NOAA-23': 'viirs', - 'JPSS-1': 'viirs', - 'Metop-SG-A1': 'metimage', - 'Metop-SG-A2': 'metimage', - 'Metop-SG-A3': 'metimage', - 'GOES-16': 'abi', - 'GOES-17': 'abi', - 'Himawari-8': 'ahi', - 'Himawari-9': 'ahi', +SENSOR = {"NOAA-19": "avhrr-3", + "NOAA-18": "avhrr-3", + "NOAA-15": "avhrr-3", + "Metop-A": "avhrr-3", + "Metop-B": "avhrr-3", + "Metop-C": "avhrr-3", + "EOS-Aqua": "modis", + "EOS-Terra": "modis", + "Suomi-NPP": "viirs", + "NOAA-20": "viirs", + "NOAA-21": "viirs", + "NOAA-22": "viirs", + "NOAA-23": "viirs", + "JPSS-1": "viirs", + "Metop-SG-A1": "metimage", + "Metop-SG-A2": "metimage", + "Metop-SG-A3": "metimage", + "GOES-16": "abi", + "GOES-17": "abi", + "Himawari-8": "ahi", + "Himawari-9": "ahi", } -PLATFORM_NAMES = {'MSG1': 'Meteosat-8', - 'MSG2': 'Meteosat-9', - 'MSG3': 'Meteosat-10', - 'MSG4': 'Meteosat-11', - 'GOES16': 'GOES-16', - 'GOES17': 'GOES-17', +PLATFORM_NAMES = {"MSG1": "Meteosat-8", + "MSG2": "Meteosat-9", + "MSG3": "Meteosat-10", + "MSG4": "Meteosat-11", + "GOES16": "GOES-16", + "GOES17": "GOES-17", } @@ -93,8 +93,8 @@ def __init__(self, filename, filename_info, filetype_info): mask_and_scale=False, chunks=CHUNK_SIZE) - self.nc = self.nc.rename({'nx': 'x', 'ny': 'y'}) - self.sw_version = self.nc.attrs['source'] + self.nc = self.nc.rename({"nx": "x", "ny": "y"}) + self.sw_version = self.nc.attrs["source"] self.pps = False self.platform_name = None @@ -104,12 +104,12 @@ def __init__(self, filename, filename_info, filetype_info): try: # NWCSAF/Geo: try: - kwrgs = {'sat_id': self.nc.attrs['satellite_identifier']} + kwrgs = {"sat_id": self.nc.attrs["satellite_identifier"]} except KeyError: - kwrgs = {'sat_id': self.nc.attrs['satellite_identifier'].astype(str)} + kwrgs = {"sat_id": self.nc.attrs["satellite_identifier"].astype(str)} except KeyError: # NWCSAF/PPS: - kwrgs = {'platform_name': self.nc.attrs['platform']} + kwrgs = {"platform_name": self.nc.attrs["platform"]} self.set_platform_and_sensor(**kwrgs) @@ -121,17 +121,17 @@ def set_platform_and_sensor(self, **kwargs): """Set some metadata: platform_name, sensors, and pps (identifying PPS or Geo).""" try: # NWCSAF/Geo - self.platform_name = PLATFORM_NAMES.get(kwargs['sat_id'], kwargs['sat_id']) + self.platform_name = PLATFORM_NAMES.get(kwargs["sat_id"], kwargs["sat_id"]) except KeyError: # NWCSAF/PPS - self.platform_name = kwargs['platform_name'] + self.platform_name = kwargs["platform_name"] self.pps = True - self.sensor = set([SENSOR.get(self.platform_name, 'seviri')]) + self.sensor = set([SENSOR.get(self.platform_name, "seviri")]) def remove_timedim(self, var): """Remove time dimension from dataset.""" - if self.pps and var.dims[0] == 'time': + if self.pps and var.dims[0] == "time": data = var[0, :, :] data.attrs = var.attrs var = data @@ -140,19 +140,19 @@ def remove_timedim(self, var): def drop_xycoords(self, variable): """Drop x, y coords when y is scan line number.""" try: - if variable.coords['y'].attrs['long_name'] == "scan line number": - return variable.drop_vars(['y', 'x']) + if variable.coords["y"].attrs["long_name"] == "scan line number": + return variable.drop_vars(["y", "x"]) except KeyError: pass return variable def get_dataset(self, dsid, info): """Load a dataset.""" - dsid_name = dsid['name'] + dsid_name = dsid["name"] if dsid_name in self.cache: - logger.debug('Get the data set from cache: %s.', dsid_name) + logger.debug("Get the data set from cache: %s.", dsid_name) return self.cache[dsid_name] - if dsid_name in ['lon', 'lat'] and dsid_name not in self.nc: + if dsid_name in ["lon", "lat"] and dsid_name not in self.nc: # Get full resolution lon,lat from the reduced (tie points) grid lon, lat = self.upsample_geolocation() if dsid_name == "lon": @@ -160,7 +160,7 @@ def get_dataset(self, dsid, info): else: return lat - logger.debug('Reading %s.', dsid_name) + logger.debug("Reading %s.", dsid_name) file_key = self._get_filekeys(dsid_name, info) variable = self.nc[file_key] variable = self.remove_timedim(variable) @@ -204,91 +204,91 @@ def scale_dataset(self, variable, info): """ variable = remove_empties(variable) - scale = variable.attrs.get('scale_factor', np.array(1)) - offset = variable.attrs.get('add_offset', np.array(0)) - if '_FillValue' in variable.attrs: - variable.attrs['scaled_FillValue'] = variable.attrs['_FillValue'] * scale + offset + scale = variable.attrs.get("scale_factor", np.array(1)) + offset = variable.attrs.get("add_offset", np.array(0)) + if "_FillValue" in variable.attrs: + variable.attrs["scaled_FillValue"] = variable.attrs["_FillValue"] * scale + offset if np.issubdtype((scale + offset).dtype, np.floating) or np.issubdtype(variable.dtype, np.floating): variable = self._mask_variable(variable) attrs = variable.attrs.copy() variable = variable * scale + offset variable.attrs = attrs - if 'valid_range' in variable.attrs: - variable.attrs['valid_range'] = variable.attrs['valid_range'] * scale + offset + if "valid_range" in variable.attrs: + variable.attrs["valid_range"] = variable.attrs["valid_range"] * scale + offset - variable.attrs.pop('add_offset', None) - variable.attrs.pop('scale_factor', None) + variable.attrs.pop("add_offset", None) + variable.attrs.pop("scale_factor", None) - variable.attrs.update({'platform_name': self.platform_name, - 'sensor': self.sensor}) + variable.attrs.update({"platform_name": self.platform_name, + "sensor": self.sensor}) - if not variable.attrs.get('standard_name', '').endswith('status_flag'): + if not variable.attrs.get("standard_name", "").endswith("status_flag"): # TODO: do we really need to add units to everything ? - variable.attrs.setdefault('units', '1') + variable.attrs.setdefault("units", "1") - ancillary_names = variable.attrs.get('ancillary_variables', '') + ancillary_names = variable.attrs.get("ancillary_variables", "") try: - variable.attrs['ancillary_variables'] = ancillary_names.split() + variable.attrs["ancillary_variables"] = ancillary_names.split() except AttributeError: pass - if 'palette_meanings' in variable.attrs: + if "palette_meanings" in variable.attrs: variable = self._prepare_variable_for_palette(variable, info) - if 'standard_name' in info: - variable.attrs.setdefault('standard_name', info['standard_name']) + if "standard_name" in info: + variable.attrs.setdefault("standard_name", info["standard_name"]) variable = self._adjust_variable_for_legacy_software(variable) return variable @staticmethod def _mask_variable(variable): - if '_FillValue' in variable.attrs: + if "_FillValue" in variable.attrs: variable = variable.where( - variable != variable.attrs['_FillValue']) - variable.attrs['_FillValue'] = np.nan - if 'valid_range' in variable.attrs: + variable != variable.attrs["_FillValue"]) + variable.attrs["_FillValue"] = np.nan + if "valid_range" in variable.attrs: variable = variable.where( - variable <= variable.attrs['valid_range'][1]) + variable <= variable.attrs["valid_range"][1]) variable = variable.where( - variable >= variable.attrs['valid_range'][0]) - if 'valid_max' in variable.attrs: + variable >= variable.attrs["valid_range"][0]) + if "valid_max" in variable.attrs: variable = variable.where( - variable <= variable.attrs['valid_max']) - if 'valid_min' in variable.attrs: + variable <= variable.attrs["valid_max"]) + if "valid_min" in variable.attrs: variable = variable.where( - variable >= variable.attrs['valid_min']) + variable >= variable.attrs["valid_min"]) return variable def _prepare_variable_for_palette(self, variable, info): try: - so_dataset = self.nc[self._get_varname_in_file(info, info_type='scale_offset_dataset')] + so_dataset = self.nc[self._get_varname_in_file(info, info_type="scale_offset_dataset")] except KeyError: scale = 1 offset = 0 fill_value = 255 else: - scale = so_dataset.attrs['scale_factor'] - offset = so_dataset.attrs['add_offset'] - fill_value = so_dataset.attrs['_FillValue'] - variable.attrs['palette_meanings'] = [int(val) - for val in variable.attrs['palette_meanings'].split()] - - if fill_value not in variable.attrs['palette_meanings'] and 'fill_value_color' in variable.attrs: - variable.attrs['palette_meanings'] = [fill_value] + variable.attrs['palette_meanings'] - variable = xr.DataArray(da.vstack((np.array(variable.attrs['fill_value_color']), variable.data)), + scale = so_dataset.attrs["scale_factor"] + offset = so_dataset.attrs["add_offset"] + fill_value = so_dataset.attrs["_FillValue"] + variable.attrs["palette_meanings"] = [int(val) + for val in variable.attrs["palette_meanings"].split()] + + if fill_value not in variable.attrs["palette_meanings"] and "fill_value_color" in variable.attrs: + variable.attrs["palette_meanings"] = [fill_value] + variable.attrs["palette_meanings"] + variable = xr.DataArray(da.vstack((np.array(variable.attrs["fill_value_color"]), variable.data)), coords=variable.coords, dims=variable.dims, attrs=variable.attrs) - val, idx = np.unique(variable.attrs['palette_meanings'], return_index=True) - variable.attrs['palette_meanings'] = val * scale + offset + val, idx = np.unique(variable.attrs["palette_meanings"], return_index=True) + variable.attrs["palette_meanings"] = val * scale + offset variable = variable[idx] return variable def _adjust_variable_for_legacy_software(self, variable): - if self.sw_version == 'NWC/PPS version v2014' and variable.attrs.get('standard_name') == 'cloud_top_altitude': + if self.sw_version == "NWC/PPS version v2014" and variable.attrs.get("standard_name") == "cloud_top_altitude": # pps 2014 valid range and palette don't match - variable.attrs['valid_range'] = (0., 9000.) - if (self.sw_version == 'NWC/PPS version v2014' and - variable.attrs.get('long_name') == 'RGB Palette for ctth_alti'): + variable.attrs["valid_range"] = (0., 9000.) + if (self.sw_version == "NWC/PPS version v2014" and + variable.attrs.get("long_name") == "RGB Palette for ctth_alti"): # pps 2014 palette has the nodata color (black) first variable = variable[1:, :] return variable @@ -298,12 +298,12 @@ def _upsample_geolocation_uncached(self): from geotiepoints import SatelliteInterpolator # Read the fields needed: - col_indices = self.nc['nx_reduced'].values - row_indices = self.nc['ny_reduced'].values - lat_reduced = self.scale_dataset(self.nc['lat_reduced'], {}) - lon_reduced = self.scale_dataset(self.nc['lon_reduced'], {}) + col_indices = self.nc["nx_reduced"].values + row_indices = self.nc["ny_reduced"].values + lat_reduced = self.scale_dataset(self.nc["lat_reduced"], {}) + lon_reduced = self.scale_dataset(self.nc["lon_reduced"], {}) - shape = (self.nc['y'].shape[0], self.nc['x'].shape[0]) + shape = (self.nc["y"].shape[0], self.nc["x"].shape[0]) cols_full = np.arange(shape[1]) rows_full = np.arange(shape[0]) @@ -313,8 +313,8 @@ def _upsample_geolocation_uncached(self): (rows_full, cols_full)) lons, lats = satint.interpolate() - lon = xr.DataArray(lons, attrs=lon_reduced.attrs, dims=['y', 'x']) - lat = xr.DataArray(lats, attrs=lat_reduced.attrs, dims=['y', 'x']) + lon = xr.DataArray(lons, attrs=lon_reduced.attrs, dims=["y", "x"]) + lat = xr.DataArray(lats, attrs=lat_reduced.attrs, dims=["y", "x"]) lat = self.drop_xycoords(lat) lon = self.drop_xycoords(lon) return lon, lat @@ -328,15 +328,15 @@ def get_area_def(self, dsid): # PPS: raise NotImplementedError - if dsid['name'].endswith('_pal'): + if dsid["name"].endswith("_pal"): raise NotImplementedError crs, area_extent = self._get_projection() crs, area_extent = self._ensure_crs_extents_in_meters(crs, area_extent) - nlines, ncols = self.nc[dsid['name']].shape - area = AreaDefinition('some_area_name', + nlines, ncols = self.nc[dsid["name"]].shape + area = AreaDefinition("some_area_name", "On-the-fly area", - 'geosmsg', + "geosmsg", crs, ncols, nlines, @@ -347,7 +347,7 @@ def get_area_def(self, dsid): @staticmethod def _ensure_crs_extents_in_meters(crs, area_extent): """Fix units in Earth shape, satellite altitude and 'units' attribute.""" - if 'kilo' in crs.axis_info[0].unit_name: + if "kilo" in crs.axis_info[0].unit_name: proj_dict = crs.to_dict() proj_dict["units"] = "m" if "a" in proj_dict: @@ -372,12 +372,12 @@ def __del__(self): @property def start_time(self): """Return the start time of the object.""" - return read_nwcsaf_time(self.nc.attrs['time_coverage_start']) + return read_nwcsaf_time(self.nc.attrs["time_coverage_start"]) @property def end_time(self): """Return the end time of the object.""" - return read_nwcsaf_time(self.nc.attrs['time_coverage_end']) + return read_nwcsaf_time(self.nc.attrs["time_coverage_end"]) @property def sensor_names(self): @@ -387,26 +387,26 @@ def sensor_names(self): def _get_projection(self): """Get projection from the NetCDF4 attributes.""" try: - proj_str = self.nc.attrs['gdal_projection'] + proj_str = self.nc.attrs["gdal_projection"] except TypeError: - proj_str = self.nc.attrs['gdal_projection'].decode() + proj_str = self.nc.attrs["gdal_projection"].decode() # Check the a/b/h units - radius_a = proj_str.split('+a=')[-1].split()[0] + radius_a = proj_str.split("+a=")[-1].split()[0] if float(radius_a) > 10e3: - units = 'm' + units = "m" scale = 1.0 else: - units = 'km' + units = "km" scale = 1e3 - if 'units' not in proj_str: - proj_str = proj_str + ' +units=' + units + if "units" not in proj_str: + proj_str = proj_str + " +units=" + units - area_extent = (float(self.nc.attrs['gdal_xgeo_up_left']) / scale, - float(self.nc.attrs['gdal_ygeo_low_right']) / scale, - float(self.nc.attrs['gdal_xgeo_low_right']) / scale, - float(self.nc.attrs['gdal_ygeo_up_left']) / scale) + area_extent = (float(self.nc.attrs["gdal_xgeo_up_left"]) / scale, + float(self.nc.attrs["gdal_ygeo_low_right"]) / scale, + float(self.nc.attrs["gdal_xgeo_low_right"]) / scale, + float(self.nc.attrs["gdal_ygeo_up_left"]) / scale) crs = CRS.from_string(proj_str) return crs, area_extent @@ -427,9 +427,9 @@ def read_nwcsaf_time(time_value): try: # MSG: try: - return datetime.strptime(time_value, '%Y-%m-%dT%H:%M:%SZ') + return datetime.strptime(time_value, "%Y-%m-%dT%H:%M:%SZ") except TypeError: # Remove this in summer 2024 (this is not needed since h5netcdf 0.14) - return datetime.strptime(time_value.astype(str), '%Y-%m-%dT%H:%M:%SZ') + return datetime.strptime(time_value.astype(str), "%Y-%m-%dT%H:%M:%SZ") except ValueError: # PPS: - return datetime.strptime(time_value, '%Y%m%dT%H%M%S%fZ') + return datetime.strptime(time_value, "%Y%m%dT%H%M%S%fZ") diff --git a/satpy/readers/oceancolorcci_l3_nc.py b/satpy/readers/oceancolorcci_l3_nc.py index b4ce1f7772..075e885b36 100644 --- a/satpy/readers/oceancolorcci_l3_nc.py +++ b/satpy/readers/oceancolorcci_l3_nc.py @@ -46,48 +46,48 @@ def _parse_datetime(datestr): @property def start_time(self): """Get the start time.""" - return self._parse_datetime(self['/attr/time_coverage_start']) + return self._parse_datetime(self["/attr/time_coverage_start"]) @property def end_time(self): """Get the end time.""" - return self._parse_datetime(self['/attr/time_coverage_end']) + return self._parse_datetime(self["/attr/time_coverage_end"]) @property def composite_period(self): """Determine composite period from filename information.""" - comp1 = self.filename_info['composite_period_1'] - comp2 = self.filename_info['composite_period_2'] - if comp2 == 'MONTHLY' and comp1 == "1M": - return 'monthly' - elif comp1 == '1D': - return 'daily' - elif comp1 == '5D': - return '5-day' - elif comp1 == '8D': - return '8-day' + comp1 = self.filename_info["composite_period_1"] + comp2 = self.filename_info["composite_period_2"] + if comp2 == "MONTHLY" and comp1 == "1M": + return "monthly" + elif comp1 == "1D": + return "daily" + elif comp1 == "5D": + return "5-day" + elif comp1 == "8D": + return "8-day" else: raise ValueError(f"Unknown data compositing period: {comp1}_{comp2}") def _update_attrs(self, dataset, dataset_info): """Update dataset attributes.""" - dataset.attrs.update(self[dataset_info['nc_key']].attrs) + dataset.attrs.update(self[dataset_info["nc_key"]].attrs) dataset.attrs.update(dataset_info) - dataset.attrs['sensor'] = 'merged' - dataset.attrs['composite_period'] = self.composite_period + dataset.attrs["sensor"] = "merged" + dataset.attrs["composite_period"] = self.composite_period # remove attributes from original file which don't apply anymore dataset.attrs.pop("nc_key") def get_dataset(self, dataset_id, ds_info): """Get dataset.""" - dataset = da.squeeze(self[ds_info['nc_key']]) - if '_FillValue' in dataset.attrs: - dataset.data = da.where(dataset.data == dataset.attrs['_FillValue'], np.nan, dataset.data) + dataset = da.squeeze(self[ds_info["nc_key"]]) + if "_FillValue" in dataset.attrs: + dataset.data = da.where(dataset.data == dataset.attrs["_FillValue"], np.nan, dataset.data) self._update_attrs(dataset, ds_info) - if 'lat' in dataset.dims: - dataset = dataset.rename({'lat': 'y'}) - if 'lon' in dataset.dims: - dataset = dataset.rename({'lon': 'x'}) + if "lat" in dataset.dims: + dataset = dataset.rename({"lat": "y"}) + if "lon" in dataset.dims: + dataset = dataset.rename({"lon": "x"}) return dataset def get_area_def(self, dsid): @@ -96,23 +96,23 @@ def get_area_def(self, dsid): There is no area definition in the file itself, so we have to compute it from the metadata, which specifies the area extent and pixel resolution. """ - proj_param = 'EPSG:4326' + proj_param = "EPSG:4326" - lon_res = float(self['/attr/geospatial_lon_resolution']) - lat_res = float(self['/attr/geospatial_lat_resolution']) + lon_res = float(self["/attr/geospatial_lon_resolution"]) + lat_res = float(self["/attr/geospatial_lat_resolution"]) - min_lon = self['/attr/geospatial_lon_min'] - max_lon = self['/attr/geospatial_lon_max'] - min_lat = self['/attr/geospatial_lat_min'] - max_lat = self['/attr/geospatial_lat_max'] + min_lon = self["/attr/geospatial_lon_min"] + max_lon = self["/attr/geospatial_lon_max"] + min_lat = self["/attr/geospatial_lat_min"] + max_lat = self["/attr/geospatial_lat_max"] area_extent = (min_lon, min_lat, max_lon, max_lat) lon_size = np.round((max_lon - min_lon) / lon_res).astype(int) lat_size = np.round((max_lat - min_lat) / lat_res).astype(int) - area = geometry.AreaDefinition('gridded_occci', - 'Full globe gridded area', - 'longlat', + area = geometry.AreaDefinition("gridded_occci", + "Full globe gridded area", + "longlat", proj_param, lon_size, lat_size, diff --git a/satpy/readers/olci_nc.py b/satpy/readers/olci_nc.py index 112f5455ac..84b21c3284 100644 --- a/satpy/readers/olci_nc.py +++ b/satpy/readers/olci_nc.py @@ -59,9 +59,9 @@ CHUNK_SIZE = get_legacy_chunk_size() -PLATFORM_NAMES = {'S3A': 'Sentinel-3A', - 'S3B': 'Sentinel-3B', - 'ENV': 'Environmental Satellite'} +PLATFORM_NAMES = {"S3A": "Sentinel-3A", + "S3B": "Sentinel-3B", + "ENV": "Environmental Satellite"} class BitFlags: @@ -70,16 +70,16 @@ class BitFlags: def __init__(self, value, flag_list=None): """Init the flags.""" self._value = value - flag_list = flag_list or ['INVALID', 'WATER', 'LAND', 'CLOUD', 'SNOW_ICE', - 'INLAND_WATER', 'TIDAL', 'COSMETIC', 'SUSPECT', - 'HISOLZEN', 'SATURATED', 'MEGLINT', 'HIGHGLINT', - 'WHITECAPS', 'ADJAC', 'WV_FAIL', 'PAR_FAIL', - 'AC_FAIL', 'OC4ME_FAIL', 'OCNN_FAIL', - 'Extra_1', - 'KDM_FAIL', - 'Extra_2', - 'CLOUD_AMBIGUOUS', 'CLOUD_MARGIN', 'BPAC_ON', 'WHITE_SCATT', - 'LOWRW', 'HIGHRW'] + flag_list = flag_list or ["INVALID", "WATER", "LAND", "CLOUD", "SNOW_ICE", + "INLAND_WATER", "TIDAL", "COSMETIC", "SUSPECT", + "HISOLZEN", "SATURATED", "MEGLINT", "HIGHGLINT", + "WHITECAPS", "ADJAC", "WV_FAIL", "PAR_FAIL", + "AC_FAIL", "OC4ME_FAIL", "OCNN_FAIL", + "Extra_1", + "KDM_FAIL", + "Extra_2", + "CLOUD_AMBIGUOUS", "CLOUD_MARGIN", "BPAC_ON", "WHITE_SCATT", + "LOWRW", "HIGHRW"] self.meaning = {f: i for i, f in enumerate(flag_list)} def __getitem__(self, item): @@ -108,11 +108,11 @@ def __init__(self, filename, filename_info, filetype_info, """Init the olci reader base.""" super().__init__(filename, filename_info, filetype_info) self._engine = engine - self._start_time = filename_info['start_time'] - self._end_time = filename_info['end_time'] + self._start_time = filename_info["start_time"] + self._end_time = filename_info["end_time"] # TODO: get metadata from the manifest file (xfdumanifest.xml) - self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] - self.sensor = 'olci' + self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]] + self.sensor = "olci" @cached_property def nc(self): @@ -124,7 +124,7 @@ def nc(self): engine=self._engine, chunks={self.cols_name: CHUNK_SIZE, self.rows_name: CHUNK_SIZE}) - return dataset.rename({self.cols_name: 'x', self.rows_name: 'y'}) + return dataset.rename({self.cols_name: "x", self.rows_name: "y"}) @property def start_time(self): @@ -138,8 +138,8 @@ def end_time(self): def get_dataset(self, key, info): """Load a dataset.""" - logger.debug('Reading %s.', key['name']) - variable = self.nc[key['name']] + logger.debug("Reading %s.", key["name"]) + variable = self.nc[key["name"]] return variable @@ -158,9 +158,9 @@ class NCOLCIChannelBase(NCOLCIBase): def __init__(self, filename, filename_info, filetype_info, engine=None): """Init the file handler.""" super().__init__(filename, filename_info, filetype_info, engine) - self.channel = filename_info.get('dataset_name') - self.reflectance_prefix = 'Oa' - self.reflectance_suffix = '_reflectance' + self.channel = filename_info.get("dataset_name") + self.reflectance_prefix = "Oa" + self.reflectance_suffix = "_reflectance" class NCOLCI1B(NCOLCIChannelBase): @@ -178,28 +178,28 @@ def _get_items(idx, solar_flux): def _get_solar_flux(self, band): """Get the solar flux for the band.""" - solar_flux = self.cal['solar_flux'].isel(bands=band).values - d_index = self.cal['detector_index'].fillna(0).astype(int) + solar_flux = self.cal["solar_flux"].isel(bands=band).values + d_index = self.cal["detector_index"].fillna(0).astype(int) return da.map_blocks(self._get_items, d_index.data, solar_flux=solar_flux, dtype=solar_flux.dtype) def get_dataset(self, key, info): """Load a dataset.""" - if self.channel != key['name']: + if self.channel != key["name"]: return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) - radiances = self.nc[self.channel + '_radiance'] + radiances = self.nc[self.channel + "_radiance"] - if key['calibration'] == 'reflectance': - idx = int(key['name'][2:]) - 1 + if key["calibration"] == "reflectance": + idx = int(key["name"][2:]) - 1 sflux = self._get_solar_flux(idx) radiances = radiances / sflux * np.pi * 100 - radiances.attrs['units'] = '%' + radiances.attrs["units"] = "%" - radiances.attrs['platform_name'] = self.platform_name - radiances.attrs['sensor'] = self.sensor + radiances.attrs["platform_name"] = self.platform_name + radiances.attrs["sensor"] = self.sensor radiances.attrs.update(key.to_dict()) return radiances @@ -215,20 +215,20 @@ def __init__(self, filename, filename_info, filetype_info, engine=None, unlog=Fa def get_dataset(self, key, info): """Load a dataset.""" - if self.channel is not None and self.channel != key['name']: + if self.channel is not None and self.channel != key["name"]: return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) if self.channel is not None and self.channel.startswith(self.reflectance_prefix): dataset = self.nc[self.channel + self.reflectance_suffix] else: - dataset = self.nc[info['nc_key']] + dataset = self.nc[info["nc_key"]] - if key['name'] == 'wqsf': - dataset.attrs['_FillValue'] = 1 - elif key['name'] == 'mask': + if key["name"] == "wqsf": + dataset.attrs["_FillValue"] = 1 + elif key["name"] == "mask": dataset = self.getbitmask(dataset, self.mask_items) - dataset.attrs['platform_name'] = self.platform_name - dataset.attrs['sensor'] = self.sensor + dataset.attrs["platform_name"] = self.platform_name + dataset.attrs["sensor"] = self.sensor dataset.attrs.update(key.to_dict()) if self.unlog: dataset = self.delog(dataset) @@ -262,8 +262,8 @@ def __init__(self, filename, filename_info, filetype_info, engine=None, **kwargs): """Init the file handler.""" super().__init__(filename, filename_info, filetype_info, engine) - self.l_step = self.nc.attrs['al_subsampling_factor'] - self.c_step = self.nc.attrs['ac_subsampling_factor'] + self.l_step = self.nc.attrs["al_subsampling_factor"] + self.c_step = self.nc.attrs["ac_subsampling_factor"] def _do_interpolate(self, data): @@ -287,7 +287,7 @@ def _do_interpolate(self, data): int_data = satint.interpolate() return [xr.DataArray(da.from_array(x, chunks=(CHUNK_SIZE, CHUNK_SIZE)), - dims=['y', 'x']) for x in int_data] + dims=["y", "x"]) for x in int_data] @property def _need_interpolation(self): @@ -297,37 +297,37 @@ def _need_interpolation(self): class NCOLCIAngles(NCOLCILowResData): """File handler for the OLCI angles.""" - datasets = {'satellite_azimuth_angle': 'OAA', - 'satellite_zenith_angle': 'OZA', - 'solar_azimuth_angle': 'SAA', - 'solar_zenith_angle': 'SZA'} + datasets = {"satellite_azimuth_angle": "OAA", + "satellite_zenith_angle": "OZA", + "solar_azimuth_angle": "SAA", + "solar_zenith_angle": "SZA"} def get_dataset(self, key, info): """Load a dataset.""" - if key['name'] not in self.datasets: + if key["name"] not in self.datasets: return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) if self._need_interpolation: - if key['name'].startswith('satellite'): + if key["name"].startswith("satellite"): azi, zen = self.satellite_angles - elif key['name'].startswith('solar'): + elif key["name"].startswith("solar"): azi, zen = self.sun_angles else: - raise NotImplementedError("Don't know how to read " + key['name']) + raise NotImplementedError("Don't know how to read " + key["name"]) - if 'zenith' in key['name']: + if "zenith" in key["name"]: values = zen - elif 'azimuth' in key['name']: + elif "azimuth" in key["name"]: values = azi else: - raise NotImplementedError("Don't know how to read " + key['name']) + raise NotImplementedError("Don't know how to read " + key["name"]) else: - values = self.nc[self.datasets[key['name']]] + values = self.nc[self.datasets[key["name"]]] - values.attrs['platform_name'] = self.platform_name - values.attrs['sensor'] = self.sensor + values.attrs["platform_name"] = self.platform_name + values.attrs["sensor"] = self.sensor values.attrs.update(key.to_dict()) return values @@ -335,16 +335,16 @@ def get_dataset(self, key, info): @cached_property def sun_angles(self): """Return the sun angles.""" - zen = self.nc[self.datasets['solar_zenith_angle']] - azi = self.nc[self.datasets['solar_azimuth_angle']] + zen = self.nc[self.datasets["solar_zenith_angle"]] + azi = self.nc[self.datasets["solar_azimuth_angle"]] azi, zen = self._interpolate_angles(azi, zen) return azi, zen @cached_property def satellite_angles(self): """Return the satellite angles.""" - zen = self.nc[self.datasets['satellite_zenith_angle']] - azi = self.nc[self.datasets['satellite_azimuth_angle']] + zen = self.nc[self.datasets["satellite_zenith_angle"]] + azi = self.nc[self.datasets["satellite_azimuth_angle"]] azi, zen = self._interpolate_angles(azi, zen) return azi, zen @@ -362,7 +362,7 @@ def _interpolate_angles(self, azi, zen): class NCOLCIMeteo(NCOLCILowResData): """File handler for the OLCI meteo data.""" - datasets = ['humidity', 'sea_level_pressure', 'total_columnar_water_vapour', 'total_ozone'] + datasets = ["humidity", "sea_level_pressure", "total_columnar_water_vapour", "total_ozone"] def __init__(self, filename, filename_info, filetype_info, engine=None): @@ -377,27 +377,27 @@ def __init__(self, filename, filename_info, filetype_info, def get_dataset(self, key, info): """Load a dataset.""" - if key['name'] not in self.datasets: + if key["name"] not in self.datasets: return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) - if self._need_interpolation and self.cache.get(key['name']) is None: + if self._need_interpolation and self.cache.get(key["name"]) is None: - data = self.nc[key['name']] + data = self.nc[key["name"]] values, = self._do_interpolate(data) values.attrs = data.attrs - self.cache[key['name']] = values + self.cache[key["name"]] = values - elif key['name'] in self.cache: - values = self.cache[key['name']] + elif key["name"] in self.cache: + values = self.cache[key["name"]] else: - values = self.nc[key['name']] + values = self.nc[key["name"]] - values.attrs['platform_name'] = self.platform_name - values.attrs['sensor'] = self.sensor + values.attrs["platform_name"] = self.platform_name + values.attrs["sensor"] = self.sensor values.attrs.update(key.to_dict()) return values diff --git a/satpy/readers/omps_edr.py b/satpy/readers/omps_edr.py index 9de71d4efa..5421ae2cd2 100644 --- a/satpy/readers/omps_edr.py +++ b/satpy/readers/omps_edr.py @@ -36,26 +36,26 @@ class EDRFileHandler(HDF5FileHandler): @property def start_orbit_number(self): """Get the start orbit number.""" - return self.filename_info['orbit'] + return self.filename_info["orbit"] @property def end_orbit_number(self): """Get the end orbit number.""" - return self.filename_info['orbit'] + return self.filename_info["orbit"] @property def platform_name(self): """Get the platform name.""" - return self.filename_info['platform_shortname'] + return self.filename_info["platform_shortname"] @property def sensor_name(self): """Get the sensor name.""" - return self.filename_info['instrument_shortname'] + return self.filename_info["instrument_shortname"] def get_shape(self, ds_id, ds_info): """Get the shape.""" - return self[ds_info['file_key'] + '/shape'] + return self[ds_info["file_key"] + "/shape"] def adjust_scaling_factors(self, factors, file_units, output_units): """Adjust scaling factors.""" @@ -68,20 +68,20 @@ def adjust_scaling_factors(self, factors, file_units, output_units): def get_metadata(self, dataset_id, ds_info): """Get the metadata.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) - info = getattr(self[var_path], 'attrs', {}).copy() - info.pop('DIMENSION_LIST', None) + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) + info = getattr(self[var_path], "attrs", {}).copy() + info.pop("DIMENSION_LIST", None) info.update(ds_info) - file_units = ds_info.get('file_units') + file_units = ds_info.get("file_units") if file_units is None: - file_units = self.get(var_path + '/attr/units', self.get(var_path + '/attr/Units')) + file_units = self.get(var_path + "/attr/units", self.get(var_path + "/attr/Units")) if file_units is None: raise KeyError("File variable '{}' has no units attribute".format(var_path)) - if file_units == 'deg': - file_units = 'degrees' - elif file_units == 'Unitless': - file_units = '1' + if file_units == "deg": + file_units = "degrees" + elif file_units == "Unitless": + file_units = "1" info.update({ "shape": self.get_shape(dataset_id, ds_info), @@ -93,32 +93,32 @@ def get_metadata(self, dataset_id, ds_info): "end_orbit": self.end_orbit_number, }) info.update(dataset_id.to_dict()) - if 'standard_name' not in ds_info: - info['standard_name'] = self.get(var_path + '/attr/Title', dataset_id['name']) + if "standard_name" not in ds_info: + info["standard_name"] = self.get(var_path + "/attr/Title", dataset_id["name"]) return info def get_dataset(self, dataset_id, ds_info): """Get the dataset.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) metadata = self.get_metadata(dataset_id, ds_info) - valid_min, valid_max = self.get(var_path + '/attr/valid_range', - self.get(var_path + '/attr/ValidRange', (None, None))) + valid_min, valid_max = self.get(var_path + "/attr/valid_range", + self.get(var_path + "/attr/ValidRange", (None, None))) if valid_min is None or valid_max is None: - valid_min = self.get(var_path + '/attr/valid_min', None) - valid_max = self.get(var_path + '/attr/valid_max', None) + valid_min = self.get(var_path + "/attr/valid_min", None) + valid_max = self.get(var_path + "/attr/valid_max", None) if valid_min is None or valid_max is None: raise KeyError("File variable '{}' has no valid range attribute".format(var_path)) - fill_name = var_path + '/attr/{}'.format(self._fill_name) + fill_name = var_path + "/attr/{}".format(self._fill_name) if fill_name in self: fill_value = self[fill_name] else: fill_value = None data = self[var_path] - scale_factor_path = var_path + '/attr/ScaleFactor' + scale_factor_path = var_path + "/attr/ScaleFactor" if scale_factor_path in self: scale_factor = self[scale_factor_path] - scale_offset = self[var_path + '/attr/Offset'] + scale_offset = self[var_path + "/attr/Offset"] else: scale_factor = None scale_offset = None @@ -130,14 +130,14 @@ def get_dataset(self, dataset_id, ds_info): data = data.where(data != fill_value) factors = (scale_factor, scale_offset) - factors = self.adjust_scaling_factors(factors, metadata['file_units'], ds_info.get("units")) + factors = self.adjust_scaling_factors(factors, metadata["file_units"], ds_info.get("units")) if factors[0] != 1 or factors[1] != 0: data = data * factors[0] + factors[1] data.attrs.update(metadata) - if 'DIMENSION_LIST' in data.attrs: - data.attrs.pop('DIMENSION_LIST') - dimensions = self.get_reference(var_path, 'DIMENSION_LIST') + if "DIMENSION_LIST" in data.attrs: + data.attrs.pop("DIMENSION_LIST") + dimensions = self.get_reference(var_path, "DIMENSION_LIST") for dim, coord in zip(data.dims, dimensions): data.coords[dim] = coord[0] return data diff --git a/satpy/readers/safe_sar_l2_ocn.py b/satpy/readers/safe_sar_l2_ocn.py index 267f8683f8..c5b63dd8be 100644 --- a/satpy/readers/safe_sar_l2_ocn.py +++ b/satpy/readers/safe_sar_l2_ocn.py @@ -45,15 +45,15 @@ def __init__(self, filename, filename_info, filetype_info): super(SAFENC, self).__init__(filename, filename_info, filetype_info) - self._start_time = filename_info['start_time'] - self._end_time = filename_info['end_time'] + self._start_time = filename_info["start_time"] + self._end_time = filename_info["end_time"] # For some SAFE packages, fstart_time differs, but start_time is the same # To avoid over writing exiting file with same start_time, a solution is to # use fstart_time - self._fstart_time = filename_info['fstart_time'] - self._fend_time = filename_info['fend_time'] + self._fstart_time = filename_info["fstart_time"] + self._fend_time = filename_info["fend_time"] - self._polarization = filename_info['polarization'] + self._polarization = filename_info["polarization"] self.lats = None self.lons = None @@ -63,19 +63,19 @@ def __init__(self, filename, filename_info, filetype_info): self.nc = xr.open_dataset(filename, decode_cf=True, mask_and_scale=False, - chunks={'owiAzSize': CHUNK_SIZE, - 'owiRaSize': CHUNK_SIZE}) - self.nc = self.nc.rename({'owiAzSize': 'y'}) - self.nc = self.nc.rename({'owiRaSize': 'x'}) + chunks={"owiAzSize": CHUNK_SIZE, + "owiRaSize": CHUNK_SIZE}) + self.nc = self.nc.rename({"owiAzSize": "y"}) + self.nc = self.nc.rename({"owiRaSize": "x"}) self.filename = filename def get_dataset(self, key, info): """Load a dataset.""" - if key['name'] in ['owiLat', 'owiLon']: + if key["name"] in ["owiLat", "owiLon"]: if self.lons is None or self.lats is None: - self.lons = self.nc['owiLon'] - self.lats = self.nc['owiLat'] - if key['name'] == 'owiLat': + self.lons = self.nc["owiLon"] + self.lats = self.nc["owiLat"] + if key["name"] == "owiLat": res = self.lats else: res = self.lons @@ -83,11 +83,11 @@ def get_dataset(self, key, info): else: res = self._get_data_channels(key, info) - if 'missionName' in self.nc.attrs: - res.attrs.update({'platform_name': self.nc.attrs['missionName']}) + if "missionName" in self.nc.attrs: + res.attrs.update({"platform_name": self.nc.attrs["missionName"]}) - res.attrs.update({'fstart_time': self._fstart_time}) - res.attrs.update({'fend_time': self._fend_time}) + res.attrs.update({"fstart_time": self._fstart_time}) + res.attrs.update({"fend_time": self._fend_time}) if not self._shape: self._shape = res.shape @@ -95,23 +95,23 @@ def get_dataset(self, key, info): return res def _get_data_channels(self, key, info): - res = self.nc[key['name']] - if key['name'] in ['owiHs', 'owiWl', 'owiDirmet']: - res = xr.DataArray(res, dims=['y', 'x', 'oswPartitions']) - elif key['name'] in ['owiNrcs', 'owiNesz', 'owiNrcsNeszCorr']: - res = xr.DataArray(res, dims=['y', 'x', 'oswPolarisation']) - elif key['name'] in ['owiPolarisationName']: - res = xr.DataArray(res, dims=['owiPolarisation']) - elif key['name'] in ['owiCalConstObsi', 'owiCalConstInci']: - res = xr.DataArray(res, dims=['owiIncSize']) - elif key['name'].startswith('owi'): - res = xr.DataArray(res, dims=['y', 'x']) + res = self.nc[key["name"]] + if key["name"] in ["owiHs", "owiWl", "owiDirmet"]: + res = xr.DataArray(res, dims=["y", "x", "oswPartitions"]) + elif key["name"] in ["owiNrcs", "owiNesz", "owiNrcsNeszCorr"]: + res = xr.DataArray(res, dims=["y", "x", "oswPolarisation"]) + elif key["name"] in ["owiPolarisationName"]: + res = xr.DataArray(res, dims=["owiPolarisation"]) + elif key["name"] in ["owiCalConstObsi", "owiCalConstInci"]: + res = xr.DataArray(res, dims=["owiIncSize"]) + elif key["name"].startswith("owi"): + res = xr.DataArray(res, dims=["y", "x"]) else: - res = xr.DataArray(res, dims=['y', 'x']) + res = xr.DataArray(res, dims=["y", "x"]) res.attrs.update(info) - if '_FillValue' in res.attrs: - res = res.where(res != res.attrs['_FillValue']) - res.attrs['_FillValue'] = np.nan + if "_FillValue" in res.attrs: + res = res.where(res != res.attrs["_FillValue"]) + res.attrs["_FillValue"] = np.nan return res @property diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 4b2d214187..19e5396b61 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -80,11 +80,11 @@ def _dictify(r): def _get_calibration_name(calibration): """Get the proper calibration name.""" - calibration_name = getattr(calibration, "name", calibration) or 'gamma' - if calibration_name == 'sigma_nought': - calibration_name = 'sigmaNought' - elif calibration_name == 'beta_nought': - calibration_name = 'betaNought' + calibration_name = getattr(calibration, "name", calibration) or "gamma" + if calibration_name == "sigma_nought": + calibration_name = "sigmaNought" + elif calibration_name == "beta_nought": + calibration_name = "betaNought" return calibration_name @@ -96,17 +96,17 @@ def __init__(self, filename, filename_info, filetype_info, """Init the xml filehandler.""" super(SAFEXML, self).__init__(filename, filename_info, filetype_info) - self._start_time = filename_info['start_time'] - self._end_time = filename_info['end_time'] - self._polarization = filename_info['polarization'] + self._start_time = filename_info["start_time"] + self._end_time = filename_info["end_time"] + self._polarization = filename_info["polarization"] self.root = ET.parse(self.filename) self.hdr = {} if header_file is not None: self.hdr = header_file.get_metadata() else: self.hdr = self.get_metadata() - self._image_shape = (self.hdr['product']['imageAnnotation']['imageInformation']['numberOfLines'], - self.hdr['product']['imageAnnotation']['imageInformation']['numberOfSamples']) + self._image_shape = (self.hdr["product"]["imageAnnotation"]["imageInformation"]["numberOfLines"], + self.hdr["product"]["imageAnnotation"]["imageInformation"]["numberOfSamples"]) def get_metadata(self): """Convert the xml metadata to dict.""" @@ -169,7 +169,7 @@ def get_dataset(self, key, info, chunks=None): def get_calibration_constant(self): """Load the calibration constant.""" - return float(self.root.find('.//absoluteCalibrationConstant').text) + return float(self.root.find(".//absoluteCalibrationConstant").text) def _get_calibration_uncached(self, calibration, chunks=None): """Get the calibration array.""" @@ -280,9 +280,9 @@ def _assemble_azimuth_noise_blocks(self, chunks): # relying mostly on dask arrays. slices = self._create_dask_slices_from_blocks(chunks) populated_array = da.vstack(slices).rechunk(chunks) - populated_array = xr.DataArray(populated_array, dims=['y', 'x'], - coords={'x': np.arange(self._image_shape[1]), - 'y': np.arange(self._image_shape[0])}) + populated_array = xr.DataArray(populated_array, dims=["y", "x"], + coords={"x": np.arange(self._image_shape[1]), + "y": np.arange(self._image_shape[0])}) return populated_array def _create_dask_slices_from_blocks(self, chunks): @@ -306,7 +306,7 @@ def _create_dask_slice_from_block_line(self, current_line, chunks): def _get_array_pieces_for_current_line(self, current_line): """Get the array pieces that cover the current line.""" current_blocks = self._find_blocks_covering_line(current_line) - current_blocks.sort(key=(lambda x: x.coords['x'][0])) + current_blocks.sort(key=(lambda x: x.coords["x"][0])) next_line = self._get_next_start_line(current_blocks, current_line) current_y = np.arange(current_line, next_line) pieces = [arr.sel(y=current_y) for arr in current_blocks] @@ -316,12 +316,12 @@ def _find_blocks_covering_line(self, current_line): """Find the blocks covering a given line.""" current_blocks = [] for block in self.blocks: - if block.coords['y'][0] <= current_line <= block.coords['y'][-1]: + if block.coords["y"][0] <= current_line <= block.coords["y"][-1]: current_blocks.append(block) return current_blocks def _get_next_start_line(self, current_blocks, current_line): - next_line = min((arr.coords['y'][-1] for arr in current_blocks)) + 1 + next_line = min((arr.coords["y"][-1] for arr in current_blocks)) + 1 blocks_starting_soon = [block for block in self.blocks if current_line < block.coords["y"][0] < next_line] if blocks_starting_soon: next_start_line = min((arr.coords["y"][0] for arr in blocks_starting_soon)) @@ -330,21 +330,21 @@ def _get_next_start_line(self, current_blocks, current_line): def _get_padded_dask_pieces(self, pieces, chunks): """Get the padded pieces of a slice.""" - pieces = sorted(pieces, key=(lambda x: x.coords['x'][0])) + pieces = sorted(pieces, key=(lambda x: x.coords["x"][0])) dask_pieces = [] previous_x_end = -1 piece = pieces[0] - next_x_start = piece.coords['x'][0].item() - y_shape = len(piece.coords['y']) + next_x_start = piece.coords["x"][0].item() + y_shape = len(piece.coords["y"]) x_shape = (next_x_start - previous_x_end - 1) self._fill_dask_pieces(dask_pieces, (y_shape, x_shape), chunks) for i, piece in enumerate(pieces): dask_pieces.append(piece.data) - previous_x_end = piece.coords['x'][-1].item() + previous_x_end = piece.coords["x"][-1].item() try: - next_x_start = pieces[i + 1].coords['x'][0].item() + next_x_start = pieces[i + 1].coords["x"][0].item() except IndexError: next_x_start = self._image_shape[1] @@ -405,35 +405,35 @@ def expand(self, chunks): new_arr = (da.ones((len(y_coord), len(x_coord)), chunks=chunks) * np.interp(y_coord, self.lines, data)[:, np.newaxis]) new_arr = xr.DataArray(new_arr, - dims=['y', 'x'], - coords={'x': x_coord, - 'y': y_coord}) + dims=["y", "x"], + coords={"x": x_coord, + "y": y_coord}) return new_arr @property def first_pixel(self): - return int(self.element.find('firstRangeSample').text) + return int(self.element.find("firstRangeSample").text) @property def last_pixel(self): - return int(self.element.find('lastRangeSample').text) + return int(self.element.find("lastRangeSample").text) @property def first_line(self): - return int(self.element.find('firstAzimuthLine').text) + return int(self.element.find("firstAzimuthLine").text) @property def last_line(self): - return int(self.element.find('lastAzimuthLine').text) + return int(self.element.find("lastAzimuthLine").text) @property def lines(self): - lines = self.element.find('line').text.split() + lines = self.element.find("line").text.split() return np.array(lines).astype(int) @property def lut(self): - lut = self.element.find('noiseAzimuthLut').text.split() + lut = self.element.find("noiseAzimuthLut").text.split() return np.array(lut).astype(float) @@ -458,8 +458,8 @@ def _read_xml_array(self): x = [] data = [] for elt in elements: - new_x = elt.find('pixel').text.split() - y += [int(elt.find('line').text)] * len(new_x) + new_x = elt.find("pixel").text.split() + y += [int(elt.find("line").text)] * len(new_x) x += [int(val) for val in new_x] data += [float(val) for val in elt.find(self.element_tag).text.split()] @@ -488,7 +488,7 @@ def interpolate_xarray(xpoints, ypoints, values, shape, hchunks = range(0, shape[1], blocksize) token = tokenize(blocksize, xpoints, ypoints, values, shape) - name = 'interpolate-' + token + name = "interpolate-" + token spline = RectBivariateSpline(xpoints, ypoints, values.T) @@ -507,7 +507,7 @@ def interpolator(xnew, ynew): res = da.Array(dskx, name, shape=list(shape), chunks=(blocksize, blocksize), dtype=values.dtype) - return DataArray(res, dims=('y', 'x')) + return DataArray(res, dims=("y", "x")) def intp(grid_x, grid_y, interpolator): @@ -536,7 +536,7 @@ def interpolate_xarray_linear(xpoints, ypoints, values, shape, chunks=CHUNK_SIZE interpolator((0, 0)) res = da.map_blocks(intp, grid_x, grid_y, interpolator=interpolator) - return DataArray(res, dims=('y', 'x')) + return DataArray(res, dims=("y", "x")) class SAFEGRD(BaseFileHandler): @@ -552,19 +552,19 @@ def __init__(self, filename, filename_info, filetype_info, calfh, noisefh, annot super(SAFEGRD, self).__init__(filename, filename_info, filetype_info) - self._start_time = filename_info['start_time'] - self._end_time = filename_info['end_time'] + self._start_time = filename_info["start_time"] + self._end_time = filename_info["end_time"] - self._polarization = filename_info['polarization'] + self._polarization = filename_info["polarization"] - self._mission_id = filename_info['mission_id'] + self._mission_id = filename_info["mission_id"] self.calibration = calfh self.noise = noisefh self.annotation = annotationfh self.read_lock = Lock() - self.filehandle = rasterio.open(self.filename, 'r', sharing=False) + self.filehandle = rasterio.open(self.filename, "r", sharing=False) self.get_lonlatalts = functools.lru_cache(maxsize=2)( self._get_lonlatalts_uncached ) @@ -574,37 +574,37 @@ def get_dataset(self, key, info): if self._polarization != key["polarization"]: return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) - if key['name'] in ['longitude', 'latitude', 'altitude']: - logger.debug('Constructing coordinate arrays.') + if key["name"] in ["longitude", "latitude", "altitude"]: + logger.debug("Constructing coordinate arrays.") arrays = dict() - arrays['longitude'], arrays['latitude'], arrays['altitude'] = self.get_lonlatalts() + arrays["longitude"], arrays["latitude"], arrays["altitude"] = self.get_lonlatalts() - data = arrays[key['name']] + data = arrays[key["name"]] data.attrs.update(info) else: data = xr.open_dataset(self.filename, engine="rasterio", chunks={"band": 1, "y": CHUNK_SIZE, "x": CHUNK_SIZE})["band_data"].squeeze() - data = data.assign_coords(x=np.arange(len(data.coords['x'])), - y=np.arange(len(data.coords['y']))) + data = data.assign_coords(x=np.arange(len(data.coords["x"])), + y=np.arange(len(data.coords["y"]))) data = self._calibrate_and_denoise(data, key) data.attrs.update(info) - data.attrs.update({'platform_name': self._mission_id}) + data.attrs.update({"platform_name": self._mission_id}) - data = self._change_quantity(data, key['quantity']) + data = self._change_quantity(data, key["quantity"]) return data @staticmethod def _change_quantity(data, quantity): """Change quantity to dB if needed.""" - if quantity == 'dB': + if quantity == "dB": data.data = 10 * np.log10(data.data) - data.attrs['units'] = 'dB' + data.attrs["units"] = "dB" else: - data.attrs['units'] = '1' + data.attrs["units"] = "1" return data @@ -627,17 +627,17 @@ def _get_digital_number(self, data): def _denoise(self, dn, chunks): """Denoise the data.""" - logger.debug('Reading noise data.') + logger.debug("Reading noise data.") noise = self.noise.get_noise_correction(chunks=chunks).fillna(0) dn = dn - noise return dn def _calibrate(self, dn, chunks, key): """Calibrate the data.""" - logger.debug('Reading calibration data.') - cal = self.calibration.get_calibration(key['calibration'], chunks=chunks) + logger.debug("Reading calibration data.") + cal = self.calibration.get_calibration(key["calibration"], chunks=chunks) cal_constant = self.calibration.get_calibration_constant() - logger.debug('Calibrating.') + logger.debug("Calibrating.") data = ((dn + cal_constant) / (cal ** 2)).clip(min=0) return data @@ -661,12 +661,12 @@ def _get_lonlatalts_uncached(self): latitudes = interpolate_xarray(xpoints, ypoints, gcp_lats, band.shape) altitudes = interpolate_xarray(xpoints, ypoints, gcp_alts, band.shape) - longitudes.attrs['gcps'] = gcps - longitudes.attrs['crs'] = crs - latitudes.attrs['gcps'] = gcps - latitudes.attrs['crs'] = crs - altitudes.attrs['gcps'] = gcps - altitudes.attrs['crs'] = crs + longitudes.attrs["gcps"] = gcps + longitudes.attrs["crs"] = crs + latitudes.attrs["gcps"] = gcps + latitudes.attrs["crs"] = crs + altitudes.attrs["gcps"] = gcps + altitudes.attrs["crs"] = crs return longitudes, latitudes, altitudes diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index 7add1df265..cf99b57e7d 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -196,7 +196,7 @@ class SatpyCFFileHandler(BaseFileHandler): """File handler for Satpy's CF netCDF files.""" - def __init__(self, filename, filename_info, filetype_info, numeric_name_prefix='CHANNEL_'): + def __init__(self, filename, filename_info, filetype_info, numeric_name_prefix="CHANNEL_"): """Initialize file handler.""" super().__init__(filename, filename_info, filetype_info) self.engine = None @@ -205,12 +205,12 @@ def __init__(self, filename, filename_info, filetype_info, numeric_name_prefix=' @property def start_time(self): """Get start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def sensor_names(self): @@ -239,11 +239,11 @@ def _existing_datasets(self, configured_datasets=None): def fix_modifier_attr(self, ds_info): """Fix modifiers attribute.""" # Empty modifiers are read as [], which causes problems later - if 'modifiers' in ds_info and not ds_info['modifiers']: - ds_info['modifiers'] = () + if "modifiers" in ds_info and not ds_info["modifiers"]: + ds_info["modifiers"] = () try: try: - ds_info['modifiers'] = tuple(ds_info['modifiers'].split(' ')) + ds_info["modifiers"] = tuple(ds_info["modifiers"].split(" ")) except AttributeError: pass except KeyError: @@ -252,14 +252,14 @@ def fix_modifier_attr(self, ds_info): def _assign_ds_info(self, var_name, val): """Assign ds_info.""" ds_info = dict(val.attrs) - ds_info['file_type'] = self.filetype_info['file_type'] - ds_info['name'] = ds_info['nc_store_name'] = var_name - if 'original_name' in ds_info: - ds_info['name'] = ds_info['original_name'] + ds_info["file_type"] = self.filetype_info["file_type"] + ds_info["name"] = ds_info["nc_store_name"] = var_name + if "original_name" in ds_info: + ds_info["name"] = ds_info["original_name"] elif self._numeric_name_prefix and var_name.startswith(self._numeric_name_prefix): - ds_info['name'] = var_name.replace(self._numeric_name_prefix, '') + ds_info["name"] = var_name.replace(self._numeric_name_prefix, "") try: - ds_info['wavelength'] = WavelengthRange.from_cf(ds_info['wavelength']) + ds_info["wavelength"] = WavelengthRange.from_cf(ds_info["wavelength"]) except KeyError: pass return ds_info @@ -278,15 +278,15 @@ def _coordinate_datasets(self, configured_datasets=None): nc = xr.open_dataset(self.filename, engine=self.engine) for var_name, val in nc.coords.items(): ds_info = dict(val.attrs) - ds_info['file_type'] = self.filetype_info['file_type'] - ds_info['name'] = var_name + ds_info["file_type"] = self.filetype_info["file_type"] + ds_info["name"] = var_name self.fix_modifier_attr(ds_info) yield True, ds_info def _compare_attr(self, _ds_id_dict, key, data): - if key in ['name', 'modifiers']: + if key in ["name", "modifiers"]: return True - elif key == 'wavelength': + elif key == "wavelength": return _ds_id_dict[key] == WavelengthRange.from_cf(data.attrs[key]) else: return data.attrs[key] == _ds_id_dict[key] @@ -303,15 +303,15 @@ def _dataid_attrs_equal(self, ds_id, data): def get_dataset(self, ds_id, ds_info): """Get dataset.""" - logger.debug("Getting data for: %s", ds_id['name']) + logger.debug("Getting data for: %s", ds_id["name"]) nc = xr.open_dataset(self.filename, engine=self.engine, - chunks={'y': CHUNK_SIZE, 'x': CHUNK_SIZE}) - name = ds_info.get('nc_store_name', ds_id['name']) - data = nc[ds_info.get('file_key', name)] + chunks={"y": CHUNK_SIZE, "x": CHUNK_SIZE}) + name = ds_info.get("nc_store_name", ds_id["name"]) + data = nc[ds_info.get("file_key", name)] if not self._dataid_attrs_equal(ds_id, data): return - if name != ds_id['name']: - data = data.rename(ds_id['name']) + if name != ds_id["name"]: + data = data.rename(ds_id["name"]) data.attrs.update(nc.attrs) # For now add global attributes to all datasets if "orbital_parameters" in data.attrs: data.attrs["orbital_parameters"] = _str2dict(data.attrs["orbital_parameters"]) diff --git a/satpy/readers/scatsat1_l2b.py b/satpy/readers/scatsat1_l2b.py index e507cdb2bc..9989bf3d86 100644 --- a/satpy/readers/scatsat1_l2b.py +++ b/satpy/readers/scatsat1_l2b.py @@ -30,38 +30,38 @@ class SCATSAT1L2BFileHandler(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): super(SCATSAT1L2BFileHandler, self).__init__(filename, filename_info, filetype_info) self.h5f = h5py.File(self.filename, "r") - h5data = self.h5f['science_data'] + h5data = self.h5f["science_data"] - self.filename_info['start_time'] = datetime.strptime(h5data.attrs['Range Beginning Date'], '%Y-%jT%H:%M:%S.%f') - self.filename_info['end_time'] = datetime.strptime(h5data.attrs['Range Ending Date'], '%Y-%jT%H:%M:%S.%f') + self.filename_info["start_time"] = datetime.strptime(h5data.attrs["Range Beginning Date"], "%Y-%jT%H:%M:%S.%f") + self.filename_info["end_time"] = datetime.strptime(h5data.attrs["Range Ending Date"], "%Y-%jT%H:%M:%S.%f") self.lons = None self.lats = None - self.wind_speed_scale = float(h5data.attrs['Wind Speed Selection Scale']) - self.wind_direction_scale = float(h5data.attrs['Wind Direction Selection Scale']) - self.latitude_scale = float(h5data.attrs['Latitude Scale']) - self.longitude_scale = float(h5data.attrs['Longitude Scale']) + self.wind_speed_scale = float(h5data.attrs["Wind Speed Selection Scale"]) + self.wind_direction_scale = float(h5data.attrs["Wind Direction Selection Scale"]) + self.latitude_scale = float(h5data.attrs["Latitude Scale"]) + self.longitude_scale = float(h5data.attrs["Longitude Scale"]) def get_dataset(self, key, info): - h5data = self.h5f['science_data'] - stdname = info.get('standard_name') + h5data = self.h5f["science_data"] + stdname = info.get("standard_name") - if stdname in ['latitude', 'longitude']: + if stdname in ["latitude", "longitude"]: if self.lons is None or self.lats is None: - self.lons = h5data['Longitude'][:]*self.longitude_scale - self.lats = h5data['Latitude'][:]*self.latitude_scale + self.lons = h5data["Longitude"][:]*self.longitude_scale + self.lats = h5data["Latitude"][:]*self.latitude_scale - if info['standard_name'] == 'longitude': + if info["standard_name"] == "longitude": return Dataset(self.lons, id=key, **info) else: return Dataset(self.lats, id=key, **info) - if stdname in ['wind_speed']: - windspeed = h5data['Wind_speed_selection'][:, :] * self.wind_speed_scale + if stdname in ["wind_speed"]: + windspeed = h5data["Wind_speed_selection"][:, :] * self.wind_speed_scale return Dataset(windspeed, id=key, **info) - if stdname in ['wind_direction']: - wind_direction = h5data['Wind_direction_selection'][:, :] * self.wind_direction_scale + if stdname in ["wind_direction"]: + wind_direction = h5data["Wind_direction_selection"][:, :] * self.wind_direction_scale return Dataset(wind_direction, id=key, **info) diff --git a/satpy/readers/scmi.py b/satpy/readers/scmi.py index f53073c751..a4b8620f8b 100644 --- a/satpy/readers/scmi.py +++ b/satpy/readers/scmi.py @@ -54,7 +54,7 @@ CHUNK_SIZE = get_legacy_chunk_size() # NetCDF doesn't support multi-threaded reading, trick it by opening # as one whole chunk then split it up before we do any calculations -LOAD_CHUNK_SIZE = int(os.getenv('PYTROLL_LOAD_CHUNK_SIZE', -1)) +LOAD_CHUNK_SIZE = int(os.getenv("PYTROLL_LOAD_CHUNK_SIZE", -1)) logger = logging.getLogger(__name__) @@ -69,20 +69,20 @@ def __init__(self, filename, filename_info, filetype_info): self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, - chunks={'x': LOAD_CHUNK_SIZE, 'y': LOAD_CHUNK_SIZE}) - self.platform_name = self.nc.attrs['satellite_id'] + chunks={"x": LOAD_CHUNK_SIZE, "y": LOAD_CHUNK_SIZE}) + self.platform_name = self.nc.attrs["satellite_id"] self.sensor = self._get_sensor() - self.nlines = self.nc.dims['y'] - self.ncols = self.nc.dims['x'] + self.nlines = self.nc.dims["y"] + self.ncols = self.nc.dims["x"] self.coords = {} def _get_sensor(self): """Determine the sensor for this file.""" # sometimes Himawari-8 (or 9) data is stored in SCMI format - is_h8 = 'H8' in self.platform_name - is_h9 = 'H9' in self.platform_name + is_h8 = "H8" in self.platform_name + is_h9 = "H9" in self.platform_name is_ahi = is_h8 or is_h9 - return 'ahi' if is_ahi else 'abi' + return "ahi" if is_ahi else "abi" @property def sensor_names(self): @@ -99,9 +99,9 @@ def __getitem__(self, item): """ data = self.nc[item] attrs = data.attrs - factor = data.attrs.get('scale_factor') - offset = data.attrs.get('add_offset') - fill = data.attrs.get('_FillValue') + factor = data.attrs.get("scale_factor") + offset = data.attrs.get("add_offset") + fill = data.attrs.get("_FillValue") if fill is not None: data = data.where(data != fill) if factor is not None: @@ -114,8 +114,8 @@ def __getitem__(self, item): # handle coordinates (and recursive fun) new_coords = {} # 'time' dimension causes issues in other processing - if 'time' in data.coords: - data = data.drop_vars('time') + if "time" in data.coords: + data = data.drop_vars("time") if item in data.coords: self.coords[item] = data for coord_name in data.coords.keys(): @@ -131,60 +131,60 @@ def get_shape(self, key, info): def get_dataset(self, key, info): """Load a dataset.""" - logger.debug('Reading in get_dataset %s.', key['name']) - var_name = info.get('file_key', self.filetype_info.get('file_key')) + logger.debug("Reading in get_dataset %s.", key["name"]) + var_name = info.get("file_key", self.filetype_info.get("file_key")) if var_name: data = self[var_name] - elif 'Sectorized_CMI' in self.nc: - data = self['Sectorized_CMI'] - elif 'data' in self.nc: - data = self['data'] + elif "Sectorized_CMI" in self.nc: + data = self["Sectorized_CMI"] + elif "data" in self.nc: + data = self["data"] # NetCDF doesn't support multi-threaded reading, trick it by opening # as one whole chunk then split it up before we do any calculations - data = data.chunk({'x': CHUNK_SIZE, 'y': CHUNK_SIZE}) + data = data.chunk({"x": CHUNK_SIZE, "y": CHUNK_SIZE}) # convert to satpy standard units - factor = data.attrs.pop('scale_factor', 1) - offset = data.attrs.pop('add_offset', 0) - units = data.attrs.get('units', 1) + factor = data.attrs.pop("scale_factor", 1) + offset = data.attrs.pop("add_offset", 0) + units = data.attrs.get("units", 1) # the '*1' unit is some weird convention added/needed by AWIPS - if units in ['1', '*1'] and key['calibration'] == 'reflectance': + if units in ["1", "*1"] and key["calibration"] == "reflectance": data *= 100 factor *= 100 # used for valid_min/max - data.attrs['units'] = '%' + data.attrs["units"] = "%" # set up all the attributes that might be useful to the user/satpy - data.attrs.update({'platform_name': self.platform_name, - 'sensor': data.attrs.get('sensor', self.sensor), + data.attrs.update({"platform_name": self.platform_name, + "sensor": data.attrs.get("sensor", self.sensor), }) - if 'satellite_longitude' in self.nc.attrs: - data.attrs['orbital_parameters'] = { - 'projection_longitude': self.nc.attrs['satellite_longitude'], - 'projection_latitude': self.nc.attrs['satellite_latitude'], - 'projection_altitude': self.nc.attrs['satellite_altitude'], + if "satellite_longitude" in self.nc.attrs: + data.attrs["orbital_parameters"] = { + "projection_longitude": self.nc.attrs["satellite_longitude"], + "projection_latitude": self.nc.attrs["satellite_latitude"], + "projection_altitude": self.nc.attrs["satellite_altitude"], } - scene_id = self.nc.attrs.get('scene_id') + scene_id = self.nc.attrs.get("scene_id") if scene_id is not None: - data.attrs['scene_id'] = scene_id + data.attrs["scene_id"] = scene_id data.attrs.update(key.to_dict()) - data.attrs.pop('_FillValue', None) - if 'valid_min' in data.attrs: - vmin = data.attrs.pop('valid_min') - vmax = data.attrs.pop('valid_max') + data.attrs.pop("_FillValue", None) + if "valid_min" in data.attrs: + vmin = data.attrs.pop("valid_min") + vmax = data.attrs.pop("valid_max") vmin = vmin * factor + offset vmax = vmax * factor + offset - data.attrs['valid_min'] = vmin - data.attrs['valid_max'] = vmax + data.attrs["valid_min"] = vmin + data.attrs["valid_max"] = vmax return data def _get_cf_grid_mapping_var(self): """Figure out which grid mapping should be used.""" - gmaps = ['fixedgrid_projection', 'goes_imager_projection', - 'lambert_projection', 'polar_projection', - 'mercator_projection'] - if 'grid_mapping' in self.filename_info: - gmaps = [self.filename_info.get('grid_mapping')] + gmaps + gmaps = ["fixedgrid_projection", "goes_imager_projection", + "lambert_projection", "polar_projection", + "mercator_projection"] + if "grid_mapping" in self.filename_info: + gmaps = [self.filename_info.get("grid_mapping")] + gmaps for grid_mapping in gmaps: if grid_mapping in self.nc: return self.nc[grid_mapping] @@ -192,12 +192,12 @@ def _get_cf_grid_mapping_var(self): def _get_proj4_name(self, projection): """Map CF projection name to PROJ.4 name.""" - gmap_name = projection.attrs['grid_mapping_name'] + gmap_name = projection.attrs["grid_mapping_name"] proj = { - 'geostationary': 'geos', - 'lambert_conformal_conic': 'lcc', - 'polar_stereographic': 'stere', - 'mercator': 'merc', + "geostationary": "geos", + "lambert_conformal_conic": "lcc", + "polar_stereographic": "stere", + "mercator": "merc", }.get(gmap_name, gmap_name) return proj @@ -205,42 +205,42 @@ def _get_proj_specific_params(self, projection): """Convert CF projection parameters to PROJ.4 dict.""" proj = self._get_proj4_name(projection) proj_dict = { - 'proj': proj, - 'a': float(projection.attrs['semi_major_axis']), - 'b': float(projection.attrs['semi_minor_axis']), - 'units': 'm', + "proj": proj, + "a": float(projection.attrs["semi_major_axis"]), + "b": float(projection.attrs["semi_minor_axis"]), + "units": "m", } - if proj == 'geos': - proj_dict['h'] = float(projection.attrs['perspective_point_height']) - proj_dict['sweep'] = projection.attrs.get('sweep_angle_axis', 'y') - proj_dict['lon_0'] = float(projection.attrs['longitude_of_projection_origin']) - proj_dict['lat_0'] = float(projection.attrs.get('latitude_of_projection_origin', 0.0)) - elif proj == 'lcc': - proj_dict['lat_0'] = float(projection.attrs['standard_parallel']) - proj_dict['lon_0'] = float(projection.attrs['longitude_of_central_meridian']) - proj_dict['lat_1'] = float(projection.attrs['latitude_of_projection_origin']) - elif proj == 'stere': - proj_dict['lat_ts'] = float(projection.attrs['standard_parallel']) - proj_dict['lon_0'] = float(projection.attrs['straight_vertical_longitude_from_pole']) - proj_dict['lat_0'] = float(projection.attrs['latitude_of_projection_origin']) - elif proj == 'merc': - proj_dict['lat_ts'] = float(projection.attrs['standard_parallel']) - proj_dict['lat_0'] = proj_dict['lat_ts'] - proj_dict['lon_0'] = float(projection.attrs['longitude_of_projection_origin']) + if proj == "geos": + proj_dict["h"] = float(projection.attrs["perspective_point_height"]) + proj_dict["sweep"] = projection.attrs.get("sweep_angle_axis", "y") + proj_dict["lon_0"] = float(projection.attrs["longitude_of_projection_origin"]) + proj_dict["lat_0"] = float(projection.attrs.get("latitude_of_projection_origin", 0.0)) + elif proj == "lcc": + proj_dict["lat_0"] = float(projection.attrs["standard_parallel"]) + proj_dict["lon_0"] = float(projection.attrs["longitude_of_central_meridian"]) + proj_dict["lat_1"] = float(projection.attrs["latitude_of_projection_origin"]) + elif proj == "stere": + proj_dict["lat_ts"] = float(projection.attrs["standard_parallel"]) + proj_dict["lon_0"] = float(projection.attrs["straight_vertical_longitude_from_pole"]) + proj_dict["lat_0"] = float(projection.attrs["latitude_of_projection_origin"]) + elif proj == "merc": + proj_dict["lat_ts"] = float(projection.attrs["standard_parallel"]) + proj_dict["lat_0"] = proj_dict["lat_ts"] + proj_dict["lon_0"] = float(projection.attrs["longitude_of_projection_origin"]) else: raise ValueError("Can't handle projection '{}'".format(proj)) return proj_dict def _calc_extents(self, proj_dict): """Calculate area extents from x/y variables.""" - h = float(proj_dict.get('h', 1.)) # force to 64-bit float - x = self['x'] - y = self['y'] - x_units = x.attrs.get('units', 'rad') - if x_units == 'meters': + h = float(proj_dict.get("h", 1.)) # force to 64-bit float + x = self["x"] + y = self["y"] + x_units = x.attrs.get("units", "rad") + if x_units == "meters": h_factor = 1. factor = 1. - elif x_units == 'microradian': + elif x_units == "microradian": h_factor = h factor = 1e6 else: # radians @@ -260,7 +260,7 @@ def get_area_def(self, key): projection = self._get_cf_grid_mapping_var() proj_dict = self._get_proj_specific_params(projection) area_extent = self._calc_extents(proj_dict) - area_name = '{}_{}'.format(self.sensor, proj_dict['proj']) + area_name = "{}_{}".format(self.sensor, proj_dict["proj"]) return geometry.AreaDefinition( area_name, "SCMI file area", @@ -273,7 +273,7 @@ def get_area_def(self, key): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs['start_date_time'], '%Y%j%H%M%S') + return datetime.strptime(self.nc.attrs["start_date_time"], "%Y%j%H%M%S") @property def end_time(self): diff --git a/satpy/readers/seadas_l2.py b/satpy/readers/seadas_l2.py index 281a0132af..03fa648330 100644 --- a/satpy/readers/seadas_l2.py +++ b/satpy/readers/seadas_l2.py @@ -57,9 +57,9 @@ def _rows_per_scan(self): def _platform_name(self): platform = self[self.platform_attr_name] - platform_dict = {'NPP': 'Suomi-NPP', - 'JPSS-1': 'NOAA-20', - 'JPSS-2': 'NOAA-21'} + platform_dict = {"NPP": "Suomi-NPP", + "JPSS-1": "NOAA-20", + "JPSS-2": "NOAA-21"} return platform_dict.get(platform, platform) @property diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 131fe39ad4..0df97d83a7 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -179,14 +179,14 @@ CHUNK_SIZE = get_legacy_chunk_size() PLATFORM_DICT = { - 'MET08': 'Meteosat-8', - 'MET09': 'Meteosat-9', - 'MET10': 'Meteosat-10', - 'MET11': 'Meteosat-11', - 'MSG1': 'Meteosat-8', - 'MSG2': 'Meteosat-9', - 'MSG3': 'Meteosat-10', - 'MSG4': 'Meteosat-11', + "MET08": "Meteosat-8", + "MET09": "Meteosat-9", + "MET10": "Meteosat-10", + "MET11": "Meteosat-11", + "MSG1": "Meteosat-8", + "MSG2": "Meteosat-9", + "MSG3": "Meteosat-10", + "MSG4": "Meteosat-11", } REPEAT_CYCLE_DURATION = 15 @@ -212,19 +212,19 @@ 11: "IR_134", 12: "HRV"} -VIS_CHANNELS = ['HRV', 'VIS006', 'VIS008', 'IR_016'] +VIS_CHANNELS = ["HRV", "VIS006", "VIS008", "IR_016"] # Polynomial coefficients for spectral-effective BT fits BTFIT = dict() # [A, B, C] -BTFIT['IR_039'] = [0.0, 1.011751900, -3.550400] -BTFIT['WV_062'] = [0.00001805700, 1.000255533, -1.790930] -BTFIT['WV_073'] = [0.00000231818, 1.000668281, -0.456166] -BTFIT['IR_087'] = [-0.00002332000, 1.011803400, -1.507390] -BTFIT['IR_097'] = [-0.00002055330, 1.009370670, -1.030600] -BTFIT['IR_108'] = [-0.00007392770, 1.032889800, -3.296740] -BTFIT['IR_120'] = [-0.00007009840, 1.031314600, -3.181090] -BTFIT['IR_134'] = [-0.00007293450, 1.030424800, -2.645950] +BTFIT["IR_039"] = [0.0, 1.011751900, -3.550400] +BTFIT["WV_062"] = [0.00001805700, 1.000255533, -1.790930] +BTFIT["WV_073"] = [0.00000231818, 1.000668281, -0.456166] +BTFIT["IR_087"] = [-0.00002332000, 1.011803400, -1.507390] +BTFIT["IR_097"] = [-0.00002055330, 1.009370670, -1.030600] +BTFIT["IR_108"] = [-0.00007392770, 1.032889800, -3.296740] +BTFIT["IR_120"] = [-0.00007009840, 1.031314600, -3.181090] +BTFIT["IR_134"] = [-0.00007293450, 1.030424800, -2.645950] SATNUM = {321: "8", 322: "9", @@ -234,124 +234,124 @@ CALIB = dict() # Meteosat 8 -CALIB[321] = {'HRV': {'F': 78.7599}, - 'VIS006': {'F': 65.2296}, - 'VIS008': {'F': 73.0127}, - 'IR_016': {'F': 62.3715}, - 'IR_039': {'VC': 2567.33, - 'ALPHA': 0.9956, - 'BETA': 3.41}, - 'WV_062': {'VC': 1598.103, - 'ALPHA': 0.9962, - 'BETA': 2.218}, - 'WV_073': {'VC': 1362.081, - 'ALPHA': 0.9991, - 'BETA': 0.478}, - 'IR_087': {'VC': 1149.069, - 'ALPHA': 0.9996, - 'BETA': 0.179}, - 'IR_097': {'VC': 1034.343, - 'ALPHA': 0.9999, - 'BETA': 0.06}, - 'IR_108': {'VC': 930.647, - 'ALPHA': 0.9983, - 'BETA': 0.625}, - 'IR_120': {'VC': 839.66, - 'ALPHA': 0.9988, - 'BETA': 0.397}, - 'IR_134': {'VC': 752.387, - 'ALPHA': 0.9981, - 'BETA': 0.578}} +CALIB[321] = {"HRV": {"F": 78.7599}, + "VIS006": {"F": 65.2296}, + "VIS008": {"F": 73.0127}, + "IR_016": {"F": 62.3715}, + "IR_039": {"VC": 2567.33, + "ALPHA": 0.9956, + "BETA": 3.41}, + "WV_062": {"VC": 1598.103, + "ALPHA": 0.9962, + "BETA": 2.218}, + "WV_073": {"VC": 1362.081, + "ALPHA": 0.9991, + "BETA": 0.478}, + "IR_087": {"VC": 1149.069, + "ALPHA": 0.9996, + "BETA": 0.179}, + "IR_097": {"VC": 1034.343, + "ALPHA": 0.9999, + "BETA": 0.06}, + "IR_108": {"VC": 930.647, + "ALPHA": 0.9983, + "BETA": 0.625}, + "IR_120": {"VC": 839.66, + "ALPHA": 0.9988, + "BETA": 0.397}, + "IR_134": {"VC": 752.387, + "ALPHA": 0.9981, + "BETA": 0.578}} # Meteosat 9 -CALIB[322] = {'HRV': {'F': 79.0113}, - 'VIS006': {'F': 65.2065}, - 'VIS008': {'F': 73.1869}, - 'IR_016': {'F': 61.9923}, - 'IR_039': {'VC': 2568.832, - 'ALPHA': 0.9954, - 'BETA': 3.438}, - 'WV_062': {'VC': 1600.548, - 'ALPHA': 0.9963, - 'BETA': 2.185}, - 'WV_073': {'VC': 1360.330, - 'ALPHA': 0.9991, - 'BETA': 0.47}, - 'IR_087': {'VC': 1148.620, - 'ALPHA': 0.9996, - 'BETA': 0.179}, - 'IR_097': {'VC': 1035.289, - 'ALPHA': 0.9999, - 'BETA': 0.056}, - 'IR_108': {'VC': 931.7, - 'ALPHA': 0.9983, - 'BETA': 0.64}, - 'IR_120': {'VC': 836.445, - 'ALPHA': 0.9988, - 'BETA': 0.408}, - 'IR_134': {'VC': 751.792, - 'ALPHA': 0.9981, - 'BETA': 0.561}} +CALIB[322] = {"HRV": {"F": 79.0113}, + "VIS006": {"F": 65.2065}, + "VIS008": {"F": 73.1869}, + "IR_016": {"F": 61.9923}, + "IR_039": {"VC": 2568.832, + "ALPHA": 0.9954, + "BETA": 3.438}, + "WV_062": {"VC": 1600.548, + "ALPHA": 0.9963, + "BETA": 2.185}, + "WV_073": {"VC": 1360.330, + "ALPHA": 0.9991, + "BETA": 0.47}, + "IR_087": {"VC": 1148.620, + "ALPHA": 0.9996, + "BETA": 0.179}, + "IR_097": {"VC": 1035.289, + "ALPHA": 0.9999, + "BETA": 0.056}, + "IR_108": {"VC": 931.7, + "ALPHA": 0.9983, + "BETA": 0.64}, + "IR_120": {"VC": 836.445, + "ALPHA": 0.9988, + "BETA": 0.408}, + "IR_134": {"VC": 751.792, + "ALPHA": 0.9981, + "BETA": 0.561}} # Meteosat 10 -CALIB[323] = {'HRV': {'F': 78.9416}, - 'VIS006': {'F': 65.5148}, - 'VIS008': {'F': 73.1807}, - 'IR_016': {'F': 62.0208}, - 'IR_039': {'VC': 2547.771, - 'ALPHA': 0.9915, - 'BETA': 2.9002}, - 'WV_062': {'VC': 1595.621, - 'ALPHA': 0.9960, - 'BETA': 2.0337}, - 'WV_073': {'VC': 1360.337, - 'ALPHA': 0.9991, - 'BETA': 0.4340}, - 'IR_087': {'VC': 1148.130, - 'ALPHA': 0.9996, - 'BETA': 0.1714}, - 'IR_097': {'VC': 1034.715, - 'ALPHA': 0.9999, - 'BETA': 0.0527}, - 'IR_108': {'VC': 929.842, - 'ALPHA': 0.9983, - 'BETA': 0.6084}, - 'IR_120': {'VC': 838.659, - 'ALPHA': 0.9988, - 'BETA': 0.3882}, - 'IR_134': {'VC': 750.653, - 'ALPHA': 0.9982, - 'BETA': 0.5390}} +CALIB[323] = {"HRV": {"F": 78.9416}, + "VIS006": {"F": 65.5148}, + "VIS008": {"F": 73.1807}, + "IR_016": {"F": 62.0208}, + "IR_039": {"VC": 2547.771, + "ALPHA": 0.9915, + "BETA": 2.9002}, + "WV_062": {"VC": 1595.621, + "ALPHA": 0.9960, + "BETA": 2.0337}, + "WV_073": {"VC": 1360.337, + "ALPHA": 0.9991, + "BETA": 0.4340}, + "IR_087": {"VC": 1148.130, + "ALPHA": 0.9996, + "BETA": 0.1714}, + "IR_097": {"VC": 1034.715, + "ALPHA": 0.9999, + "BETA": 0.0527}, + "IR_108": {"VC": 929.842, + "ALPHA": 0.9983, + "BETA": 0.6084}, + "IR_120": {"VC": 838.659, + "ALPHA": 0.9988, + "BETA": 0.3882}, + "IR_134": {"VC": 750.653, + "ALPHA": 0.9982, + "BETA": 0.5390}} # Meteosat 11 -CALIB[324] = {'HRV': {'F': 79.0035}, - 'VIS006': {'F': 65.2656}, - 'VIS008': {'F': 73.1692}, - 'IR_016': {'F': 61.9416}, - 'IR_039': {'VC': 2555.280, - 'ALPHA': 0.9916, - 'BETA': 2.9438}, - 'WV_062': {'VC': 1596.080, - 'ALPHA': 0.9959, - 'BETA': 2.0780}, - 'WV_073': {'VC': 1361.748, - 'ALPHA': 0.9990, - 'BETA': 0.4929}, - 'IR_087': {'VC': 1147.433, - 'ALPHA': 0.9996, - 'BETA': 0.1731}, - 'IR_097': {'VC': 1034.851, - 'ALPHA': 0.9998, - 'BETA': 0.0597}, - 'IR_108': {'VC': 931.122, - 'ALPHA': 0.9983, - 'BETA': 0.6256}, - 'IR_120': {'VC': 839.113, - 'ALPHA': 0.9988, - 'BETA': 0.4002}, - 'IR_134': {'VC': 748.585, - 'ALPHA': 0.9981, - 'BETA': 0.5635}} +CALIB[324] = {"HRV": {"F": 79.0035}, + "VIS006": {"F": 65.2656}, + "VIS008": {"F": 73.1692}, + "IR_016": {"F": 61.9416}, + "IR_039": {"VC": 2555.280, + "ALPHA": 0.9916, + "BETA": 2.9438}, + "WV_062": {"VC": 1596.080, + "ALPHA": 0.9959, + "BETA": 2.0780}, + "WV_073": {"VC": 1361.748, + "ALPHA": 0.9990, + "BETA": 0.4929}, + "IR_087": {"VC": 1147.433, + "ALPHA": 0.9996, + "BETA": 0.1731}, + "IR_097": {"VC": 1034.851, + "ALPHA": 0.9998, + "BETA": 0.0597}, + "IR_108": {"VC": 931.122, + "ALPHA": 0.9983, + "BETA": 0.6256}, + "IR_120": {"VC": 839.113, + "ALPHA": 0.9988, + "BETA": 0.4002}, + "IR_134": {"VC": 748.585, + "ALPHA": 0.9981, + "BETA": 0.5635}} def get_cds_time(days, msecs): @@ -370,12 +370,12 @@ def get_cds_time(days, msecs): """ if np.isscalar(days): - days = np.array([days], dtype='int64') - msecs = np.array([msecs], dtype='int64') + days = np.array([days], dtype="int64") + msecs = np.array([msecs], dtype="int64") - time = np.datetime64('1958-01-01').astype('datetime64[ms]') + \ - days.astype('timedelta64[D]') + msecs.astype('timedelta64[ms]') - time[time == np.datetime64('1958-01-01 00:00')] = np.datetime64("NaT") + time = np.datetime64("1958-01-01").astype("datetime64[ms]") + \ + days.astype("timedelta64[D]") + msecs.astype("timedelta64[ms]") + time[time == np.datetime64("1958-01-01 00:00")] = np.datetime64("NaT") if len(time) == 1: return time[0] @@ -384,9 +384,9 @@ def get_cds_time(days, msecs): def add_scanline_acq_time(dataset, acq_time): """Add scanline acquisition time to the given dataset.""" - dataset.coords['acq_time'] = ('y', acq_time) - dataset.coords['acq_time'].attrs[ - 'long_name'] = 'Mean scanline acquisition time' + dataset.coords["acq_time"] = ("y", acq_time) + dataset.coords["acq_time"].attrs[ + "long_name"] = "Mean scanline acquisition time" def dec10216(inbuf): @@ -436,53 +436,53 @@ class MpefProductHeader(object): def get(self): """Return numpy record_array for MPEF product header.""" record = [ - ('MPEF_File_Id', np.int16), - ('MPEF_Header_Version', np.uint8), - ('ManualDissAuthRequest', bool), - ('ManualDisseminationAuth', bool), - ('DisseminationAuth', bool), - ('NominalTime', time_cds_short), - ('ProductQuality', np.uint8), - ('ProductCompleteness', np.uint8), - ('ProductTimeliness', np.uint8), - ('ProcessingInstanceId', np.int8), - ('ImagesUsed', self.images_used, (4,)), - ('BaseAlgorithmVersion', + ("MPEF_File_Id", np.int16), + ("MPEF_Header_Version", np.uint8), + ("ManualDissAuthRequest", bool), + ("ManualDisseminationAuth", bool), + ("DisseminationAuth", bool), + ("NominalTime", time_cds_short), + ("ProductQuality", np.uint8), + ("ProductCompleteness", np.uint8), + ("ProductTimeliness", np.uint8), + ("ProcessingInstanceId", np.int8), + ("ImagesUsed", self.images_used, (4,)), + ("BaseAlgorithmVersion", issue_revision), - ('ProductAlgorithmVersion', + ("ProductAlgorithmVersion", issue_revision), - ('InstanceServerName', 'S2'), - ('SpacecraftName', 'S2'), - ('Mission', 'S3'), - ('RectificationLongitude', 'S5'), - ('Encoding', 'S1'), - ('TerminationSpace', 'S1'), - ('EncodingVersion', np.uint16), - ('Channel', np.uint8), - ('ImageLocation', 'S3'), - ('GsicsCalMode', np.bool_), - ('GsicsCalValidity', np.bool_), - ('Padding', 'S2'), - ('OffsetToData', np.uint32), - ('Padding2', 'S9'), - ('RepeatCycle', 'S15'), + ("InstanceServerName", "S2"), + ("SpacecraftName", "S2"), + ("Mission", "S3"), + ("RectificationLongitude", "S5"), + ("Encoding", "S1"), + ("TerminationSpace", "S1"), + ("EncodingVersion", np.uint16), + ("Channel", np.uint8), + ("ImageLocation", "S3"), + ("GsicsCalMode", np.bool_), + ("GsicsCalValidity", np.bool_), + ("Padding", "S2"), + ("OffsetToData", np.uint32), + ("Padding2", "S9"), + ("RepeatCycle", "S15"), ] - return np.dtype(record).newbyteorder('>') + return np.dtype(record).newbyteorder(">") @property def images_used(self): """Return structure for images_used.""" record = [ - ('Padding1', 'S2'), - ('ExpectedImage', time_cds_short), - ('ImageReceived', bool), - ('Padding2', 'S1'), - ('UsedImageStart_Day', np.uint16), - ('UsedImageStart_Millsec', np.uint32), - ('Padding3', 'S2'), - ('UsedImageEnd_Day', np.uint16), - ('UsedImageEndt_Millsec', np.uint32), + ("Padding1", "S2"), + ("ExpectedImage", time_cds_short), + ("ImageReceived", bool), + ("Padding2", "S1"), + ("UsedImageStart_Day", np.uint16), + ("UsedImageStart_Millsec", np.uint32), + ("Padding3", "S2"), + ("UsedImageEnd_Day", np.uint16), + ("UsedImageEndt_Millsec", np.uint32), ] return record @@ -522,7 +522,7 @@ def ir_calibrate(self, data, channel_name, cal_type): # effective radiances return self._erads2bt(data, channel_name) else: - raise NotImplementedError('Unknown calibration type') + raise NotImplementedError("Unknown calibration type") def _srads2bt(self, data, channel_name): """Convert spectral radiance to brightness temperature.""" @@ -566,36 +566,36 @@ def __init__(self, platform_id, channel_name, coefs, calib_mode, scan_time): scan_time=self._scan_time ) - valid_modes = ('NOMINAL', 'GSICS') + valid_modes = ("NOMINAL", "GSICS") if self._calib_mode not in valid_modes: raise ValueError( - 'Invalid calibration mode: {}. Choose one of {}'.format( + "Invalid calibration mode: {}. Choose one of {}".format( self._calib_mode, valid_modes) ) def calibrate(self, data, calibration): """Calibrate the given data.""" - if calibration == 'counts': + if calibration == "counts": res = data - elif calibration in ['radiance', 'reflectance', - 'brightness_temperature']: + elif calibration in ["radiance", "reflectance", + "brightness_temperature"]: gain, offset = self.get_gain_offset() res = self._algo.convert_to_radiance( data.astype(np.float32), gain, offset ) else: raise ValueError( - 'Invalid calibration {} for channel {}'.format( + "Invalid calibration {} for channel {}".format( calibration, self._channel_name ) ) - if calibration == 'reflectance': + if calibration == "reflectance": solar_irradiance = CALIB[self._platform_id][self._channel_name]["F"] res = self._algo.vis_calibrate(res, solar_irradiance) - elif calibration == 'brightness_temperature': + elif calibration == "brightness_temperature": res = self._algo.ir_calibrate( - res, self._channel_name, self._coefs['radiance_type'] + res, self._channel_name, self._coefs["radiance_type"] ) return res @@ -608,14 +608,14 @@ def get_gain_offset(self): nominal coefficients. External coefficients take precedence over internal coefficients. """ - coefs = self._coefs['coefs'] + coefs = self._coefs["coefs"] # Select internal coefficients for the given calibration mode - internal_gain = coefs['NOMINAL']['gain'] - internal_offset = coefs['NOMINAL']['offset'] - if self._calib_mode == 'GSICS': - gsics_gain = coefs['GSICS']['gain'] - gsics_offset = coefs['GSICS']['offset'] * gsics_gain + internal_gain = coefs["NOMINAL"]["gain"] + internal_offset = coefs["NOMINAL"]["offset"] + if self._calib_mode == "GSICS": + gsics_gain = coefs["GSICS"]["gain"] + gsics_offset = coefs["GSICS"]["offset"] * gsics_gain if gsics_gain != 0 and gsics_offset != 0: # If no GSICS coefficients are available for a certain channel, # they are set to zero in the file. @@ -623,8 +623,8 @@ def get_gain_offset(self): internal_offset = gsics_offset # Override with external coefficients, if any. - gain = coefs['EXTERNAL'].get('gain', internal_gain) - offset = coefs['EXTERNAL'].get('offset', internal_offset) + gain = coefs["EXTERNAL"].get("gain", internal_gain) + offset = coefs["EXTERNAL"].get("offset", internal_offset) return gain, offset @@ -689,9 +689,9 @@ def evaluate(self, time): Returns: Earth-centered cartesian coordinates (x, y, z) in meters """ - domain = [np.datetime64(self.start_time).astype('int64'), - np.datetime64(self.end_time).astype('int64')] - time = np.datetime64(time).astype('int64') + domain = [np.datetime64(self.start_time).astype("int64"), + np.datetime64(self.end_time).astype("int64")] + time = np.datetime64(time).astype("int64") x, y, z = chebyshev_3d(self.coefs, time, domain) return x * 1000, y * 1000, z * 1000 # km -> m @@ -718,10 +718,10 @@ def get_satpos(orbit_polynomial, time, semi_major_axis, semi_minor_axis): """ x, y, z = orbit_polynomial.evaluate(time) geocent = pyproj.CRS( - proj='geocent', a=semi_major_axis, b=semi_minor_axis, units='m' + proj="geocent", a=semi_major_axis, b=semi_minor_axis, units="m" ) latlong = pyproj.CRS( - proj='latlong', a=semi_major_axis, b=semi_minor_axis, units='m' + proj="latlong", a=semi_major_axis, b=semi_minor_axis, units="m" ) transformer = pyproj.Transformer.from_crs(geocent, latlong) lon, lat, alt = transformer.transform(x, y, z) @@ -750,10 +750,10 @@ def __init__(self, orbit_polynomials): self.orbit_polynomials = orbit_polynomials # Left/right boundaries of time intervals for which the polynomials are # valid. - self.valid_from = orbit_polynomials['StartTime'][0, :].astype( - 'datetime64[us]') - self.valid_to = orbit_polynomials['EndTime'][0, :].astype( - 'datetime64[us]') + self.valid_from = orbit_polynomials["StartTime"][0, :].astype( + "datetime64[us]") + self.valid_to = orbit_polynomials["EndTime"][0, :].astype( + "datetime64[us]") def get_orbit_polynomial(self, time, max_delta=6): """Get orbit polynomial valid for the given time. @@ -782,16 +782,16 @@ def get_orbit_polynomial(self, time, max_delta=6): match = self._get_enclosing_interval(time) except ValueError: warnings.warn( - 'No orbit polynomial valid for {}. Using closest ' - 'match.'.format(time), + "No orbit polynomial valid for {}. Using closest " + "match.".format(time), stacklevel=2 ) match = self._get_closest_interval_within(time, max_delta) return OrbitPolynomial( coefs=( - self.orbit_polynomials['X'][match], - self.orbit_polynomials['Y'][match], - self.orbit_polynomials['Z'][match] + self.orbit_polynomials["X"][match], + self.orbit_polynomials["Y"][match], + self.orbit_polynomials["Z"][match] ), start_time=self.valid_from[match], end_time=self.valid_to[match] @@ -819,12 +819,12 @@ def _get_closest_interval_within(self, time, threshold): Index of closest interval """ closest_match, distance = self._get_closest_interval(time) - threshold_diff = np.timedelta64(threshold, 'h') + threshold_diff = np.timedelta64(threshold, "h") if distance < threshold_diff: return closest_match raise NoValidOrbitParams( - 'Unable to find orbit coefficients valid for {} +/- {}' - 'hours'.format(time, threshold) + "Unable to find orbit coefficients valid for {} +/- {}" + "hours".format(time, threshold) ) def _get_closest_interval(self, time): @@ -836,7 +836,7 @@ def _get_closest_interval(self, time): intervals_centre = self.valid_from + 0.5 * ( self.valid_to - self.valid_from ) - diffs_us = (time - intervals_centre).astype('i8') + diffs_us = (time - intervals_centre).astype("i8") closest_match = np.argmin(np.fabs(diffs_us)) distance = abs(intervals_centre[closest_match] - time) return closest_match, distance @@ -864,15 +864,15 @@ def calculate_area_extent(area_dict): # For Earth model 2 and full disk VISIR, (center_point - west - 0.5 + we_offset) must be -1856.5 . # See MSG Level 1.5 Image Data Format Description Figure 7 - Alignment and numbering of the non-HRV pixels. """ - center_point = area_dict['center_point'] - east = area_dict['east'] - west = area_dict['west'] - south = area_dict['south'] - north = area_dict['north'] - column_step = area_dict['column_step'] - line_step = area_dict['line_step'] - column_offset = area_dict.get('column_offset', 0) - line_offset = area_dict.get('line_offset', 0) + center_point = area_dict["center_point"] + east = area_dict["east"] + west = area_dict["west"] + south = area_dict["south"] + north = area_dict["north"] + column_step = area_dict["column_step"] + line_step = area_dict["line_step"] + column_offset = area_dict.get("column_offset", 0) + line_offset = area_dict.get("line_offset", 0) ll_c = (center_point - east + 0.5 + column_offset) * column_step ll_l = (north - center_point + 0.5 + line_offset) * line_step @@ -885,18 +885,18 @@ def calculate_area_extent(area_dict): def create_coef_dict(coefs_nominal, coefs_gsics, radiance_type, ext_coefs): """Create coefficient dictionary expected by calibration class.""" return { - 'coefs': { - 'NOMINAL': { - 'gain': coefs_nominal[0], - 'offset': coefs_nominal[1], + "coefs": { + "NOMINAL": { + "gain": coefs_nominal[0], + "offset": coefs_nominal[1], }, - 'GSICS': { - 'gain': coefs_gsics[0], - 'offset': coefs_gsics[1] + "GSICS": { + "gain": coefs_gsics[0], + "offset": coefs_gsics[1] }, - 'EXTERNAL': ext_coefs + "EXTERNAL": ext_coefs }, - 'radiance_type': radiance_type + "radiance_type": radiance_type } @@ -916,7 +916,7 @@ def pad_data_horizontally(data, final_size, east_bound, west_bound): """Pad the data given east and west bounds and the desired size.""" nlines = final_size[0] if west_bound - east_bound != data.shape[1] - 1: - raise IndexError('East and west bounds do not match data shape') + raise IndexError("East and west bounds do not match data shape") padding_east = get_padding_area((nlines, east_bound - 1), data.dtype) padding_west = get_padding_area((nlines, (final_size[1] - west_bound)), data.dtype) @@ -928,7 +928,7 @@ def pad_data_vertically(data, final_size, south_bound, north_bound): """Pad the data given south and north bounds and the desired size.""" ncols = final_size[1] if north_bound - south_bound != data.shape[0] - 1: - raise IndexError('South and north bounds do not match data shape') + raise IndexError("South and north bounds do not match data shape") padding_south = get_padding_area((south_bound - 1, ncols), data.dtype) padding_north = get_padding_area(((final_size[0] - north_bound), ncols), data.dtype) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 2b153edfcc..83fc82f687 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -252,33 +252,33 @@ from satpy.utils import get_legacy_chunk_size CHUNK_SIZE = get_legacy_chunk_size() -logger = logging.getLogger('hrit_msg') +logger = logging.getLogger("hrit_msg") # MSG implementation: -key_header = np.dtype([('key_number', 'u1'), - ('seed', '>f8')]) - -segment_identification = np.dtype([('GP_SC_ID', '>i2'), - ('spectral_channel_id', '>i1'), - ('segment_sequence_number', '>u2'), - ('planned_start_segment_number', '>u2'), - ('planned_end_segment_number', '>u2'), - ('data_field_representation', '>i1')]) - -image_segment_line_quality = np.dtype([('line_number_in_grid', '>i4'), - ('line_mean_acquisition', - [('days', '>u2'), - ('milliseconds', '>u4')]), - ('line_validity', 'u1'), - ('line_radiometric_quality', 'u1'), - ('line_geometric_quality', 'u1')]) +key_header = np.dtype([("key_number", "u1"), + ("seed", ">f8")]) + +segment_identification = np.dtype([("GP_SC_ID", ">i2"), + ("spectral_channel_id", ">i1"), + ("segment_sequence_number", ">u2"), + ("planned_start_segment_number", ">u2"), + ("planned_end_segment_number", ">u2"), + ("data_field_representation", ">i1")]) + +image_segment_line_quality = np.dtype([("line_number_in_grid", ">i4"), + ("line_mean_acquisition", + [("days", ">u2"), + ("milliseconds", ">u4")]), + ("line_validity", "u1"), + ("line_radiometric_quality", "u1"), + ("line_geometric_quality", "u1")]) msg_variable_length_headers = { - image_segment_line_quality: 'image_segment_line_quality'} + image_segment_line_quality: "image_segment_line_quality"} -msg_text_headers = {image_data_function: 'image_data_function', - annotation_header: 'annotation_header', - ancillary_text: 'ancillary_text'} +msg_text_headers = {image_data_function: "image_data_function", + annotation_header: "annotation_header", + ancillary_text: "ancillary_text"} msg_hdr_map = base_hdr_map.copy() msg_hdr_map.update({7: key_header, @@ -287,23 +287,23 @@ }) -orbit_coef = np.dtype([('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('X', '>f8', (8, )), - ('Y', '>f8', (8, )), - ('Z', '>f8', (8, )), - ('VX', '>f8', (8, )), - ('VY', '>f8', (8, )), - ('VZ', '>f8', (8, ))]) +orbit_coef = np.dtype([("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("X", ">f8", (8, )), + ("Y", ">f8", (8, )), + ("Z", ">f8", (8, )), + ("VX", ">f8", (8, )), + ("VY", ">f8", (8, )), + ("VZ", ">f8", (8, ))]) -attitude_coef = np.dtype([('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('XofSpinAxis', '>f8', (8, )), - ('YofSpinAxis', '>f8', (8, )), - ('ZofSpinAxis', '>f8', (8, ))]) +attitude_coef = np.dtype([("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("XofSpinAxis", ">f8", (8, )), + ("YofSpinAxis", ">f8", (8, )), + ("ZofSpinAxis", ">f8", (8, ))]) -cuc_time = np.dtype([('coarse', 'u1', (4, )), - ('fine', 'u1', (3, ))]) +cuc_time = np.dtype([("coarse", "u1", (4, )), + ("fine", "u1", (3, ))]) class HRITMSGPrologueEpilogueBase(HRITFileHandler): @@ -328,7 +328,7 @@ def reduce(self, max_size): class HRITMSGPrologueFileHandler(HRITMSGPrologueEpilogueBase): """SEVIRI HRIT prologue reader.""" - def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', + def __init__(self, filename, filename_info, filetype_info, calib_mode="nominal", ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=None, fill_hrv=None, mask_bad_quality_scan_lines=None): """Initialize the reader.""" @@ -340,22 +340,22 @@ def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', self.prologue = {} self.read_prologue() - service = filename_info['service'] - if service == '': - self.mda['service'] = '0DEG' + service = filename_info["service"] + if service == "": + self.mda["service"] = "0DEG" else: - self.mda['service'] = service + self.mda["service"] = service def read_prologue(self): """Read the prologue metadata.""" with utils.generic_open(self.filename, mode="rb") as fp_: - fp_.seek(self.mda['total_header_length']) + fp_.seek(self.mda["total_header_length"]) data = np.frombuffer(fp_.read(hrit_prologue.itemsize), dtype=hrit_prologue, count=1) self.prologue.update(recarray2dict(data)) try: impf = np.frombuffer(fp_.read(impf_configuration.itemsize), dtype=impf_configuration, count=1)[0] except ValueError: - logger.info('No IMPF configuration field found in prologue.') + logger.info("No IMPF configuration field found in prologue.") else: self.prologue.update(recarray2dict(impf)) @@ -368,8 +368,8 @@ def satpos(self): Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ a, b = self.get_earth_radii() - poly_finder = OrbitPolynomialFinder(self.prologue['SatelliteStatus'][ - 'Orbit']['OrbitPolynomial']) + poly_finder = OrbitPolynomialFinder(self.prologue["SatelliteStatus"][ + "Orbit"]["OrbitPolynomial"]) orbit_polynomial = poly_finder.get_orbit_polynomial(self.observation_start_time) return get_satpos( orbit_polynomial=orbit_polynomial, @@ -385,10 +385,10 @@ def get_earth_radii(self): Equatorial radius, polar radius [m] """ - earth_model = self.prologue['GeometricProcessing']['EarthModel'] - a = earth_model['EquatorialRadius'] * 1000 - b = (earth_model['NorthPolarRadius'] + - earth_model['SouthPolarRadius']) / 2.0 * 1000 + earth_model = self.prologue["GeometricProcessing"]["EarthModel"] + a = earth_model["EquatorialRadius"] * 1000 + b = (earth_model["NorthPolarRadius"] + + earth_model["SouthPolarRadius"]) / 2.0 * 1000 return a, b def reduce(self, max_size): @@ -399,7 +399,7 @@ def reduce(self, max_size): class HRITMSGEpilogueFileHandler(HRITMSGPrologueEpilogueBase): """SEVIRI HRIT epilogue reader.""" - def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', + def __init__(self, filename, filename_info, filetype_info, calib_mode="nominal", ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=None, fill_hrv=None, mask_bad_quality_scan_lines=None): """Initialize the reader.""" @@ -411,16 +411,16 @@ def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', self.epilogue = {} self.read_epilogue() - service = filename_info['service'] - if service == '': - self.mda['service'] = '0DEG' + service = filename_info["service"] + if service == "": + self.mda["service"] = "0DEG" else: - self.mda['service'] = service + self.mda["service"] = service def read_epilogue(self): """Read the epilogue metadata.""" with utils.generic_open(self.filename, mode="rb") as fp_: - fp_.seek(self.mda['total_header_length']) + fp_.seek(self.mda["total_header_length"]) data = np.frombuffer(fp_.read(hrit_epilogue.itemsize), dtype=hrit_epilogue, count=1) self.epilogue.update(recarray2dict(data)) @@ -454,7 +454,7 @@ class HRITMSGFileHandler(HRITFileHandler): """ def __init__(self, filename, filename_info, filetype_info, - prologue, epilogue, calib_mode='nominal', + prologue, epilogue, calib_mode="nominal", ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=100, fill_hrv=True, mask_bad_quality_scan_lines=True): @@ -480,27 +480,27 @@ def __init__(self, filename, filename_info, filetype_info, def _get_header(self): """Read the header info, and fill the metadata dictionary.""" - earth_model = self.prologue['GeometricProcessing']['EarthModel'] - self.mda['offset_corrected'] = earth_model['TypeOfEarthModel'] == 2 + earth_model = self.prologue["GeometricProcessing"]["EarthModel"] + self.mda["offset_corrected"] = earth_model["TypeOfEarthModel"] == 2 # Projection a, b = self.prologue_.get_earth_radii() - self.mda['projection_parameters']['a'] = a - self.mda['projection_parameters']['b'] = b - ssp = self.prologue['ImageDescription'][ - 'ProjectionDescription']['LongitudeOfSSP'] - self.mda['projection_parameters']['SSP_longitude'] = ssp - self.mda['projection_parameters']['SSP_latitude'] = 0.0 + self.mda["projection_parameters"]["a"] = a + self.mda["projection_parameters"]["b"] = b + ssp = self.prologue["ImageDescription"][ + "ProjectionDescription"]["LongitudeOfSSP"] + self.mda["projection_parameters"]["SSP_longitude"] = ssp + self.mda["projection_parameters"]["SSP_latitude"] = 0.0 # Orbital parameters - self.mda['orbital_parameters']['satellite_nominal_longitude'] = self.prologue['SatelliteStatus'][ - 'SatelliteDefinition']['NominalLongitude'] - self.mda['orbital_parameters']['satellite_nominal_latitude'] = 0.0 + self.mda["orbital_parameters"]["satellite_nominal_longitude"] = self.prologue["SatelliteStatus"][ + "SatelliteDefinition"]["NominalLongitude"] + self.mda["orbital_parameters"]["satellite_nominal_latitude"] = 0.0 try: actual_lon, actual_lat, actual_alt = self.prologue_.satpos - self.mda['orbital_parameters']['satellite_actual_longitude'] = actual_lon - self.mda['orbital_parameters']['satellite_actual_latitude'] = actual_lat - self.mda['orbital_parameters']['satellite_actual_altitude'] = actual_alt + self.mda["orbital_parameters"]["satellite_actual_longitude"] = actual_lon + self.mda["orbital_parameters"]["satellite_actual_latitude"] = actual_lat + self.mda["orbital_parameters"]["satellite_actual_altitude"] = actual_alt except NoValidOrbitParams as err: logger.warning(err) @@ -508,46 +508,46 @@ def _get_header(self): self.platform_id = self.prologue["SatelliteStatus"][ "SatelliteDefinition"]["SatelliteId"] self.platform_name = "Meteosat-" + SATNUM[self.platform_id] - self.mda['platform_name'] = self.platform_name - service = self._filename_info['service'] - if service == '': - self.mda['service'] = '0DEG' + self.mda["platform_name"] = self.platform_name + service = self._filename_info["service"] + if service == "": + self.mda["service"] = "0DEG" else: - self.mda['service'] = service - self.channel_name = CHANNEL_NAMES[self.mda['spectral_channel_id']] + self.mda["service"] = service + self.channel_name = CHANNEL_NAMES[self.mda["spectral_channel_id"]] @property def _repeat_cycle_duration(self): """Get repeat cycle duration from epilogue.""" - if self.epilogue['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: + if self.epilogue["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] == 1: return 5 return REPEAT_CYCLE_DURATION @property def nominal_start_time(self): """Get the start time and round it according to scan law.""" - tm = self.prologue['ImageAcquisition'][ - 'PlannedAcquisitionTime']['TrueRepeatCycleStart'] + tm = self.prologue["ImageAcquisition"][ + "PlannedAcquisitionTime"]["TrueRepeatCycleStart"] return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): """Get the end time and round it according to scan law.""" - tm = self.prologue['ImageAcquisition'][ - 'PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] + tm = self.prologue["ImageAcquisition"][ + "PlannedAcquisitionTime"]["PlannedRepeatCycleEnd"] return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def observation_start_time(self): """Get the observation start time.""" - return self.epilogue['ImageProductionStats'][ - 'ActualScanningSummary']['ForwardScanStart'] + return self.epilogue["ImageProductionStats"][ + "ActualScanningSummary"]["ForwardScanStart"] @property def observation_end_time(self): """Get the observation end time.""" - return self.epilogue['ImageProductionStats'][ - 'ActualScanningSummary']['ForwardScanEnd'] + return self.epilogue["ImageProductionStats"][ + "ActualScanningSummary"]["ForwardScanEnd"] @property def start_time(self): @@ -572,7 +572,7 @@ def _get_area_extent(self, pdict): """ aex = get_area_extent(pdict) - if not self.mda['offset_corrected']: + if not self.mda["offset_corrected"]: # Geo-referencing offset present. Adjust area extent to match the shifted data. Note that we have to adjust # the corners in the *opposite* direction, i.e. S-E. Think of it as if the coastlines were fixed, and you # dragged the image to S-E until coastlines and data area aligned correctly. @@ -589,80 +589,80 @@ def _get_area_extent(self, pdict): def get_area_def(self, dsid): """Get the area definition of the band.""" # Common parameters for both HRV and other channels - nlines = int(self.mda['number_of_lines']) - loff = np.float32(self.mda['loff']) + nlines = int(self.mda["number_of_lines"]) + loff = np.float32(self.mda["loff"]) pdict = dict() - pdict['cfac'] = np.int32(self.mda['cfac']) - pdict['lfac'] = np.int32(self.mda['lfac']) - pdict['coff'] = np.float32(self.mda['coff']) - - pdict['a'] = self.mda['projection_parameters']['a'] - pdict['b'] = self.mda['projection_parameters']['b'] - pdict['h'] = self.mda['projection_parameters']['h'] - pdict['ssp_lon'] = self.mda['projection_parameters']['SSP_longitude'] - - pdict['nlines'] = nlines - pdict['ncols'] = int(self.mda['number_of_columns']) - if (self.prologue['ImageDescription']['Level15ImageProduction'] - ['ImageProcDirection'] == 0): - pdict['scandir'] = 'N2S' + pdict["cfac"] = np.int32(self.mda["cfac"]) + pdict["lfac"] = np.int32(self.mda["lfac"]) + pdict["coff"] = np.float32(self.mda["coff"]) + + pdict["a"] = self.mda["projection_parameters"]["a"] + pdict["b"] = self.mda["projection_parameters"]["b"] + pdict["h"] = self.mda["projection_parameters"]["h"] + pdict["ssp_lon"] = self.mda["projection_parameters"]["SSP_longitude"] + + pdict["nlines"] = nlines + pdict["ncols"] = int(self.mda["number_of_columns"]) + if (self.prologue["ImageDescription"]["Level15ImageProduction"] + ["ImageProcDirection"] == 0): + pdict["scandir"] = "N2S" else: - pdict['scandir'] = 'S2N' + pdict["scandir"] = "S2N" - area_naming_input_dict = {'platform_name': 'msg', - 'instrument_name': 'seviri', - 'resolution': int(dsid['resolution']) + area_naming_input_dict = {"platform_name": "msg", + "instrument_name": "seviri", + "resolution": int(dsid["resolution"]) } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('seviri', pdict['ssp_lon'])}) + **get_service_mode("seviri", pdict["ssp_lon"])}) # Compute area definition for non-HRV channels: - if dsid['name'] != 'HRV': - pdict['loff'] = loff - nlines + if dsid["name"] != "HRV": + pdict["loff"] = loff - nlines aex = self._get_area_extent(pdict) - pdict['a_name'] = area_naming['area_id'] - pdict['a_desc'] = area_naming['description'] - pdict['p_id'] = "" + pdict["a_name"] = area_naming["area_id"] + pdict["a_desc"] = area_naming["description"] + pdict["p_id"] = "" area = get_area_definition(pdict, aex) self.area = area return self.area - segment_number = self.mda['segment_sequence_number'] + segment_number = self.mda["segment_sequence_number"] current_first_line = ((segment_number - - self.mda['planned_start_segment_number']) - * pdict['nlines']) + self.mda["planned_start_segment_number"]) + * pdict["nlines"]) # Or, if we are processing HRV: - pdict['a_name'] = area_naming['area_id'] - pdict['p_id'] = "" - bounds = self.epilogue['ImageProductionStats']['ActualL15CoverageHRV'].copy() + pdict["a_name"] = area_naming["area_id"] + pdict["p_id"] = "" + bounds = self.epilogue["ImageProductionStats"]["ActualL15CoverageHRV"].copy() if self.fill_hrv: - bounds['UpperEastColumnActual'] = 1 - bounds['UpperWestColumnActual'] = HRV_NUM_COLUMNS - bounds['LowerEastColumnActual'] = 1 - bounds['LowerWestColumnActual'] = HRV_NUM_COLUMNS - pdict['ncols'] = HRV_NUM_COLUMNS + bounds["UpperEastColumnActual"] = 1 + bounds["UpperWestColumnActual"] = HRV_NUM_COLUMNS + bounds["LowerEastColumnActual"] = 1 + bounds["LowerWestColumnActual"] = HRV_NUM_COLUMNS + pdict["ncols"] = HRV_NUM_COLUMNS upper_south_line = bounds[ - 'LowerNorthLineActual'] - current_first_line - 1 - upper_south_line = min(max(upper_south_line, 0), pdict['nlines']) - lower_coff = (5566 - bounds['LowerEastColumnActual'] + 1) - upper_coff = (5566 - bounds['UpperEastColumnActual'] + 1) + "LowerNorthLineActual"] - current_first_line - 1 + upper_south_line = min(max(upper_south_line, 0), pdict["nlines"]) + lower_coff = (5566 - bounds["LowerEastColumnActual"] + 1) + upper_coff = (5566 - bounds["UpperEastColumnActual"] + 1) # First we look at the lower window - pdict['nlines'] = upper_south_line - pdict['loff'] = loff - upper_south_line - pdict['coff'] = lower_coff - pdict['a_desc'] = area_naming['description'] + pdict["nlines"] = upper_south_line + pdict["loff"] = loff - upper_south_line + pdict["coff"] = lower_coff + pdict["a_desc"] = area_naming["description"] lower_area_extent = self._get_area_extent(pdict) lower_area = get_area_definition(pdict, lower_area_extent) # Now the upper window - pdict['nlines'] = nlines - upper_south_line - pdict['loff'] = loff - pdict['nlines'] - upper_south_line - pdict['coff'] = upper_coff - pdict['a_desc'] = area_naming['description'] + pdict["nlines"] = nlines - upper_south_line + pdict["loff"] = loff - pdict["nlines"] - upper_south_line + pdict["coff"] = upper_coff + pdict["a_desc"] = area_naming["description"] upper_area_extent = self._get_area_extent(pdict) upper_area = get_area_definition(pdict, upper_area_extent) @@ -674,13 +674,13 @@ def get_area_def(self, dsid): def get_dataset(self, key, info): """Get the dataset.""" res = super(HRITMSGFileHandler, self).get_dataset(key, info) - res = self.calibrate(res, key['calibration']) + res = self.calibrate(res, key["calibration"]) - is_calibration = key['calibration'] in ['radiance', 'reflectance', 'brightness_temperature'] + is_calibration = key["calibration"] in ["radiance", "reflectance", "brightness_temperature"] if is_calibration and self.mask_bad_quality_scan_lines: # noqa: E129 res = self._mask_bad_quality(res) - if key['name'] == 'HRV' and self.fill_hrv: + if key["name"] == "HRV" and self.fill_hrv: res = self.pad_hrv_data(res) self._update_attrs(res, info) self._add_scanline_acq_time(res) @@ -688,17 +688,17 @@ def get_dataset(self, key, info): def pad_hrv_data(self, res): """Add empty pixels around the HRV.""" - logger.debug('Padding HRV data to full disk') - nlines = int(self.mda['number_of_lines']) + logger.debug("Padding HRV data to full disk") + nlines = int(self.mda["number_of_lines"]) - segment_number = self.mda['segment_sequence_number'] + segment_number = self.mda["segment_sequence_number"] current_first_line = (segment_number - - self.mda['planned_start_segment_number']) * nlines - bounds = self.epilogue['ImageProductionStats']['ActualL15CoverageHRV'] + - self.mda["planned_start_segment_number"]) * nlines + bounds = self.epilogue["ImageProductionStats"]["ActualL15CoverageHRV"] upper_south_line = bounds[ - 'LowerNorthLineActual'] - current_first_line - 1 + "LowerNorthLineActual"] - current_first_line - 1 upper_south_line = min(max(upper_south_line, 0), nlines) data_list = list() @@ -706,18 +706,18 @@ def pad_hrv_data(self, res): # we have some of the lower window data_lower = pad_data_horizontally(res[:upper_south_line, :].data, (upper_south_line, HRV_NUM_COLUMNS), - bounds['LowerEastColumnActual'], - bounds['LowerWestColumnActual']) + bounds["LowerEastColumnActual"], + bounds["LowerWestColumnActual"]) data_list.append(data_lower) if upper_south_line < nlines: # we have some of the upper window data_upper = pad_data_horizontally(res[upper_south_line:, :].data, (nlines - upper_south_line, HRV_NUM_COLUMNS), - bounds['UpperEastColumnActual'], - bounds['UpperWestColumnActual']) + bounds["UpperEastColumnActual"], + bounds["UpperWestColumnActual"]) data_list.append(data_upper) - return xr.DataArray(da.vstack(data_list), dims=('y', 'x'), attrs=res.attrs.copy()) + return xr.DataArray(da.vstack(data_list), dims=("y", "x"), attrs=res.attrs.copy()) def calibrate(self, data, calibration): """Calibrate the data.""" @@ -733,9 +733,9 @@ def calibrate(self, data, calibration): def _mask_bad_quality(self, data): """Mask scanlines with bad quality.""" - line_validity = self.mda['image_segment_line_quality']['line_validity'] - line_radiometric_quality = self.mda['image_segment_line_quality']['line_radiometric_quality'] - line_geometric_quality = self.mda['image_segment_line_quality']['line_geometric_quality'] + line_validity = self.mda["image_segment_line_quality"]["line_validity"] + line_radiometric_quality = self.mda["image_segment_line_quality"]["line_radiometric_quality"] + line_geometric_quality = self.mda["image_segment_line_quality"]["line_geometric_quality"] data = mask_bad_quality(data, line_validity, line_geometric_quality, line_radiometric_quality) return data @@ -743,7 +743,7 @@ def _get_raw_mda(self): """Compile raw metadata to be included in the dataset attributes.""" # Metadata from segment header (excluding items which vary among the different segments) raw_mda = copy.deepcopy(self.mda) - for key in ('image_segment_line_quality', 'segment_sequence_number', 'annotation_header', 'loff'): + for key in ("image_segment_line_quality", "segment_sequence_number", "annotation_header", "loff"): raw_mda.pop(key, None) # Metadata from prologue and epilogue (large arrays removed) @@ -754,50 +754,50 @@ def _get_raw_mda(self): def _add_scanline_acq_time(self, dataset): """Add scanline acquisition time to the given dataset.""" - tline = self.mda['image_segment_line_quality']['line_mean_acquisition'] - acq_time = get_cds_time(days=tline['days'], msecs=tline['milliseconds']) + tline = self.mda["image_segment_line_quality"]["line_mean_acquisition"] + acq_time = get_cds_time(days=tline["days"], msecs=tline["milliseconds"]) add_scanline_acq_time(dataset, acq_time) def _update_attrs(self, res, info): """Update dataset attributes.""" - res.attrs['units'] = info['units'] - res.attrs['wavelength'] = info['wavelength'] - res.attrs['standard_name'] = info['standard_name'] - res.attrs['platform_name'] = self.platform_name - res.attrs['sensor'] = 'seviri' - res.attrs['nominal_start_time'] = self.nominal_start_time, - res.attrs['nominal_end_time'] = self.nominal_end_time, - res.attrs['time_parameters'] = { - 'nominal_start_time': self.nominal_start_time, - 'nominal_end_time': self.nominal_end_time, - 'observation_start_time': self.observation_start_time, - 'observation_end_time': self.observation_end_time, + res.attrs["units"] = info["units"] + res.attrs["wavelength"] = info["wavelength"] + res.attrs["standard_name"] = info["standard_name"] + res.attrs["platform_name"] = self.platform_name + res.attrs["sensor"] = "seviri" + res.attrs["nominal_start_time"] = self.nominal_start_time, + res.attrs["nominal_end_time"] = self.nominal_end_time, + res.attrs["time_parameters"] = { + "nominal_start_time": self.nominal_start_time, + "nominal_end_time": self.nominal_end_time, + "observation_start_time": self.observation_start_time, + "observation_end_time": self.observation_end_time, } - res.attrs['orbital_parameters'] = { - 'projection_longitude': self.mda['projection_parameters']['SSP_longitude'], - 'projection_latitude': self.mda['projection_parameters']['SSP_latitude'], - 'projection_altitude': self.mda['projection_parameters']['h']} - res.attrs['orbital_parameters'].update(self.mda['orbital_parameters']) - res.attrs['georef_offset_corrected'] = self.mda['offset_corrected'] + res.attrs["orbital_parameters"] = { + "projection_longitude": self.mda["projection_parameters"]["SSP_longitude"], + "projection_latitude": self.mda["projection_parameters"]["SSP_latitude"], + "projection_altitude": self.mda["projection_parameters"]["h"]} + res.attrs["orbital_parameters"].update(self.mda["orbital_parameters"]) + res.attrs["georef_offset_corrected"] = self.mda["offset_corrected"] if self.include_raw_metadata: - res.attrs['raw_metadata'] = self._get_raw_mda() + res.attrs["raw_metadata"] = self._get_raw_mda() def _get_calib_coefs(self, channel_name): """Get coefficients for calibration from counts to radiance.""" - band_idx = self.mda['spectral_channel_id'] - 1 + band_idx = self.mda["spectral_channel_id"] - 1 coefs_nominal = self.prologue["RadiometricProcessing"][ "Level15ImageCalibration"] - coefs_gsics = self.prologue["RadiometricProcessing"]['MPEFCalFeedback'] - radiance_types = self.prologue['ImageDescription'][ - 'Level15ImageProduction']['PlannedChanProcessing'] + coefs_gsics = self.prologue["RadiometricProcessing"]["MPEFCalFeedback"] + radiance_types = self.prologue["ImageDescription"][ + "Level15ImageProduction"]["PlannedChanProcessing"] return create_coef_dict( coefs_nominal=( - coefs_nominal['CalSlope'][band_idx], - coefs_nominal['CalOffset'][band_idx] + coefs_nominal["CalSlope"][band_idx], + coefs_nominal["CalOffset"][band_idx] ), coefs_gsics=( - coefs_gsics['GSICSCalCoeff'][band_idx], - coefs_gsics['GSICSOffsetCount'][band_idx] + coefs_gsics["GSICSCalCoeff"][band_idx], + coefs_gsics["GSICSOffsetCount"][band_idx] ), ext_coefs=self.ext_calib_coefs.get(channel_name, {}), radiance_type=radiance_types[band_idx] @@ -808,7 +808,7 @@ def pad_data(data, final_size, east_bound, west_bound): """Pad the data given east and west bounds and the desired size.""" nlines = final_size[0] if west_bound - east_bound != data.shape[1] - 1: - raise IndexError('East and west bounds do not match data shape') + raise IndexError("East and west bounds do not match data shape") padding_east = da.zeros((nlines, east_bound - 1), dtype=data.dtype, chunks=CHUNK_SIZE) padding_west = da.zeros((nlines, (final_size[1] - west_bound)), diff --git a/satpy/readers/seviri_l1b_icare.py b/satpy/readers/seviri_l1b_icare.py index 38c4360744..b2ceb30313 100644 --- a/satpy/readers/seviri_l1b_icare.py +++ b/satpy/readers/seviri_l1b_icare.py @@ -86,18 +86,18 @@ def __init__(self, filename, filename_info, filetype_info): filename_info, filetype_info) # These are VIS bands - self.ref_bands = ['HRV', 'VIS006', 'VIS008', 'IR_016'] + self.ref_bands = ["HRV", "VIS006", "VIS008", "IR_016"] # And these are IR bands - self.bt_bands = ['IR_039', 'IR_062', 'IR_073', - 'IR_087', 'IR_097', 'IR_108', - 'IR_120', 'IR_134', - 'WV_062', 'WV_073'] + self.bt_bands = ["IR_039", "IR_062", "IR_073", + "IR_087", "IR_097", "IR_108", + "IR_120", "IR_134", + "WV_062", "WV_073"] @property def sensor_name(self): """Get the sensor name.""" # the sensor and platform names are stored together, eg: MSG1/SEVIRI - attr = self['/attr/Sensors'] + attr = self["/attr/Sensors"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)).lower() else: @@ -105,14 +105,14 @@ def sensor_name(self): plat = attr[0:4] sens = attr[5:] # icare uses non-standard platform names - if plat == 'msg1': - plat = 'Meteosat-08' - elif plat == 'msg2': - plat = 'Meteosat-09' - elif plat == 'msg3': - plat = 'Meteosat-10' - elif plat == 'msg4': - plat = 'Meteosat-11' + if plat == "msg1": + plat = "Meteosat-08" + elif plat == "msg2": + plat = "Meteosat-09" + elif plat == "msg3": + plat = "Meteosat-10" + elif plat == "msg4": + plat = "Meteosat-11" else: raise NameError("Unsupported satellite platform:"+plat) return [plat, sens] @@ -120,7 +120,7 @@ def sensor_name(self): @property def satlon(self): """Get the satellite longitude.""" - attr = self['/attr/Sub_Satellite_Longitude'] + attr = self["/attr/Sub_Satellite_Longitude"] if isinstance(attr, np.ndarray): attr = float(attr.astype(str)) return attr @@ -128,7 +128,7 @@ def satlon(self): @property def projlon(self): """Get the projection longitude.""" - attr = self['/attr/Projection_Longitude'] + attr = self["/attr/Projection_Longitude"] if isinstance(attr, np.ndarray): attr = float(attr.astype(str)) return attr @@ -136,11 +136,11 @@ def projlon(self): @property def projection(self): """Get the projection.""" - attr = self['/attr/Geographic_Projection'] + attr = self["/attr/Geographic_Projection"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)) attr = attr.lower() - if attr != 'geos': + if attr != "geos": raise NotImplementedError("Only the GEOS projection is supported.\ This is:", attr) return attr @@ -148,7 +148,7 @@ def projection(self): @property def zone(self): """Get the zone.""" - attr = self['/attr/Zone'] + attr = self["/attr/Zone"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)).lower() return attr @@ -156,7 +156,7 @@ def zone(self): @property def res(self): """Get the resolution.""" - attr = self['/attr/Nadir_Pixel_Size'] + attr = self["/attr/Nadir_Pixel_Size"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)).lower() return float(attr) @@ -164,7 +164,7 @@ def res(self): @property def end_time(self): """Get the end time.""" - attr = self['/attr/End_Acquisition_Date'] + attr = self["/attr/End_Acquisition_Date"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)) # In some versions milliseconds are present, sometimes not. @@ -177,7 +177,7 @@ def end_time(self): @property def start_time(self): """Get the start time.""" - attr = self['/attr/Beginning_Acquisition_Date'] + attr = self["/attr/Beginning_Acquisition_Date"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)) # In some versions milliseconds are present, sometimes not. @@ -190,7 +190,7 @@ def start_time(self): @property def alt(self): """Get the altitude.""" - attr = self['/attr/Altitude'] + attr = self["/attr/Altitude"] if isinstance(attr, np.ndarray): attr = attr.astype(str) attr = float(attr) @@ -201,7 +201,7 @@ def alt(self): @property def geoloc(self): """Get the geolocation.""" - attr = self['/attr/Geolocation'] + attr = self["/attr/Geolocation"] if isinstance(attr, np.ndarray): attr = attr.astype(str) cfac = float(attr[0]) @@ -217,32 +217,32 @@ def get_metadata(self, data, ds_info): mda.update(ds_info) geoloc = self.geoloc mda.update({ - 'start_time': self.start_time, - 'end_time': self.end_time, - 'platform_name': self.sensor_name[0], - 'sensor': self.sensor_name[1], - 'zone': self.zone, - 'projection_altitude': self.alt, - 'cfac': geoloc[0], - 'lfac': geoloc[1], - 'coff': geoloc[2], - 'loff': geoloc[3], - 'resolution': self.res, - 'satellite_actual_longitude': self.satlon, - 'projection_longitude': self.projlon, - 'projection_type': self.projection + "start_time": self.start_time, + "end_time": self.end_time, + "platform_name": self.sensor_name[0], + "sensor": self.sensor_name[1], + "zone": self.zone, + "projection_altitude": self.alt, + "cfac": geoloc[0], + "lfac": geoloc[1], + "coff": geoloc[2], + "loff": geoloc[3], + "resolution": self.res, + "satellite_actual_longitude": self.satlon, + "projection_longitude": self.projlon, + "projection_type": self.projection }) return mda def _get_dsname(self, ds_id): """Return the correct dataset name based on requested band.""" - if ds_id['name'] in self.ref_bands: - ds_get_name = 'Normalized_Radiance' - elif ds_id['name'] in self.bt_bands: - ds_get_name = 'Brightness_Temperature' + if ds_id["name"] in self.ref_bands: + ds_get_name = "Normalized_Radiance" + elif ds_id["name"] in self.bt_bands: + ds_get_name = "Brightness_Temperature" else: - raise NameError("Datset type "+ds_id['name']+" is not supported.") + raise NameError("Datset type "+ds_id["name"]+" is not supported.") return ds_get_name def get_dataset(self, ds_id, ds_info): @@ -250,50 +250,50 @@ def get_dataset(self, ds_id, ds_info): ds_get_name = self._get_dsname(ds_id) data = self[ds_get_name] data.attrs = self.get_metadata(data, ds_info) - fill = data.attrs.pop('_FillValue') - offset = data.attrs.get('add_offset') - scale_factor = data.attrs.get('scale_factor') + fill = data.attrs.pop("_FillValue") + offset = data.attrs.get("add_offset") + scale_factor = data.attrs.get("scale_factor") data = data.where(data != fill) data = data.astype(np.float32) if scale_factor is not None and offset is not None: data = data * scale_factor data = data + offset # Now we correct range from 0-1 to 0-100 for VIS: - if ds_id['name'] in self.ref_bands: + if ds_id["name"] in self.ref_bands: data = data * 100. return data def get_area_def(self, ds_id): """Get the area def.""" ds_get_name = self._get_dsname(ds_id) - ds_shape = self[ds_get_name + '/shape'] + ds_shape = self[ds_get_name + "/shape"] geoloc = self.geoloc pdict = {} - pdict['cfac'] = np.int32(geoloc[0]) - pdict['lfac'] = np.int32(geoloc[1]) - pdict['coff'] = np.float32(geoloc[2]) - pdict['loff'] = -np.float32(geoloc[3]) + pdict["cfac"] = np.int32(geoloc[0]) + pdict["lfac"] = np.int32(geoloc[1]) + pdict["coff"] = np.float32(geoloc[2]) + pdict["loff"] = -np.float32(geoloc[3]) # Unfortunately this dataset does not store a, b or h. # We assume a and b here, and calculate h from altitude # a and b are from SEVIRI data HRIT header (201912101300) - pdict['a'] = 6378169 - pdict['b'] = 6356583.8 - pdict['h'] = self.alt - pdict['a'] - pdict['ssp_lon'] = self.projlon - pdict['ncols'] = int(ds_shape[0]) - pdict['nlines'] = int(ds_shape[1]) + pdict["a"] = 6378169 + pdict["b"] = 6356583.8 + pdict["h"] = self.alt - pdict["a"] + pdict["ssp_lon"] = self.projlon + pdict["ncols"] = int(ds_shape[0]) + pdict["nlines"] = int(ds_shape[1]) # Force scandir to SEVIRI default, not known from file - pdict['scandir'] = 'S2N' - pdict['a_name'] = 'geosmsg' - if ds_id['name'] == 'HRV': - pdict['a_desc'] = 'MSG/SEVIRI HRV channel area' - pdict['p_id'] = 'msg_hires' + pdict["scandir"] = "S2N" + pdict["a_name"] = "geosmsg" + if ds_id["name"] == "HRV": + pdict["a_desc"] = "MSG/SEVIRI HRV channel area" + pdict["p_id"] = "msg_hires" else: - pdict['a_desc'] = 'MSG/SEVIRI low resolution channel area' - pdict['p_id'] = 'msg_lowres' + pdict["a_desc"] = "MSG/SEVIRI low resolution channel area" + pdict["p_id"] = "msg_lowres" aex = get_area_extent(pdict) area = get_area_definition(pdict, aex) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index cdad865f0c..81f2d01300 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -140,9 +140,9 @@ from satpy.readers.utils import reduce_mda from satpy.utils import get_legacy_chunk_size -logger = logging.getLogger('native_msg') +logger = logging.getLogger("native_msg") CHUNK_SIZE = get_legacy_chunk_size() -ASCII_STARTSWITH = b'FormatName : NATIVE' +ASCII_STARTSWITH = b"FormatName : NATIVE" class NativeMSGFileHandler(BaseFileHandler): @@ -170,7 +170,7 @@ class NativeMSGFileHandler(BaseFileHandler): """ def __init__(self, filename, filename_info, filetype_info, - calib_mode='nominal', fill_disk=False, ext_calib_coefs=None, + calib_mode="nominal", fill_disk=False, ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=100): """Initialize the reader.""" super(NativeMSGFileHandler, self).__init__(filename, @@ -199,33 +199,33 @@ def __init__(self, filename, filename_info, filetype_info, @property def _repeat_cycle_duration(self): """Get repeat cycle duration from the trailer.""" - if self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: + if self.trailer["15TRAILER"]["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] == 1: return 5 return REPEAT_CYCLE_DURATION @property def nominal_start_time(self): """Get the repeat cycle nominal start time from file header and round it to expected nominal time slot.""" - tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['TrueRepeatCycleStart'] + tm = self.header["15_DATA_HEADER"]["ImageAcquisition"]["PlannedAcquisitionTime"]["TrueRepeatCycleStart"] return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): """Get the repeat cycle nominal end time from file header and round it to expected nominal time slot.""" - tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] + tm = self.header["15_DATA_HEADER"]["ImageAcquisition"]["PlannedAcquisitionTime"]["PlannedRepeatCycleEnd"] return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def observation_start_time(self): """Get observation start time from trailer.""" - return self.trailer['15TRAILER']['ImageProductionStats'][ - 'ActualScanningSummary']['ForwardScanStart'] + return self.trailer["15TRAILER"]["ImageProductionStats"][ + "ActualScanningSummary"]["ForwardScanStart"] @property def observation_end_time(self): """Get observation end time from trailer.""" - return self.trailer['15TRAILER']['ImageProductionStats'][ - 'ActualScanningSummary']['ForwardScanEnd'] + return self.trailer["15TRAILER"]["ImageProductionStats"][ + "ActualScanningSummary"]["ForwardScanEnd"] @property def start_time(self): @@ -240,8 +240,8 @@ def end_time(self): def _get_data_dtype(self): """Get the dtype of the file based on the actual available channels.""" pkhrec = [ - ('GP_PK_HEADER', GSDTRecords.gp_pk_header), - ('GP_PK_SH1', GSDTRecords.gp_pk_sh1) + ("GP_PK_HEADER", GSDTRecords.gp_pk_header), + ("GP_PK_SH1", GSDTRecords.gp_pk_sh1) ] pk_head_dtype = np.dtype(pkhrec) @@ -264,14 +264,14 @@ def get_lrec(cols): # each pixel is 10-bits -> one line of data has 25% more bytes # than the number of columns suggest (10/8 = 1.25) - visir_rec = get_lrec(int(self.mda['number_of_columns'] * 1.25)) + visir_rec = get_lrec(int(self.mda["number_of_columns"] * 1.25)) number_of_visir_channels = len( - [s for s in self.mda['channel_list'] if not s == 'HRV']) - drec = [('visir', (visir_rec, number_of_visir_channels))] + [s for s in self.mda["channel_list"] if not s == "HRV"]) + drec = [("visir", (visir_rec, number_of_visir_channels))] - if self.mda['available_channels']['HRV']: - hrv_rec = get_lrec(int(self.mda['hrv_number_of_columns'] * 1.25)) - drec.append(('hrv', (hrv_rec, 3))) + if self.mda["available_channels"]["HRV"]: + hrv_rec = get_lrec(int(self.mda["hrv_number_of_columns"] * 1.25)) + drec.append(("hrv", (hrv_rec, 3))) return np.dtype(drec) @@ -282,51 +282,51 @@ def _get_memmap(self): hdr_size = self.header_type.itemsize return np.memmap(fp, dtype=data_dtype, - shape=(self.mda['number_of_lines'],), + shape=(self.mda["number_of_lines"],), offset=hdr_size, mode="r") def _read_header(self): """Read the header info.""" self.header.update(read_header(self.filename)) - if '15_SECONDARY_PRODUCT_HEADER' not in self.header: + if "15_SECONDARY_PRODUCT_HEADER" not in self.header: # No archive header, that means we have a complete file # including all channels. - self.header['15_SECONDARY_PRODUCT_HEADER'] = DEFAULT_15_SECONDARY_PRODUCT_HEADER + self.header["15_SECONDARY_PRODUCT_HEADER"] = DEFAULT_15_SECONDARY_PRODUCT_HEADER - data15hd = self.header['15_DATA_HEADER'] - sec15hd = self.header['15_SECONDARY_PRODUCT_HEADER'] + data15hd = self.header["15_DATA_HEADER"] + sec15hd = self.header["15_SECONDARY_PRODUCT_HEADER"] # Set the list of available channels: - self.mda['available_channels'] = get_available_channels(self.header) - self.mda['channel_list'] = [i for i in CHANNEL_NAMES.values() - if self.mda['available_channels'][i]] + self.mda["available_channels"] = get_available_channels(self.header) + self.mda["channel_list"] = [i for i in CHANNEL_NAMES.values() + if self.mda["available_channels"][i]] self.platform_id = data15hd[ - 'SatelliteStatus']['SatelliteDefinition']['SatelliteId'] - self.mda['platform_name'] = "Meteosat-" + SATNUM[self.platform_id] - self.mda['offset_corrected'] = data15hd['GeometricProcessing'][ - 'EarthModel']['TypeOfEarthModel'] == 2 + "SatelliteStatus"]["SatelliteDefinition"]["SatelliteId"] + self.mda["platform_name"] = "Meteosat-" + SATNUM[self.platform_id] + self.mda["offset_corrected"] = data15hd["GeometricProcessing"][ + "EarthModel"]["TypeOfEarthModel"] == 2 - equator_radius = data15hd['GeometricProcessing'][ - 'EarthModel']['EquatorialRadius'] * 1000. + equator_radius = data15hd["GeometricProcessing"][ + "EarthModel"]["EquatorialRadius"] * 1000. north_polar_radius = data15hd[ - 'GeometricProcessing']['EarthModel']['NorthPolarRadius'] * 1000. + "GeometricProcessing"]["EarthModel"]["NorthPolarRadius"] * 1000. south_polar_radius = data15hd[ - 'GeometricProcessing']['EarthModel']['SouthPolarRadius'] * 1000. + "GeometricProcessing"]["EarthModel"]["SouthPolarRadius"] * 1000. polar_radius = (north_polar_radius + south_polar_radius) * 0.5 - ssp_lon = data15hd['ImageDescription'][ - 'ProjectionDescription']['LongitudeOfSSP'] + ssp_lon = data15hd["ImageDescription"][ + "ProjectionDescription"]["LongitudeOfSSP"] - self.mda['projection_parameters'] = {'a': equator_radius, - 'b': polar_radius, - 'h': 35785831.00, - 'ssp_longitude': ssp_lon} + self.mda["projection_parameters"] = {"a": equator_radius, + "b": polar_radius, + "h": 35785831.00, + "ssp_longitude": ssp_lon} - north = int(sec15hd['NorthLineSelectedRectangle']['Value']) - east = int(sec15hd['EastColumnSelectedRectangle']['Value']) - south = int(sec15hd['SouthLineSelectedRectangle']['Value']) - west = int(sec15hd['WestColumnSelectedRectangle']['Value']) + north = int(sec15hd["NorthLineSelectedRectangle"]["Value"]) + east = int(sec15hd["EastColumnSelectedRectangle"]["Value"]) + south = int(sec15hd["SouthLineSelectedRectangle"]["Value"]) + west = int(sec15hd["WestColumnSelectedRectangle"]["Value"]) ncolumns = west - east + 1 nrows = north - south + 1 @@ -335,9 +335,9 @@ def _read_header(self): # the maximum, if so it is a rapid scanning service # or region of interest file if (nrows < VISIR_NUM_LINES) or (ncolumns < VISIR_NUM_COLUMNS): - self.mda['is_full_disk'] = False + self.mda["is_full_disk"] = False else: - self.mda['is_full_disk'] = True + self.mda["is_full_disk"] = True # If the number of columns in the file is not divisible by 4, # UMARF will add extra columns to the file @@ -349,7 +349,7 @@ def _read_header(self): # Check the VISIR calculated column dimension against # the header information - cols_visir_hdr = int(sec15hd['NumberColumnsVISIR']['Value']) + cols_visir_hdr = int(sec15hd["NumberColumnsVISIR"]["Value"]) if cols_visir_hdr != cols_visir: logger.warning( "Number of VISIR columns from the header is incorrect!") @@ -358,21 +358,21 @@ def _read_header(self): # HRV Channel - check if the area is reduced in east west # direction as this affects the number of columns in the file - cols_hrv_hdr = int(sec15hd['NumberColumnsHRV']['Value']) + cols_hrv_hdr = int(sec15hd["NumberColumnsHRV"]["Value"]) if ncolumns < VISIR_NUM_COLUMNS: cols_hrv = cols_hrv_hdr else: cols_hrv = int(cols_hrv_hdr / 2) # self.mda represents the 16bit dimensions not 10bit - self.mda['number_of_lines'] = int(sec15hd['NumberLinesVISIR']['Value']) - self.mda['number_of_columns'] = cols_visir - self.mda['hrv_number_of_lines'] = int(sec15hd["NumberLinesHRV"]['Value']) - self.mda['hrv_number_of_columns'] = cols_hrv + self.mda["number_of_lines"] = int(sec15hd["NumberLinesVISIR"]["Value"]) + self.mda["number_of_columns"] = cols_visir + self.mda["hrv_number_of_lines"] = int(sec15hd["NumberLinesHRV"]["Value"]) + self.mda["hrv_number_of_columns"] = cols_hrv - if '15_MAIN_PRODUCT_HEADER' not in self.header: + if "15_MAIN_PRODUCT_HEADER" not in self.header: logger.info("Quality flag check was not possible due to missing 15_MAIN_PRODUCT_HEADER.") - elif self.header['15_MAIN_PRODUCT_HEADER']['QQOV']['Value'] == 'NOK': + elif self.header["15_MAIN_PRODUCT_HEADER"]["QQOV"]["Value"] == "NOK": warnings.warn( "The quality flag for this file indicates not OK. " "Use this data with caution!", @@ -384,7 +384,7 @@ def _read_trailer(self): hdr_size = self.header_type.itemsize data_size = (self._get_data_dtype().itemsize * - self.mda['number_of_lines']) + self.mda["number_of_lines"]) with open(self.filename) as fp: fp.seek(hdr_size + data_size) @@ -417,27 +417,27 @@ def get_area_def(self, dataset_id): """ pdict = dict() - pdict['a'] = self.mda['projection_parameters']['a'] - pdict['b'] = self.mda['projection_parameters']['b'] - pdict['h'] = self.mda['projection_parameters']['h'] - pdict['ssp_lon'] = self.mda['projection_parameters']['ssp_longitude'] - - area_naming_input_dict = {'platform_name': 'msg', - 'instrument_name': 'seviri', - 'resolution': int(dataset_id['resolution']) + pdict["a"] = self.mda["projection_parameters"]["a"] + pdict["b"] = self.mda["projection_parameters"]["b"] + pdict["h"] = self.mda["projection_parameters"]["h"] + pdict["ssp_lon"] = self.mda["projection_parameters"]["ssp_longitude"] + + area_naming_input_dict = {"platform_name": "msg", + "instrument_name": "seviri", + "resolution": int(dataset_id["resolution"]) } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('seviri', pdict['ssp_lon'])}) + **get_service_mode("seviri", pdict["ssp_lon"])}) - pdict['a_name'] = area_naming['area_id'] - pdict['a_desc'] = area_naming['description'] - pdict['p_id'] = "" + pdict["a_name"] = area_naming["area_id"] + pdict["a_desc"] = area_naming["description"] + pdict["p_id"] = "" area_extent = self.get_area_extent(dataset_id) areas = list() - for aex, nlines, ncolumns in zip(area_extent['area_extent'], area_extent['nlines'], area_extent['ncolumns']): - pdict['nlines'] = nlines - pdict['ncols'] = ncolumns + for aex, nlines, ncolumns in zip(area_extent["area_extent"], area_extent["nlines"], area_extent["ncolumns"]): + pdict["nlines"] = nlines + pdict["ncols"] = ncolumns areas.append(get_area_definition(pdict, aex)) if len(areas) == 2: @@ -459,80 +459,80 @@ def get_area_extent(self, dataset_id): of the area extent is documented in a `developer's memo `_. """ - data15hd = self.header['15_DATA_HEADER'] + data15hd = self.header["15_DATA_HEADER"] # check for Earth model as this affects the north-south and # west-east offsets # section 3.1.4.2 of MSG Level 1.5 Image Data Format Description - earth_model = data15hd['GeometricProcessing']['EarthModel'][ - 'TypeOfEarthModel'] + earth_model = data15hd["GeometricProcessing"]["EarthModel"][ + "TypeOfEarthModel"] if earth_model == 2: ns_offset = 0 we_offset = 0 elif earth_model == 1: ns_offset = -0.5 we_offset = 0.5 - if dataset_id['name'] == 'HRV': + if dataset_id["name"] == "HRV": ns_offset = -1.5 we_offset = 1.5 else: raise NotImplementedError( - 'Unrecognised Earth model: {}'.format(earth_model) + "Unrecognised Earth model: {}".format(earth_model) ) - if dataset_id['name'] == 'HRV': - grid_origin = data15hd['ImageDescription']['ReferenceGridHRV']['GridOrigin'] + if dataset_id["name"] == "HRV": + grid_origin = data15hd["ImageDescription"]["ReferenceGridHRV"]["GridOrigin"] center_point = (HRV_NUM_COLUMNS / 2) - 2 - column_step = data15hd['ImageDescription']['ReferenceGridHRV']['ColumnDirGridStep'] * 1000.0 - line_step = data15hd['ImageDescription']['ReferenceGridHRV']['LineDirGridStep'] * 1000.0 + column_step = data15hd["ImageDescription"]["ReferenceGridHRV"]["ColumnDirGridStep"] * 1000.0 + line_step = data15hd["ImageDescription"]["ReferenceGridHRV"]["LineDirGridStep"] * 1000.0 nlines_fulldisk = HRV_NUM_LINES ncolumns_fulldisk = HRV_NUM_COLUMNS else: - grid_origin = data15hd['ImageDescription']['ReferenceGridVIS_IR']['GridOrigin'] + grid_origin = data15hd["ImageDescription"]["ReferenceGridVIS_IR"]["GridOrigin"] center_point = VISIR_NUM_COLUMNS / 2 - column_step = data15hd['ImageDescription']['ReferenceGridVIS_IR']['ColumnDirGridStep'] * 1000.0 - line_step = data15hd['ImageDescription']['ReferenceGridVIS_IR']['LineDirGridStep'] * 1000.0 + column_step = data15hd["ImageDescription"]["ReferenceGridVIS_IR"]["ColumnDirGridStep"] * 1000.0 + line_step = data15hd["ImageDescription"]["ReferenceGridVIS_IR"]["LineDirGridStep"] * 1000.0 nlines_fulldisk = VISIR_NUM_LINES ncolumns_fulldisk = VISIR_NUM_COLUMNS # Calculations assume grid origin is south-east corner # section 7.2.4 of MSG Level 1.5 Image Data Format Description - origins = {0: 'NW', 1: 'SW', 2: 'SE', 3: 'NE'} + origins = {0: "NW", 1: "SW", 2: "SE", 3: "NE"} if grid_origin != 2: - msg = 'Grid origin not supported number: {}, {} corner'.format( + msg = "Grid origin not supported number: {}, {} corner".format( grid_origin, origins[grid_origin] ) raise NotImplementedError(msg) - aex_data = {'area_extent': [], 'nlines': [], 'ncolumns': []} + aex_data = {"area_extent": [], "nlines": [], "ncolumns": []} img_bounds = self.image_boundaries.get_img_bounds(dataset_id, self.is_roi()) for south_bound, north_bound, east_bound, west_bound in zip(*img_bounds.values()): if self.fill_disk: east_bound, west_bound = 1, ncolumns_fulldisk - if not self.mda['is_full_disk']: + if not self.mda["is_full_disk"]: south_bound, north_bound = 1, nlines_fulldisk nlines = north_bound - south_bound + 1 ncolumns = west_bound - east_bound + 1 - area_dict = {'center_point': center_point, - 'east': east_bound, - 'west': west_bound, - 'south': south_bound, - 'north': north_bound, - 'column_step': column_step, - 'line_step': line_step, - 'column_offset': we_offset, - 'line_offset': ns_offset + area_dict = {"center_point": center_point, + "east": east_bound, + "west": west_bound, + "south": south_bound, + "north": north_bound, + "column_step": column_step, + "line_step": line_step, + "column_offset": we_offset, + "line_offset": ns_offset } aex = calculate_area_extent(area_dict) - aex_data['area_extent'].append(aex) - aex_data['nlines'].append(nlines) - aex_data['ncolumns'].append(ncolumns) + aex_data["area_extent"].append(aex) + aex_data["nlines"].append(nlines) + aex_data["ncolumns"].append(ncolumns) return aex_data @@ -543,28 +543,28 @@ def is_roi(self): of the SEVIRI disk. Hence, if the data does not cover the full disk, nor the standard RSS region in RSS mode, it's assumed to be ROI data. """ - is_rapid_scan = self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] + is_rapid_scan = self.trailer["15TRAILER"]["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] # Standard RSS data is assumed to cover the three northmost segments, thus consisting of all 3712 columns and # the 1392 northmost lines - nlines = int(self.mda['number_of_lines']) - ncolumns = int(self.mda['number_of_columns']) - north_bound = int(self.header['15_SECONDARY_PRODUCT_HEADER']['NorthLineSelectedRectangle']['Value']) + nlines = int(self.mda["number_of_lines"]) + ncolumns = int(self.mda["number_of_columns"]) + north_bound = int(self.header["15_SECONDARY_PRODUCT_HEADER"]["NorthLineSelectedRectangle"]["Value"]) is_top3segments = (ncolumns == VISIR_NUM_COLUMNS and nlines == 1392 and north_bound == VISIR_NUM_LINES) - return not self.mda['is_full_disk'] and not (is_rapid_scan and is_top3segments) + return not self.mda["is_full_disk"] and not (is_rapid_scan and is_top3segments) def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" - if dataset_id['name'] not in self.mda['channel_list']: - raise KeyError('Channel % s not available in the file' % dataset_id['name']) - elif dataset_id['name'] not in ['HRV']: + if dataset_id["name"] not in self.mda["channel_list"]: + raise KeyError("Channel % s not available in the file" % dataset_id["name"]) + elif dataset_id["name"] not in ["HRV"]: data = self._get_visir_channel(dataset_id) else: data = self._get_hrv_channel() - xarr = xr.DataArray(data, dims=['y', 'x']).where(data != 0).astype(np.float32) + xarr = xr.DataArray(data, dims=["y", "x"]).where(data != 0).astype(np.float32) if xarr is None: return None @@ -573,34 +573,34 @@ def get_dataset(self, dataset_id, dataset_info): self._add_scanline_acq_time(dataset, dataset_id) self._update_attrs(dataset, dataset_info) - if self.fill_disk and not (dataset_id['name'] != 'HRV' and self.mda['is_full_disk']): + if self.fill_disk and not (dataset_id["name"] != "HRV" and self.mda["is_full_disk"]): padder = Padder(dataset_id, self.image_boundaries.get_img_bounds(dataset_id, self.is_roi()), - self.mda['is_full_disk']) + self.mda["is_full_disk"]) dataset = padder.pad_data(dataset) return dataset def _get_visir_channel(self, dataset_id): - shape = (self.mda['number_of_lines'], self.mda['number_of_columns']) + shape = (self.mda["number_of_lines"], self.mda["number_of_columns"]) # Check if there is only 1 channel in the list as a change # is needed in the array assignment ie channel id is not present - if len(self.mda['channel_list']) == 1: - raw = self.dask_array['visir']['line_data'] + if len(self.mda["channel_list"]) == 1: + raw = self.dask_array["visir"]["line_data"] else: - i = self.mda['channel_list'].index(dataset_id['name']) - raw = self.dask_array['visir']['line_data'][:, i, :] + i = self.mda["channel_list"].index(dataset_id["name"]) + raw = self.dask_array["visir"]["line_data"][:, i, :] data = dec10216(raw.flatten()) data = data.reshape(shape) return data def _get_hrv_channel(self): - shape = (self.mda['hrv_number_of_lines'], self.mda['hrv_number_of_columns']) - shape_layer = (self.mda['number_of_lines'], self.mda['hrv_number_of_columns']) + shape = (self.mda["hrv_number_of_lines"], self.mda["hrv_number_of_columns"]) + shape_layer = (self.mda["number_of_lines"], self.mda["hrv_number_of_columns"]) data_list = [] for i in range(3): - raw = self.dask_array['hrv']['line_data'][:, i, :] + raw = self.dask_array["hrv"]["line_data"][:, i, :] data = dec10216(raw.flatten()) data = data.reshape(shape_layer) data_list.append(data) @@ -610,7 +610,7 @@ def _get_hrv_channel(self): def calibrate(self, data, dataset_id): """Calibrate the data.""" tic = datetime.now() - channel_name = dataset_id['name'] + channel_name = dataset_id["name"] calib = SEVIRICalibrationHandler( platform_id=self.platform_id, channel_name=channel_name, @@ -618,7 +618,7 @@ def calibrate(self, data, dataset_id): calib_mode=self.calib_mode, scan_time=self.observation_start_time ) - res = calib.calibrate(data, dataset_id['calibration']) + res = calib.calibrate(data, dataset_id["calibration"]) logger.debug("Calibration time " + str(datetime.now() - tic)) return res @@ -629,20 +629,20 @@ def _get_calib_coefs(self, channel_name): # hence, this channel index needs to refer to full channel list band_idx = list(CHANNEL_NAMES.values()).index(channel_name) - coefs_nominal = self.header['15_DATA_HEADER'][ - 'RadiometricProcessing']['Level15ImageCalibration'] - coefs_gsics = self.header['15_DATA_HEADER'][ - 'RadiometricProcessing']['MPEFCalFeedback'] - radiance_types = self.header['15_DATA_HEADER']['ImageDescription'][ - 'Level15ImageProduction']['PlannedChanProcessing'] + coefs_nominal = self.header["15_DATA_HEADER"][ + "RadiometricProcessing"]["Level15ImageCalibration"] + coefs_gsics = self.header["15_DATA_HEADER"][ + "RadiometricProcessing"]["MPEFCalFeedback"] + radiance_types = self.header["15_DATA_HEADER"]["ImageDescription"][ + "Level15ImageProduction"]["PlannedChanProcessing"] return create_coef_dict( coefs_nominal=( - coefs_nominal['CalSlope'][band_idx], - coefs_nominal['CalOffset'][band_idx] + coefs_nominal["CalSlope"][band_idx], + coefs_nominal["CalOffset"][band_idx] ), coefs_gsics=( - coefs_gsics['GSICSCalCoeff'][band_idx], - coefs_gsics['GSICSOffsetCount'][band_idx] + coefs_gsics["GSICSCalCoeff"][band_idx], + coefs_gsics["GSICSOffsetCount"][band_idx] ), ext_coefs=self.ext_calib_coefs.get(channel_name, {}), radiance_type=radiance_types[band_idx] @@ -650,69 +650,69 @@ def _get_calib_coefs(self, channel_name): def _add_scanline_acq_time(self, dataset, dataset_id): """Add scanline acquisition time to the given dataset.""" - if dataset_id['name'] == 'HRV': + if dataset_id["name"] == "HRV": tline = self._get_acq_time_hrv() else: tline = self._get_acq_time_visir(dataset_id) - acq_time = get_cds_time(days=tline['Days'], msecs=tline['Milliseconds']) + acq_time = get_cds_time(days=tline["Days"], msecs=tline["Milliseconds"]) add_scanline_acq_time(dataset, acq_time) def _get_acq_time_hrv(self): """Get raw acquisition time for HRV channel.""" - tline = self.dask_array['hrv']['acq_time'] + tline = self.dask_array["hrv"]["acq_time"] tline0 = tline[:, 0] tline1 = tline[:, 1] tline2 = tline[:, 2] return da.stack((tline0, tline1, tline2), axis=1).reshape( - self.mda['hrv_number_of_lines']).compute() + self.mda["hrv_number_of_lines"]).compute() def _get_acq_time_visir(self, dataset_id): """Get raw acquisition time for VIS/IR channels.""" # Check if there is only 1 channel in the list as a change # is needed in the array assignment, i.e. channel id is not present - if len(self.mda['channel_list']) == 1: - return self.dask_array['visir']['acq_time'].compute() - i = self.mda['channel_list'].index(dataset_id['name']) - return self.dask_array['visir']['acq_time'][:, i].compute() + if len(self.mda["channel_list"]) == 1: + return self.dask_array["visir"]["acq_time"].compute() + i = self.mda["channel_list"].index(dataset_id["name"]) + return self.dask_array["visir"]["acq_time"][:, i].compute() def _update_attrs(self, dataset, dataset_info): """Update dataset attributes.""" - dataset.attrs['units'] = dataset_info['units'] - dataset.attrs['wavelength'] = dataset_info['wavelength'] - dataset.attrs['standard_name'] = dataset_info['standard_name'] - dataset.attrs['platform_name'] = self.mda['platform_name'] - dataset.attrs['sensor'] = 'seviri' - dataset.attrs['georef_offset_corrected'] = self.mda[ - 'offset_corrected'] - dataset.attrs['time_parameters'] = { - 'nominal_start_time': self.nominal_start_time, - 'nominal_end_time': self.nominal_end_time, - 'observation_start_time': self.observation_start_time, - 'observation_end_time': self.observation_end_time, + dataset.attrs["units"] = dataset_info["units"] + dataset.attrs["wavelength"] = dataset_info["wavelength"] + dataset.attrs["standard_name"] = dataset_info["standard_name"] + dataset.attrs["platform_name"] = self.mda["platform_name"] + dataset.attrs["sensor"] = "seviri" + dataset.attrs["georef_offset_corrected"] = self.mda[ + "offset_corrected"] + dataset.attrs["time_parameters"] = { + "nominal_start_time": self.nominal_start_time, + "nominal_end_time": self.nominal_end_time, + "observation_start_time": self.observation_start_time, + "observation_end_time": self.observation_end_time, } - dataset.attrs['orbital_parameters'] = self._get_orbital_parameters() + dataset.attrs["orbital_parameters"] = self._get_orbital_parameters() if self.include_raw_metadata: - dataset.attrs['raw_metadata'] = reduce_mda( + dataset.attrs["raw_metadata"] = reduce_mda( self.header, max_size=self.mda_max_array_size ) def _get_orbital_parameters(self): orbital_parameters = { - 'projection_longitude': self.mda['projection_parameters'][ - 'ssp_longitude'], - 'projection_latitude': 0., - 'projection_altitude': self.mda['projection_parameters']['h'], - 'satellite_nominal_longitude': self.header['15_DATA_HEADER'][ - 'SatelliteStatus']['SatelliteDefinition'][ - 'NominalLongitude'], - 'satellite_nominal_latitude': 0.0 + "projection_longitude": self.mda["projection_parameters"][ + "ssp_longitude"], + "projection_latitude": 0., + "projection_altitude": self.mda["projection_parameters"]["h"], + "satellite_nominal_longitude": self.header["15_DATA_HEADER"][ + "SatelliteStatus"]["SatelliteDefinition"][ + "NominalLongitude"], + "satellite_nominal_latitude": 0.0 } try: actual_lon, actual_lat, actual_alt = self.satpos orbital_parameters.update({ - 'satellite_actual_longitude': actual_lon, - 'satellite_actual_latitude': actual_lat, - 'satellite_actual_altitude': actual_alt + "satellite_actual_longitude": actual_lon, + "satellite_actual_latitude": actual_lat, + "satellite_actual_altitude": actual_alt }) except NoValidOrbitParams as err: logger.warning(err) @@ -726,14 +726,14 @@ def satpos(self): Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ - poly_finder = OrbitPolynomialFinder(self.header['15_DATA_HEADER'][ - 'SatelliteStatus']['Orbit']['OrbitPolynomial']) + poly_finder = OrbitPolynomialFinder(self.header["15_DATA_HEADER"][ + "SatelliteStatus"]["Orbit"]["OrbitPolynomial"]) orbit_polynomial = poly_finder.get_orbit_polynomial(self.start_time) return get_satpos( orbit_polynomial=orbit_polynomial, time=self.observation_start_time, - semi_major_axis=self.mda['projection_parameters']['a'], - semi_minor_axis=self.mda['projection_parameters']['b'] + semi_major_axis=self.mda["projection_parameters"]["a"], + semi_minor_axis=self.mda["projection_parameters"]["b"] ) @@ -755,7 +755,7 @@ def get_img_bounds(self, dataset_id, is_roi): Lists (rather than scalars) are returned since the HRV data in FES mode contain data from two windows/areas. """ - if dataset_id['name'] == 'HRV' and not is_roi: + if dataset_id["name"] == "HRV" and not is_roi: img_bounds = self._get_hrv_actual_img_bounds() else: img_bounds = self._get_selected_img_bounds(dataset_id) @@ -766,28 +766,28 @@ def get_img_bounds(self, dataset_id, is_roi): def _get_hrv_actual_img_bounds(self): """Get HRV (if not ROI) image boundaries from the ActualL15CoverageHRV information stored in the trailer.""" - hrv_bounds = self._trailer['15TRAILER']['ImageProductionStats']['ActualL15CoverageHRV'] + hrv_bounds = self._trailer["15TRAILER"]["ImageProductionStats"]["ActualL15CoverageHRV"] - img_bounds = {'south_bound': [], 'north_bound': [], 'east_bound': [], 'west_bound': []} - for hrv_window in ['Lower', 'Upper']: - img_bounds['south_bound'].append(hrv_bounds['%sSouthLineActual' % hrv_window]) - img_bounds['north_bound'].append(hrv_bounds['%sNorthLineActual' % hrv_window]) - img_bounds['east_bound'].append(hrv_bounds['%sEastColumnActual' % hrv_window]) - img_bounds['west_bound'].append(hrv_bounds['%sWestColumnActual' % hrv_window]) + img_bounds = {"south_bound": [], "north_bound": [], "east_bound": [], "west_bound": []} + for hrv_window in ["Lower", "Upper"]: + img_bounds["south_bound"].append(hrv_bounds["%sSouthLineActual" % hrv_window]) + img_bounds["north_bound"].append(hrv_bounds["%sNorthLineActual" % hrv_window]) + img_bounds["east_bound"].append(hrv_bounds["%sEastColumnActual" % hrv_window]) + img_bounds["west_bound"].append(hrv_bounds["%sWestColumnActual" % hrv_window]) # Data from the upper hrv window are only available in FES mode - if not self._mda['is_full_disk']: + if not self._mda["is_full_disk"]: break return img_bounds def _get_selected_img_bounds(self, dataset_id): """Get VISIR and HRV (if ROI) image boundaries from the SelectedRectangle information stored in the header.""" - sec15hd = self._header['15_SECONDARY_PRODUCT_HEADER'] - south_bound = int(sec15hd['SouthLineSelectedRectangle']['Value']) - east_bound = int(sec15hd['EastColumnSelectedRectangle']['Value']) + sec15hd = self._header["15_SECONDARY_PRODUCT_HEADER"] + south_bound = int(sec15hd["SouthLineSelectedRectangle"]["Value"]) + east_bound = int(sec15hd["EastColumnSelectedRectangle"]["Value"]) - if dataset_id['name'] == 'HRV': + if dataset_id["name"] == "HRV": nlines, ncolumns = self._get_hrv_img_shape() south_bound = self._convert_visir_bound_to_hrv(south_bound) east_bound = self._convert_visir_bound_to_hrv(east_bound) @@ -797,19 +797,19 @@ def _get_selected_img_bounds(self, dataset_id): north_bound = south_bound + nlines - 1 west_bound = east_bound + ncolumns - 1 - img_bounds = {'south_bound': [south_bound], 'north_bound': [north_bound], - 'east_bound': [east_bound], 'west_bound': [west_bound]} + img_bounds = {"south_bound": [south_bound], "north_bound": [north_bound], + "east_bound": [east_bound], "west_bound": [west_bound]} return img_bounds def _get_hrv_img_shape(self): - nlines = int(self._mda['hrv_number_of_lines']) - ncolumns = int(self._mda['hrv_number_of_columns']) + nlines = int(self._mda["hrv_number_of_lines"]) + ncolumns = int(self._mda["hrv_number_of_columns"]) return nlines, ncolumns def _get_visir_img_shape(self): - nlines = int(self._mda['number_of_lines']) - ncolumns = int(self._mda['number_of_columns']) + nlines = int(self._mda["number_of_lines"]) + ncolumns = int(self._mda["number_of_columns"]) return nlines, ncolumns @staticmethod @@ -824,7 +824,7 @@ def _check_for_valid_bounds(img_bounds): no_empty = (min(len_img_bounds) > 0) if not (same_lengths and no_empty): - raise ValueError('Invalid image boundaries') + raise ValueError("Invalid image boundaries") class Padder: @@ -835,14 +835,14 @@ def __init__(self, dataset_id, img_bounds, is_full_disk): self._img_bounds = img_bounds self._is_full_disk = is_full_disk - if dataset_id['name'] == 'HRV': + if dataset_id["name"] == "HRV": self._final_shape = (HRV_NUM_LINES, HRV_NUM_COLUMNS) else: self._final_shape = (VISIR_NUM_LINES, VISIR_NUM_COLUMNS) def pad_data(self, dataset): """Pad data to full disk with empty pixels.""" - logger.debug('Padding data to full disk') + logger.debug("Padding data to full disk") data_list = [] for south_bound, north_bound, east_bound, west_bound in zip(*self._img_bounds.values()): @@ -857,7 +857,7 @@ def pad_data(self, dataset): if not self._is_full_disk: padded_data = pad_data_vertically(padded_data, self._final_shape, south_bound, north_bound) - return xr.DataArray(padded_data, dims=('y', 'x'), attrs=dataset.attrs.copy()) + return xr.DataArray(padded_data, dims=("y", "x"), attrs=dataset.attrs.copy()) def _extract_data_to_pad(self, dataset, south_bound, north_bound): """Extract the data that shall be padded. @@ -875,19 +875,19 @@ def _extract_data_to_pad(self, dataset, south_bound, north_bound): def get_available_channels(header): """Get the available channels from the header information.""" - channels_str = header['15_SECONDARY_PRODUCT_HEADER'][ - 'SelectedBandIDs']['Value'] + channels_str = header["15_SECONDARY_PRODUCT_HEADER"][ + "SelectedBandIDs"]["Value"] available_channels = {} for idx, char in zip(range(12), channels_str): - available_channels[CHANNEL_NAMES[idx + 1]] = (char == 'X') + available_channels[CHANNEL_NAMES[idx + 1]] = (char == "X") return available_channels def has_archive_header(filename): """Check whether the file includes an ASCII archive header.""" - with open(filename, mode='rb') as istream: + with open(filename, mode="rb") as istream: return istream.read(36) == ASCII_STARTSWITH diff --git a/satpy/readers/seviri_l1b_native_hdr.py b/satpy/readers/seviri_l1b_native_hdr.py index 8c0212a6f2..56c5c0c3c9 100644 --- a/satpy/readers/seviri_l1b_native_hdr.py +++ b/satpy/readers/seviri_l1b_native_hdr.py @@ -39,37 +39,37 @@ class GSDTRecords(object): # 4 bytes gp_cpu_address = [ - ('Qualifier_1', np.uint8), - ('Qualifier_2', np.uint8), - ('Qualifier_3', np.uint8), - ('Qualifier_4', np.uint8) + ("Qualifier_1", np.uint8), + ("Qualifier_2", np.uint8), + ("Qualifier_3", np.uint8), + ("Qualifier_4", np.uint8) ] # 22 bytes gp_pk_header = [ - ('HeaderVersionNo', np.uint8), - ('PacketType', np.uint8), - ('SubHeaderType', np.uint8), - ('SourceFacilityId', gp_fac_id), - ('SourceEnvId', gp_fac_env), - ('SourceInstanceId', np.uint8), - ('SourceSUId', gp_su_id), - ('SourceCPUId', gp_cpu_address), - ('DestFacilityId', gp_fac_id), - ('DestEnvId', gp_fac_env), - ('SequenceCount', np.uint16), - ('PacketLength', np.int32) + ("HeaderVersionNo", np.uint8), + ("PacketType", np.uint8), + ("SubHeaderType", np.uint8), + ("SourceFacilityId", gp_fac_id), + ("SourceEnvId", gp_fac_env), + ("SourceInstanceId", np.uint8), + ("SourceSUId", gp_su_id), + ("SourceCPUId", gp_cpu_address), + ("DestFacilityId", gp_fac_id), + ("DestEnvId", gp_fac_env), + ("SequenceCount", np.uint16), + ("PacketLength", np.int32) ] # 16 bytes gp_pk_sh1 = [ - ('SubHeaderVersionNo', np.uint8), - ('ChecksumFlag', bool), - ('Acknowledgement', (np.uint8, 4)), - ('ServiceType', gp_svce_type), - ('ServiceSubtype', np.uint8), - ('PacketTime', time_cds_short), - ('SpacecraftId', gp_sc_id) + ("SubHeaderVersionNo", np.uint8), + ("ChecksumFlag", bool), + ("Acknowledgement", (np.uint8, 4)), + ("ServiceType", gp_svce_type), + ("ServiceSubtype", np.uint8), + ("PacketTime", time_cds_short), + ("SpacecraftId", gp_sc_id) ] @@ -83,17 +83,17 @@ def get(self, with_archive_header): record = [] if with_archive_header: record += [ - ('15_MAIN_PRODUCT_HEADER', L15MainProductHeaderRecord().get()), - ('15_SECONDARY_PRODUCT_HEADER', + ("15_MAIN_PRODUCT_HEADER", L15MainProductHeaderRecord().get()), + ("15_SECONDARY_PRODUCT_HEADER", L15SecondaryProductHeaderRecord().get()), ] record += [ - ('GP_PK_HEADER', GSDTRecords.gp_pk_header), - ('GP_PK_SH1', GSDTRecords.gp_pk_sh1), - ('15_DATA_HEADER', L15DataHeaderRecord().get()) + ("GP_PK_HEADER", GSDTRecords.gp_pk_header), + ("GP_PK_SH1", GSDTRecords.gp_pk_sh1), + ("15_DATA_HEADER", L15DataHeaderRecord().get()) ] - return np.dtype(record).newbyteorder('>') + return np.dtype(record).newbyteorder(">") class L15PhData(object): @@ -101,8 +101,8 @@ class L15PhData(object): # 80 bytes l15_ph_data = [ - ('Name', 'S30'), - ('Value', 'S50') + ("Name", "S30"), + ("Value", "S50") ] @@ -118,39 +118,39 @@ def get(self): l15_ph_data = L15PhData.l15_ph_data l15_ph_data_identification = [ - ('Name', 'S30'), - ('Size', 'S16'), - ('Address', 'S16') + ("Name", "S30"), + ("Size", "S16"), + ("Address", "S16") ] # 3674 bytes record = [ - ('FormatName', l15_ph_data), - ('FormatDocumentName', l15_ph_data), - ('FormatDocumentMajorVersion', l15_ph_data), - ('FormatDocumentMinorVersion', l15_ph_data), - ('CreationDateTime', l15_ph_data), - ('CreatingCentre', l15_ph_data), - ('DataSetIdentification', (l15_ph_data_identification, 27)), - ('TotalFileSize', l15_ph_data), - ('GORT', l15_ph_data), - ('ASTI', l15_ph_data), - ('LLOS', l15_ph_data), - ('SNIT', l15_ph_data), - ('AIID', l15_ph_data), - ('SSBT', l15_ph_data), - ('SSST', l15_ph_data), - ('RRCC', l15_ph_data), - ('RRBT', l15_ph_data), - ('RRST', l15_ph_data), - ('PPRC', l15_ph_data), - ('PPDT', l15_ph_data), - ('GPLV', l15_ph_data), - ('APNM', l15_ph_data), - ('AARF', l15_ph_data), - ('UUDT', l15_ph_data), - ('QQOV', l15_ph_data), - ('UDSP', l15_ph_data) + ("FormatName", l15_ph_data), + ("FormatDocumentName", l15_ph_data), + ("FormatDocumentMajorVersion", l15_ph_data), + ("FormatDocumentMinorVersion", l15_ph_data), + ("CreationDateTime", l15_ph_data), + ("CreatingCentre", l15_ph_data), + ("DataSetIdentification", (l15_ph_data_identification, 27)), + ("TotalFileSize", l15_ph_data), + ("GORT", l15_ph_data), + ("ASTI", l15_ph_data), + ("LLOS", l15_ph_data), + ("SNIT", l15_ph_data), + ("AIID", l15_ph_data), + ("SSBT", l15_ph_data), + ("SSST", l15_ph_data), + ("RRCC", l15_ph_data), + ("RRBT", l15_ph_data), + ("RRST", l15_ph_data), + ("PPRC", l15_ph_data), + ("PPDT", l15_ph_data), + ("GPLV", l15_ph_data), + ("APNM", l15_ph_data), + ("AARF", l15_ph_data), + ("UUDT", l15_ph_data), + ("QQOV", l15_ph_data), + ("UDSP", l15_ph_data) ] return record @@ -169,24 +169,24 @@ def get(self): # 1440 bytes record = [ - ('ABID', l15_ph_data), - ('SMOD', l15_ph_data), - ('APXS', l15_ph_data), - ('AVPA', l15_ph_data), - ('LSCD', l15_ph_data), - ('LMAP', l15_ph_data), - ('QDLC', l15_ph_data), - ('QDLP', l15_ph_data), - ('QQAI', l15_ph_data), - ('SelectedBandIDs', l15_ph_data), - ('SouthLineSelectedRectangle', l15_ph_data), - ('NorthLineSelectedRectangle', l15_ph_data), - ('EastColumnSelectedRectangle', l15_ph_data), - ('WestColumnSelectedRectangle', l15_ph_data), - ('NumberLinesVISIR', l15_ph_data), - ('NumberColumnsVISIR', l15_ph_data), - ('NumberLinesHRV', l15_ph_data), - ('NumberColumnsHRV', l15_ph_data) + ("ABID", l15_ph_data), + ("SMOD", l15_ph_data), + ("APXS", l15_ph_data), + ("AVPA", l15_ph_data), + ("LSCD", l15_ph_data), + ("LMAP", l15_ph_data), + ("QDLC", l15_ph_data), + ("QDLP", l15_ph_data), + ("QQAI", l15_ph_data), + ("SelectedBandIDs", l15_ph_data), + ("SouthLineSelectedRectangle", l15_ph_data), + ("NorthLineSelectedRectangle", l15_ph_data), + ("EastColumnSelectedRectangle", l15_ph_data), + ("WestColumnSelectedRectangle", l15_ph_data), + ("NumberLinesVISIR", l15_ph_data), + ("NumberColumnsVISIR", l15_ph_data), + ("NumberLinesHRV", l15_ph_data), + ("NumberColumnsHRV", l15_ph_data) ] return record @@ -203,14 +203,14 @@ def get(self): """Get header record data.""" # 445248 bytes record = [ - ('15HeaderVersion', np.uint8), - ('SatelliteStatus', self.satellite_status), - ('ImageAcquisition', self.image_acquisition), - ('CelestialEvents', self.celestial_events), - ('ImageDescription', self.image_description), - ('RadiometricProcessing', self.radiometric_processing), - ('GeometricProcessing', self.geometric_processing), - ('IMPFConfiguration', self.impf_configuration)] + ("15HeaderVersion", np.uint8), + ("SatelliteStatus", self.satellite_status), + ("ImageAcquisition", self.image_acquisition), + ("CelestialEvents", self.celestial_events), + ("ImageDescription", self.image_description), + ("RadiometricProcessing", self.radiometric_processing), + ("GeometricProcessing", self.geometric_processing), + ("IMPFConfiguration", self.impf_configuration)] return record @@ -219,72 +219,72 @@ def satellite_status(self): """Get satellite status data.""" # 7 bytes satellite_definition = [ - ('SatelliteId', np.uint16), - ('NominalLongitude', np.float32), - ('SatelliteStatus', np.uint8)] + ("SatelliteId", np.uint16), + ("NominalLongitude", np.float32), + ("SatelliteStatus", np.uint8)] # 28 bytes satellite_operations = [ - ('LastManoeuvreFlag', bool), - ('LastManoeuvreStartTime', time_cds_short), - ('LastManoeuvreEndTime', time_cds_short), - ('LastManoeuvreType', np.uint8), - ('NextManoeuvreFlag', bool), - ('NextManoeuvreStartTime', time_cds_short), - ('NextManoeuvreEndTime', time_cds_short), - ('NextManoeuvreType', np.uint8)] + ("LastManoeuvreFlag", bool), + ("LastManoeuvreStartTime", time_cds_short), + ("LastManoeuvreEndTime", time_cds_short), + ("LastManoeuvreType", np.uint8), + ("NextManoeuvreFlag", bool), + ("NextManoeuvreStartTime", time_cds_short), + ("NextManoeuvreEndTime", time_cds_short), + ("NextManoeuvreType", np.uint8)] # 396 bytes orbit_coeff = [ - ('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('X', (np.float64, 8)), - ('Y', (np.float64, 8)), - ('Z', (np.float64, 8)), - ('VX', (np.float64, 8)), - ('VY', (np.float64, 8)), - ('VZ', (np.float64, 8))] + ("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("X", (np.float64, 8)), + ("Y", (np.float64, 8)), + ("Z", (np.float64, 8)), + ("VX", (np.float64, 8)), + ("VY", (np.float64, 8)), + ("VZ", (np.float64, 8))] # 39612 bytes orbit = [ - ('PeriodStartTime', time_cds_short), - ('PeriodEndTime', time_cds_short), - ('OrbitPolynomial', (orbit_coeff, 100))] + ("PeriodStartTime", time_cds_short), + ("PeriodEndTime", time_cds_short), + ("OrbitPolynomial", (orbit_coeff, 100))] # 204 bytes attitude_coeff = [ - ('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('XofSpinAxis', (np.float64, 8)), - ('YofSpinAxis', (np.float64, 8)), - ('ZofSpinAxis', (np.float64, 8))] + ("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("XofSpinAxis", (np.float64, 8)), + ("YofSpinAxis", (np.float64, 8)), + ("ZofSpinAxis", (np.float64, 8))] # 20420 bytes attitude = [ - ('PeriodStartTime', time_cds_short), - ('PeriodEndTime', time_cds_short), - ('PrincipleAxisOffsetAngle', np.float64), - ('AttitudePolynomial', (attitude_coeff, 100))] + ("PeriodStartTime", time_cds_short), + ("PeriodEndTime", time_cds_short), + ("PrincipleAxisOffsetAngle", np.float64), + ("AttitudePolynomial", (attitude_coeff, 100))] # 59 bytes utc_correlation = [ - ('PeriodStartTime', time_cds_short), - ('PeriodEndTime', time_cds_short), - ('OnBoardTimeStart', (np.uint8, 7)), - ('VarOnBoardTimeStart', np.float64), - ('A1', np.float64), - ('VarA1', np.float64), - ('A2', np.float64), - ('VarA2', np.float64)] + ("PeriodStartTime", time_cds_short), + ("PeriodEndTime", time_cds_short), + ("OnBoardTimeStart", (np.uint8, 7)), + ("VarOnBoardTimeStart", np.float64), + ("A1", np.float64), + ("VarA1", np.float64), + ("A2", np.float64), + ("VarA2", np.float64)] # 60134 bytes record = [ - ('SatelliteDefinition', satellite_definition), - ('SatelliteOperations', satellite_operations), - ('Orbit', orbit), - ('Attitude', attitude), - ('SpinRetreatRCStart', np.float64), - ('UTCCorrelation', utc_correlation)] + ("SatelliteDefinition", satellite_definition), + ("SatelliteOperations", satellite_operations), + ("Orbit", orbit), + ("Attitude", attitude), + ("SpinRetreatRCStart", np.float64), + ("UTCCorrelation", utc_correlation)] return record @@ -292,72 +292,72 @@ def satellite_status(self): def image_acquisition(self): """Get image acquisition data.""" planned_acquisition_time = [ - ('TrueRepeatCycleStart', time_cds_expanded), - ('PlanForwardScanEnd', time_cds_expanded), - ('PlannedRepeatCycleEnd', time_cds_expanded)] + ("TrueRepeatCycleStart", time_cds_expanded), + ("PlanForwardScanEnd", time_cds_expanded), + ("PlannedRepeatCycleEnd", time_cds_expanded)] radiometer_status = [ - ('ChannelStatus', (np.uint8, 12)), - ('DetectorStatus', (np.uint8, 42))] + ("ChannelStatus", (np.uint8, 12)), + ("DetectorStatus", (np.uint8, 42))] hrv_frame_offsets = [ - ('MDUNomHRVDelay1', np.uint16), - ('MDUNomHRVDelay2', np.uint16), - ('Spare', np.uint16), - ('MDUNomHRVBreakLine', np.uint16)] + ("MDUNomHRVDelay1", np.uint16), + ("MDUNomHRVDelay2", np.uint16), + ("Spare", np.uint16), + ("MDUNomHRVBreakLine", np.uint16)] operation_parameters = [ - ('L0_LineCounter', np.uint16), - ('K1_RetraceLines', np.uint16), - ('K2_PauseDeciseconds', np.uint16), - ('K3_RetraceLines', np.uint16), - ('K4_PauseDeciseconds', np.uint16), - ('K5_RetraceLines', np.uint16), - ('XDeepSpaceWindowPosition', np.uint8)] + ("L0_LineCounter", np.uint16), + ("K1_RetraceLines", np.uint16), + ("K2_PauseDeciseconds", np.uint16), + ("K3_RetraceLines", np.uint16), + ("K4_PauseDeciseconds", np.uint16), + ("K5_RetraceLines", np.uint16), + ("XDeepSpaceWindowPosition", np.uint8)] radiometer_settings = [ - ('MDUSamplingDelays', (np.uint16, 42)), - ('HRVFrameOffsets', hrv_frame_offsets), - ('DHSSSynchSelection', np.uint8), - ('MDUOutGain', (np.uint16, 42)), - ('MDUCoarseGain', (np.uint8, 42)), - ('MDUFineGain', (np.uint16, 42)), - ('MDUNumericalOffset', (np.uint16, 42)), - ('PUGain', (np.uint16, 42)), - ('PUOffset', (np.uint16, 27)), - ('PUBias', (np.uint16, 15)), - ('OperationParameters', operation_parameters), - ('RefocusingLines', np.uint16), - ('RefocusingDirection', np.uint8), - ('RefocusingPosition', np.uint16), - ('ScanRefPosFlag', bool), - ('ScanRefPosNumber', np.uint16), - ('ScanRefPosVal', np.float32), - ('ScanFirstLine', np.uint16), - ('ScanLastLine', np.uint16), - ('RetraceStartLine', np.uint16)] + ("MDUSamplingDelays", (np.uint16, 42)), + ("HRVFrameOffsets", hrv_frame_offsets), + ("DHSSSynchSelection", np.uint8), + ("MDUOutGain", (np.uint16, 42)), + ("MDUCoarseGain", (np.uint8, 42)), + ("MDUFineGain", (np.uint16, 42)), + ("MDUNumericalOffset", (np.uint16, 42)), + ("PUGain", (np.uint16, 42)), + ("PUOffset", (np.uint16, 27)), + ("PUBias", (np.uint16, 15)), + ("OperationParameters", operation_parameters), + ("RefocusingLines", np.uint16), + ("RefocusingDirection", np.uint8), + ("RefocusingPosition", np.uint16), + ("ScanRefPosFlag", bool), + ("ScanRefPosNumber", np.uint16), + ("ScanRefPosVal", np.float32), + ("ScanFirstLine", np.uint16), + ("ScanLastLine", np.uint16), + ("RetraceStartLine", np.uint16)] decontamination = [ - ('DecontaminationNow', bool), - ('DecontaminationStart', time_cds_short), - ('DecontaminationEnd', time_cds_short)] + ("DecontaminationNow", bool), + ("DecontaminationStart", time_cds_short), + ("DecontaminationEnd", time_cds_short)] radiometer_operations = [ - ('LastGainChangeFlag', bool), - ('LastGainChangeTime', time_cds_short), - ('Decontamination', decontamination), - ('BBCalScheduled', bool), - ('BBCalibrationType', np.uint8), - ('BBFirstLine', np.uint16), - ('BBLastLine', np.uint16), - ('ColdFocalPlaneOpTemp', np.uint16), - ('WarmFocalPlaneOpTemp', np.uint16)] + ("LastGainChangeFlag", bool), + ("LastGainChangeTime", time_cds_short), + ("Decontamination", decontamination), + ("BBCalScheduled", bool), + ("BBCalibrationType", np.uint8), + ("BBFirstLine", np.uint16), + ("BBLastLine", np.uint16), + ("ColdFocalPlaneOpTemp", np.uint16), + ("WarmFocalPlaneOpTemp", np.uint16)] record = [ - ('PlannedAcquisitionTime', planned_acquisition_time), - ('RadiometerStatus', radiometer_status), - ('RadiometerSettings', radiometer_settings), - ('RadiometerOperations', radiometer_operations)] + ("PlannedAcquisitionTime", planned_acquisition_time), + ("RadiometerStatus", radiometer_status), + ("RadiometerSettings", radiometer_settings), + ("RadiometerOperations", radiometer_operations)] return record @@ -365,39 +365,39 @@ def image_acquisition(self): def celestial_events(self): """Get celestial events data.""" earth_moon_sun_coeff = [ - ('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('AlphaCoef', (np.float64, 8)), - ('BetaCoef', (np.float64, 8))] + ("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("AlphaCoef", (np.float64, 8)), + ("BetaCoef", (np.float64, 8))] star_coeff = [ - ('StarId', np.uint16), - ('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('AlphaCoef', (np.float64, 8)), - ('BetaCoef', (np.float64, 8))] + ("StarId", np.uint16), + ("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("AlphaCoef", (np.float64, 8)), + ("BetaCoef", (np.float64, 8))] ephemeris = [ - ('PeriodTimeStart', time_cds_short), - ('PeriodTimeEnd', time_cds_short), - ('RelatedOrbitFileTime', 'S15'), - ('RelatedAttitudeFileTime', 'S15'), - ('EarthEphemeris', (earth_moon_sun_coeff, 100)), - ('MoonEphemeris', (earth_moon_sun_coeff, 100)), - ('SunEphemeris', (earth_moon_sun_coeff, 100)), - ('StarEphemeris', (star_coeff, (20, 100)))] + ("PeriodTimeStart", time_cds_short), + ("PeriodTimeEnd", time_cds_short), + ("RelatedOrbitFileTime", "S15"), + ("RelatedAttitudeFileTime", "S15"), + ("EarthEphemeris", (earth_moon_sun_coeff, 100)), + ("MoonEphemeris", (earth_moon_sun_coeff, 100)), + ("SunEphemeris", (earth_moon_sun_coeff, 100)), + ("StarEphemeris", (star_coeff, (20, 100)))] relation_to_image = [ - ('TypeOfEclipse', np.uint8), - ('EclipseStartTime', time_cds_short), - ('EclipseEndTime', time_cds_short), - ('VisibleBodiesInImage', np.uint8), - ('BodiesCloseToFOV', np.uint8), - ('ImpactOnImageQuality', np.uint8)] + ("TypeOfEclipse", np.uint8), + ("EclipseStartTime", time_cds_short), + ("EclipseEndTime", time_cds_short), + ("VisibleBodiesInImage", np.uint8), + ("BodiesCloseToFOV", np.uint8), + ("ImpactOnImageQuality", np.uint8)] record = [ - ('CelestialBodiesPosition', ephemeris), - ('RelationToImage', relation_to_image)] + ("CelestialBodiesPosition", ephemeris), + ("RelationToImage", relation_to_image)] return record @@ -405,44 +405,44 @@ def celestial_events(self): def image_description(self): """Get image description data.""" projection_description = [ - ('TypeOfProjection', np.uint8), - ('LongitudeOfSSP', np.float32)] + ("TypeOfProjection", np.uint8), + ("LongitudeOfSSP", np.float32)] reference_grid = [ - ('NumberOfLines', np.int32), - ('NumberOfColumns', np.int32), - ('LineDirGridStep', np.float32), - ('ColumnDirGridStep', np.float32), - ('GridOrigin', np.uint8)] + ("NumberOfLines", np.int32), + ("NumberOfColumns", np.int32), + ("LineDirGridStep", np.float32), + ("ColumnDirGridStep", np.float32), + ("GridOrigin", np.uint8)] planned_coverage_vis_ir = [ - ('SouthernLinePlanned', np.int32), - ('NorthernLinePlanned', np.int32), - ('EasternColumnPlanned', np.int32), - ('WesternColumnPlanned', np.int32)] + ("SouthernLinePlanned", np.int32), + ("NorthernLinePlanned", np.int32), + ("EasternColumnPlanned", np.int32), + ("WesternColumnPlanned", np.int32)] planned_coverage_hrv = [ - ('LowerSouthLinePlanned', np.int32), - ('LowerNorthLinePlanned', np.int32), - ('LowerEastColumnPlanned', np.int32), - ('LowerWestColumnPlanned', np.int32), - ('UpperSouthLinePlanned', np.int32), - ('UpperNorthLinePlanned', np.int32), - ('UpperEastColumnPlanned', np.int32), - ('UpperWestColumnPlanned', np.int32)] + ("LowerSouthLinePlanned", np.int32), + ("LowerNorthLinePlanned", np.int32), + ("LowerEastColumnPlanned", np.int32), + ("LowerWestColumnPlanned", np.int32), + ("UpperSouthLinePlanned", np.int32), + ("UpperNorthLinePlanned", np.int32), + ("UpperEastColumnPlanned", np.int32), + ("UpperWestColumnPlanned", np.int32)] level_15_image_production = [ - ('ImageProcDirection', np.uint8), - ('PixelGenDirection', np.uint8), - ('PlannedChanProcessing', (np.uint8, 12))] + ("ImageProcDirection", np.uint8), + ("PixelGenDirection", np.uint8), + ("PlannedChanProcessing", (np.uint8, 12))] record = [ - ('ProjectionDescription', projection_description), - ('ReferenceGridVIS_IR', reference_grid), - ('ReferenceGridHRV', reference_grid), - ('PlannedCoverageVIS_IR', planned_coverage_vis_ir), - ('PlannedCoverageHRV', planned_coverage_hrv), - ('Level15ImageProduction', level_15_image_production)] + ("ProjectionDescription", projection_description), + ("ReferenceGridVIS_IR", reference_grid), + ("ReferenceGridHRV", reference_grid), + ("PlannedCoverageVIS_IR", planned_coverage_vis_ir), + ("PlannedCoverageHRV", planned_coverage_hrv), + ("Level15ImageProduction", level_15_image_production)] return record @@ -450,122 +450,122 @@ def image_description(self): def radiometric_processing(self): """Get radiometric processing data.""" rp_summary = [ - ('RadianceLinearization', (bool, 12)), - ('DetectorEqualization', (bool, 12)), - ('OnboardCalibrationResult', (bool, 12)), - ('MPEFCalFeedback', (bool, 12)), - ('MTFAdaptation', (bool, 12)), - ('StrayLightCorrection', (bool, 12))] + ("RadianceLinearization", (bool, 12)), + ("DetectorEqualization", (bool, 12)), + ("OnboardCalibrationResult", (bool, 12)), + ("MPEFCalFeedback", (bool, 12)), + ("MTFAdaptation", (bool, 12)), + ("StrayLightCorrection", (bool, 12))] level_15_image_calibration = [ - ('CalSlope', np.float64), - ('CalOffset', np.float64)] + ("CalSlope", np.float64), + ("CalOffset", np.float64)] time_cuc_size = [ - ('CT1', np.uint8), - ('CT2', np.uint8), - ('CT3', np.uint8), - ('CT4', np.uint8), - ('FT1', np.uint8), - ('FT2', np.uint8), - ('FT3', np.uint8)] + ("CT1", np.uint8), + ("CT2", np.uint8), + ("CT3", np.uint8), + ("CT4", np.uint8), + ("FT1", np.uint8), + ("FT2", np.uint8), + ("FT3", np.uint8)] cold_fp_temperature = [ - ('FCUNominalColdFocalPlaneTemp', np.uint16), - ('FCURedundantColdFocalPlaneTemp', np.uint16)] + ("FCUNominalColdFocalPlaneTemp", np.uint16), + ("FCURedundantColdFocalPlaneTemp", np.uint16)] warm_fp_temperature = [ - ('FCUNominalWarmFocalPlaneVHROTemp', np.uint16), - ('FCURedundantWarmFocalPlaneVHROTemp', np.uint16)] + ("FCUNominalWarmFocalPlaneVHROTemp", np.uint16), + ("FCURedundantWarmFocalPlaneVHROTemp", np.uint16)] scan_mirror_temperature = [ - ('FCUNominalScanMirrorSensor1Temp', np.uint16), - ('FCURedundantScanMirrorSensor1Temp', np.uint16), - ('FCUNominalScanMirrorSensor2Temp', np.uint16), - ('FCURedundantScanMirrorSensor2Temp', np.uint16)] + ("FCUNominalScanMirrorSensor1Temp", np.uint16), + ("FCURedundantScanMirrorSensor1Temp", np.uint16), + ("FCUNominalScanMirrorSensor2Temp", np.uint16), + ("FCURedundantScanMirrorSensor2Temp", np.uint16)] m1m2m3_temperature = [ - ('FCUNominalM1MirrorSensor1Temp', np.uint16), - ('FCURedundantM1MirrorSensor1Temp', np.uint16), - ('FCUNominalM1MirrorSensor2Temp', np.uint16), - ('FCURedundantM1MirrorSensor2Temp', np.uint16), - ('FCUNominalM23AssemblySensor1Temp', np.uint8), - ('FCURedundantM23AssemblySensor1Temp', np.uint8), - ('FCUNominalM23AssemblySensor2Temp', np.uint8), - ('FCURedundantM23AssemblySensor2Temp', np.uint8)] + ("FCUNominalM1MirrorSensor1Temp", np.uint16), + ("FCURedundantM1MirrorSensor1Temp", np.uint16), + ("FCUNominalM1MirrorSensor2Temp", np.uint16), + ("FCURedundantM1MirrorSensor2Temp", np.uint16), + ("FCUNominalM23AssemblySensor1Temp", np.uint8), + ("FCURedundantM23AssemblySensor1Temp", np.uint8), + ("FCUNominalM23AssemblySensor2Temp", np.uint8), + ("FCURedundantM23AssemblySensor2Temp", np.uint8)] baffle_temperature = [ - ('FCUNominalM1BaffleTemp', np.uint16), - ('FCURedundantM1BaffleTemp', np.uint16)] + ("FCUNominalM1BaffleTemp", np.uint16), + ("FCURedundantM1BaffleTemp", np.uint16)] blackbody_temperature = [ - ('FCUNominalBlackBodySensorTemp', np.uint16), - ('FCURedundantBlackBodySensorTemp', np.uint16)] + ("FCUNominalBlackBodySensorTemp", np.uint16), + ("FCURedundantBlackBodySensorTemp", np.uint16)] fcu_mode = [ - ('FCUNominalSMMStatus', 'S2'), - ('FCURedundantSMMStatus', 'S2')] + ("FCUNominalSMMStatus", "S2"), + ("FCURedundantSMMStatus", "S2")] extracted_bb_data = [ - ('NumberOfPixelsUsed', np.uint32), - ('MeanCount', np.float32), - ('RMS', np.float32), - ('MaxCount', np.uint16), - ('MinCount', np.uint16), - ('BB_Processing_Slope', np.float64), - ('BB_Processing_Offset', np.float64)] + ("NumberOfPixelsUsed", np.uint32), + ("MeanCount", np.float32), + ("RMS", np.float32), + ("MaxCount", np.uint16), + ("MinCount", np.uint16), + ("BB_Processing_Slope", np.float64), + ("BB_Processing_Offset", np.float64)] bb_related_data = [ - ('OnBoardBBTime', time_cuc_size), - ('MDUOutGain', (np.uint16, 42)), - ('MDUCoarseGain', (np.uint8, 42)), - ('MDUFineGain', (np.uint16, 42)), - ('MDUNumericalOffset', (np.uint16, 42)), - ('PUGain', (np.uint16, 42)), - ('PUOffset', (np.uint16, 27)), - ('PUBias', (np.uint16, 15)), - ('DCRValues', (np.uint8, 63)), - ('X_DeepSpaceWindowPosition', np.int8), - ('ColdFPTemperature', cold_fp_temperature), - ('WarmFPTemperature', warm_fp_temperature), - ('ScanMirrorTemperature', scan_mirror_temperature), - ('M1M2M3Temperature', m1m2m3_temperature), - ('BaffleTemperature', baffle_temperature), - ('BlackBodyTemperature', blackbody_temperature), - ('FCUMode', fcu_mode), - ('ExtractedBBData', (extracted_bb_data, 12))] + ("OnBoardBBTime", time_cuc_size), + ("MDUOutGain", (np.uint16, 42)), + ("MDUCoarseGain", (np.uint8, 42)), + ("MDUFineGain", (np.uint16, 42)), + ("MDUNumericalOffset", (np.uint16, 42)), + ("PUGain", (np.uint16, 42)), + ("PUOffset", (np.uint16, 27)), + ("PUBias", (np.uint16, 15)), + ("DCRValues", (np.uint8, 63)), + ("X_DeepSpaceWindowPosition", np.int8), + ("ColdFPTemperature", cold_fp_temperature), + ("WarmFPTemperature", warm_fp_temperature), + ("ScanMirrorTemperature", scan_mirror_temperature), + ("M1M2M3Temperature", m1m2m3_temperature), + ("BaffleTemperature", baffle_temperature), + ("BlackBodyTemperature", blackbody_temperature), + ("FCUMode", fcu_mode), + ("ExtractedBBData", (extracted_bb_data, 12))] black_body_data_used = [ - ('BBObservationUTC', time_cds_expanded), - ('BBRelatedData', bb_related_data)] + ("BBObservationUTC", time_cds_expanded), + ("BBRelatedData", bb_related_data)] impf_cal_data = [ - ('ImageQualityFlag', np.uint8), - ('ReferenceDataFlag', np.uint8), - ('AbsCalMethod', np.uint8), - ('Pad1', 'S1'), - ('AbsCalWeightVic', np.float32), - ('AbsCalWeightXsat', np.float32), - ('AbsCalCoeff', np.float32), - ('AbsCalError', np.float32), - ('GSICSCalCoeff', np.float32), - ('GSICSCalError', np.float32), - ('GSICSOffsetCount', np.float32)] + ("ImageQualityFlag", np.uint8), + ("ReferenceDataFlag", np.uint8), + ("AbsCalMethod", np.uint8), + ("Pad1", "S1"), + ("AbsCalWeightVic", np.float32), + ("AbsCalWeightXsat", np.float32), + ("AbsCalCoeff", np.float32), + ("AbsCalError", np.float32), + ("GSICSCalCoeff", np.float32), + ("GSICSCalError", np.float32), + ("GSICSOffsetCount", np.float32)] rad_proc_mtf_adaptation = [ - ('VIS_IRMTFCorrectionE_W', (np.float32, (33, 16))), - ('VIS_IRMTFCorrectionN_S', (np.float32, (33, 16))), - ('HRVMTFCorrectionE_W', (np.float32, (9, 16))), - ('HRVMTFCorrectionN_S', (np.float32, (9, 16))), - ('StraylightCorrection', (np.float32, (12, 8, 8)))] + ("VIS_IRMTFCorrectionE_W", (np.float32, (33, 16))), + ("VIS_IRMTFCorrectionN_S", (np.float32, (33, 16))), + ("HRVMTFCorrectionE_W", (np.float32, (9, 16))), + ("HRVMTFCorrectionN_S", (np.float32, (9, 16))), + ("StraylightCorrection", (np.float32, (12, 8, 8)))] record = [ - ('RPSummary', rp_summary), - ('Level15ImageCalibration', (level_15_image_calibration, 12)), - ('BlackBodyDataUsed', black_body_data_used), - ('MPEFCalFeedback', (impf_cal_data, 12)), - ('RadTransform', (np.float32, (42, 64))), - ('RadProcMTFAdaptation', rad_proc_mtf_adaptation)] + ("RPSummary", rp_summary), + ("Level15ImageCalibration", (level_15_image_calibration, 12)), + ("BlackBodyDataUsed", black_body_data_used), + ("MPEFCalFeedback", (impf_cal_data, 12)), + ("RadTransform", (np.float32, (42, 64))), + ("RadProcMTFAdaptation", rad_proc_mtf_adaptation)] return record @@ -573,20 +573,20 @@ def radiometric_processing(self): def geometric_processing(self): """Get geometric processing data.""" opt_axis_distances = [ - ('E-WFocalPlane', (np.float32, 42)), - ('N_SFocalPlane', (np.float32, 42))] + ("E-WFocalPlane", (np.float32, 42)), + ("N_SFocalPlane", (np.float32, 42))] earth_model = [ - ('TypeOfEarthModel', np.uint8), - ('EquatorialRadius', np.float64), - ('NorthPolarRadius', np.float64), - ('SouthPolarRadius', np.float64)] + ("TypeOfEarthModel", np.uint8), + ("EquatorialRadius", np.float64), + ("NorthPolarRadius", np.float64), + ("SouthPolarRadius", np.float64)] record = [ - ('OptAxisDistances', opt_axis_distances), - ('EarthModel', earth_model), - ('AtmosphericModel', (np.float32, (12, 360))), - ('ResamplingFunctions', (np.uint8, 12))] + ("OptAxisDistances", opt_axis_distances), + ("EarthModel", earth_model), + ("AtmosphericModel", (np.float32, (12, 360))), + ("ResamplingFunctions", (np.uint8, 12))] return record @@ -594,8 +594,8 @@ def geometric_processing(self): def impf_configuration(self): """Get impf configuration information.""" overall_configuration = [ - ('Issue', np.uint16), - ('Revision', np.uint16) + ("Issue", np.uint16), + ("Revision", np.uint16) ] sw_version = overall_configuration @@ -603,82 +603,82 @@ def impf_configuration(self): info_base_versions = sw_version su_configuration = [ - ('SWVersion', sw_version), - ('InfoBaseVersions', (info_base_versions, 10)) + ("SWVersion", sw_version), + ("InfoBaseVersions", (info_base_versions, 10)) ] su_details = [ - ('SUId', GSDTRecords.gp_su_id), - ('SUIdInstance', np.int8), - ('SUMode', np.uint8), - ('SUState', np.uint8), - ('SUConfiguration', su_configuration) + ("SUId", GSDTRecords.gp_su_id), + ("SUIdInstance", np.int8), + ("SUMode", np.uint8), + ("SUState", np.uint8), + ("SUConfiguration", su_configuration) ] equalisation_params = [ - ('ConstCoeff', np.float32), - ('LinearCoeff', np.float32), - ('QuadraticCoeff', np.float32) + ("ConstCoeff", np.float32), + ("LinearCoeff", np.float32), + ("QuadraticCoeff", np.float32) ] black_body_data_for_warm_start = [ - ('GTotalForMethod1', (np.float64, 12)), - ('GTotalForMethod2', (np.float64, 12)), - ('GTotalForMethod3', (np.float64, 12)), - ('GBackForMethod1', (np.float64, 12)), - ('GBackForMethod2', (np.float64, 12)), - ('GBackForMethod3', (np.float64, 12)), - ('RatioGTotalToGBack', (np.float64, 12)), - ('GainInFrontOpticsCont', (np.float64, 12)), - ('CalibrationConstants', (np.float32, 12)), - ('maxIncidentRadiance', (np.float64, 12)), - ('TimeOfColdObsSeconds', np.float64), - ('TimeOfColdObsNanoSecs', np.float64), - ('IncidenceRadiance', (np.float64, 12)), - ('TempCal', np.float64), - ('TempM1', np.float64), - ('TempScan', np.float64), - ('TempM1Baf', np.float64), - ('TempCalSurround', np.float64) + ("GTotalForMethod1", (np.float64, 12)), + ("GTotalForMethod2", (np.float64, 12)), + ("GTotalForMethod3", (np.float64, 12)), + ("GBackForMethod1", (np.float64, 12)), + ("GBackForMethod2", (np.float64, 12)), + ("GBackForMethod3", (np.float64, 12)), + ("RatioGTotalToGBack", (np.float64, 12)), + ("GainInFrontOpticsCont", (np.float64, 12)), + ("CalibrationConstants", (np.float32, 12)), + ("maxIncidentRadiance", (np.float64, 12)), + ("TimeOfColdObsSeconds", np.float64), + ("TimeOfColdObsNanoSecs", np.float64), + ("IncidenceRadiance", (np.float64, 12)), + ("TempCal", np.float64), + ("TempM1", np.float64), + ("TempScan", np.float64), + ("TempM1Baf", np.float64), + ("TempCalSurround", np.float64) ] mirror_parameters = [ - ('MaxFeedbackVoltage', np.float64), - ('MinFeedbackVoltage', np.float64), - ('MirrorSlipEstimate', np.float64) + ("MaxFeedbackVoltage", np.float64), + ("MinFeedbackVoltage", np.float64), + ("MirrorSlipEstimate", np.float64) ] hktm_parameters = [ - ('TimeS0Packet', time_cds_short), - ('TimeS1Packet', time_cds_short), - ('TimeS2Packet', time_cds_short), - ('TimeS3Packet', time_cds_short), - ('TimeS4Packet', time_cds_short), - ('TimeS5Packet', time_cds_short), - ('TimeS6Packet', time_cds_short), - ('TimeS7Packet', time_cds_short), - ('TimeS8Packet', time_cds_short), - ('TimeS9Packet', time_cds_short), - ('TimeSYPacket', time_cds_short), - ('TimePSPacket', time_cds_short) + ("TimeS0Packet", time_cds_short), + ("TimeS1Packet", time_cds_short), + ("TimeS2Packet", time_cds_short), + ("TimeS3Packet", time_cds_short), + ("TimeS4Packet", time_cds_short), + ("TimeS5Packet", time_cds_short), + ("TimeS6Packet", time_cds_short), + ("TimeS7Packet", time_cds_short), + ("TimeS8Packet", time_cds_short), + ("TimeS9Packet", time_cds_short), + ("TimeSYPacket", time_cds_short), + ("TimePSPacket", time_cds_short) ] warm_start_params = [ - ('ScanningLaw', (np.float64, 1527)), - ('RadFramesAlignment', (np.float64, 3)), - ('ScanningLawVariation', (np.float32, 2)), - ('EqualisationParams', (equalisation_params, 42)), - ('BlackBodyDataForWarmStart', black_body_data_for_warm_start), - ('MirrorParameters', mirror_parameters), - ('LastSpinPeriod', np.float64), - ('HKTMParameters', hktm_parameters), - ('WSPReserved', (np.uint8, 3312)) + ("ScanningLaw", (np.float64, 1527)), + ("RadFramesAlignment", (np.float64, 3)), + ("ScanningLawVariation", (np.float32, 2)), + ("EqualisationParams", (equalisation_params, 42)), + ("BlackBodyDataForWarmStart", black_body_data_for_warm_start), + ("MirrorParameters", mirror_parameters), + ("LastSpinPeriod", np.float64), + ("HKTMParameters", hktm_parameters), + ("WSPReserved", (np.uint8, 3312)) ] record = [ - ('OverallConfiguration', overall_configuration), - ('SUDetails', (su_details, 50)), - ('WarmStartParams', warm_start_params) + ("OverallConfiguration", overall_configuration), + ("SUDetails", (su_details, 50)), + ("WarmStartParams", warm_start_params) ] return record @@ -695,23 +695,23 @@ def get(self): """Get header record data.""" # 380363 bytes record = [ - ('GP_PK_HEADER', GSDTRecords.gp_pk_header), - ('GP_PK_SH1', GSDTRecords.gp_pk_sh1), - ('15TRAILER', self.seviri_l15_trailer) + ("GP_PK_HEADER", GSDTRecords.gp_pk_header), + ("GP_PK_SH1", GSDTRecords.gp_pk_sh1), + ("15TRAILER", self.seviri_l15_trailer) ] - return np.dtype(record).newbyteorder('>') + return np.dtype(record).newbyteorder(">") @property def seviri_l15_trailer(self): """Get file trailer data.""" record = [ - ('15TrailerVersion', np.uint8), - ('ImageProductionStats', self.image_production_stats), - ('NavigationExtractionResults', self.navigation_extraction_results), - ('RadiometricQuality', self.radiometric_quality), - ('GeometricQuality', self.geometric_quality), - ('TimelinessAndCompleteness', self.timeliness_and_completeness) + ("15TrailerVersion", np.uint8), + ("ImageProductionStats", self.image_production_stats), + ("NavigationExtractionResults", self.navigation_extraction_results), + ("RadiometricQuality", self.radiometric_quality), + ("GeometricQuality", self.geometric_quality), + ("TimelinessAndCompleteness", self.timeliness_and_completeness) ] return record @@ -721,69 +721,69 @@ def image_production_stats(self): gp_sc_id = GSDTRecords.gp_sc_id actual_scanning_summary = [ - ('NominalImageScanning', np.uint8), - ('ReducedScan', np.uint8), - ('ForwardScanStart', time_cds_short), - ('ForwardScanEnd', time_cds_short) + ("NominalImageScanning", np.uint8), + ("ReducedScan", np.uint8), + ("ForwardScanStart", time_cds_short), + ("ForwardScanEnd", time_cds_short) ] radiometric_behaviour = [ - ('NominalBehaviour', np.uint8), - ('RadScanIrregularity', np.uint8), - ('RadStoppage', np.uint8), - ('RepeatCycleNotCompleted', np.uint8), - ('GainChangeTookPlace', np.uint8), - ('DecontaminationTookPlace', np.uint8), - ('NoBBCalibrationAchieved', np.uint8), - ('IncorrectTemperature', np.uint8), - ('InvalidBBData', np.uint8), - ('InvalidAuxOrHKTMData', np.uint8), - ('RefocusingMechanismActuated', np.uint8), - ('MirrorBackToReferencePos', np.uint8) + ("NominalBehaviour", np.uint8), + ("RadScanIrregularity", np.uint8), + ("RadStoppage", np.uint8), + ("RepeatCycleNotCompleted", np.uint8), + ("GainChangeTookPlace", np.uint8), + ("DecontaminationTookPlace", np.uint8), + ("NoBBCalibrationAchieved", np.uint8), + ("IncorrectTemperature", np.uint8), + ("InvalidBBData", np.uint8), + ("InvalidAuxOrHKTMData", np.uint8), + ("RefocusingMechanismActuated", np.uint8), + ("MirrorBackToReferencePos", np.uint8) ] reception_summary_stats = [ - ('PlannedNumberOfL10Lines', (np.uint32, 12)), - ('NumberOfMissingL10Lines', (np.uint32, 12)), - ('NumberOfCorruptedL10Lines', (np.uint32, 12)), - ('NumberOfReplacedL10Lines', (np.uint32, 12)) + ("PlannedNumberOfL10Lines", (np.uint32, 12)), + ("NumberOfMissingL10Lines", (np.uint32, 12)), + ("NumberOfCorruptedL10Lines", (np.uint32, 12)), + ("NumberOfReplacedL10Lines", (np.uint32, 12)) ] l15_image_validity = [ - ('NominalImage', np.uint8), - ('NonNominalBecauseIncomplete', np.uint8), - ('NonNominalRadiometricQuality', np.uint8), - ('NonNominalGeometricQuality', np.uint8), - ('NonNominalTimeliness', np.uint8), - ('IncompleteL15', np.uint8), + ("NominalImage", np.uint8), + ("NonNominalBecauseIncomplete", np.uint8), + ("NonNominalRadiometricQuality", np.uint8), + ("NonNominalGeometricQuality", np.uint8), + ("NonNominalTimeliness", np.uint8), + ("IncompleteL15", np.uint8), ] actual_l15_coverage_vis_ir = [ - ('SouthernLineActual', np.int32), - ('NorthernLineActual', np.int32), - ('EasternColumnActual', np.int32), - ('WesternColumnActual', np.int32) + ("SouthernLineActual", np.int32), + ("NorthernLineActual", np.int32), + ("EasternColumnActual", np.int32), + ("WesternColumnActual", np.int32) ] actual_l15_coverage_hrv = [ - ('LowerSouthLineActual', np.int32), - ('LowerNorthLineActual', np.int32), - ('LowerEastColumnActual', np.int32), - ('LowerWestColumnActual', np.int32), - ('UpperSouthLineActual', np.int32), - ('UpperNorthLineActual', np.int32), - ('UpperEastColumnActual', np.int32), - ('UpperWestColumnActual', np.int32), + ("LowerSouthLineActual", np.int32), + ("LowerNorthLineActual", np.int32), + ("LowerEastColumnActual", np.int32), + ("LowerWestColumnActual", np.int32), + ("UpperSouthLineActual", np.int32), + ("UpperNorthLineActual", np.int32), + ("UpperEastColumnActual", np.int32), + ("UpperWestColumnActual", np.int32), ] record = [ - ('SatelliteId', gp_sc_id), - ('ActualScanningSummary', actual_scanning_summary), - ('RadiometricBehaviour', radiometric_behaviour), - ('ReceptionSummaryStats', reception_summary_stats), - ('L15ImageValidity', (l15_image_validity, 12)), - ('ActualL15CoverageVIS_IR', actual_l15_coverage_vis_ir), - ('ActualL15CoverageHRV', actual_l15_coverage_hrv) + ("SatelliteId", gp_sc_id), + ("ActualScanningSummary", actual_scanning_summary), + ("RadiometricBehaviour", radiometric_behaviour), + ("ReceptionSummaryStats", reception_summary_stats), + ("L15ImageValidity", (l15_image_validity, 12)), + ("ActualL15CoverageVIS_IR", actual_l15_coverage_vis_ir), + ("ActualL15CoverageHRV", actual_l15_coverage_hrv) ] return record @@ -792,47 +792,47 @@ def image_production_stats(self): def navigation_extraction_results(self): """Get navigation extraction data.""" horizon_observation = [ - ('HorizonId', np.uint8), - ('Alpha', np.float64), - ('AlphaConfidence', np.float64), - ('Beta', np.float64), - ('BetaConfidence', np.float64), - ('ObservationTime', time_cds), - ('SpinRate', np.float64), - ('AlphaDeviation', np.float64), - ('BetaDeviation', np.float64) + ("HorizonId", np.uint8), + ("Alpha", np.float64), + ("AlphaConfidence", np.float64), + ("Beta", np.float64), + ("BetaConfidence", np.float64), + ("ObservationTime", time_cds), + ("SpinRate", np.float64), + ("AlphaDeviation", np.float64), + ("BetaDeviation", np.float64) ] star_observation = [ - ('StarId', np.uint16), - ('Alpha', np.float64), - ('AlphaConfidence', np.float64), - ('Beta', np.float64), - ('BetaConfidence', np.float64), - ('ObservationTime', time_cds), - ('SpinRate', np.float64), - ('AlphaDeviation', np.float64), - ('BetaDeviation', np.float64) + ("StarId", np.uint16), + ("Alpha", np.float64), + ("AlphaConfidence", np.float64), + ("Beta", np.float64), + ("BetaConfidence", np.float64), + ("ObservationTime", time_cds), + ("SpinRate", np.float64), + ("AlphaDeviation", np.float64), + ("BetaDeviation", np.float64) ] landmark_observation = [ - ('LandmarkId', np.uint16), - ('LandmarkLongitude', np.float64), - ('LandmarkLatitude', np.float64), - ('Alpha', np.float64), - ('AlphaConfidence', np.float64), - ('Beta', np.float64), - ('BetaConfidence', np.float64), - ('ObservationTime', time_cds), - ('SpinRate', np.float64), - ('AlphaDeviation', np.float64), - ('BetaDeviation', np.float64) + ("LandmarkId", np.uint16), + ("LandmarkLongitude", np.float64), + ("LandmarkLatitude", np.float64), + ("Alpha", np.float64), + ("AlphaConfidence", np.float64), + ("Beta", np.float64), + ("BetaConfidence", np.float64), + ("ObservationTime", time_cds), + ("SpinRate", np.float64), + ("AlphaDeviation", np.float64), + ("BetaDeviation", np.float64) ] record = [ - ('ExtractedHorizons', (horizon_observation, 4)), - ('ExtractedStars', (star_observation, 20)), - ('ExtractedLandmarks', (landmark_observation, 50)) + ("ExtractedHorizons", (horizon_observation, 4)), + ("ExtractedStars", (star_observation, 20)), + ("ExtractedLandmarks", (landmark_observation, 50)) ] return record @@ -841,85 +841,85 @@ def navigation_extraction_results(self): def radiometric_quality(self): """Get radiometric quality record data.""" l10_rad_quality = [ - ('FullImageMinimumCount', np.uint16), - ('FullImageMaximumCount', np.uint16), - ('EarthDiskMinimumCount', np.uint16), - ('EarthDiskMaximumCount', np.uint16), - ('MoonMinimumCount', np.uint16), - ('MoonMaximumCount', np.uint16), - ('FullImageMeanCount', np.float32), - ('FullImageStandardDeviation', np.float32), - ('EarthDiskMeanCount', np.float32), - ('EarthDiskStandardDeviation', np.float32), - ('MoonMeanCount', np.float32), - ('MoonStandardDeviation', np.float32), - ('SpaceMeanCount', np.float32), - ('SpaceStandardDeviation', np.float32), - ('SESpaceCornerMeanCount', np.float32), - ('SESpaceCornerStandardDeviation', np.float32), - ('SWSpaceCornerMeanCount', np.float32), - ('SWSpaceCornerStandardDeviation', np.float32), - ('NESpaceCornerMeanCount', np.float32), - ('NESpaceCornerStandardDeviation', np.float32), - ('NWSpaceCornerMeanCount', np.float32), - ('NWSpaceCornerStandardDeviation', np.float32), - ('4SpaceCornersMeanCount', np.float32), - ('4SpaceCornersStandardDeviation', np.float32), - ('FullImageHistogram', (np.uint32, 256)), - ('EarthDiskHistogram', (np.uint32, 256)), - ('ImageCentreSquareHistogram', (np.uint32, 256)), - ('SESpaceCornerHistogram', (np.uint32, 128)), - ('SWSpaceCornerHistogram', (np.uint32, 128)), - ('NESpaceCornerHistogram', (np.uint32, 128)), - ('NWSpaceCornerHistogram', (np.uint32, 128)), - ('FullImageEntropy', (np.float32, 3)), - ('EarthDiskEntropy', (np.float32, 3)), - ('ImageCentreSquareEntropy', (np.float32, 3)), - ('SESpaceCornerEntropy', (np.float32, 3)), - ('SWSpaceCornerEntropy', (np.float32, 3)), - ('NESpaceCornerEntropy', (np.float32, 3)), - ('NWSpaceCornerEntropy', (np.float32, 3)), - ('4SpaceCornersEntropy', (np.float32, 3)), - ('ImageCentreSquarePSD_EW', (np.float32, 128)), - ('FullImagePSD_EW', (np.float32, 128)), - ('ImageCentreSquarePSD_NS', (np.float32, 128)), - ('FullImagePSD_NS', (np.float32, 128)) + ("FullImageMinimumCount", np.uint16), + ("FullImageMaximumCount", np.uint16), + ("EarthDiskMinimumCount", np.uint16), + ("EarthDiskMaximumCount", np.uint16), + ("MoonMinimumCount", np.uint16), + ("MoonMaximumCount", np.uint16), + ("FullImageMeanCount", np.float32), + ("FullImageStandardDeviation", np.float32), + ("EarthDiskMeanCount", np.float32), + ("EarthDiskStandardDeviation", np.float32), + ("MoonMeanCount", np.float32), + ("MoonStandardDeviation", np.float32), + ("SpaceMeanCount", np.float32), + ("SpaceStandardDeviation", np.float32), + ("SESpaceCornerMeanCount", np.float32), + ("SESpaceCornerStandardDeviation", np.float32), + ("SWSpaceCornerMeanCount", np.float32), + ("SWSpaceCornerStandardDeviation", np.float32), + ("NESpaceCornerMeanCount", np.float32), + ("NESpaceCornerStandardDeviation", np.float32), + ("NWSpaceCornerMeanCount", np.float32), + ("NWSpaceCornerStandardDeviation", np.float32), + ("4SpaceCornersMeanCount", np.float32), + ("4SpaceCornersStandardDeviation", np.float32), + ("FullImageHistogram", (np.uint32, 256)), + ("EarthDiskHistogram", (np.uint32, 256)), + ("ImageCentreSquareHistogram", (np.uint32, 256)), + ("SESpaceCornerHistogram", (np.uint32, 128)), + ("SWSpaceCornerHistogram", (np.uint32, 128)), + ("NESpaceCornerHistogram", (np.uint32, 128)), + ("NWSpaceCornerHistogram", (np.uint32, 128)), + ("FullImageEntropy", (np.float32, 3)), + ("EarthDiskEntropy", (np.float32, 3)), + ("ImageCentreSquareEntropy", (np.float32, 3)), + ("SESpaceCornerEntropy", (np.float32, 3)), + ("SWSpaceCornerEntropy", (np.float32, 3)), + ("NESpaceCornerEntropy", (np.float32, 3)), + ("NWSpaceCornerEntropy", (np.float32, 3)), + ("4SpaceCornersEntropy", (np.float32, 3)), + ("ImageCentreSquarePSD_EW", (np.float32, 128)), + ("FullImagePSD_EW", (np.float32, 128)), + ("ImageCentreSquarePSD_NS", (np.float32, 128)), + ("FullImagePSD_NS", (np.float32, 128)) ] l15_rad_quality = [ - ('FullImageMinimumCount', np.uint16), - ('FullImageMaximumCount', np.uint16), - ('EarthDiskMinimumCount', np.uint16), - ('EarthDiskMaximumCount', np.uint16), - ('FullImageMeanCount', np.float32), - ('FullImageStandardDeviation', np.float32), - ('EarthDiskMeanCount', np.float32), - ('EarthDiskStandardDeviation', np.float32), - ('SpaceMeanCount', np.float32), - ('SpaceStandardDeviation', np.float32), - ('FullImageHistogram', (np.uint32, 256)), - ('EarthDiskHistogram', (np.uint32, 256)), - ('ImageCentreSquareHistogram', (np.uint32, 256)), - ('FullImageEntropy', (np.float32, 3)), - ('EarthDiskEntropy', (np.float32, 3)), - ('ImageCentreSquareEntropy', (np.float32, 3)), - ('ImageCentreSquarePSD_EW', (np.float32, 128)), - ('FullImagePSD_EW', (np.float32, 128)), - ('ImageCentreSquarePSD_NS', (np.float32, 128)), - ('FullImagePSD_NS', (np.float32, 128)), - ('SESpaceCornerL15_RMS', np.float32), - ('SESpaceCornerL15_Mean', np.float32), - ('SWSpaceCornerL15_RMS', np.float32), - ('SWSpaceCornerL15_Mean', np.float32), - ('NESpaceCornerL15_RMS', np.float32), - ('NESpaceCornerL15_Mean', np.float32), - ('NWSpaceCornerL15_RMS', np.float32), - ('NWSpaceCornerL15_Mean', np.float32) + ("FullImageMinimumCount", np.uint16), + ("FullImageMaximumCount", np.uint16), + ("EarthDiskMinimumCount", np.uint16), + ("EarthDiskMaximumCount", np.uint16), + ("FullImageMeanCount", np.float32), + ("FullImageStandardDeviation", np.float32), + ("EarthDiskMeanCount", np.float32), + ("EarthDiskStandardDeviation", np.float32), + ("SpaceMeanCount", np.float32), + ("SpaceStandardDeviation", np.float32), + ("FullImageHistogram", (np.uint32, 256)), + ("EarthDiskHistogram", (np.uint32, 256)), + ("ImageCentreSquareHistogram", (np.uint32, 256)), + ("FullImageEntropy", (np.float32, 3)), + ("EarthDiskEntropy", (np.float32, 3)), + ("ImageCentreSquareEntropy", (np.float32, 3)), + ("ImageCentreSquarePSD_EW", (np.float32, 128)), + ("FullImagePSD_EW", (np.float32, 128)), + ("ImageCentreSquarePSD_NS", (np.float32, 128)), + ("FullImagePSD_NS", (np.float32, 128)), + ("SESpaceCornerL15_RMS", np.float32), + ("SESpaceCornerL15_Mean", np.float32), + ("SWSpaceCornerL15_RMS", np.float32), + ("SWSpaceCornerL15_Mean", np.float32), + ("NESpaceCornerL15_RMS", np.float32), + ("NESpaceCornerL15_Mean", np.float32), + ("NWSpaceCornerL15_RMS", np.float32), + ("NWSpaceCornerL15_Mean", np.float32) ] record = [ - ('L10RadQuality', (l10_rad_quality, 42)), - ('L15RadQuality', (l15_rad_quality, 12)) + ("L10RadQuality", (l10_rad_quality, 42)), + ("L15RadQuality", (l15_rad_quality, 12)) ] return record @@ -928,19 +928,19 @@ def radiometric_quality(self): def geometric_quality(self): """Get geometric quality record data.""" absolute_accuracy = [ - ('QualityInfoValidity', np.uint8), - ('EastWestAccuracyRMS', np.float32), - ('NorthSouthAccuracyRMS', np.float32), - ('MagnitudeRMS', np.float32), - ('EastWestUncertaintyRMS', np.float32), - ('NorthSouthUncertaintyRMS', np.float32), - ('MagnitudeUncertaintyRMS', np.float32), - ('EastWestMaxDeviation', np.float32), - ('NorthSouthMaxDeviation', np.float32), - ('MagnitudeMaxDeviation', np.float32), - ('EastWestUncertaintyMax', np.float32), - ('NorthSouthUncertaintyMax', np.float32), - ('MagnitudeUncertaintyMax', np.float32) + ("QualityInfoValidity", np.uint8), + ("EastWestAccuracyRMS", np.float32), + ("NorthSouthAccuracyRMS", np.float32), + ("MagnitudeRMS", np.float32), + ("EastWestUncertaintyRMS", np.float32), + ("NorthSouthUncertaintyRMS", np.float32), + ("MagnitudeUncertaintyRMS", np.float32), + ("EastWestMaxDeviation", np.float32), + ("NorthSouthMaxDeviation", np.float32), + ("MagnitudeMaxDeviation", np.float32), + ("EastWestUncertaintyMax", np.float32), + ("NorthSouthUncertaintyMax", np.float32), + ("MagnitudeUncertaintyMax", np.float32) ] relative_accuracy = absolute_accuracy @@ -948,35 +948,35 @@ def geometric_quality(self): pixels_16_relative_accuracy = absolute_accuracy misregistration_residuals = [ - ('QualityInfoValidity', np.uint8), - ('EastWestResidual', np.float32), - ('NorthSouthResidual', np.float32), - ('EastWestUncertainty', np.float32), - ('NorthSouthUncertainty', np.float32), - ('EastWestRMS', np.float32), - ('NorthSouthRMS', np.float32), - ('EastWestMagnitude', np.float32), - ('NorthSouthMagnitude', np.float32), - ('EastWestMagnitudeUncertainty', np.float32), - ('NorthSouthMagnitudeUncertainty', np.float32) + ("QualityInfoValidity", np.uint8), + ("EastWestResidual", np.float32), + ("NorthSouthResidual", np.float32), + ("EastWestUncertainty", np.float32), + ("NorthSouthUncertainty", np.float32), + ("EastWestRMS", np.float32), + ("NorthSouthRMS", np.float32), + ("EastWestMagnitude", np.float32), + ("NorthSouthMagnitude", np.float32), + ("EastWestMagnitudeUncertainty", np.float32), + ("NorthSouthMagnitudeUncertainty", np.float32) ] geometric_quality_status = [ - ('QualityNominal', np.uint8), - ('NominalAbsolute', np.uint8), - ('NominalRelativeToPreviousImage', np.uint8), - ('NominalForREL500', np.uint8), - ('NominalForREL16', np.uint8), - ('NominalForResMisreg', np.uint8) + ("QualityNominal", np.uint8), + ("NominalAbsolute", np.uint8), + ("NominalRelativeToPreviousImage", np.uint8), + ("NominalForREL500", np.uint8), + ("NominalForREL16", np.uint8), + ("NominalForResMisreg", np.uint8) ] record = [ - ('AbsoluteAccuracy', (absolute_accuracy, 12)), - ('RelativeAccuracy', (relative_accuracy, 12)), - ('500PixelsRelativeAccuracy', (pixels_500_relative_accuracy, 12)), - ('16PixelsRelativeAccuracy', (pixels_16_relative_accuracy, 12)), - ('MisregistrationResiduals', (misregistration_residuals, 12)), - ('GeometricQualityStatus', (geometric_quality_status, 12)) + ("AbsoluteAccuracy", (absolute_accuracy, 12)), + ("RelativeAccuracy", (relative_accuracy, 12)), + ("500PixelsRelativeAccuracy", (pixels_500_relative_accuracy, 12)), + ("16PixelsRelativeAccuracy", (pixels_16_relative_accuracy, 12)), + ("MisregistrationResiduals", (misregistration_residuals, 12)), + ("GeometricQualityStatus", (geometric_quality_status, 12)) ] return record @@ -985,22 +985,22 @@ def geometric_quality(self): def timeliness_and_completeness(self): """Get time and completeness record data.""" timeliness = [ - ('MaxDelay', np.float32), - ('MinDelay', np.float32), - ('MeanDelay', np.float32) + ("MaxDelay", np.float32), + ("MinDelay", np.float32), + ("MeanDelay", np.float32) ] completeness = [ - ('PlannedL15ImageLines', np.uint16), - ('GeneratedL15ImageLines', np.uint16), - ('ValidL15ImageLines', np.uint16), - ('DummyL15ImageLines', np.uint16), - ('CorruptedL15ImageLines', np.uint16) + ("PlannedL15ImageLines", np.uint16), + ("GeneratedL15ImageLines", np.uint16), + ("ValidL15ImageLines", np.uint16), + ("DummyL15ImageLines", np.uint16), + ("CorruptedL15ImageLines", np.uint16) ] record = [ - ('Timeliness', timeliness), - ('Completeness', (completeness, 12)) + ("Timeliness", timeliness), + ("Completeness", (completeness, 12)) ] return record @@ -1013,15 +1013,15 @@ def get(self): """Get record data array.""" # X bytes record = [ - ('SatelliteStatus', self.satellite_status), - ('ImageAcquisition', self.image_acquisition), - ('CelestialEvents', self.celestial_events), - ('ImageDescription', self.image_description), - ('RadiometricProcessing', self.radiometric_processing), - ('GeometricProcessing', self.geometric_processing) + ("SatelliteStatus", self.satellite_status), + ("ImageAcquisition", self.image_acquisition), + ("CelestialEvents", self.celestial_events), + ("ImageDescription", self.image_description), + ("RadiometricProcessing", self.radiometric_processing), + ("GeometricProcessing", self.geometric_processing) ] - return np.dtype(record).newbyteorder('>') + return np.dtype(record).newbyteorder(">") def get_native_header(with_archive_header=True): @@ -1035,22 +1035,22 @@ def get_native_header(with_archive_header=True): DEFAULT_15_SECONDARY_PRODUCT_HEADER = { - 'NorthLineSelectedRectangle': {'Value': VISIR_NUM_LINES}, - 'SouthLineSelectedRectangle': {'Value': 1}, - 'EastColumnSelectedRectangle': {'Value': 1}, - 'WestColumnSelectedRectangle': {'Value': VISIR_NUM_COLUMNS}, - 'NumberColumnsVISIR': {'Value': VISIR_NUM_COLUMNS}, - 'NumberLinesVISIR': {'Value': VISIR_NUM_LINES}, - 'NumberColumnsHRV': {'Value': HRV_NUM_COLUMNS}, - 'NumberLinesHRV': {'Value': HRV_NUM_LINES}, - 'SelectedBandIDs': {'Value': 'XXXXXXXXXXXX'} + "NorthLineSelectedRectangle": {"Value": VISIR_NUM_LINES}, + "SouthLineSelectedRectangle": {"Value": 1}, + "EastColumnSelectedRectangle": {"Value": 1}, + "WestColumnSelectedRectangle": {"Value": VISIR_NUM_COLUMNS}, + "NumberColumnsVISIR": {"Value": VISIR_NUM_COLUMNS}, + "NumberLinesVISIR": {"Value": VISIR_NUM_LINES}, + "NumberColumnsHRV": {"Value": HRV_NUM_COLUMNS}, + "NumberLinesHRV": {"Value": HRV_NUM_LINES}, + "SelectedBandIDs": {"Value": "XXXXXXXXXXXX"} } """Default secondary product header for files containing all channels.""" hrit_epilogue = np.dtype( - Msg15NativeTrailerRecord().seviri_l15_trailer).newbyteorder('>') + Msg15NativeTrailerRecord().seviri_l15_trailer).newbyteorder(">") hrit_prologue = HritPrologue().get() impf_configuration = np.dtype( - L15DataHeaderRecord().impf_configuration).newbyteorder('>') + L15DataHeaderRecord().impf_configuration).newbyteorder(">") native_trailer = Msg15NativeTrailerRecord().get() diff --git a/satpy/readers/seviri_l1b_nc.py b/satpy/readers/seviri_l1b_nc.py index ae56053114..82e3b15297 100644 --- a/satpy/readers/seviri_l1b_nc.py +++ b/satpy/readers/seviri_l1b_nc.py @@ -41,7 +41,7 @@ ) from satpy.utils import get_legacy_chunk_size -logger = logging.getLogger('nc_msg') +logger = logging.getLogger("nc_msg") CHUNK_SIZE = get_legacy_chunk_size() @@ -73,9 +73,9 @@ def __init__(self, filename, filename_info, filetype_info, @property def _repeat_cycle_duration(self): """Get repeat cycle duration from the metadata.""" - if self.nc.attrs['nominal_image_scanning'] == 'T': + if self.nc.attrs["nominal_image_scanning"] == "T": return 15 - elif self.nc.attrs['reduced_scanning'] == 'T': + elif self.nc.attrs["reduced_scanning"] == "T": return 5 @property @@ -114,55 +114,55 @@ def end_time(self): def nc(self): """Read the file.""" return open_dataset(self.filename, decode_cf=True, mask_and_scale=False, - chunks=CHUNK_SIZE).rename({'num_columns_vis_ir': 'x', - 'num_rows_vis_ir': 'y'}) + chunks=CHUNK_SIZE).rename({"num_columns_vis_ir": "x", + "num_rows_vis_ir": "y"}) def get_metadata(self): """Get metadata.""" # Obtain some area definition attributes - equatorial_radius = self.nc.attrs['equatorial_radius'] * 1000. - polar_radius = (self.nc.attrs['north_polar_radius'] * 1000 + self.nc.attrs['south_polar_radius'] * 1000) * 0.5 - ssp_lon = self.nc.attrs['longitude_of_SSP'] - self.mda['vis_ir_grid_origin'] = self.nc.attrs['vis_ir_grid_origin'] - self.mda['vis_ir_column_dir_grid_step'] = self.nc.attrs['vis_ir_column_dir_grid_step'] * 1000.0 - self.mda['vis_ir_line_dir_grid_step'] = self.nc.attrs['vis_ir_line_dir_grid_step'] * 1000.0 + equatorial_radius = self.nc.attrs["equatorial_radius"] * 1000. + polar_radius = (self.nc.attrs["north_polar_radius"] * 1000 + self.nc.attrs["south_polar_radius"] * 1000) * 0.5 + ssp_lon = self.nc.attrs["longitude_of_SSP"] + self.mda["vis_ir_grid_origin"] = self.nc.attrs["vis_ir_grid_origin"] + self.mda["vis_ir_column_dir_grid_step"] = self.nc.attrs["vis_ir_column_dir_grid_step"] * 1000.0 + self.mda["vis_ir_line_dir_grid_step"] = self.nc.attrs["vis_ir_line_dir_grid_step"] * 1000.0 # if FSFile is used h5netcdf engine is used which outputs arrays instead of floats for attributes if isinstance(equatorial_radius, np.ndarray): equatorial_radius = equatorial_radius.item() polar_radius = polar_radius.item() ssp_lon = ssp_lon.item() - self.mda['vis_ir_column_dir_grid_step'] = self.mda['vis_ir_column_dir_grid_step'].item() - self.mda['vis_ir_line_dir_grid_step'] = self.mda['vis_ir_line_dir_grid_step'].item() + self.mda["vis_ir_column_dir_grid_step"] = self.mda["vis_ir_column_dir_grid_step"].item() + self.mda["vis_ir_line_dir_grid_step"] = self.mda["vis_ir_line_dir_grid_step"].item() - self.mda['projection_parameters'] = {'a': equatorial_radius, - 'b': polar_radius, - 'h': 35785831.00, - 'ssp_longitude': ssp_lon} + self.mda["projection_parameters"] = {"a": equatorial_radius, + "b": polar_radius, + "h": 35785831.00, + "ssp_longitude": ssp_lon} - self.mda['number_of_lines'] = int(self.nc.dims['y']) - self.mda['number_of_columns'] = int(self.nc.dims['x']) + self.mda["number_of_lines"] = int(self.nc.dims["y"]) + self.mda["number_of_columns"] = int(self.nc.dims["x"]) # only needed for HRV channel which is not implemented yet # self.mda['hrv_number_of_lines'] = int(self.nc.dims['num_rows_hrv']) # self.mda['hrv_number_of_columns'] = int(self.nc.dims['num_columns_hrv']) self.deltaSt = self.reference + datetime.timedelta( - days=int(self.nc.attrs['true_repeat_cycle_start_day']), - milliseconds=int(self.nc.attrs['true_repeat_cycle_start_mi_sec'])) + days=int(self.nc.attrs["true_repeat_cycle_start_day"]), + milliseconds=int(self.nc.attrs["true_repeat_cycle_start_mi_sec"])) self.deltaEnd = self.reference + datetime.timedelta( - days=int(self.nc.attrs['planned_repeat_cycle_end_day']), - milliseconds=int(self.nc.attrs['planned_repeat_cycle_end_mi_sec'])) + days=int(self.nc.attrs["planned_repeat_cycle_end_day"]), + milliseconds=int(self.nc.attrs["planned_repeat_cycle_end_mi_sec"])) - self.north = int(self.nc.attrs['north_most_line']) - self.east = int(self.nc.attrs['east_most_pixel']) - self.west = int(self.nc.attrs['west_most_pixel']) - self.south = int(self.nc.attrs['south_most_line']) - self.platform_id = int(self.nc.attrs['satellite_id']) + self.north = int(self.nc.attrs["north_most_line"]) + self.east = int(self.nc.attrs["east_most_pixel"]) + self.west = int(self.nc.attrs["west_most_pixel"]) + self.south = int(self.nc.attrs["south_most_line"]) + self.platform_id = int(self.nc.attrs["satellite_id"]) def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" - dataset = self.nc[dataset_info['nc_key']] + dataset = self.nc[dataset_info["nc_key"]] # Correct for the scan line order # TODO: Move _add_scanline_acq_time() call to the end of the method @@ -171,7 +171,7 @@ def get_dataset(self, dataset_id, dataset_info): dataset = dataset.sel(y=slice(None, None, -1)) dataset = self.calibrate(dataset, dataset_id) - is_calibration = dataset_id['calibration'] in ['radiance', 'reflectance', 'brightness_temperature'] + is_calibration = dataset_id["calibration"] in ["radiance", "reflectance", "brightness_temperature"] if (is_calibration and self.mask_bad_quality_scan_lines): # noqa: E129 dataset = self._mask_bad_quality(dataset, dataset_info) @@ -180,17 +180,17 @@ def get_dataset(self, dataset_id, dataset_info): def calibrate(self, dataset, dataset_id): """Calibrate the data.""" - channel = dataset_id['name'] - calibration = dataset_id['calibration'] + channel = dataset_id["name"] + calibration = dataset_id["calibration"] - if dataset_id['calibration'] == 'counts': - dataset.attrs['_FillValue'] = 0 + if dataset_id["calibration"] == "counts": + dataset.attrs["_FillValue"] = 0 calib = SEVIRICalibrationHandler( platform_id=int(self.platform_id), channel_name=channel, coefs=self._get_calib_coefs(dataset, channel), - calib_mode='NOMINAL', + calib_mode="NOMINAL", scan_time=self.observation_start_time ) @@ -199,59 +199,59 @@ def calibrate(self, dataset, dataset_id): def _get_calib_coefs(self, dataset, channel): """Get coefficients for calibration from counts to radiance.""" band_idx = list(CHANNEL_NAMES.values()).index(channel) - offset = dataset.attrs['add_offset'].astype('float32') - gain = dataset.attrs['scale_factor'].astype('float32') + offset = dataset.attrs["add_offset"].astype("float32") + gain = dataset.attrs["scale_factor"].astype("float32") # Only one calibration available here return { - 'coefs': { - 'NOMINAL': { - 'gain': gain, - 'offset': offset + "coefs": { + "NOMINAL": { + "gain": gain, + "offset": offset }, - 'EXTERNAL': self.ext_calib_coefs.get(channel, {}) + "EXTERNAL": self.ext_calib_coefs.get(channel, {}) }, - 'radiance_type': self.nc['planned_chan_processing'].values[band_idx] + "radiance_type": self.nc["planned_chan_processing"].values[band_idx] } def _mask_bad_quality(self, dataset, dataset_info): """Mask scanlines with bad quality.""" - ch_number = int(dataset_info['nc_key'][2:]) - line_validity = self.nc['channel_data_visir_data_line_validity'][:, ch_number - 1].data - line_geometric_quality = self.nc['channel_data_visir_data_line_geometric_quality'][:, ch_number - 1].data - line_radiometric_quality = self.nc['channel_data_visir_data_line_radiometric_quality'][:, ch_number - 1].data + ch_number = int(dataset_info["nc_key"][2:]) + line_validity = self.nc["channel_data_visir_data_line_validity"][:, ch_number - 1].data + line_geometric_quality = self.nc["channel_data_visir_data_line_geometric_quality"][:, ch_number - 1].data + line_radiometric_quality = self.nc["channel_data_visir_data_line_radiometric_quality"][:, ch_number - 1].data return mask_bad_quality(dataset, line_validity, line_geometric_quality, line_radiometric_quality) def _update_attrs(self, dataset, dataset_info): """Update dataset attributes.""" - dataset.attrs.update(self.nc[dataset_info['nc_key']].attrs) + dataset.attrs.update(self.nc[dataset_info["nc_key"]].attrs) dataset.attrs.update(dataset_info) - dataset.attrs['platform_name'] = "Meteosat-" + SATNUM[self.platform_id] - dataset.attrs['sensor'] = 'seviri' - dataset.attrs['orbital_parameters'] = { - 'projection_longitude': self.mda['projection_parameters']['ssp_longitude'], - 'projection_latitude': 0., - 'projection_altitude': self.mda['projection_parameters']['h'], - 'satellite_nominal_longitude': float( - self.nc.attrs['nominal_longitude'] + dataset.attrs["platform_name"] = "Meteosat-" + SATNUM[self.platform_id] + dataset.attrs["sensor"] = "seviri" + dataset.attrs["orbital_parameters"] = { + "projection_longitude": self.mda["projection_parameters"]["ssp_longitude"], + "projection_latitude": 0., + "projection_altitude": self.mda["projection_parameters"]["h"], + "satellite_nominal_longitude": float( + self.nc.attrs["nominal_longitude"] ), - 'satellite_nominal_latitude': 0.0, + "satellite_nominal_latitude": 0.0, } - dataset.attrs['time_parameters'] = { - 'nominal_start_time': self.nominal_start_time, - 'nominal_end_time': self.nominal_end_time, - 'observation_start_time': self.observation_start_time, - 'observation_end_time': self.observation_end_time, + dataset.attrs["time_parameters"] = { + "nominal_start_time": self.nominal_start_time, + "nominal_end_time": self.nominal_end_time, + "observation_start_time": self.observation_start_time, + "observation_end_time": self.observation_end_time, } try: actual_lon, actual_lat, actual_alt = self.satpos - dataset.attrs['orbital_parameters'].update({ - 'satellite_actual_longitude': actual_lon, - 'satellite_actual_latitude': actual_lat, - 'satellite_actual_altitude': actual_alt, + dataset.attrs["orbital_parameters"].update({ + "satellite_actual_longitude": actual_lon, + "satellite_actual_latitude": actual_lat, + "satellite_actual_altitude": actual_alt, }) except NoValidOrbitParams as err: logger.warning(err) - dataset.attrs['georef_offset_corrected'] = self._get_earth_model() == 2 + dataset.attrs["georef_offset_corrected"] = self._get_earth_model() == 2 # remove attributes from original file which don't apply anymore strip_attrs = ["comment", "long_name", "nc_key", "scale_factor", "add_offset", "valid_min", "valid_max"] @@ -277,30 +277,30 @@ def get_area_def(self, dataset_id): """ pdict = {} - pdict['a'] = self.mda['projection_parameters']['a'] - pdict['b'] = self.mda['projection_parameters']['b'] - pdict['h'] = self.mda['projection_parameters']['h'] - pdict['ssp_lon'] = self.mda['projection_parameters']['ssp_longitude'] - - area_naming_input_dict = {'platform_name': 'msg', - 'instrument_name': 'seviri', - 'resolution': int(dataset_id['resolution']) + pdict["a"] = self.mda["projection_parameters"]["a"] + pdict["b"] = self.mda["projection_parameters"]["b"] + pdict["h"] = self.mda["projection_parameters"]["h"] + pdict["ssp_lon"] = self.mda["projection_parameters"]["ssp_longitude"] + + area_naming_input_dict = {"platform_name": "msg", + "instrument_name": "seviri", + "resolution": int(dataset_id["resolution"]) } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('seviri', pdict['ssp_lon'])}) - - if dataset_id['name'] == 'HRV': - pdict['nlines'] = self.mda['hrv_number_of_lines'] - pdict['ncols'] = self.mda['hrv_number_of_columns'] - pdict['a_name'] = area_naming['area_id'] - pdict['a_desc'] = area_naming['description'] - pdict['p_id'] = "" + **get_service_mode("seviri", pdict["ssp_lon"])}) + + if dataset_id["name"] == "HRV": + pdict["nlines"] = self.mda["hrv_number_of_lines"] + pdict["ncols"] = self.mda["hrv_number_of_columns"] + pdict["a_name"] = area_naming["area_id"] + pdict["a_desc"] = area_naming["description"] + pdict["p_id"] = "" else: - pdict['nlines'] = self.mda['number_of_lines'] - pdict['ncols'] = self.mda['number_of_columns'] - pdict['a_name'] = area_naming['area_id'] - pdict['a_desc'] = area_naming['description'] - pdict['p_id'] = "" + pdict["nlines"] = self.mda["number_of_lines"] + pdict["ncols"] = self.mda["number_of_columns"] + pdict["a_name"] = area_naming["area_id"] + pdict["a_desc"] = area_naming["description"] + pdict["p_id"] = "" area = get_area_definition(pdict, self.get_area_extent(dataset_id)) @@ -310,20 +310,20 @@ def get_area_extent(self, dsid): """Get the area extent.""" # following calculations assume grid origin is south-east corner # section 7.2.4 of MSG Level 1.5 Image Data Format Description - origins = {0: 'NW', 1: 'SW', 2: 'SE', 3: 'NE'} - grid_origin = self.mda['vis_ir_grid_origin'] + origins = {0: "NW", 1: "SW", 2: "SE", 3: "NE"} + grid_origin = self.mda["vis_ir_grid_origin"] grid_origin = int(grid_origin, 16) if grid_origin != 2: raise NotImplementedError( - 'Grid origin not supported number: {}, {} corner' + "Grid origin not supported number: {}, {} corner" .format(grid_origin, origins[grid_origin]) ) center_point = 3712 / 2 - column_step = self.mda['vis_ir_column_dir_grid_step'] + column_step = self.mda["vis_ir_column_dir_grid_step"] - line_step = self.mda['vis_ir_line_dir_grid_step'] + line_step = self.mda["vis_ir_line_dir_grid_step"] # check for Earth model as this affects the north-south and # west-east offsets @@ -337,7 +337,7 @@ def get_area_extent(self, dsid): we_offset = 0.5 # west +ve else: raise NotImplementedError( - 'unrecognised earth model: {}'.format(earth_model) + "unrecognised earth model: {}".format(earth_model) ) # section 3.1.5 of MSG Level 1.5 Image Data Format Description ll_c = (center_point - self.west - 0.5 + we_offset) * column_step @@ -349,7 +349,7 @@ def get_area_extent(self, dsid): return area_extent def _add_scanline_acq_time(self, dataset, dataset_id): - if dataset_id['name'] == 'HRV': + if dataset_id["name"] == "HRV": # TODO: Enable once HRV reading has been fixed. return # days, msecs = self._get_acq_time_hrv() @@ -359,16 +359,16 @@ def _add_scanline_acq_time(self, dataset, dataset_id): add_scanline_acq_time(dataset, acq_time) def _get_acq_time_hrv(self): - day_key = 'channel_data_hrv_data_l10_line_mean_acquisition_time_day' - msec_key = 'channel_data_hrv_data_l10_line_mean_acquisition_msec' + day_key = "channel_data_hrv_data_l10_line_mean_acquisition_time_day" + msec_key = "channel_data_hrv_data_l10_line_mean_acquisition_msec" days = self.nc[day_key].isel(channels_hrv_dim=0) msecs = self.nc[msec_key].isel(channels_hrv_dim=0) return days, msecs def _get_acq_time_visir(self, dataset_id): - band_idx = list(CHANNEL_NAMES.values()).index(dataset_id['name']) - day_key = 'channel_data_visir_data_l10_line_mean_acquisition_time_day' - msec_key = 'channel_data_visir_data_l10_line_mean_acquisition_msec' + band_idx = list(CHANNEL_NAMES.values()).index(dataset_id["name"]) + day_key = "channel_data_visir_data_l10_line_mean_acquisition_time_day" + msec_key = "channel_data_visir_data_l10_line_mean_acquisition_msec" days = self.nc[day_key].isel(channels_vis_ir_dim=band_idx) msecs = self.nc[msec_key].isel(channels_vis_ir_dim=band_idx) return days, msecs @@ -382,31 +382,31 @@ def satpos(self): Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ start_times_poly = get_cds_time( - days=self.nc['orbit_polynomial_start_time_day'].values, - msecs=self.nc['orbit_polynomial_start_time_msec'].values + days=self.nc["orbit_polynomial_start_time_day"].values, + msecs=self.nc["orbit_polynomial_start_time_msec"].values ) end_times_poly = get_cds_time( - days=self.nc['orbit_polynomial_end_time_day'].values, - msecs=self.nc['orbit_polynomial_end_time_msec'].values + days=self.nc["orbit_polynomial_end_time_day"].values, + msecs=self.nc["orbit_polynomial_end_time_msec"].values ) orbit_polynomials = { - 'StartTime': np.array([start_times_poly]), - 'EndTime': np.array([end_times_poly]), - 'X': self.nc['orbit_polynomial_x'].values, - 'Y': self.nc['orbit_polynomial_y'].values, - 'Z': self.nc['orbit_polynomial_z'].values, + "StartTime": np.array([start_times_poly]), + "EndTime": np.array([end_times_poly]), + "X": self.nc["orbit_polynomial_x"].values, + "Y": self.nc["orbit_polynomial_y"].values, + "Z": self.nc["orbit_polynomial_z"].values, } poly_finder = OrbitPolynomialFinder(orbit_polynomials) orbit_polynomial = poly_finder.get_orbit_polynomial(self.start_time) return get_satpos( orbit_polynomial=orbit_polynomial, time=self.start_time, - semi_major_axis=self.mda['projection_parameters']['a'], - semi_minor_axis=self.mda['projection_parameters']['b'], + semi_major_axis=self.mda["projection_parameters"]["a"], + semi_minor_axis=self.mda["projection_parameters"]["b"], ) def _get_earth_model(self): - return int(self.nc.attrs['type_of_earth_model'], 16) + return int(self.nc.attrs["type_of_earth_model"], 16) class NCSEVIRIHRVFileHandler(NCSEVIRIFileHandler, SEVIRICalibrationHandler): diff --git a/satpy/readers/seviri_l2_bufr.py b/satpy/readers/seviri_l2_bufr.py index cb38f44acf..3b7188945c 100644 --- a/satpy/readers/seviri_l2_bufr.py +++ b/satpy/readers/seviri_l2_bufr.py @@ -44,15 +44,15 @@ "Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes") CHUNK_SIZE = get_legacy_chunk_size() -logger = logging.getLogger('SeviriL2Bufr') +logger = logging.getLogger("SeviriL2Bufr") -data_center_dict = {55: {'ssp': 'E0415', 'name': '08'}, 56: {'ssp': 'E0455', 'name': '09'}, - 57: {'ssp': 'E0095', 'name': '10'}, 70: {'ssp': 'E0000', 'name': '11'}} +data_center_dict = {55: {"ssp": "E0415", "name": "08"}, 56: {"ssp": "E0455", "name": "09"}, + 57: {"ssp": "E0095", "name": "10"}, 70: {"ssp": "E0000", "name": "11"}} -seg_size_dict = {'seviri_l2_bufr_asr': 16, 'seviri_l2_bufr_cla': 16, - 'seviri_l2_bufr_csr': 16, 'seviri_l2_bufr_gii': 3, - 'seviri_l2_bufr_thu': 16, 'seviri_l2_bufr_toz': 3, - 'seviri_l2_bufr_amv': 24} +seg_size_dict = {"seviri_l2_bufr_asr": 16, "seviri_l2_bufr_cla": 16, + "seviri_l2_bufr_csr": 16, "seviri_l2_bufr_gii": 3, + "seviri_l2_bufr_thu": 16, "seviri_l2_bufr_toz": 3, + "seviri_l2_bufr_amv": 24} class SeviriL2BufrFileHandler(BaseFileHandler): @@ -83,39 +83,39 @@ class SeviriL2BufrFileHandler(BaseFileHandler): """ def __init__(self, filename, filename_info, filetype_info, with_area_definition=False, - rectification_longitude='default', **kwargs): + rectification_longitude="default", **kwargs): """Initialise the file handler for SEVIRI L2 BUFR data.""" super(SeviriL2BufrFileHandler, self).__init__(filename, filename_info, filetype_info) - if ('server' in filename_info): + if ("server" in filename_info): # EUMETSAT Offline Bufr product self.mpef_header = self._read_mpef_header() else: # Product was retrieved from the EUMETSAT Data Center - timeStr = self.get_attribute('typicalDate')+self.get_attribute('typicalTime') + timeStr = self.get_attribute("typicalDate")+self.get_attribute("typicalTime") buf_start_time = datetime.strptime(timeStr, "%Y%m%d%H%M%S") - sc_id = self.get_attribute('satelliteIdentifier') + sc_id = self.get_attribute("satelliteIdentifier") self.mpef_header = {} - self.mpef_header['NominalTime'] = buf_start_time - self.mpef_header['SpacecraftName'] = data_center_dict[sc_id]['name'] - self.mpef_header['RectificationLongitude'] = data_center_dict[sc_id]['ssp'] + self.mpef_header["NominalTime"] = buf_start_time + self.mpef_header["SpacecraftName"] = data_center_dict[sc_id]["name"] + self.mpef_header["RectificationLongitude"] = data_center_dict[sc_id]["ssp"] - if rectification_longitude != 'default': - self.mpef_header['RectificationLongitude'] = f'E{int(rectification_longitude * 10):04d}' + if rectification_longitude != "default": + self.mpef_header["RectificationLongitude"] = f"E{int(rectification_longitude * 10):04d}" self.with_adef = with_area_definition - if self.with_adef and filetype_info['file_type'] == 'seviri_l2_bufr_amv': + if self.with_adef and filetype_info["file_type"] == "seviri_l2_bufr_amv": logging.warning("AMV BUFR data cannot be loaded with an area definition. Setting self.with_def = False.") self.with_adef = False - self.seg_size = seg_size_dict[filetype_info['file_type']] + self.seg_size = seg_size_dict[filetype_info["file_type"]] @property def start_time(self): """Return the repeat cycle start time.""" - return self.mpef_header['NominalTime'] + return self.mpef_header["NominalTime"] @property def end_time(self): @@ -125,13 +125,13 @@ def end_time(self): @property def platform_name(self): """Return spacecraft name.""" - return 'MET{}'.format(self.mpef_header['SpacecraftName']) + return "MET{}".format(self.mpef_header["SpacecraftName"]) @property def ssp_lon(self): """Return subsatellite point longitude.""" # e.g. E0415 - ssp_lon = self.mpef_header['RectificationLongitude'] + ssp_lon = self.mpef_header["RectificationLongitude"] return float(ssp_lon[1:])/10. def get_area_def(self, key): @@ -157,7 +157,7 @@ def get_attribute(self, key): bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break - ec.codes_set(bufr, 'unpack', 1) + ec.codes_set(bufr, "unpack", 1) attr = ec.codes_get(bufr, key) ec.codes_release(bufr) @@ -173,7 +173,7 @@ def get_array(self, key): if bufr is None: break - ec.codes_set(bufr, 'unpack', 1) + ec.codes_set(bufr, "unpack", 1) # if is the first message initialise our final array if (msgCount == 0): @@ -199,18 +199,18 @@ def get_dataset(self, dataset_id, dataset_info): and create the dataset with or without an AreaDefinition. """ - arr = self.get_array(dataset_info['key']) + arr = self.get_array(dataset_info["key"]) if self.with_adef: xarr = self.get_dataset_with_area_def(arr, dataset_id) # coordinates are not relevant when returning data with an AreaDefinition - if 'coordinates' in dataset_info.keys(): - del dataset_info['coordinates'] + if "coordinates" in dataset_info.keys(): + del dataset_info["coordinates"] else: xarr = xr.DataArray(arr, dims=["y"]) - if 'fill_value' in dataset_info: - xarr = xarr.where(xarr != dataset_info['fill_value']) + if "fill_value" in dataset_info: + xarr = xarr.where(xarr != dataset_info["fill_value"]) self._add_attributes(xarr, dataset_info) @@ -218,8 +218,8 @@ def get_dataset(self, dataset_id, dataset_info): def get_dataset_with_area_def(self, arr, dataset_id): """Get dataset with an AreaDefinition.""" - if dataset_id['name'] in ['latitude', 'longitude']: - self.__setattr__(dataset_id['name'], arr) + if dataset_id["name"] in ["latitude", "longitude"]: + self.__setattr__(dataset_id["name"], arr) xarr = xr.DataArray(arr, dims=["y"]) else: lons_1d, lats_1d, data_1d = da.compute(self.longitude, self.latitude, arr) @@ -231,13 +231,13 @@ def get_dataset_with_area_def(self, arr, dataset_id): data_2d[:] = np.nan data_2d[irow.compressed(), icol.compressed()] = data_1d[~irow.mask] - xarr = xr.DataArray(da.from_array(data_2d, CHUNK_SIZE), dims=('y', 'x')) + xarr = xr.DataArray(da.from_array(data_2d, CHUNK_SIZE), dims=("y", "x")) ntotal = len(icol) nvalid = len(icol.compressed()) if nvalid < ntotal: - logging.warning(f'{ntotal-nvalid} out of {ntotal} data points could not be put on ' - f'the grid {self._area_def.area_id}.') + logging.warning(f"{ntotal-nvalid} out of {ntotal} data points could not be put on " + f"the grid {self._area_def.area_id}.") return xarr @@ -248,31 +248,31 @@ def _construct_area_def(self, dataset_id): AreaDefinition: A pyresample AreaDefinition object containing the area definition. """ - res = dataset_id['resolution'] + res = dataset_id["resolution"] - area_naming_input_dict = {'platform_name': 'msg', - 'instrument_name': 'seviri', - 'resolution': res, + area_naming_input_dict = {"platform_name": "msg", + "instrument_name": "seviri", + "resolution": res, } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('seviri', self.ssp_lon)}) + **get_service_mode("seviri", self.ssp_lon)}) # Datasets with a segment size of 3 pixels extend outside the original SEVIRI 3km grid (with 1238 x 1238 # segments a 3 pixels). Hence, we need to use corresponding area defintions in areas.yaml if self.seg_size == 3: - area_naming['area_id'] += '_ext' - area_naming['description'] += ' (extended outside original 3km grid)' + area_naming["area_id"] += "_ext" + area_naming["description"] += " (extended outside original 3km grid)" # Construct AreaDefinition from standardized area definition in areas.yaml. - stand_area_def = get_area_def(area_naming['area_id']) + stand_area_def = get_area_def(area_naming["area_id"]) return stand_area_def def _add_attributes(self, xarr, dataset_info): """Add dataset attributes to xarray.""" - xarr.attrs['sensor'] = 'SEVIRI' - xarr.attrs['platform_name'] = self.platform_name - xarr.attrs['ssp_lon'] = self.ssp_lon - xarr.attrs['seg_size'] = self.seg_size + xarr.attrs["sensor"] = "SEVIRI" + xarr.attrs["platform_name"] = self.platform_name + xarr.attrs["ssp_lon"] = self.ssp_lon + xarr.attrs["seg_size"] = self.seg_size xarr.attrs.update(dataset_info) diff --git a/satpy/readers/seviri_l2_grib.py b/satpy/readers/seviri_l2_grib.py index 214193039d..ca021a7bc0 100644 --- a/satpy/readers/seviri_l2_grib.py +++ b/satpy/readers/seviri_l2_grib.py @@ -57,7 +57,7 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Return the sensing start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): @@ -66,8 +66,8 @@ def end_time(self): def get_area_def(self, dataset_id): """Return the area definition for a dataset.""" - self._area_dict['column_step'] = dataset_id["resolution"] - self._area_dict['line_step'] = dataset_id["resolution"] + self._area_dict["column_step"] = dataset_id["resolution"] + self._area_dict["line_step"] = dataset_id["resolution"] area_extent = calculate_area_extent(self._area_dict) @@ -86,12 +86,12 @@ def get_dataset(self, dataset_id, dataset_info): dimensions within a given message if the file was only partly read (not looping over all messages) in an earlier instance. """ - logger.debug('Reading in file to get dataset with parameter number %d.', - dataset_info['parameter_number']) + logger.debug("Reading in file to get dataset with parameter number %d.", + dataset_info["parameter_number"]) xarr = None message_found = False - with open(self.filename, 'rb') as fh: + with open(self.filename, "rb") as fh: # Iterate over all messages and fetch data when the correct parameter number is found while True: @@ -101,19 +101,19 @@ def get_dataset(self, dataset_id, dataset_info): if not message_found: # Could not obtain a valid message ID from the grib file logger.warning("Could not find parameter_number %d in GRIB file, no valid Dataset created", - dataset_info['parameter_number']) + dataset_info["parameter_number"]) break # Check if the parameter number in the GRIB message corresponds to the required key - parameter_number = self._get_from_msg(gid, 'parameterNumber') + parameter_number = self._get_from_msg(gid, "parameterNumber") - if parameter_number == dataset_info['parameter_number']: + if parameter_number == dataset_info["parameter_number"]: self._res = dataset_id["resolution"] self._read_attributes(gid) # Read the missing value - missing_value = self._get_from_msg(gid, 'missingValue') + missing_value = self._get_from_msg(gid, "missingValue") # Retrieve values and metadata from the GRIB message, masking the values equal to missing_value xarr = self._get_xarray_from_msg(gid) @@ -137,11 +137,11 @@ def get_dataset(self, dataset_id, dataset_info): def _read_attributes(self, gid): """Read the parameter attributes from the message and create the projection and area dictionaries.""" # Read SSP and date/time - self._ssp_lon = self._get_from_msg(gid, 'longitudeOfSubSatellitePointInDegrees') + self._ssp_lon = self._get_from_msg(gid, "longitudeOfSubSatellitePointInDegrees") # Read number of points on the x and y axes - self._nrows = self._get_from_msg(gid, 'Ny') - self._ncols = self._get_from_msg(gid, 'Nx') + self._nrows = self._get_from_msg(gid, "Ny") + self._ncols = self._get_from_msg(gid, "Nx") # Creates the projection and area dictionaries self._pdict, self._area_dict = self._get_proj_area(gid) @@ -171,45 +171,45 @@ def _get_proj_area(self, gid): south: coodinate of the south limit """ # Get name of area definition - area_naming_input_dict = {'platform_name': 'msg', - 'instrument_name': 'seviri', - 'resolution': self._res, + area_naming_input_dict = {"platform_name": "msg", + "instrument_name": "seviri", + "resolution": self._res, } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('seviri', self._ssp_lon)}) + **get_service_mode("seviri", self._ssp_lon)}) # Read all projection and area parameters from the message - earth_major_axis_in_meters = self._get_from_msg(gid, 'earthMajorAxis') * 1000.0 # [m] - earth_minor_axis_in_meters = self._get_from_msg(gid, 'earthMinorAxis') * 1000.0 # [m] + earth_major_axis_in_meters = self._get_from_msg(gid, "earthMajorAxis") * 1000.0 # [m] + earth_minor_axis_in_meters = self._get_from_msg(gid, "earthMinorAxis") * 1000.0 # [m] earth_major_axis_in_meters = self._scale_earth_axis(earth_major_axis_in_meters) earth_minor_axis_in_meters = self._scale_earth_axis(earth_minor_axis_in_meters) - nr_in_radius_of_earth = self._get_from_msg(gid, 'NrInRadiusOfEarth') - xp_in_grid_lengths = self._get_from_msg(gid, 'XpInGridLengths') + nr_in_radius_of_earth = self._get_from_msg(gid, "NrInRadiusOfEarth") + xp_in_grid_lengths = self._get_from_msg(gid, "XpInGridLengths") h_in_meters = earth_major_axis_in_meters * (nr_in_radius_of_earth - 1.0) # [m] # Create the dictionary with the projection data pdict = { - 'a': earth_major_axis_in_meters, - 'b': earth_minor_axis_in_meters, - 'h': h_in_meters, - 'ssp_lon': self._ssp_lon, - 'nlines': self._ncols, - 'ncols': self._nrows, - 'a_name': area_naming['area_id'], - 'a_desc': area_naming['description'], - 'p_id': "", + "a": earth_major_axis_in_meters, + "b": earth_minor_axis_in_meters, + "h": h_in_meters, + "ssp_lon": self._ssp_lon, + "nlines": self._ncols, + "ncols": self._nrows, + "a_name": area_naming["area_id"], + "a_desc": area_naming["description"], + "p_id": "", } # Compute the dictionary with the area extension area_dict = { - 'center_point': xp_in_grid_lengths, - 'north': self._nrows, - 'east': 1, - 'west': self._ncols, - 'south': 1, + "center_point": xp_in_grid_lengths, + "north": self._nrows, + "east": 1, + "west": self._ncols, + "south": 1, } return pdict, area_dict @@ -237,7 +237,7 @@ def _get_xarray_from_msg(self, gid): """ # Data from GRIB message are read into an Xarray... xarr = xr.DataArray(da.from_array(ec.codes_get_values( - gid).reshape(self._nrows, self._ncols), CHUNK_SIZE), dims=('y', 'x')) + gid).reshape(self._nrows, self._ncols), CHUNK_SIZE), dims=("y", "x")) return xarr @@ -251,13 +251,13 @@ def _get_attributes(self): platform_name: name of the platform """ orbital_parameters = { - 'projection_longitude': self._ssp_lon + "projection_longitude": self._ssp_lon } attributes = { - 'orbital_parameters': orbital_parameters, - 'sensor': 'seviri', - 'platform_name': PLATFORM_DICT[self.filename_info['spacecraft']] + "orbital_parameters": orbital_parameters, + "sensor": "seviri", + "platform_name": PLATFORM_DICT[self.filename_info["spacecraft"]] } return attributes diff --git a/satpy/readers/slstr_l1b.py b/satpy/readers/slstr_l1b.py index 5024d3d9a1..02aae9f72b 100644 --- a/satpy/readers/slstr_l1b.py +++ b/satpy/readers/slstr_l1b.py @@ -34,30 +34,30 @@ CHUNK_SIZE = get_legacy_chunk_size() -PLATFORM_NAMES = {'S3A': 'Sentinel-3A', - 'S3B': 'Sentinel-3B'} +PLATFORM_NAMES = {"S3A": "Sentinel-3A", + "S3B": "Sentinel-3B"} # These are the default channel adjustment factors. # Defined in the product notice: S3.PN-SLSTR-L1.08 # https://sentinel.esa.int/documents/247904/2731673/Sentinel-3A-and-3B-SLSTR-Product-Notice-Level-1B-SL-1-RBT-at-NRT-and-NTC.pdf -CHANCALIB_FACTORS = {'S1_nadir': 0.97, - 'S2_nadir': 0.98, - 'S3_nadir': 0.98, - 'S4_nadir': 1.0, - 'S5_nadir': 1.11, - 'S6_nadir': 1.13, - 'S7_nadir': 1.0, - 'S8_nadir': 1.0, - 'S9_nadir': 1.0, - 'S1_oblique': 0.94, - 'S2_oblique': 0.95, - 'S3_oblique': 0.95, - 'S4_oblique': 1.0, - 'S5_oblique': 1.04, - 'S6_oblique': 1.07, - 'S7_oblique': 1.0, - 'S8_oblique': 1.0, - 'S9_oblique': 1.0, } +CHANCALIB_FACTORS = {"S1_nadir": 0.97, + "S2_nadir": 0.98, + "S3_nadir": 0.98, + "S4_nadir": 1.0, + "S5_nadir": 1.11, + "S6_nadir": 1.13, + "S7_nadir": 1.0, + "S8_nadir": 1.0, + "S9_nadir": 1.0, + "S1_oblique": 0.94, + "S2_oblique": 0.95, + "S3_oblique": 0.95, + "S4_oblique": 1.0, + "S5_oblique": 1.04, + "S6_oblique": 1.07, + "S7_oblique": 1.0, + "S8_oblique": 1.0, + "S9_oblique": 1.0, } class NCSLSTRGeo(BaseFileHandler): @@ -70,17 +70,17 @@ def __init__(self, filename, filename_info, filetype_info): self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, - chunks={'columns': CHUNK_SIZE, - 'rows': CHUNK_SIZE}) - self.nc = self.nc.rename({'columns': 'x', 'rows': 'y'}) + chunks={"columns": CHUNK_SIZE, + "rows": CHUNK_SIZE}) + self.nc = self.nc.rename({"columns": "x", "rows": "y"}) self.cache = {} def get_dataset(self, key, info): """Load a dataset.""" - logger.debug('Reading %s.', key['name']) - file_key = info['file_key'].format(view=key['view'].name[0], - stripe=key['stripe'].name) + logger.debug("Reading %s.", key["name"]) + file_key = info["file_key"].format(view=key["view"].name[0], + stripe=key["stripe"].name) try: variable = self.nc[file_key] except KeyError: @@ -95,12 +95,12 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") class NCSLSTR1B(BaseFileHandler): @@ -132,29 +132,29 @@ def __init__(self, filename, filename_info, filetype_info, self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, - chunks={'columns': CHUNK_SIZE, - 'rows': CHUNK_SIZE}) - self.nc = self.nc.rename({'columns': 'x', 'rows': 'y'}) - self.channel = filename_info['dataset_name'] - self.stripe = filename_info['stripe'] - views = {'n': 'nadir', 'o': 'oblique'} - self.view = views[filename_info['view']] - cal_file = os.path.join(os.path.dirname(self.filename), 'viscal.nc') + chunks={"columns": CHUNK_SIZE, + "rows": CHUNK_SIZE}) + self.nc = self.nc.rename({"columns": "x", "rows": "y"}) + self.channel = filename_info["dataset_name"] + self.stripe = filename_info["stripe"] + views = {"n": "nadir", "o": "oblique"} + self.view = views[filename_info["view"]] + cal_file = os.path.join(os.path.dirname(self.filename), "viscal.nc") self.cal = xr.open_dataset(cal_file, decode_cf=True, mask_and_scale=True, - chunks={'views': CHUNK_SIZE}) + chunks={"views": CHUNK_SIZE}) indices_file = os.path.join(os.path.dirname(self.filename), - 'indices_{}{}.nc'.format(self.stripe, self.view[0])) + "indices_{}{}.nc".format(self.stripe, self.view[0])) self.indices = xr.open_dataset(indices_file, decode_cf=True, mask_and_scale=True, - chunks={'columns': CHUNK_SIZE, - 'rows': CHUNK_SIZE}) - self.indices = self.indices.rename({'columns': 'x', 'rows': 'y'}) + chunks={"columns": CHUNK_SIZE, + "rows": CHUNK_SIZE}) + self.indices = self.indices.rename({"columns": "x", "rows": "y"}) - self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] - self.sensor = 'slstr' + self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]] + self.sensor = "slstr" if isinstance(user_calibration, dict): self.usercalib = user_calibration else: @@ -162,7 +162,7 @@ def __init__(self, filename, filename_info, filetype_info, def _apply_radiance_adjustment(self, radiances): """Adjust SLSTR radiances with default or user supplied values.""" - chan_name = self.channel + '_' + self.view + chan_name = self.channel + "_" + self.view adjust_fac = None if self.usercalib is not None: # If user supplied adjustment, use it. @@ -189,26 +189,26 @@ def _cal_rad(rad, didx, solar_flux=None): def get_dataset(self, key, info): """Load a dataset.""" - if (self.channel not in key['name'] or - self.stripe != key['stripe'].name or - self.view != key['view'].name): + if (self.channel not in key["name"] or + self.stripe != key["stripe"].name or + self.view != key["view"].name): return - logger.debug('Reading %s.', key['name']) - if key['calibration'] == 'brightness_temperature': - variable = self.nc['{}_BT_{}{}'.format(self.channel, self.stripe, self.view[0])] + logger.debug("Reading %s.", key["name"]) + if key["calibration"] == "brightness_temperature": + variable = self.nc["{}_BT_{}{}".format(self.channel, self.stripe, self.view[0])] else: - variable = self.nc['{}_radiance_{}{}'.format(self.channel, self.stripe, self.view[0])] + variable = self.nc["{}_radiance_{}{}".format(self.channel, self.stripe, self.view[0])] radiances = self._apply_radiance_adjustment(variable) - units = variable.attrs['units'] - if key['calibration'] == 'reflectance': + units = variable.attrs["units"] + if key["calibration"] == "reflectance": # TODO take into account sun-earth distance - solar_flux = self.cal[re.sub('_[^_]*$', '', key['name']) + '_solar_irradiances'] - d_index = self.indices['detector_{}{}'.format(self.stripe, self.view[0])] - idx = 0 if self.view[0] == 'n' else 1 # 0: Nadir view, 1: oblique (check). + solar_flux = self.cal[re.sub("_[^_]*$", "", key["name"]) + "_solar_irradiances"] + d_index = self.indices["detector_{}{}".format(self.stripe, self.view[0])] + idx = 0 if self.view[0] == "n" else 1 # 0: Nadir view, 1: oblique (check). radiances.data = da.map_blocks( self._cal_rad, radiances.data, d_index.data, solar_flux=solar_flux[:, idx].values) radiances *= np.pi * 100 - units = '%' + units = "%" info = info.copy() info.update(radiances.attrs) @@ -224,12 +224,12 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") class NCSLSTRAngles(BaseFileHandler): @@ -240,8 +240,8 @@ def _loadcart(self, fname): cartf = xr.open_dataset(fname, decode_cf=True, mask_and_scale=True, - chunks={'columns': CHUNK_SIZE, - 'rows': CHUNK_SIZE}) + chunks={"columns": CHUNK_SIZE, + "rows": CHUNK_SIZE}) return cartf def __init__(self, filename, filename_info, filetype_info): @@ -252,57 +252,57 @@ def __init__(self, filename, filename_info, filetype_info): self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, - chunks={'columns': CHUNK_SIZE, - 'rows': CHUNK_SIZE}) + chunks={"columns": CHUNK_SIZE, + "rows": CHUNK_SIZE}) # TODO: get metadata from the manifest file (xfdumanifest.xml) - self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] - self.sensor = 'slstr' - self.view = filename_info['view'] - self._start_time = filename_info['start_time'] - self._end_time = filename_info['end_time'] + self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]] + self.sensor = "slstr" + self.view = filename_info["view"] + self._start_time = filename_info["start_time"] + self._end_time = filename_info["end_time"] carta_file = os.path.join( - os.path.dirname(self.filename), 'cartesian_a{}.nc'.format(self.view[0])) + os.path.dirname(self.filename), "cartesian_a{}.nc".format(self.view[0])) carti_file = os.path.join( - os.path.dirname(self.filename), 'cartesian_i{}.nc'.format(self.view[0])) + os.path.dirname(self.filename), "cartesian_i{}.nc".format(self.view[0])) cartx_file = os.path.join( - os.path.dirname(self.filename), 'cartesian_tx.nc') + os.path.dirname(self.filename), "cartesian_tx.nc") self.carta = self._loadcart(carta_file) self.carti = self._loadcart(carti_file) self.cartx = self._loadcart(cartx_file) def get_dataset(self, key, info): """Load a dataset.""" - if not key['view'].name.startswith(self.view[0]): + if not key["view"].name.startswith(self.view[0]): return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) # Check if file_key is specified in the yaml - file_key = info['file_key'].format(view=key['view'].name[0]) + file_key = info["file_key"].format(view=key["view"].name[0]) variable = self.nc[file_key] - l_step = self.nc.attrs.get('al_subsampling_factor', 1) - c_step = self.nc.attrs.get('ac_subsampling_factor', 16) + l_step = self.nc.attrs.get("al_subsampling_factor", 1) + c_step = self.nc.attrs.get("ac_subsampling_factor", 16) - if key.get('resolution', 1000) == 500: + if key.get("resolution", 1000) == 500: l_step *= 2 c_step *= 2 if c_step != 1 or l_step != 1: - logger.debug('Interpolating %s.', key['name']) + logger.debug("Interpolating %s.", key["name"]) # TODO: do it in cartesian coordinates ! pbs at date line and # possible - tie_x = self.cartx['x_tx'].data[0, :][::-1] - tie_y = self.cartx['y_tx'].data[:, 0] - if key.get('resolution', 1000) == 500: - full_x = self.carta['x_a' + self.view[0]].data - full_y = self.carta['y_a' + self.view[0]].data + tie_x = self.cartx["x_tx"].data[0, :][::-1] + tie_y = self.cartx["y_tx"].data[:, 0] + if key.get("resolution", 1000) == 500: + full_x = self.carta["x_a" + self.view[0]].data + full_y = self.carta["y_a" + self.view[0]].data else: - full_x = self.carti['x_i' + self.view[0]].data - full_y = self.carti['y_i' + self.view[0]].data + full_x = self.carti["x_i" + self.view[0]].data + full_y = self.carti["y_i" + self.view[0]].data variable = variable.fillna(0) - variable.attrs['resolution'] = key.get('resolution', 1000) + variable.attrs["resolution"] = key.get("resolution", 1000) from scipy.interpolate import RectBivariateSpline spl = RectBivariateSpline( @@ -311,13 +311,13 @@ def get_dataset(self, key, info): values = spl.ev(full_y, full_x) variable = xr.DataArray(da.from_array(values, chunks=(CHUNK_SIZE, CHUNK_SIZE)), - dims=['y', 'x'], attrs=variable.attrs) + dims=["y", "x"], attrs=variable.attrs) - variable.attrs['platform_name'] = self.platform_name - variable.attrs['sensor'] = self.sensor + variable.attrs["platform_name"] = self.platform_name + variable.attrs["sensor"] = self.sensor - if 'units' not in variable.attrs: - variable.attrs['units'] = 'degrees' + if "units" not in variable.attrs: + variable.attrs["units"] = "degrees" variable.attrs.update(key.to_dict()) @@ -326,12 +326,12 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") class NCSLSTRFlag(BaseFileHandler): @@ -344,24 +344,24 @@ def __init__(self, filename, filename_info, filetype_info): self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, - chunks={'columns': CHUNK_SIZE, - 'rows': CHUNK_SIZE}) - self.nc = self.nc.rename({'columns': 'x', 'rows': 'y'}) - self.stripe = filename_info['stripe'] - views = {'n': 'nadir', 'o': 'oblique'} - self.view = views[filename_info['view']] + chunks={"columns": CHUNK_SIZE, + "rows": CHUNK_SIZE}) + self.nc = self.nc.rename({"columns": "x", "rows": "y"}) + self.stripe = filename_info["stripe"] + views = {"n": "nadir", "o": "oblique"} + self.view = views[filename_info["view"]] # TODO: get metadata from the manifest file (xfdumanifest.xml) - self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] - self.sensor = 'slstr' + self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]] + self.sensor = "slstr" def get_dataset(self, key, info): """Load a dataset.""" - if (self.stripe != key['stripe'].name or - self.view != key['view'].name): + if (self.stripe != key["stripe"].name or + self.view != key["view"].name): return - logger.debug('Reading %s.', key['name']) - file_key = info['file_key'].format(view=key['view'].name[0], - stripe=key['stripe'].name) + logger.debug("Reading %s.", key["name"]) + file_key = info["file_key"].format(view=key["view"].name[0], + stripe=key["stripe"].name) variable = self.nc[file_key] info = info.copy() @@ -376,9 +376,9 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") diff --git a/satpy/readers/smos_l2_wind.py b/satpy/readers/smos_l2_wind.py index 116ac39756..c982397c3c 100644 --- a/satpy/readers/smos_l2_wind.py +++ b/satpy/readers/smos_l2_wind.py @@ -41,22 +41,22 @@ class SMOSL2WINDFileHandler(NetCDF4FileHandler): @property def start_time(self): """Get start time.""" - return datetime.strptime(self['/attr/time_coverage_start'], "%Y-%m-%dT%H:%M:%S Z") + return datetime.strptime(self["/attr/time_coverage_start"], "%Y-%m-%dT%H:%M:%S Z") @property def end_time(self): """Get end time.""" - return datetime.strptime(self['/attr/time_coverage_end'], "%Y-%m-%dT%H:%M:%S Z") + return datetime.strptime(self["/attr/time_coverage_end"], "%Y-%m-%dT%H:%M:%S Z") @property def platform_shortname(self): """Get platform shortname.""" - return self.filename_info['platform_shortname'] + return self.filename_info["platform_shortname"] @property def platform_name(self): """Get platform.""" - return self['/attr/platform'] + return self["/attr/platform"] def get_metadata(self, data, ds_info): """Get metadata.""" @@ -64,12 +64,12 @@ def get_metadata(self, data, ds_info): metadata.update(data.attrs) metadata.update(ds_info) metadata.update({ - 'platform_shortname': self.platform_shortname, - 'platform_name': self.platform_name, - 'sensor': self['/attr/instrument'], - 'start_time': self.start_time, - 'end_time': self.end_time, - 'level': self['/attr/processing_level'], + "platform_shortname": self.platform_shortname, + "platform_name": self.platform_name, + "sensor": self["/attr/instrument"], + "start_time": self.start_time, + "end_time": self.end_time, + "level": self["/attr/processing_level"], }) return metadata @@ -88,16 +88,16 @@ def available_datasets(self, configured_datasets=None): continue handled_variables.add(var_name) new_info = { - 'name': var_name, - 'file_type': self.filetype_info['file_type'], + "name": var_name, + "file_type": self.filetype_info["file_type"], } yield True, new_info def _mask_dataset(self, data): """Mask out fill values.""" try: - fill = data.attrs['_FillValue'] - data.attrs['_FillValue'] = np.nan + fill = data.attrs["_FillValue"] + data.attrs["_FillValue"] = np.nan return data.where(data != fill) except KeyError: return data @@ -110,11 +110,11 @@ def _adjust_lon_coord(self, data): def _rename_coords(self, data): """Rename coords.""" rename_dict = {} - if 'lon' in data.dims: + if "lon" in data.dims: data = self._adjust_lon_coord(data) - rename_dict['lon'] = 'x' - if 'lat' in data.dims: - rename_dict['lat'] = 'y' + rename_dict["lon"] = "x" + if "lat" in data.dims: + rename_dict["lat"] = "y" # Rename the coordinates to x and y return data.rename(rename_dict) @@ -123,39 +123,39 @@ def _remove_time_coordinate(self, data): # Remove dimension where size is 1, eg. time data = data.squeeze() # Remove if exists time as coordinate - if 'time' in data.coords: - data = data.drop_vars('time') + if "time" in data.coords: + data = data.drop_vars("time") return data def _roll_dataset_lon_coord(self, data): """Roll dataset along the lon coordinate.""" - if 'lon' in data.dims: + if "lon" in data.dims: data = data.roll(lon=720, roll_coords=True) return data def get_dataset(self, ds_id, ds_info): """Get dataset.""" - data = self[ds_id['name']] + data = self[ds_id["name"]] data.attrs = self.get_metadata(data, ds_info) data = self._remove_time_coordinate(data) data = self._roll_dataset_lon_coord(data) data = self._rename_coords(data) data = self._mask_dataset(data) - if len(data.dims) >= 2 and all([dim in data.dims for dim in ['x', 'y']]): + if len(data.dims) >= 2 and all([dim in data.dims for dim in ["x", "y"]]): # Remove the first and last row as these values extends beyond +-90 latitude # if the dataset contains the y dimmension. # As this is data over open sea these has no values. data = data.where((data.y > -90.0) & (data.y < 90.0), drop=True) - elif len(data.dims) == 1 and 'y' in data.dims: + elif len(data.dims) == 1 and "y" in data.dims: data = data.where((data.y > 0) & (data.y < len(data.y) - 1), drop=True) return data def _create_area_extent(self, width, height): """Create area extent.""" # Creating a meshgrid, not needed actually, but makes it easy to find extremes - _lon = self._adjust_lon_coord(self['lon']) + _lon = self._adjust_lon_coord(self["lon"]) _lon = self._roll_dataset_lon_coord(_lon) - latlon = np.meshgrid(_lon, self['lat'][1:self['lat/shape'][0] - 1]) + latlon = np.meshgrid(_lon, self["lat"][1:self["lat/shape"][0] - 1]) lower_left_x = latlon[0][height - 1][0] - 0.125 lower_left_y = latlon[1][height - 1][0] + 0.125 upper_right_y = latlon[1][1][width - 1] - 0.125 @@ -164,12 +164,12 @@ def _create_area_extent(self, width, height): def get_area_def(self, dsid): """Define AreaDefintion.""" - width = self['lon/shape'][0] - height = self['lat/shape'][0] - 2 + width = self["lon/shape"][0] + height = self["lat/shape"][0] - 2 area_extent = self._create_area_extent(width, height) description = "SMOS L2 Wind Equirectangular Projection" - area_id = 'smos_eqc' - proj_id = 'equirectangular' - proj_dict = {'init': self['/attr/geospatial_bounds_vertical_crs']} + area_id = "smos_eqc" + proj_id = "equirectangular" + proj_dict = {"init": self["/attr/geospatial_bounds_vertical_crs"]} area_def = AreaDefinition(area_id, description, proj_id, proj_dict, width, height, area_extent, ) return area_def diff --git a/satpy/readers/tropomi_l2.py b/satpy/readers/tropomi_l2.py index c6dda4bd89..768ca70948 100644 --- a/satpy/readers/tropomi_l2.py +++ b/satpy/readers/tropomi_l2.py @@ -40,7 +40,7 @@ from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) -DATE_FMT = '%Y-%m-%dT%H:%M:%SZ' +DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" CHUNK_SIZE = get_legacy_chunk_size() @@ -50,32 +50,32 @@ class TROPOMIL2FileHandler(NetCDF4FileHandler): @property def start_time(self): """Get start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def platform_shortname(self): """Get platform shortname.""" - return self.filename_info['platform_shortname'] + return self.filename_info["platform_shortname"] @property def time_coverage_start(self): """Get time_coverage_start.""" - return datetime.strptime(self['/attr/time_coverage_start'], DATE_FMT) + return datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT) @property def time_coverage_end(self): """Get time_coverage_end.""" - return datetime.strptime(self['/attr/time_coverage_end'], DATE_FMT) + return datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT) @property def sensor(self): """Get sensor.""" - res = self['/attr/sensor'] + res = self["/attr/sensor"] if isinstance(res, np.ndarray): return str(res.astype(str)).lower() return res.lower() @@ -93,7 +93,7 @@ def available_datasets(self, configured_datasets=None): lat_shape = None for var_name, _val in self.file_content.items(): # Could probably avoid this hardcoding, will think on it - if (var_name == 'PRODUCT/latitude'): + if (var_name == "PRODUCT/latitude"): lat_shape = self[var_name + "/shape"] break @@ -102,19 +102,19 @@ def available_datasets(self, configured_datasets=None): # update previously configured datasets logger.debug("Starting previously configured variables loop...") # if bounds exists, we can assemble them later - bounds_exist = 'latitude_bounds' in self and 'longitude_bounds' in self + bounds_exist = "latitude_bounds" in self and "longitude_bounds" in self for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info - var_name = ds_info.get('file_key', ds_info['name']) + var_name = ds_info.get("file_key", ds_info["name"]) # logger.debug("Evaluating previously configured variable: %s", var_name) - matches = self.file_type_matches(ds_info['file_type']) + matches = self.file_type_matches(ds_info["file_type"]) # we can confidently say that we can provide this dataset and can # provide more info - assembled = var_name in ['assembled_lat_bounds', 'assembled_lon_bounds'] + assembled = var_name in ["assembled_lat_bounds", "assembled_lon_bounds"] if (matches and var_name in self) or (assembled and bounds_exist): logger.debug("Handling previously configured variable: %s", var_name) if not assembled: @@ -150,20 +150,20 @@ def _iterate_over_dataset_contents(self, handled_variables, shape): logger.debug("Already handled, skipping: %s", var_name) continue handled_variables.add(var_name) - last_index_separator = var_name.rindex('/') + last_index_separator = var_name.rindex("/") last_index_separator = last_index_separator + 1 var_name_no_path = var_name[last_index_separator:] logger.debug("Using short name of: %s", var_name_no_path) # Create new ds_info object - if var_name_no_path in ['latitude_bounds', 'longitude_bounds']: + if var_name_no_path in ["latitude_bounds", "longitude_bounds"]: coordinates = [] else: - coordinates = ['longitude', 'latitude'] + coordinates = ["longitude", "latitude"] new_info = { - 'name': var_name_no_path, - 'file_key': var_name, - 'coordinates': coordinates, - 'file_type': self.filetype_info['file_type'], + "name": var_name_no_path, + "file_key": var_name, + "coordinates": coordinates, + "file_type": self.filetype_info["file_type"], } yield True, new_info @@ -173,12 +173,12 @@ def get_metadata(self, data, ds_info): metadata.update(data.attrs) metadata.update(ds_info) metadata.update({ - 'platform_shortname': self.platform_shortname, - 'sensor': self.sensor, - 'start_time': self.start_time, - 'end_time': self.end_time, - 'time_coverage_start': self.time_coverage_start, - 'time_coverage_end': self.time_coverage_end, + "platform_shortname": self.platform_shortname, + "sensor": self.sensor, + "start_time": self.start_time, + "end_time": self.end_time, + "time_coverage_start": self.time_coverage_start, + "time_coverage_end": self.time_coverage_end, }) return metadata @@ -186,10 +186,10 @@ def get_metadata(self, data, ds_info): def _rename_dims(self, data_arr): """Normalize dimension names with the rest of Satpy.""" dims_dict = {} - if 'ground_pixel' in data_arr.dims: - dims_dict['ground_pixel'] = 'x' - if 'scanline' in data_arr.dims: - dims_dict['scanline'] = 'y' + if "ground_pixel" in data_arr.dims: + dims_dict["ground_pixel"] = "x" + if "scanline" in data_arr.dims: + dims_dict["scanline"] = "y" return data_arr.rename(dims_dict) def prepare_geo(self, bounds_data): @@ -220,18 +220,18 @@ def prepare_geo(self, bounds_data): # Convert to DataArray dask_dest = da.from_array(dest, chunks=CHUNK_SIZE) dest = xr.DataArray(dask_dest, - dims=('y_bounds', 'x_bounds'), + dims=("y_bounds", "x_bounds"), attrs=bounds_data.attrs ) return dest def get_dataset(self, ds_id, ds_info): """Get dataset.""" - logger.debug("Getting data for: %s", ds_id['name']) - file_key = ds_info.get('file_key', ds_id['name']) + logger.debug("Getting data for: %s", ds_id["name"]) + file_key = ds_info.get("file_key", ds_id["name"]) data = self[file_key] data.attrs = self.get_metadata(data, ds_info) - fill_value = data.attrs.get('_FillValue', np.float32(np.nan)) + fill_value = data.attrs.get("_FillValue", np.float32(np.nan)) data = data.squeeze() # preserve integer data types if possible @@ -239,11 +239,11 @@ def get_dataset(self, ds_id, ds_info): new_fill = fill_value else: new_fill = np.float32(np.nan) - data.attrs.pop('_FillValue', None) + data.attrs.pop("_FillValue", None) good_mask = data != fill_value - scale_factor = data.attrs.get('scale_factor') - add_offset = data.attrs.get('add_offset') + scale_factor = data.attrs.get("scale_factor") + add_offset = data.attrs.get("add_offset") if scale_factor is not None: data = data * scale_factor + add_offset @@ -251,11 +251,11 @@ def get_dataset(self, ds_id, ds_info): data = self._rename_dims(data) # drop coords whose units are not meters - drop_list = ['y', 'x', 'layer', 'vertices'] + drop_list = ["y", "x", "layer", "vertices"] coords_exist = [coord for coord in drop_list if coord in data.coords] if coords_exist: data = data.drop_vars(coords_exist) - if ds_id['name'] in ['assembled_lat_bounds', 'assembled_lon_bounds']: + if ds_id["name"] in ["assembled_lat_bounds", "assembled_lon_bounds"]: data = self.prepare_geo(data) return data diff --git a/satpy/readers/utils.py b/satpy/readers/utils.py index 31f6dea6d9..892c93acaf 100644 --- a/satpy/readers/utils.py +++ b/satpy/readers/utils.py @@ -53,7 +53,7 @@ def np2str(value): type `numpy.string_` or it is not a numpy array """ - if hasattr(value, 'dtype') and \ + if hasattr(value, "dtype") and \ issubclass(value.dtype.type, (np.str_, np.string_, np.object_)) \ and value.size == 1: value = value.item() @@ -68,13 +68,13 @@ def np2str(value): def _get_geostationary_height(geos_area): params = geos_area.crs.coordinate_operation.params - h_param = [p for p in params if 'satellite height' in p.name.lower()][0] + h_param = [p for p in params if "satellite height" in p.name.lower()][0] return h_param.value def _get_geostationary_reference_longitude(geos_area): params = geos_area.crs.coordinate_operation.params - lon_0_params = [p for p in params if 'longitude of natural origin' in p.name.lower()] + lon_0_params = [p for p in params if "longitude of natural origin" in p.name.lower()] if not lon_0_params: return 0 elif len(lon_0_params) != 1: @@ -232,7 +232,7 @@ def _unzip_local_file(filename: str, prefix=None): Temporary filename path for decompressed file or None. """ - if not os.fspath(filename).endswith('bz2'): + if not os.fspath(filename).endswith("bz2"): return None fdn, tmpfilepath = tempfile.mkstemp(prefix=prefix, dir=config["tmp_dir"]) @@ -248,19 +248,19 @@ def _unzip_local_file(filename: str, prefix=None): def _unzip_with_pbzip(filename, tmpfilepath, fdn): # try pbzip2 - pbzip = which('pbzip2') + pbzip = which("pbzip2") if pbzip is None: return None # Run external pbzip2 - n_thr = os.environ.get('OMP_NUM_THREADS') + n_thr = os.environ.get("OMP_NUM_THREADS") if n_thr: runner = [pbzip, - '-dc', - '-p'+str(n_thr), + "-dc", + "-p"+str(n_thr), filename] else: runner = [pbzip, - '-dc', + "-dc", filename] p = Popen(runner, stdout=PIPE, stderr=PIPE) # nosec stdout = BytesIO(p.communicate()[0]) @@ -268,7 +268,7 @@ def _unzip_with_pbzip(filename, tmpfilepath, fdn): if status != 0: raise IOError("pbzip2 error '%s', failed, status=%d" % (filename, status)) - with closing(os.fdopen(fdn, 'wb')) as ofpt: + with closing(os.fdopen(fdn, "wb")) as ofpt: try: stdout.seek(0) shutil.copyfileobj(stdout, ofpt) @@ -291,7 +291,7 @@ def _unzip_with_bz2(filename, tmpfilepath): def _write_uncompressed_file(content, fdn, filename, tmpfilepath): - with closing(os.fdopen(fdn, 'wb')) as ofpt: + with closing(os.fdopen(fdn, "wb")) as ofpt: try: ofpt.write(content) except IOError: @@ -348,7 +348,7 @@ def generic_open(filename, *args, **kwargs): Returns a file-like object. """ - if os.fspath(filename).endswith('.bz2'): + if os.fspath(filename).endswith(".bz2"): fp = bz2.open(filename, *args, **kwargs) else: try: @@ -413,8 +413,8 @@ def get_user_calibration_factors(band_name, correction_dict): """Retrieve radiance correction factors from user-supplied dict.""" if band_name in correction_dict: try: - slope = correction_dict[band_name]['slope'] - offset = correction_dict[band_name]['offset'] + slope = correction_dict[band_name]["slope"] + offset = correction_dict[band_name]["offset"] except KeyError: raise KeyError("Incorrect correction factor dictionary. You must " "supply 'slope' and 'offset' keys.") @@ -440,13 +440,13 @@ def get_array_date(scn_data, utc_date=None): """Get start time from a channel data array.""" if utc_date is None: try: - utc_date = scn_data.attrs['start_time'] + utc_date = scn_data.attrs["start_time"] except KeyError: try: - utc_date = scn_data.attrs['scheduled_time'] + utc_date = scn_data.attrs["scheduled_time"] except KeyError: - raise KeyError('Scene has no start_time ' - 'or scheduled_time attribute.') + raise KeyError("Scene has no start_time " + "or scheduled_time attribute.") return utc_date @@ -456,8 +456,8 @@ def apply_earthsun_distance_correction(reflectance, utc_date=None): utc_date = get_array_date(reflectance, utc_date) sun_earth_dist = sun_earth_distance_correction(utc_date) - reflectance.attrs['sun_earth_distance_correction_applied'] = True - reflectance.attrs['sun_earth_distance_correction_factor'] = sun_earth_dist + reflectance.attrs["sun_earth_distance_correction_applied"] = True + reflectance.attrs["sun_earth_distance_correction_factor"] = sun_earth_dist with xr.set_options(keep_attrs=True): reflectance = reflectance * sun_earth_dist * sun_earth_dist return reflectance @@ -469,8 +469,8 @@ def remove_earthsun_distance_correction(reflectance, utc_date=None): utc_date = get_array_date(reflectance, utc_date) sun_earth_dist = sun_earth_distance_correction(utc_date) - reflectance.attrs['sun_earth_distance_correction_applied'] = False - reflectance.attrs['sun_earth_distance_correction_factor'] = sun_earth_dist + reflectance.attrs["sun_earth_distance_correction_applied"] = False + reflectance.attrs["sun_earth_distance_correction_factor"] = sun_earth_dist with xr.set_options(keep_attrs=True): reflectance = reflectance / (sun_earth_dist * sun_earth_dist) return reflectance diff --git a/satpy/readers/vaisala_gld360.py b/satpy/readers/vaisala_gld360.py index 735eb6b3c9..b0bde01573 100644 --- a/satpy/readers/vaisala_gld360.py +++ b/satpy/readers/vaisala_gld360.py @@ -48,11 +48,11 @@ def __init__(self, filename, filename_info, filetype_info): """Initialize VaisalaGLD360TextFileHandler.""" super(VaisalaGLD360TextFileHandler, self).__init__(filename, filename_info, filetype_info) - names = ['gld360_date', 'gld360_time', 'latitude', 'longitude', 'power', 'unit'] - types = ['str', 'str', 'float', 'float', 'float', 'str'] + names = ["gld360_date", "gld360_time", "latitude", "longitude", "power", "unit"] + types = ["str", "str", "float", "float", "float", "str"] dtypes = dict(zip(names, types)) # Combine 'date' and 'time' into a datetime object - parse_dates = {'time': ['gld360_date', 'gld360_time']} + parse_dates = {"time": ["gld360_date", "gld360_time"]} self.data = pd.read_csv(filename, delim_whitespace=True, header=None, names=names, dtype=dtypes, parse_dates=parse_dates) @@ -60,28 +60,28 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Get start time.""" - return self.data['time'].iloc[0] + return self.data["time"].iloc[0] @property def end_time(self): """Get end time.""" - return self.data['time'].iloc[-1] + return self.data["time"].iloc[-1] def get_dataset(self, dataset_id, dataset_info): """Load a dataset.""" - xarr = xr.DataArray(da.from_array(self.data[dataset_id['name']], + xarr = xr.DataArray(da.from_array(self.data[dataset_id["name"]], chunks=CHUNK_SIZE), dims=["y"]) # Add time, longitude, and latitude as non-dimensional y-coordinates - xarr['time'] = ('y', self.data['time']) - xarr['longitude'] = ('y', self.data['longitude']) - xarr['latitude'] = ('y', self.data['latitude']) + xarr["time"] = ("y", self.data["time"]) + xarr["longitude"] = ("y", self.data["longitude"]) + xarr["latitude"] = ("y", self.data["latitude"]) - if dataset_id['name'] == 'power': + if dataset_id["name"] == "power": # Check that units in the file match the unit specified in the # reader yaml-file - if not (self.data.unit == dataset_info['units']).all(): - raise ValueError('Inconsistent units found in file!') + if not (self.data.unit == dataset_info["units"]).all(): + raise ValueError("Inconsistent units found in file!") xarr.attrs.update(dataset_info) return xarr diff --git a/satpy/readers/vii_base_nc.py b/satpy/readers/vii_base_nc.py index e51024ba56..83056189dc 100644 --- a/satpy/readers/vii_base_nc.py +++ b/satpy/readers/vii_base_nc.py @@ -46,14 +46,14 @@ def __init__(self, filename, filename_info, filetype_info, orthorect=False): super().__init__(filename, filename_info, filetype_info, auto_maskandscale=True) # Saves the orthorectification flag - self.orthorect = orthorect and filetype_info.get('orthorect', True) + self.orthorect = orthorect and filetype_info.get("orthorect", True) # Saves the interpolation flag - self.interpolate = filetype_info.get('interpolate', True) + self.interpolate = filetype_info.get("interpolate", True) try: - longitude = self[filetype_info['cached_longitude']] - latitude = self[filetype_info['cached_latitude']] + longitude = self[filetype_info["cached_longitude"]] + latitude = self[filetype_info["cached_latitude"]] if self.interpolate: self.longitude, self.latitude = self._perform_geo_interpolation(longitude, latitude) @@ -66,22 +66,22 @@ def __init__(self, filename, filename_info, filetype_info, orthorect=False): def _standardize_dims(self, variable): """Standardize dims to y, x.""" - if 'num_pixels' in variable.dims: - variable = variable.rename({'num_pixels': 'x', 'num_lines': 'y'}) - if 'num_points_act' in variable.dims: - variable = variable.rename({'num_points_act': 'x', 'num_points_alt': 'y'}) - if variable.dims[0] == 'x': - variable = variable.transpose('y', 'x') + if "num_pixels" in variable.dims: + variable = variable.rename({"num_pixels": "x", "num_lines": "y"}) + if "num_points_act" in variable.dims: + variable = variable.rename({"num_points_act": "x", "num_points_alt": "y"}) + if variable.dims[0] == "x": + variable = variable.transpose("y", "x") return variable def get_dataset(self, dataset_id, dataset_info): """Get dataset using file_key in dataset_info.""" - var_key = dataset_info['file_key'] - logger.debug('Reading in file to get dataset with key %s.', var_key) + var_key = dataset_info["file_key"] + logger.debug("Reading in file to get dataset with key %s.", var_key) - if var_key == 'cached_longitude' and self.longitude is not None: + if var_key == "cached_longitude" and self.longitude is not None: variable = self.longitude.copy() - elif var_key == 'cached_latitude' and self.latitude is not None: + elif var_key == "cached_latitude" and self.latitude is not None: variable = self.latitude.copy() else: try: @@ -91,21 +91,21 @@ def get_dataset(self, dataset_id, dataset_info): return None # If the dataset is marked for interpolation, perform the interpolation from tie points to pixels - if dataset_info.get('interpolate', False) and self.interpolate: + if dataset_info.get("interpolate", False) and self.interpolate: variable = self._perform_interpolation(variable) # Perform the calibration if required - if dataset_info.get('calibration') is not None: + if dataset_info.get("calibration") is not None: variable = self._perform_calibration(variable, dataset_info) # Perform the orthorectification if required if self.orthorect: - orthorect_data_name = dataset_info.get('orthorect_data', None) + orthorect_data_name = dataset_info.get("orthorect_data", None) if orthorect_data_name is not None: variable = self._perform_orthorectification(variable, orthorect_data_name) # Manage the attributes of the dataset - variable.attrs.setdefault('units', None) + variable.attrs.setdefault("units", None) variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) @@ -130,8 +130,8 @@ def _perform_interpolation(variable): TIE_POINTS_FACTOR )[0] new_variable = interpolated_values.rename( - num_tie_points_act='num_pixels', - num_tie_points_alt='num_lines' + num_tie_points_act="num_pixels", + num_tie_points_alt="num_lines" ) new_variable.name = variable.name new_variable.attrs = variable.attrs @@ -157,14 +157,14 @@ def _perform_geo_interpolation(longitude, latitude): TIE_POINTS_FACTOR ) new_longitude = interpolated_longitude.rename( - num_tie_points_act='num_pixels', - num_tie_points_alt='num_lines' + num_tie_points_act="num_pixels", + num_tie_points_alt="num_lines" ) new_longitude.name = longitude.name new_longitude.attrs = longitude.attrs new_latitude = interpolated_latitude.rename( - num_tie_points_act='num_pixels', - num_tie_points_alt='num_lines' + num_tie_points_act="num_pixels", + num_tie_points_alt="num_lines" ) new_latitude.name = latitude.name new_latitude.attrs = latitude.attrs @@ -181,20 +181,20 @@ def _perform_calibration(self, variable, dataset_info): def _get_global_attributes(self): """Create a dictionary of global attributes to be added to all datasets.""" attributes = { - 'filename': self.filename, - 'start_time': self.start_time, - 'end_time': self.end_time, - 'spacecraft_name': self.spacecraft_name, - 'ssp_lon': self.ssp_lon, - 'sensor': self.sensor, - 'filename_start_time': self.filename_info['sensing_start_time'], - 'filename_end_time': self.filename_info['sensing_end_time'], - 'platform_name': self.spacecraft_name, + "filename": self.filename, + "start_time": self.start_time, + "end_time": self.end_time, + "spacecraft_name": self.spacecraft_name, + "ssp_lon": self.ssp_lon, + "sensor": self.sensor, + "filename_start_time": self.filename_info["sensing_start_time"], + "filename_end_time": self.filename_info["sensing_end_time"], + "platform_name": self.spacecraft_name, } # Add a "quality_group" item to the dictionary with all the variables and attributes # which are found in the 'quality' group of the VII product - quality_group = self['quality'] + quality_group = self["quality"] quality_dict = {} for key in quality_group: # Add the values (as Numpy array) of each variable in the group where possible @@ -205,7 +205,7 @@ def _get_global_attributes(self): # Add the attributes of the quality group quality_dict.update(quality_group.attrs) - attributes['quality_group'] = quality_dict + attributes["quality_group"] = quality_dict return attributes @@ -213,29 +213,29 @@ def _get_global_attributes(self): def start_time(self): """Get observation start time.""" try: - start_time = datetime.strptime(self['/attr/sensing_start_time_utc'], '%Y%m%d%H%M%S.%f') + start_time = datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y%m%d%H%M%S.%f") except ValueError: - start_time = datetime.strptime(self['/attr/sensing_start_time_utc'], '%Y-%m-%d %H:%M:%S.%f') + start_time = datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y-%m-%d %H:%M:%S.%f") return start_time @property def end_time(self): """Get observation end time.""" try: - end_time = datetime.strptime(self['/attr/sensing_end_time_utc'], '%Y%m%d%H%M%S.%f') + end_time = datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y%m%d%H%M%S.%f") except ValueError: - end_time = datetime.strptime(self['/attr/sensing_end_time_utc'], '%Y-%m-%d %H:%M:%S.%f') + end_time = datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y-%m-%d %H:%M:%S.%f") return end_time @property def spacecraft_name(self): """Return spacecraft name.""" - return self['/attr/spacecraft'] + return self["/attr/spacecraft"] @property def sensor(self): """Return sensor.""" - return self['/attr/instrument'] + return self["/attr/instrument"] @property def ssp_lon(self): diff --git a/satpy/readers/vii_l1b_nc.py b/satpy/readers/vii_l1b_nc.py index 2e66c3deb0..2dbcb63eda 100644 --- a/satpy/readers/vii_l1b_nc.py +++ b/satpy/readers/vii_l1b_nc.py @@ -44,14 +44,14 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): super().__init__(filename, filename_info, filetype_info, **kwargs) # Read the variables which are required for the calibration - self._bt_conversion_a = self['data/calibration_data/bt_conversion_a'].values - self._bt_conversion_b = self['data/calibration_data/bt_conversion_b'].values - self._channel_cw_thermal = self['data/calibration_data/channel_cw_thermal'].values - self._integrated_solar_irradiance = self['data/calibration_data/Band_averaged_solar_irradiance'].values + self._bt_conversion_a = self["data/calibration_data/bt_conversion_a"].values + self._bt_conversion_b = self["data/calibration_data/bt_conversion_b"].values + self._channel_cw_thermal = self["data/calibration_data/channel_cw_thermal"].values + self._integrated_solar_irradiance = self["data/calibration_data/Band_averaged_solar_irradiance"].values # Computes the angle factor for reflectance calibration as inverse of cosine of solar zenith angle # (the values in the product file are on tie points and in degrees, # therefore interpolation and conversion to radians are required) - solar_zenith_angle = self['data/measurement_data/solar_zenith'] + solar_zenith_angle = self["data/measurement_data/solar_zenith"] solar_zenith_angle_on_pixels = self._perform_interpolation(solar_zenith_angle) solar_zenith_angle_on_pixels_radians = np.radians(solar_zenith_angle_on_pixels) self.angle_factor = 1.0 / (np.cos(solar_zenith_angle_on_pixels_radians)) @@ -67,27 +67,27 @@ def _perform_calibration(self, variable, dataset_info): DataArray: array containing the calibrated values and all the original metadata. """ - calibration_name = dataset_info['calibration'] - if calibration_name == 'brightness_temperature': + calibration_name = dataset_info["calibration"] + if calibration_name == "brightness_temperature": # Extract the values of calibration coefficients for the current channel - chan_index = dataset_info['chan_thermal_index'] + chan_index = dataset_info["chan_thermal_index"] cw = self._channel_cw_thermal[chan_index] a = self._bt_conversion_a[chan_index] b = self._bt_conversion_b[chan_index] # Perform the calibration calibrated_variable = self._calibrate_bt(variable, cw, a, b) calibrated_variable.attrs = variable.attrs - elif calibration_name == 'reflectance': + elif calibration_name == "reflectance": # Extract the values of calibration coefficients for the current channel - chan_index = dataset_info['chan_solar_index'] + chan_index = dataset_info["chan_solar_index"] isi = self._integrated_solar_irradiance[chan_index] # Perform the calibration calibrated_variable = self._calibrate_refl(variable, self.angle_factor.data, isi) calibrated_variable.attrs = variable.attrs - elif calibration_name == 'radiance': + elif calibration_name == "radiance": calibrated_variable = variable else: - raise ValueError("Unknown calibration %s for dataset %s" % (calibration_name, dataset_info['name'])) + raise ValueError("Unknown calibration %s for dataset %s" % (calibration_name, dataset_info["name"])) return calibrated_variable @@ -108,7 +108,7 @@ def _perform_orthorectification(self, variable, orthorect_data_name): # based on the simplified formula using mean Earth radius variable += np.degrees(orthorect_data / MEAN_EARTH_RADIUS) except KeyError: - logger.warning('Required dataset %s for orthorectification not available, skipping', orthorect_data_name) + logger.warning("Required dataset %s for orthorectification not available, skipping", orthorect_data_name) return variable @staticmethod diff --git a/satpy/readers/vii_l2_nc.py b/satpy/readers/vii_l2_nc.py index 3ce3926674..276d77f668 100644 --- a/satpy/readers/vii_l2_nc.py +++ b/satpy/readers/vii_l2_nc.py @@ -43,5 +43,5 @@ def _perform_orthorectification(self, variable, orthorect_data_name): orthorect_data = self[orthorect_data_name] variable += orthorect_data except KeyError: - logger.warning('Required dataset %s for orthorectification not available, skipping', orthorect_data_name) + logger.warning("Required dataset %s for orthorectification not available, skipping", orthorect_data_name) return variable diff --git a/satpy/readers/viirs_atms_sdr_base.py b/satpy/readers/viirs_atms_sdr_base.py index 048c601f84..be0a7a0d65 100644 --- a/satpy/readers/viirs_atms_sdr_base.py +++ b/satpy/readers/viirs_atms_sdr_base.py @@ -32,37 +32,37 @@ LOG = logging.getLogger(__name__) -VIIRS_DATASET_KEYS = {'GDNBO': 'VIIRS-DNB-GEO', - 'SVDNB': 'VIIRS-DNB-SDR', - 'GITCO': 'VIIRS-IMG-GEO-TC', - 'GIMGO': 'VIIRS-IMG-GEO', - 'SVI01': 'VIIRS-I1-SDR', - 'SVI02': 'VIIRS-I2-SDR', - 'SVI03': 'VIIRS-I3-SDR', - 'SVI04': 'VIIRS-I4-SDR', - 'SVI05': 'VIIRS-I5-SDR', - 'GMTCO': 'VIIRS-MOD-GEO-TC', - 'GMODO': 'VIIRS-MOD-GEO', - 'SVM01': 'VIIRS-M1-SDR', - 'SVM02': 'VIIRS-M2-SDR', - 'SVM03': 'VIIRS-M3-SDR', - 'SVM04': 'VIIRS-M4-SDR', - 'SVM05': 'VIIRS-M5-SDR', - 'SVM06': 'VIIRS-M6-SDR', - 'SVM07': 'VIIRS-M7-SDR', - 'SVM08': 'VIIRS-M8-SDR', - 'SVM09': 'VIIRS-M9-SDR', - 'SVM10': 'VIIRS-M10-SDR', - 'SVM11': 'VIIRS-M11-SDR', - 'SVM12': 'VIIRS-M12-SDR', - 'SVM13': 'VIIRS-M13-SDR', - 'SVM14': 'VIIRS-M14-SDR', - 'SVM15': 'VIIRS-M15-SDR', - 'SVM16': 'VIIRS-M16-SDR', - 'IVCDB': 'VIIRS-DualGain-Cal-IP'} -ATMS_DATASET_KEYS = {'SATMS': 'ATMS-SDR', - 'GATMO': 'ATMS-SDR-GEO', - 'TATMS': 'ATMS-TDR'} +VIIRS_DATASET_KEYS = {"GDNBO": "VIIRS-DNB-GEO", + "SVDNB": "VIIRS-DNB-SDR", + "GITCO": "VIIRS-IMG-GEO-TC", + "GIMGO": "VIIRS-IMG-GEO", + "SVI01": "VIIRS-I1-SDR", + "SVI02": "VIIRS-I2-SDR", + "SVI03": "VIIRS-I3-SDR", + "SVI04": "VIIRS-I4-SDR", + "SVI05": "VIIRS-I5-SDR", + "GMTCO": "VIIRS-MOD-GEO-TC", + "GMODO": "VIIRS-MOD-GEO", + "SVM01": "VIIRS-M1-SDR", + "SVM02": "VIIRS-M2-SDR", + "SVM03": "VIIRS-M3-SDR", + "SVM04": "VIIRS-M4-SDR", + "SVM05": "VIIRS-M5-SDR", + "SVM06": "VIIRS-M6-SDR", + "SVM07": "VIIRS-M7-SDR", + "SVM08": "VIIRS-M8-SDR", + "SVM09": "VIIRS-M9-SDR", + "SVM10": "VIIRS-M10-SDR", + "SVM11": "VIIRS-M11-SDR", + "SVM12": "VIIRS-M12-SDR", + "SVM13": "VIIRS-M13-SDR", + "SVM14": "VIIRS-M14-SDR", + "SVM15": "VIIRS-M15-SDR", + "SVM16": "VIIRS-M16-SDR", + "IVCDB": "VIIRS-DualGain-Cal-IP"} +ATMS_DATASET_KEYS = {"SATMS": "ATMS-SDR", + "GATMO": "ATMS-SDR-GEO", + "TATMS": "ATMS-TDR"} DATASET_KEYS = {} DATASET_KEYS.update(VIIRS_DATASET_KEYS) @@ -106,7 +106,7 @@ def _parse_datetime(self, datestr, timestr): datetime_str = (str(datestr.data.compute().astype(str)) + str(timestr.data.compute().astype(str))) - time_val = datetime.strptime(datetime_str, '%Y%m%d%H%M%S.%fZ') + time_val = datetime.strptime(datetime_str, "%Y%m%d%H%M%S.%fZ") if abs(time_val - NO_DATE) < EPSILON_TIME: # catch rare case when SDR files have incorrect date raise ValueError("Datetime invalid {}".format(time_val)) @@ -140,29 +140,29 @@ def end_orbit_number(self): def _get_aggr_path(self, fileinfo_key, aggr_default): dataset_group = DATASET_KEYS[self.datasets[0]] - default = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/' + aggr_default + default = "Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/" + aggr_default return self.filetype_info.get(fileinfo_key, default).format(dataset_group=dataset_group) @property def platform_name(self): """Get platform name.""" - default = '/attr/Platform_Short_Name' + default = "/attr/Platform_Short_Name" platform_path = self.filetype_info.get( - 'platform_name', default).format(**self.filetype_info) - platform_dict = {'NPP': 'Suomi-NPP', - 'JPSS-1': 'NOAA-20', - 'J01': 'NOAA-20', - 'JPSS-2': 'NOAA-21', - 'J02': 'NOAA-21'} + "platform_name", default).format(**self.filetype_info) + platform_dict = {"NPP": "Suomi-NPP", + "JPSS-1": "NOAA-20", + "J01": "NOAA-20", + "JPSS-2": "NOAA-21", + "J02": "NOAA-21"} return platform_dict.get(self[platform_path], self[platform_path]) @property def sensor_name(self): """Get sensor name.""" dataset_group = DATASET_KEYS[self.datasets[0]] - default = 'Data_Products/{dataset_group}/attr/Instrument_Short_Name' + default = "Data_Products/{dataset_group}/attr/Instrument_Short_Name" sensor_path = self.filetype_info.get( - 'sensor_name', default).format(dataset_group=dataset_group) + "sensor_name", default).format(dataset_group=dataset_group) return self[sensor_path].lower() def scale_swath_data(self, data, scaling_factors, dataset_group): @@ -178,7 +178,7 @@ def scale_swath_data(self, data, scaling_factors, dataset_group): def scale_data_to_specified_unit(self, data, dataset_id, ds_info): """Get sscale and offset factors and convert/scale data to given physical unit.""" var_path = self._generate_file_key(dataset_id, ds_info) - dataset_group = ds_info['dataset_group'] + dataset_group = ds_info["dataset_group"] file_units = _get_file_units(dataset_id, ds_info) output_units = ds_info.get("units", file_units) @@ -241,38 +241,38 @@ def expand_single_values(var, scans): else: expanded = np.repeat(var, scans) expanded.attrs = var.attrs - expanded.rename({expanded.dims[0]: 'y'}) + expanded.rename({expanded.dims[0]: "y"}) return expanded def _scan_size(self, dataset_group_name): """Get how many rows of data constitute one scanline.""" - if 'ATM' in dataset_group_name: + if "ATM" in dataset_group_name: scan_size = 1 - elif 'I' in dataset_group_name: + elif "I" in dataset_group_name: scan_size = 32 else: scan_size = 16 return scan_size def _generate_file_key(self, ds_id, ds_info, factors=False): - var_path = ds_info.get('file_key', 'All_Data/{dataset_group}_All/{calibration}') + var_path = ds_info.get("file_key", "All_Data/{dataset_group}_All/{calibration}") calibration = { - 'radiance': 'Radiance', - 'reflectance': 'Reflectance', - 'brightness_temperature': 'BrightnessTemperature', - }.get(ds_id.get('calibration')) - var_path = var_path.format(calibration=calibration, dataset_group=DATASET_KEYS[ds_info['dataset_group']]) - if ds_id['name'] in ['dnb_longitude', 'dnb_latitude']: + "radiance": "Radiance", + "reflectance": "Reflectance", + "brightness_temperature": "BrightnessTemperature", + }.get(ds_id.get("calibration")) + var_path = var_path.format(calibration=calibration, dataset_group=DATASET_KEYS[ds_info["dataset_group"]]) + if ds_id["name"] in ["dnb_longitude", "dnb_latitude"]: if self.use_tc is True: - return var_path + '_TC' - if self.use_tc is None and var_path + '_TC' in self.file_content: - return var_path + '_TC' + return var_path + "_TC" + if self.use_tc is None and var_path + "_TC" in self.file_content: + return var_path + "_TC" return var_path def _update_data_attributes(self, data, dataset_id, ds_info): file_units = _get_file_units(dataset_id, ds_info) output_units = ds_info.get("units", file_units) - i = getattr(data, 'attrs', {}) + i = getattr(data, "attrs", {}) i.update(ds_info) i.update({ "platform_name": self.platform_name, @@ -280,7 +280,7 @@ def _update_data_attributes(self, data, dataset_id, ds_info): "start_orbit": self.start_orbit_number, "end_orbit": self.end_orbit_number, "units": output_units, - "rows_per_scan": self._scan_size(ds_info['dataset_group']), + "rows_per_scan": self._scan_size(ds_info["dataset_group"]), }) i.update(dataset_id.to_dict()) data.attrs.update(i) @@ -304,7 +304,7 @@ def concatenate_dataset(self, dataset_group, var_path, **kwargs): data_chunks.append(variable.isel(y=slice(start_scan, start_scan + gscans * scan_size))) start_scan += gscans * scan_size - return xr.concat(data_chunks, 'y') + return xr.concat(data_chunks, "y") else: # This is not tested - Not sure this code is ever going to be used? A. Dybbroe # Mon Jan 2 13:31:21 2023 @@ -316,11 +316,11 @@ def _get_rows_per_granule(self, dataset_group): return [scan_size * gran_scans for gran_scans in scans_per_gran] def _get_scans_per_granule(self, dataset_group): - number_of_granules_path = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateNumberGranules' + number_of_granules_path = "Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateNumberGranules" nb_granules_path = number_of_granules_path.format(dataset_group=DATASET_KEYS[dataset_group]) scans = [] for granule in range(self[nb_granules_path]): - scans_path = 'Data_Products/{dataset_group}/{dataset_group}_Gran_{granule}/attr/N_Number_Of_Scans' + scans_path = "Data_Products/{dataset_group}/{dataset_group}_Gran_{granule}/attr/N_Number_Of_Scans" scans_path = scans_path.format(dataset_group=DATASET_KEYS[dataset_group], granule=granule) scans.append(self[scans_path]) return scans @@ -350,7 +350,7 @@ def available_datasets(self, configured_datasets=None): if is_avail is not None: yield is_avail, ds_info continue - dataset_group = [ds_group for ds_group in ds_info['dataset_groups'] if ds_group in self.datasets] + dataset_group = [ds_group for ds_group in ds_info["dataset_groups"] if ds_group in self.datasets] if dataset_group: yield True, ds_info elif is_avail is None: diff --git a/satpy/readers/viirs_compact.py b/satpy/readers/viirs_compact.py index b9f83e4287..af3a4ce766 100644 --- a/satpy/readers/viirs_compact.py +++ b/satpy/readers/viirs_compact.py @@ -67,9 +67,9 @@ h = 6.6260755e-34 # m2kg.s-1 k = 1.380658e-23 # m2kg.s-2.K-1 -short_names = {'NPP': 'Suomi-NPP', - 'J01': 'NOAA-20', - 'J02': 'NOAA-21'} +short_names = {"NPP": "Suomi-NPP", + "J01": "NOAA-20", + "J02": "NOAA-21"} class VIIRSCompactFileHandler(BaseFileHandler): @@ -83,28 +83,28 @@ def __init__(self, filename, filename_info, filetype_info): self.finfo = filename_info self.lons = None self.lats = None - if filetype_info['file_type'] == 'compact_m': - self.ch_type = 'MOD' - elif filetype_info['file_type'] == 'compact_dnb': - self.ch_type = 'DNB' + if filetype_info["file_type"] == "compact_m": + self.ch_type = "MOD" + elif filetype_info["file_type"] == "compact_dnb": + self.ch_type = "DNB" else: - raise IOError('Compact Viirs file type not recognized.') + raise IOError("Compact Viirs file type not recognized.") geo_data = self.h5f["Data_Products"]["VIIRS-%s-GEO" % self.ch_type]["VIIRS-%s-GEO_Gran_0" % self.ch_type] - self.min_lat = geo_data.attrs['South_Bounding_Coordinate'].item() - self.max_lat = geo_data.attrs['North_Bounding_Coordinate'].item() - self.min_lon = geo_data.attrs['West_Bounding_Coordinate'].item() - self.max_lon = geo_data.attrs['East_Bounding_Coordinate'].item() + self.min_lat = geo_data.attrs["South_Bounding_Coordinate"].item() + self.max_lat = geo_data.attrs["North_Bounding_Coordinate"].item() + self.min_lon = geo_data.attrs["West_Bounding_Coordinate"].item() + self.max_lon = geo_data.attrs["East_Bounding_Coordinate"].item() self.switch_to_cart = ((abs(self.max_lon - self.min_lon) > 90) or (max(abs(self.min_lat), abs(self.max_lat)) > 60)) self.scans = self.h5f["All_Data"]["NumberOfScans"][0] - self.geography = self.h5f["All_Data"]['VIIRS-%s-GEO_All' % self.ch_type] + self.geography = self.h5f["All_Data"]["VIIRS-%s-GEO_All" % self.ch_type] for key in self.h5f["All_Data"].keys(): if key.startswith("VIIRS") and key.endswith("SDR_All"): - channel = key.split('-')[1] + channel = key.split("-")[1] break # This supposes there is only one tiepoint zone in the track direction. @@ -134,9 +134,9 @@ def __init__(self, filename, filename_info, filetype_info): self.cache = {} self.mda = {} - short_name = np2str(self.h5f.attrs['Platform_Short_Name']) - self.mda['platform_name'] = short_names.get(short_name, short_name) - self.mda['sensor'] = 'viirs' + short_name = np2str(self.h5f.attrs["Platform_Short_Name"]) + self.mda["platform_name"] = short_names.get(short_name, short_name) + self.mda["sensor"] = "viirs" def __del__(self): """Close file handlers when we are done.""" @@ -145,75 +145,75 @@ def __del__(self): def get_dataset(self, key, info): """Load a dataset.""" - logger.debug('Reading %s.', key['name']) - if key['name'] in _channels_dict: + logger.debug("Reading %s.", key["name"]) + if key["name"] in _channels_dict: m_data = self.read_dataset(key, info) else: m_data = self.read_geo(key, info) m_data.attrs.update(info) - m_data.attrs['rows_per_scan'] = self.scan_size + m_data.attrs["rows_per_scan"] = self.scan_size return m_data def get_bounding_box(self): """Get the bounding box of the data.""" for key in self.h5f["Data_Products"].keys(): if key.startswith("VIIRS") and key.endswith("GEO"): - lats = self.h5f["Data_Products"][key][key + '_Gran_0'].attrs['G-Ring_Latitude'][()] - lons = self.h5f["Data_Products"][key][key + '_Gran_0'].attrs['G-Ring_Longitude'][()] + lats = self.h5f["Data_Products"][key][key + "_Gran_0"].attrs["G-Ring_Latitude"][()] + lons = self.h5f["Data_Products"][key][key + "_Gran_0"].attrs["G-Ring_Longitude"][()] break else: - raise KeyError('Cannot find bounding coordinates!') + raise KeyError("Cannot find bounding coordinates!") return lons.ravel(), lats.ravel() @property def start_time(self): """Get the start time.""" - return self.finfo['start_time'] + return self.finfo["start_time"] @property def end_time(self): """Get the end time.""" end_time = datetime.combine(self.start_time.date(), - self.finfo['end_time'].time()) + self.finfo["end_time"].time()) if end_time < self.start_time: end_time += timedelta(days=1) return end_time def read_geo(self, key, info): """Read angles.""" - pairs = {('satellite_azimuth_angle', 'satellite_zenith_angle'): + pairs = {("satellite_azimuth_angle", "satellite_zenith_angle"): ("SatelliteAzimuthAngle", "SatelliteZenithAngle"), - ('solar_azimuth_angle', 'solar_zenith_angle'): + ("solar_azimuth_angle", "solar_zenith_angle"): ("SolarAzimuthAngle", "SolarZenithAngle"), - ('dnb_solar_azimuth_angle', 'dnb_solar_zenith_angle'): + ("dnb_solar_azimuth_angle", "dnb_solar_zenith_angle"): ("SolarAzimuthAngle", "SolarZenithAngle"), - ('dnb_lunar_azimuth_angle', 'dnb_lunar_zenith_angle'): + ("dnb_lunar_azimuth_angle", "dnb_lunar_zenith_angle"): ("LunarAzimuthAngle", "LunarZenithAngle"), } if self.lons is None or self.lats is None: self.lons, self.lats = self.navigate() for pair, fkeys in pairs.items(): - if key['name'] in pair: + if key["name"] in pair: if (self.cache.get(pair[0]) is None or self.cache.get(pair[1]) is None): angles = self.angles(*fkeys) self.cache[pair[0]], self.cache[pair[1]] = angles - if key['name'] == pair[0]: - return xr.DataArray(self.cache[pair[0]], name=key['name'], - attrs=self.mda, dims=('y', 'x')) + if key["name"] == pair[0]: + return xr.DataArray(self.cache[pair[0]], name=key["name"], + attrs=self.mda, dims=("y", "x")) else: - return xr.DataArray(self.cache[pair[1]], name=key['name'], - attrs=self.mda, dims=('y', 'x')) + return xr.DataArray(self.cache[pair[1]], name=key["name"], + attrs=self.mda, dims=("y", "x")) - if info.get('standard_name') in ['latitude', 'longitude']: + if info.get("standard_name") in ["latitude", "longitude"]: mda = self.mda.copy() mda.update(info) - if info['standard_name'] == 'longitude': - return xr.DataArray(self.lons, attrs=mda, dims=('y', 'x')) + if info["standard_name"] == "longitude": + return xr.DataArray(self.lons, attrs=mda, dims=("y", "x")) else: - return xr.DataArray(self.lats, attrs=mda, dims=('y', 'x')) + return xr.DataArray(self.lats, attrs=mda, dims=("y", "x")) - if key['name'] == 'dnb_moon_illumination_fraction': + if key["name"] == "dnb_moon_illumination_fraction": mda = self.mda.copy() mda.update(info) return xr.DataArray(da.from_array(self.geography["MoonIllumFraction"]), @@ -222,7 +222,7 @@ def read_geo(self, key, info): def read_dataset(self, dataset_key, info): """Read a dataset.""" h5f = self.h5f - channel = _channels_dict[dataset_key['name']] + channel = _channels_dict[dataset_key["name"]] chan_dict = dict([(key.split("-")[1], key) for key in h5f["All_Data"].keys() if key.startswith("VIIRS")]) @@ -230,39 +230,39 @@ def read_dataset(self, dataset_key, info): h5rads = h5f["All_Data"][chan_dict[channel]]["Radiance"] chunks = h5rads.chunks or CHUNK_SIZE rads = xr.DataArray(da.from_array(h5rads, chunks=chunks), - name=dataset_key['name'], - dims=['y', 'x']).astype(np.float32) + name=dataset_key["name"], + dims=["y", "x"]).astype(np.float32) h5attrs = h5rads.attrs scans = h5f["All_Data"]["NumberOfScans"][0] rads = rads[:scans * 16, :] rads = rads.where(rads <= 65526) try: - rads = xr.where(rads <= h5attrs['Threshold'], - rads * h5attrs['RadianceScaleLow'] + - h5attrs['RadianceOffsetLow'], - rads * h5attrs['RadianceScaleHigh'] + - h5attrs['RadianceOffsetHigh']) + rads = xr.where(rads <= h5attrs["Threshold"], + rads * h5attrs["RadianceScaleLow"] + + h5attrs["RadianceOffsetLow"], + rads * h5attrs["RadianceScaleHigh"] + + h5attrs["RadianceOffsetHigh"]) except (KeyError, AttributeError): logger.info("Missing attribute for scaling of %s.", channel) pass unit = "W m-2 sr-1 μm-1" - if dataset_key['calibration'] == 'counts': + if dataset_key["calibration"] == "counts": raise NotImplementedError("Can't get counts from this data") - if dataset_key['calibration'] in ['reflectance', 'brightness_temperature']: + if dataset_key["calibration"] in ["reflectance", "brightness_temperature"]: # do calibrate try: # First guess: VIS or NIR data - a_vis = h5attrs['EquivalentWidth'] - b_vis = h5attrs['IntegratedSolarIrradiance'] - dse = h5attrs['EarthSunDistanceNormalised'] + a_vis = h5attrs["EquivalentWidth"] + b_vis = h5attrs["IntegratedSolarIrradiance"] + dse = h5attrs["EarthSunDistanceNormalised"] rads *= 100 * np.pi * a_vis / b_vis * (dse**2) unit = "%" except KeyError: # Maybe it's IR data? try: - a_ir = h5attrs['BandCorrectionCoefficientA'] - b_ir = h5attrs['BandCorrectionCoefficientB'] - lambda_c = h5attrs['CentralWaveLength'] + a_ir = h5attrs["BandCorrectionCoefficientA"] + b_ir = h5attrs["BandCorrectionCoefficientB"] + lambda_c = h5attrs["CentralWaveLength"] rads *= 1e6 rads = (h * c) / (k * lambda_c * np.log(1 + @@ -274,12 +274,12 @@ def read_dataset(self, dataset_key, info): except KeyError: logger.warning("Calibration failed.") - elif dataset_key['calibration'] != 'radiance': + elif dataset_key["calibration"] != "radiance": raise ValueError("Calibration parameter should be radiance, " "reflectance or brightness_temperature") rads = rads.clip(min=0) rads.attrs = self.mda - rads.attrs['units'] = unit + rads.attrs["units"] = unit return rads def expand_angle_and_nav(self, arrays): @@ -326,7 +326,7 @@ def navigate(self): return expanded def _get_geographical_chunks(self): - shape = self.geography['Longitude'].shape + shape = self.geography["Longitude"].shape horizontal_chunks = (self.nb_tiepoint_zones + 1).compute() chunks = (shape[0], tuple(horizontal_chunks)) return chunks diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index a8c6c934b2..646d7e0d17 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -86,28 +86,28 @@ def __init__(self, filename, filename_info, filetype_info): decode_cf=True, mask_and_scale=True, chunks={ - 'Columns': -1, - 'Rows': row_chunks_m, - 'Along_Scan_375m': -1, - 'Along_Track_375m': row_chunks_i, - 'Along_Scan_750m': -1, - 'Along_Track_750m': row_chunks_m, + "Columns": -1, + "Rows": row_chunks_m, + "Along_Scan_375m": -1, + "Along_Track_375m": row_chunks_i, + "Along_Scan_750m": -1, + "Along_Track_750m": row_chunks_m, }) - if 'Columns' in self.nc.dims: - self.nc = self.nc.rename({'Columns': 'x', 'Rows': 'y'}) - elif 'Along_Track_375m' in self.nc.dims: - self.nc = self.nc.rename({'Along_Scan_375m': 'x', 'Along_Track_375m': 'y'}) - self.nc = self.nc.rename({'Along_Scan_750m': 'x', 'Along_Track_750m': 'y'}) + if "Columns" in self.nc.dims: + self.nc = self.nc.rename({"Columns": "x", "Rows": "y"}) + elif "Along_Track_375m" in self.nc.dims: + self.nc = self.nc.rename({"Along_Scan_375m": "x", "Along_Track_375m": "y"}) + self.nc = self.nc.rename({"Along_Scan_750m": "x", "Along_Track_750m": "y"}) # For some reason, no 'standard_name' is defined in some netCDF files, so # here we manually make the definitions. - if 'Latitude' in self.nc: - self.nc['Latitude'].attrs.update({'standard_name': 'latitude'}) - if 'Longitude' in self.nc: - self.nc['Longitude'].attrs.update({'standard_name': 'longitude'}) + if "Latitude" in self.nc: + self.nc["Latitude"].attrs.update({"standard_name": "latitude"}) + if "Longitude" in self.nc: + self.nc["Longitude"].attrs.update({"standard_name": "longitude"}) - self.algorithm_version = filename_info['platform_shortname'] - self.sensor_name = 'viirs' + self.algorithm_version = filename_info["platform_shortname"] + self.sensor_name = "viirs" def rows_per_scans(self, data_arr: xr.DataArray) -> int: """Get number of array rows per instrument scan based on data resolution.""" @@ -115,7 +115,7 @@ def rows_per_scans(self, data_arr: xr.DataArray) -> int: def get_dataset(self, dataset_id: DataID, info: dict) -> xr.DataArray: """Get the dataset.""" - data_arr = self.nc[info['file_key']] + data_arr = self.nc[info["file_key"]] data_arr = self._mask_invalid(data_arr, info) units = info.get("units", data_arr.attrs.get("units")) if units is None or units == "unitless": @@ -150,27 +150,27 @@ def _decode_flag_meanings(data_arr: xr.DataArray): flag_meanings = data_arr.attrs.get("flag_meanings", None) if isinstance(flag_meanings, str) and "\n" not in flag_meanings: # only handle CF-standard flag meanings - data_arr.attrs['flag_meanings'] = [flag for flag in data_arr.attrs['flag_meanings'].split(' ')] + data_arr.attrs["flag_meanings"] = [flag for flag in data_arr.attrs["flag_meanings"].split(" ")] @property def start_time(self): """Get first date/time when observations were recorded.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get last date/time when observations were recorded.""" - return self.filename_info['end_time'] + return self.filename_info["end_time"] @property def platform_name(self): """Get platform name.""" - platform_path = self.filename_info['platform_shortname'] - platform_dict = {'NPP': 'Suomi-NPP', - 'JPSS-1': 'NOAA-20', - 'J01': 'NOAA-20', - 'JPSS-2': 'NOAA-21', - 'J02': 'NOAA-21'} + platform_path = self.filename_info["platform_shortname"] + platform_dict = {"NPP": "Suomi-NPP", + "JPSS-1": "NOAA-20", + "J01": "NOAA-20", + "JPSS-2": "NOAA-21", + "J02": "NOAA-21"} return platform_dict[platform_path.upper()] def available_datasets(self, configured_datasets=None): @@ -212,7 +212,7 @@ def available_datasets(self, configured_datasets=None): # file handler so let's yield early yield is_avail, ds_info continue - if self.file_type_matches(ds_info['file_type']) is None: + if self.file_type_matches(ds_info["file_type"]) is None: # this is not the file type for this dataset yield None, ds_info yield file_key in self.nc, ds_info @@ -278,18 +278,18 @@ def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: def _get_veg_index_good_mask(self) -> xr.DataArray: # each mask array should be TRUE when pixels are UNACCEPTABLE - qf1 = self.nc['QF1 Surface Reflectance'] + qf1 = self.nc["QF1 Surface Reflectance"] has_sun_glint = (qf1 & 0b11000000) > 0 is_cloudy = (qf1 & 0b00001100) > 0 # mask everything but "confident clear" cloud_quality = (qf1 & 0b00000011) < 0b10 - qf2 = self.nc['QF2 Surface Reflectance'] + qf2 = self.nc["QF2 Surface Reflectance"] has_snow_or_ice = (qf2 & 0b00100000) > 0 has_cloud_shadow = (qf2 & 0b00001000) > 0 water_mask = (qf2 & 0b00000111) has_water = (water_mask <= 0b010) | (water_mask == 0b101) # shallow water, deep ocean, arctic - qf7 = self.nc['QF7 Surface Reflectance'] + qf7 = self.nc["QF7 Surface Reflectance"] has_aerosols = (qf7 & 0b00001100) > 0b1000 # high aerosol quantity adjacent_to_cloud = (qf7 & 0b00000010) > 0 diff --git a/satpy/readers/viirs_edr_active_fires.py b/satpy/readers/viirs_edr_active_fires.py index f1bcf4d3cc..bd8f3f6d69 100644 --- a/satpy/readers/viirs_edr_active_fires.py +++ b/satpy/readers/viirs_edr_active_fires.py @@ -44,7 +44,7 @@ def __init__(self, filename, filename_info, filetype_info, super(VIIRSActiveFiresFileHandler, self).__init__( filename, filename_info, filetype_info, auto_maskandscale=auto_maskandscale, xarray_kwargs=xarray_kwargs) - self.prefix = filetype_info.get('variable_prefix') + self.prefix = filetype_info.get("variable_prefix") def get_dataset(self, dsid, dsinfo): """Get requested data as DataArray. @@ -57,24 +57,24 @@ def get_dataset(self, dsid, dsinfo): Dask DataArray: Data """ - key = dsinfo.get('file_key', dsid['name']).format(variable_prefix=self.prefix) + key = dsinfo.get("file_key", dsid["name"]).format(variable_prefix=self.prefix) data = self[key] # rename "phoney dims" - data = data.rename(dict(zip(data.dims, ['y', 'x']))) + data = data.rename(dict(zip(data.dims, ["y", "x"]))) # handle attributes from YAML - for key in ('units', 'standard_name', 'flag_meanings', 'flag_values', '_FillValue'): + for key in ("units", "standard_name", "flag_meanings", "flag_values", "_FillValue"): # we only want to add information that isn't present already if key in dsinfo and key not in data.attrs: data.attrs[key] = dsinfo[key] - if isinstance(data.attrs.get('flag_meanings'), str): - data.attrs['flag_meanings'] = data.attrs['flag_meanings'].split(' ') + if isinstance(data.attrs.get("flag_meanings"), str): + data.attrs["flag_meanings"] = data.attrs["flag_meanings"].split(" ") # use more common CF standard units - if data.attrs.get('units') == 'kelvins': - data.attrs['units'] = 'K' + if data.attrs.get("units") == "kelvins": + data.attrs["units"] = "K" - data.attrs["platform_name"] = PLATFORM_MAP.get(self.filename_info['satellite_name'].upper(), "unknown") + data.attrs["platform_name"] = PLATFORM_MAP.get(self.filename_info["satellite_name"].upper(), "unknown") data.attrs["sensor"] = self.sensor_name return data @@ -82,12 +82,12 @@ def get_dataset(self, dsid, dsinfo): @property def start_time(self): """Get first date/time when observations were recorded.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get last date/time when observations were recorded.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def sensor_name(self): @@ -112,33 +112,33 @@ def __init__(self, filename, filename_info, filetype_info): filetype_info: Filetype information """ - skip_rows = filetype_info.get('skip_rows', 15) - columns = filetype_info['columns'] + skip_rows = filetype_info.get("skip_rows", 15) + columns = filetype_info["columns"] self.file_content = dd.read_csv(filename, skiprows=skip_rows, header=None, names=columns) super(VIIRSActiveFiresTextFileHandler, self).__init__(filename, filename_info, filetype_info) - self.platform_name = PLATFORM_MAP.get(self.filename_info['satellite_name'].upper(), "unknown") + self.platform_name = PLATFORM_MAP.get(self.filename_info["satellite_name"].upper(), "unknown") def get_dataset(self, dsid, dsinfo): """Get requested data as DataArray.""" - ds = self[dsid['name']].to_dask_array(lengths=True) + ds = self[dsid["name"]].to_dask_array(lengths=True) data = xr.DataArray(ds, dims=("y",), attrs={"platform_name": self.platform_name, "sensor": "VIIRS"}) - for key in ('units', 'standard_name', 'flag_meanings', 'flag_values', '_FillValue'): + for key in ("units", "standard_name", "flag_meanings", "flag_values", "_FillValue"): # we only want to add information that isn't present already if key in dsinfo and key not in data.attrs: data.attrs[key] = dsinfo[key] - if isinstance(data.attrs.get('flag_meanings'), str): - data.attrs['flag_meanings'] = data.attrs['flag_meanings'].split(' ') + if isinstance(data.attrs.get("flag_meanings"), str): + data.attrs["flag_meanings"] = data.attrs["flag_meanings"].split(" ") return data @property def start_time(self): """Get first date/time when observations were recorded.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get last date/time when observations were recorded.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) def __getitem__(self, key): """Get file content for 'key'.""" diff --git a/satpy/readers/viirs_edr_flood.py b/satpy/readers/viirs_edr_flood.py index 2d9c319656..2625d6d8fc 100644 --- a/satpy/readers/viirs_edr_flood.py +++ b/satpy/readers/viirs_edr_flood.py @@ -29,17 +29,17 @@ class VIIRSEDRFlood(HDF4FileHandler): @property def start_time(self): """Get start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def sensor_name(self): """Get sensor name.""" - sensor = self['/attr/SensorIdentifyCode'] + sensor = self["/attr/SensorIdentifyCode"] if isinstance(sensor, np.ndarray): return str(sensor.astype(str)).lower() return sensor.lower() @@ -47,7 +47,7 @@ def sensor_name(self): @property def platform_name(self): """Get platform name.""" - platform_name = self['/attr/Satellitename'] + platform_name = self["/attr/Satellitename"] if isinstance(platform_name, np.ndarray): return str(platform_name.astype(str)).lower() return platform_name.lower() @@ -58,23 +58,23 @@ def get_metadata(self, data, ds_info): metadata.update(data.attrs) metadata.update(ds_info) metadata.update({ - 'sensor': self.sensor_name, - 'platform_name': self.platform_name, - 'start_time': self.start_time, - 'end_time': self.end_time, + "sensor": self.sensor_name, + "platform_name": self.platform_name, + "start_time": self.start_time, + "end_time": self.end_time, }) return metadata def get_dataset(self, ds_id, ds_info): """Get dataset.""" - data = self[ds_id['name']] + data = self[ds_id["name"]] data.attrs = self.get_metadata(data, ds_info) - fill = data.attrs.pop('_Fillvalue') - offset = data.attrs.get('add_offset') - scale_factor = data.attrs.get('scale_factor') + fill = data.attrs.pop("_Fillvalue") + offset = data.attrs.get("add_offset") + scale_factor = data.attrs.get("scale_factor") data = data.where(data != fill) if scale_factor is not None and offset is not None: @@ -85,25 +85,25 @@ def get_dataset(self, ds_id, ds_info): def get_area_def(self, ds_id): """Get area definition.""" - data = self[ds_id['name']] + data = self[ds_id["name"]] proj_dict = { - 'proj': 'latlong', - 'datum': 'WGS84', - 'ellps': 'WGS84', - 'no_defs': True + "proj": "latlong", + "datum": "WGS84", + "ellps": "WGS84", + "no_defs": True } - area_extent = [data.attrs.get('ProjectionMinLongitude'), data.attrs.get('ProjectionMinLatitude'), - data.attrs.get('ProjectionMaxLongitude'), data.attrs.get('ProjectionMaxLatitude')] + area_extent = [data.attrs.get("ProjectionMinLongitude"), data.attrs.get("ProjectionMinLatitude"), + data.attrs.get("ProjectionMaxLongitude"), data.attrs.get("ProjectionMaxLatitude")] area = geometry.AreaDefinition( - 'viirs_flood_area', - 'name_of_proj', - 'id_of_proj', + "viirs_flood_area", + "name_of_proj", + "id_of_proj", proj_dict, - int(self.filename_info['dim0']), - int(self.filename_info['dim1']), + int(self.filename_info["dim0"]), + int(self.filename_info["dim1"]), np.asarray(area_extent) ) diff --git a/satpy/readers/viirs_l1b.py b/satpy/readers/viirs_l1b.py index a265bb1f82..510a37165d 100644 --- a/satpy/readers/viirs_l1b.py +++ b/satpy/readers/viirs_l1b.py @@ -38,39 +38,39 @@ def _parse_datetime(self, datestr): def start_orbit_number(self): """Get start orbit number.""" try: - return int(self['/attr/orbit_number']) + return int(self["/attr/orbit_number"]) except KeyError: - return int(self['/attr/OrbitNumber']) + return int(self["/attr/OrbitNumber"]) @property def end_orbit_number(self): """Get end orbit number.""" try: - return int(self['/attr/orbit_number']) + return int(self["/attr/orbit_number"]) except KeyError: - return int(self['/attr/OrbitNumber']) + return int(self["/attr/OrbitNumber"]) @property def platform_name(self): """Get platform name.""" try: - res = self.get('/attr/platform', - self.filename_info['platform_shortname']) + res = self.get("/attr/platform", + self.filename_info["platform_shortname"]) except KeyError: - res = 'Unknown' + res = "Unknown" return { - 'JPSS-1': 'NOAA-20', - 'NP': 'Suomi-NPP', - 'J1': 'NOAA-20', - 'J2': 'NOAA-21', - 'JPSS-2': 'NOAA-21', + "JPSS-1": "NOAA-20", + "NP": "Suomi-NPP", + "J1": "NOAA-20", + "J2": "NOAA-21", + "JPSS-2": "NOAA-21", }.get(res, res) @property def sensor_name(self): """Get sensor name.""" - return self['/attr/instrument'].lower() + return self["/attr/instrument"].lower() def adjust_scaling_factors(self, factors, file_units, output_units): """Adjust scaling factors.""" @@ -96,35 +96,35 @@ def adjust_scaling_factors(self, factors, file_units, output_units): def get_shape(self, ds_id, ds_info): """Get shape.""" - var_path = self._dataset_name_to_var_path(ds_id['name'], ds_info) - return self.get(var_path + '/shape', 1) + var_path = self._dataset_name_to_var_path(ds_id["name"], ds_info) + return self.get(var_path + "/shape", 1) @property def start_time(self): """Get start time.""" - return self._parse_datetime(self['/attr/time_coverage_start']) + return self._parse_datetime(self["/attr/time_coverage_start"]) @property def end_time(self): """Get end time.""" - return self._parse_datetime(self['/attr/time_coverage_end']) + return self._parse_datetime(self["/attr/time_coverage_end"]) def _get_dataset_file_units(self, dataset_id, ds_info, var_path): - file_units = ds_info.get('file_units') + file_units = ds_info.get("file_units") if file_units is None: - file_units = self.get(var_path + '/attr/units') + file_units = self.get(var_path + "/attr/units") # they were almost completely CF compliant... if file_units == "none": file_units = "1" - if dataset_id.get('calibration') == 'radiance' and ds_info['units'] == 'W m-2 um-1 sr-1': - rad_units_path = var_path + '/attr/radiance_units' + if dataset_id.get("calibration") == "radiance" and ds_info["units"] == "W m-2 um-1 sr-1": + rad_units_path = var_path + "/attr/radiance_units" if rad_units_path in self: if file_units is None: - file_units = self[var_path + '/attr/radiance_units'] - if file_units == 'Watts/meter^2/steradian/micrometer': - file_units = 'W m-2 um-1 sr-1' - elif ds_info.get('units') == '%' and file_units is None: + file_units = self[var_path + "/attr/radiance_units"] + if file_units == "Watts/meter^2/steradian/micrometer": + file_units = "W m-2 um-1 sr-1" + elif ds_info.get("units") == "%" and file_units is None: # v1.1 and above of level 1 processing removed 'units' attribute # for all reflectance channels file_units = "1" @@ -132,54 +132,54 @@ def _get_dataset_file_units(self, dataset_id, ds_info, var_path): return file_units def _get_dataset_valid_range(self, dataset_id, ds_info, var_path): - if dataset_id.get('calibration') == 'radiance' and ds_info['units'] == 'W m-2 um-1 sr-1': - rad_units_path = var_path + '/attr/radiance_units' + if dataset_id.get("calibration") == "radiance" and ds_info["units"] == "W m-2 um-1 sr-1": + rad_units_path = var_path + "/attr/radiance_units" if rad_units_path in self: # we are getting a reflectance band but we want the radiance values # special scaling parameters - scale_factor = self[var_path + '/attr/radiance_scale_factor'] - scale_offset = self[var_path + '/attr/radiance_add_offset'] + scale_factor = self[var_path + "/attr/radiance_scale_factor"] + scale_offset = self[var_path + "/attr/radiance_add_offset"] else: # we are getting a btemp band but we want the radiance values # these are stored directly in the primary variable - scale_factor = self[var_path + '/attr/scale_factor'] - scale_offset = self[var_path + '/attr/add_offset'] - valid_min = self[var_path + '/attr/valid_min'] - valid_max = self[var_path + '/attr/valid_max'] - elif ds_info.get('units') == '%': + scale_factor = self[var_path + "/attr/scale_factor"] + scale_offset = self[var_path + "/attr/add_offset"] + valid_min = self[var_path + "/attr/valid_min"] + valid_max = self[var_path + "/attr/valid_max"] + elif ds_info.get("units") == "%": # normal reflectance - valid_min = self[var_path + '/attr/valid_min'] - valid_max = self[var_path + '/attr/valid_max'] - scale_factor = self[var_path + '/attr/scale_factor'] - scale_offset = self[var_path + '/attr/add_offset'] - elif ds_info.get('units') == 'K': + valid_min = self[var_path + "/attr/valid_min"] + valid_max = self[var_path + "/attr/valid_max"] + scale_factor = self[var_path + "/attr/scale_factor"] + scale_offset = self[var_path + "/attr/add_offset"] + elif ds_info.get("units") == "K": # normal brightness temperature # use a special LUT to get the actual values - lut_var_path = ds_info.get('lut', var_path + '_brightness_temperature_lut') + lut_var_path = ds_info.get("lut", var_path + "_brightness_temperature_lut") # we get the BT values from a look up table using the scaled radiance integers - valid_min = self[lut_var_path + '/attr/valid_min'] - valid_max = self[lut_var_path + '/attr/valid_max'] + valid_min = self[lut_var_path + "/attr/valid_min"] + valid_max = self[lut_var_path + "/attr/valid_max"] scale_factor = scale_offset = None else: - valid_min = self.get(var_path + '/attr/valid_min') - valid_max = self.get(var_path + '/attr/valid_max') - scale_factor = self.get(var_path + '/attr/scale_factor') - scale_offset = self.get(var_path + '/attr/add_offset') + valid_min = self.get(var_path + "/attr/valid_min") + valid_max = self.get(var_path + "/attr/valid_max") + scale_factor = self.get(var_path + "/attr/scale_factor") + scale_offset = self.get(var_path + "/attr/add_offset") return valid_min, valid_max, scale_factor, scale_offset def get_metadata(self, dataset_id, ds_info): """Get metadata.""" - var_path = self._dataset_name_to_var_path(dataset_id['name'], ds_info) + var_path = self._dataset_name_to_var_path(dataset_id["name"], ds_info) shape = self.get_shape(dataset_id, ds_info) file_units = self._get_dataset_file_units(dataset_id, ds_info, var_path) # Get extra metadata if self._is_scan_based_array(shape): - rows_per_scan = int(shape[0] / self['/dimension/number_of_scans']) - ds_info.setdefault('rows_per_scan', rows_per_scan) + rows_per_scan = int(shape[0] / self["/dimension/number_of_scans"]) + ds_info.setdefault("rows_per_scan", rows_per_scan) - i = getattr(self[var_path], 'attrs', {}) + i = getattr(self[var_path], "attrs", {}) i.update(ds_info) i.update(dataset_id.to_dict()) i.update({ @@ -195,22 +195,22 @@ def get_metadata(self, dataset_id, ds_info): return i def _is_scan_based_array(self, shape): - return '/dimension/number_of_scans' in self and isinstance(shape, tuple) and shape + return "/dimension/number_of_scans" in self and isinstance(shape, tuple) and shape def get_dataset(self, dataset_id, ds_info): """Get dataset.""" - var_path = self._dataset_name_to_var_path(dataset_id['name'], ds_info) + var_path = self._dataset_name_to_var_path(dataset_id["name"], ds_info) metadata = self.get_metadata(dataset_id, ds_info) valid_min, valid_max, scale_factor, scale_offset = self._get_dataset_valid_range(dataset_id, ds_info, var_path) - if dataset_id.get('calibration') == 'radiance' and ds_info['units'] == 'W m-2 um-1 sr-1': + if dataset_id.get("calibration") == "radiance" and ds_info["units"] == "W m-2 um-1 sr-1": data = self[var_path] - elif ds_info.get('units') == '%': + elif ds_info.get("units") == "%": data = self[var_path] - elif ds_info.get('units') == 'K': + elif ds_info.get("units") == "K": # normal brightness temperature # use a special LUT to get the actual values - lut_var_path = ds_info.get('lut', var_path + '_brightness_temperature_lut') + lut_var_path = ds_info.get("lut", var_path + "_brightness_temperature_lut") data = self[var_path] # we get the BT values from a look up table using the scaled radiance integers index_arr = data.data.astype(int) @@ -223,21 +223,21 @@ def get_dataset(self, dataset_id, ds_info): if valid_min is not None and valid_max is not None: data = data.where((data >= valid_min) & (data <= valid_max)) - if data.attrs.get('units') in ['%', 'K', '1', 'W m-2 um-1 sr-1'] and \ - 'flag_meanings' in data.attrs: + if data.attrs.get("units") in ["%", "K", "1", "W m-2 um-1 sr-1"] and \ + "flag_meanings" in data.attrs: # flag meanings don't mean anything anymore for these variables # these aren't category products - data.attrs.pop('flag_meanings', None) - data.attrs.pop('flag_values', None) + data.attrs.pop("flag_meanings", None) + data.attrs.pop("flag_values", None) factors = (scale_factor, scale_offset) - factors = self.adjust_scaling_factors(factors, metadata['file_units'], ds_info.get("units")) + factors = self.adjust_scaling_factors(factors, metadata["file_units"], ds_info.get("units")) if factors[0] != 1 or factors[1] != 0: data *= factors[0] data += factors[1] # rename dimensions to correspond to satpy's 'y' and 'x' standard - if 'number_of_lines' in data.dims: - data = data.rename({'number_of_lines': 'y', 'number_of_pixels': 'x'}) + if "number_of_lines" in data.dims: + data = data.rename({"number_of_lines": "y", "number_of_pixels": "x"}) return data def available_datasets(self, configured_datasets=None): @@ -255,11 +255,11 @@ def available_datasets(self, configured_datasets=None): # file handler so let's yield early yield is_avail, ds_info continue - ft_matches = self.file_type_matches(ds_info['file_type']) - var_path = self._dataset_name_to_var_path(ds_info['name'], ds_info) + ft_matches = self.file_type_matches(ds_info["file_type"]) + var_path = self._dataset_name_to_var_path(ds_info["name"], ds_info) is_in_file = var_path in self yield ft_matches and is_in_file, ds_info @staticmethod def _dataset_name_to_var_path(dataset_name: str, ds_info: dict) -> str: - return ds_info.get('file_key', 'observation_data/{}'.format(dataset_name)) + return ds_info.get("file_key", "observation_data/{}".format(dataset_name)) diff --git a/satpy/readers/viirs_sdr.py b/satpy/readers/viirs_sdr.py index 71379b2066..db9ba9ba10 100644 --- a/satpy/readers/viirs_sdr.py +++ b/satpy/readers/viirs_sdr.py @@ -83,18 +83,18 @@ class VIIRSSDRFileHandler(JPSS_SDR_FileHandler): def __init__(self, filename, filename_info, filetype_info, use_tc=None, **kwargs): """Initialize file handler.""" - self.datasets = filename_info['datasets'].split('-') + self.datasets = filename_info["datasets"].split("-") self.use_tc = use_tc super().__init__(filename, filename_info, filetype_info, **kwargs) def __getitem__(self, item): """Get item.""" - if '*' in item: + if "*" in item: # this is an aggregated field that can't easily be loaded, need to # join things together idx = 0 base_item = item - item = base_item.replace('*', str(idx)) + item = base_item.replace("*", str(idx)) result = [] while True: try: @@ -106,7 +106,7 @@ def __getitem__(self, item): break idx += 1 - item = base_item.replace('*', str(idx)) + item = base_item.replace("*", str(idx)) return result else: return super().__getitem__(item) @@ -120,11 +120,11 @@ def get_dataset(self, dataset_id, ds_info): scans for each granule is read from: ``Data_Products/...Gran_x/N_Number_Of_Scans``. """ - dataset_group = [ds_group for ds_group in ds_info['dataset_groups'] if ds_group in self.datasets] + dataset_group = [ds_group for ds_group in ds_info["dataset_groups"] if ds_group in self.datasets] if not dataset_group: return dataset_group = dataset_group[0] - ds_info['dataset_group'] = dataset_group + ds_info["dataset_group"] = dataset_group var_path = self._generate_file_key(dataset_id, ds_info) data = self.concatenate_dataset(dataset_group, var_path) @@ -138,17 +138,17 @@ def get_dataset(self, dataset_id, ds_info): def get_bounding_box(self): """Get the bounding box of this file.""" from pyproj import Geod - geod = Geod(ellps='WGS84') + geod = Geod(ellps="WGS84") dataset_group = DATASET_KEYS[self.datasets[0]] idx = 0 lons_ring = None lats_ring = None while True: - path = 'Data_Products/{dataset_group}/{dataset_group}_Gran_{idx}/attr/' + path = "Data_Products/{dataset_group}/{dataset_group}_Gran_{idx}/attr/" prefix = path.format(dataset_group=dataset_group, idx=idx) try: - lats = self.file_content[prefix + 'G-Ring_Latitude'] - lons = self.file_content[prefix + 'G-Ring_Longitude'] + lats = self.file_content[prefix + "G-Ring_Latitude"] + lons = self.file_content[prefix + "G-Ring_Longitude"] if lons_ring is None: lons_ring = lons lats_ring = lats @@ -215,16 +215,16 @@ def filter_filenames_by_info(self, filename_items): geo_del = [] viirs_del = [] for filename, filename_info in filename_items: - datasets = filename_info['datasets'].split('-') + datasets = filename_info["datasets"].split("-") if not self._is_viirs_dataset(datasets): viirs_del.append(filename) - if ('GITCO' in datasets) or ('GMTCO' in datasets): + if ("GITCO" in datasets) or ("GMTCO" in datasets): if self.use_tc is False: geo_del.append(filename) else: geo_keep.append(filename) - elif ('GIMGO' in datasets) or ('GMODO' in datasets): + elif ("GIMGO" in datasets) or ("GMODO" in datasets): if self.use_tc is True: geo_del.append(filename) else: @@ -240,20 +240,20 @@ def _remove_non_viirs_datasets_from_files(self, filename_items, files_to_edit): return self._remove_datasets_from_files(filename_items, files_to_edit, no_viirs) def _remove_geo_datasets_from_files(self, filename_items, files_to_edit): - datasets_to_consider = ['GITCO', 'GMTCO', 'GIMGO', 'GMODO'] + datasets_to_consider = ["GITCO", "GMTCO", "GIMGO", "GMODO"] return self._remove_datasets_from_files(filename_items, files_to_edit, datasets_to_consider) def _remove_datasets_from_files(self, filename_items, files_to_edit, considered_datasets): fdict = dict(filename_items) for to_del in files_to_edit: - fdict[to_del]['datasets'] = fdict[to_del]['datasets'].split('-') + fdict[to_del]["datasets"] = fdict[to_del]["datasets"].split("-") for dataset in considered_datasets: with suppress(ValueError): - fdict[to_del]['datasets'].remove(dataset) - if not fdict[to_del]['datasets']: + fdict[to_del]["datasets"].remove(dataset) + if not fdict[to_del]["datasets"]: del fdict[to_del] else: - fdict[to_del]['datasets'] = "-".join(fdict[to_del]['datasets']) + fdict[to_del]["datasets"] = "-".join(fdict[to_del]["datasets"]) filename_items = fdict.items() return filename_items @@ -269,15 +269,15 @@ def _load_filenames_from_geo_ref(self, dsid): try: # get the filename and remove the creation time # which is often wrong - fn = fh['/attr/N_GEO_Ref'][:46] + '*.h5' + fn = fh["/attr/N_GEO_Ref"][:46] + "*.h5" fns.extend(glob(os.path.join(base_dir, fn))) # usually is non-terrain corrected file, add the terrain # corrected file too - if fn[:5] == 'GIMGO': - fn = 'GITCO' + fn[5:] - elif fn[:5] == 'GMODO': - fn = 'GMTCO' + fn[5:] + if fn[:5] == "GIMGO": + fn = "GITCO" + fn[5:] + elif fn[:5] == "GMODO": + fn = "GMTCO" + fn[5:] else: continue fns.extend(glob(os.path.join(base_dir, fn))) @@ -288,22 +288,22 @@ def _load_filenames_from_geo_ref(self, dsid): def _get_primary_secondary_geo_groups(self, ds_info): """Find out which geolocation files are needed.""" - if ds_info['dataset_groups'][0].startswith('GM'): + if ds_info["dataset_groups"][0].startswith("GM"): if self.use_tc is False: - prime_geo = 'GMODO' - second_geo = 'GMTCO' + prime_geo = "GMODO" + second_geo = "GMTCO" else: - prime_geo = 'GMTCO' - second_geo = 'GMODO' - elif ds_info['dataset_groups'][0].startswith('GI'): + prime_geo = "GMTCO" + second_geo = "GMODO" + elif ds_info["dataset_groups"][0].startswith("GI"): if self.use_tc is False: - prime_geo = 'GIMGO' - second_geo = 'GITCO' + prime_geo = "GIMGO" + second_geo = "GITCO" else: - prime_geo = 'GITCO' - second_geo = 'GIMGO' + prime_geo = "GITCO" + second_geo = "GIMGO" else: - raise ValueError('Unknown dataset group %s' % ds_info['dataset_groups'][0]) + raise ValueError("Unknown dataset group %s" % ds_info["dataset_groups"][0]) return prime_geo, second_geo def get_right_geo_fhs(self, dsid, fhs): @@ -313,7 +313,7 @@ def get_right_geo_fhs(self, dsid, fhs): desired, other = split_desired_other(fhs, prime_geo, second_geo) if desired: try: - ds_info['dataset_groups'].remove(second_geo) + ds_info["dataset_groups"].remove(second_geo) except ValueError: pass return desired @@ -324,13 +324,13 @@ def _get_file_handlers(self, dsid): """Get the file handler to load this dataset.""" ds_info = self.all_ids[dsid] - fhs = [fh for fh in self.file_handlers['generic_file'] - if set(fh.datasets) & set(ds_info['dataset_groups'])] + fhs = [fh for fh in self.file_handlers["generic_file"] + if set(fh.datasets) & set(ds_info["dataset_groups"])] if not fhs: LOG.warning("Required file type '%s' not found or loaded for " - "'%s'", ds_info['file_type'], dsid['name']) + "'%s'", ds_info["file_type"], dsid["name"]) else: - if len(set(ds_info['dataset_groups']) & {'GITCO', 'GIMGO', 'GMTCO', 'GMODO'}) > 1: + if len(set(ds_info["dataset_groups"]) & {"GITCO", "GIMGO", "GMTCO", "GMODO"}) > 1: fhs = self.get_right_geo_fhs(dsid, fhs) return fhs @@ -351,12 +351,12 @@ def _get_coordinates_for_dataset_key(self, dsid): # check the dataset file for the geolocation filename geo_filenames = self._load_filenames_from_geo_ref(dsid) self._create_new_geo_file_handlers(geo_filenames) - self._remove_not_loaded_geo_dataset_group(c_info['dataset_groups'], prime_geo, second_geo) + self._remove_not_loaded_geo_dataset_group(c_info["dataset_groups"], prime_geo, second_geo) return coords def _geo_dataset_groups(self, c_info): - if len(c_info['dataset_groups']) == 1: # filtering already done + if len(c_info["dataset_groups"]) == 1: # filtering already done return None, None try: prime_geo, second_geo = self._get_primary_secondary_geo_groups(c_info) @@ -365,12 +365,12 @@ def _geo_dataset_groups(self, c_info): return None, None def _create_new_geo_file_handlers(self, geo_filenames): - existing_filenames = set([fh.filename for fh in self.file_handlers['generic_file']]) + existing_filenames = set([fh.filename for fh in self.file_handlers["generic_file"]]) geo_filenames = set(geo_filenames) - existing_filenames self.create_filehandlers(geo_filenames) def _remove_not_loaded_geo_dataset_group(self, c_dataset_groups, prime_geo, second_geo): - all_fhs = self.file_handlers['generic_file'] + all_fhs = self.file_handlers["generic_file"] desired, other = split_desired_other(all_fhs, prime_geo, second_geo) group_to_remove = second_geo if desired else prime_geo c_dataset_groups.remove(group_to_remove) diff --git a/satpy/readers/viirs_vgac_l1c_nc.py b/satpy/readers/viirs_vgac_l1c_nc.py index e4a29c27f1..0fa8ddf782 100644 --- a/satpy/readers/viirs_vgac_l1c_nc.py +++ b/satpy/readers/viirs_vgac_l1c_nc.py @@ -37,9 +37,9 @@ def __init__(self, filename, filename_info, filetype_info): filename, filename_info, filetype_info) self.engine = "h5netcdf" - self._start_time = filename_info['start_time'] + self._start_time = filename_info["start_time"] self._end_time = None - self.sensor = 'viirs' + self.sensor = "viirs" self.filename_info = filename_info def calibrate(self, data, yaml_info, file_key, nc): @@ -75,11 +75,11 @@ def set_time_attrs(self, data): def get_dataset(self, key, yaml_info): """Get dataset.""" - logger.debug("Getting data for: %s", yaml_info['name']) + logger.debug("Getting data for: %s", yaml_info["name"]) nc = xr.open_dataset(self.filename, engine=self.engine, decode_times=False, - chunks={'y': CHUNK_SIZE, 'x': 800}) - name = yaml_info.get('nc_store_name', yaml_info['name']) - file_key = yaml_info.get('nc_key', name) + chunks={"y": CHUNK_SIZE, "x": 800}) + name = yaml_info.get("nc_store_name", yaml_info["name"]) + file_key = yaml_info.get("nc_key", name) data = nc[file_key] data = self.calibrate(data, yaml_info, file_key, nc) data.attrs.update(nc.attrs) # For now add global attributes to all datasets diff --git a/satpy/readers/virr_l1b.py b/satpy/readers/virr_l1b.py index 0ffe7251cb..260666ff8b 100644 --- a/satpy/readers/virr_l1b.py +++ b/satpy/readers/virr_l1b.py @@ -77,24 +77,24 @@ class VIRR_L1B(HDF5FileHandler): def __init__(self, filename, filename_info, filetype_info): """Open file and perform initial setup.""" super(VIRR_L1B, self).__init__(filename, filename_info, filetype_info) - LOG.debug('day/night flag for {0}: {1}'.format(filename, self['/attr/Day Or Night Flag'])) - self.geolocation_prefix = filetype_info['geolocation_prefix'] - self.platform_id = filename_info['platform_id'] - self.l1b_prefix = 'Data/' - self.wave_number = 'Emissive_Centroid_Wave_Number' + LOG.debug("day/night flag for {0}: {1}".format(filename, self["/attr/Day Or Night Flag"])) + self.geolocation_prefix = filetype_info["geolocation_prefix"] + self.platform_id = filename_info["platform_id"] + self.l1b_prefix = "Data/" + self.wave_number = "Emissive_Centroid_Wave_Number" # Else filename_info['platform_id'] == FY3C. - if filename_info['platform_id'] == 'FY3B': - self.l1b_prefix = '' - self.wave_number = 'Emmisive_Centroid_Wave_Number' + if filename_info["platform_id"] == "FY3B": + self.l1b_prefix = "" + self.wave_number = "Emmisive_Centroid_Wave_Number" def get_dataset(self, dataset_id, ds_info): """Create DataArray from file content for `dataset_id`.""" - file_key = self.geolocation_prefix + ds_info.get('file_key', dataset_id['name']) - if self.platform_id == 'FY3B': - file_key = file_key.replace('Data/', '') + file_key = self.geolocation_prefix + ds_info.get("file_key", dataset_id["name"]) + if self.platform_id == "FY3B": + file_key = file_key.replace("Data/", "") data = self[file_key] - band_index = ds_info.get('band_index') - valid_range = data.attrs.pop('valid_range', None) + band_index = ds_info.get("band_index") + valid_range = data.attrs.pop("valid_range", None) if isinstance(valid_range, np.ndarray): valid_range = valid_range.tolist() if band_index is not None: @@ -102,50 +102,50 @@ def get_dataset(self, dataset_id, ds_info): if valid_range: data = data.where((data >= valid_range[0]) & (data <= valid_range[1])) - if 'Emissive' in file_key: + if "Emissive" in file_key: self._calibrate_emissive(data, band_index) - elif 'RefSB' in file_key: + elif "RefSB" in file_key: data = self._calibrate_reflective(data, band_index) else: - slope = self._correct_slope(self[file_key + '/attr/Slope']) - intercept = self[file_key + '/attr/Intercept'] + slope = self._correct_slope(self[file_key + "/attr/Slope"]) + intercept = self[file_key + "/attr/Intercept"] if valid_range: data = data.where((data >= valid_range[0]) & (data <= valid_range[1])) data = data * slope + intercept - new_dims = {old: new for old, new in zip(data.dims, ('y', 'x'))} + new_dims = {old: new for old, new in zip(data.dims, ("y", "x"))} data = data.rename(new_dims) # use lowercase sensor name to be consistent with the rest of satpy - data.attrs.update({'platform_name': self['/attr/Satellite Name'], - 'sensor': self['/attr/Sensor Identification Code'].lower()}) + data.attrs.update({"platform_name": self["/attr/Satellite Name"], + "sensor": self["/attr/Sensor Identification Code"].lower()}) data.attrs.update(ds_info) - units = self.get(file_key + '/attr/units') - if units is not None and str(units).lower() != 'none': - data.attrs.update({'units': self.get(file_key + '/attr/units')}) - elif data.attrs.get('calibration') == 'reflectance': - data.attrs.update({'units': '%'}) + units = self.get(file_key + "/attr/units") + if units is not None and str(units).lower() != "none": + data.attrs.update({"units": self.get(file_key + "/attr/units")}) + elif data.attrs.get("calibration") == "reflectance": + data.attrs.update({"units": "%"}) else: - data.attrs.update({'units': '1'}) + data.attrs.update({"units": "1"}) return data def _calibrate_reflective(self, data, band_index): - if self.platform_id == 'FY3B': + if self.platform_id == "FY3B": coeffs = da.from_array(FY3B_REF_COEFFS, chunks=-1) else: - coeffs = self['/attr/RefSB_Cal_Coefficients'] + coeffs = self["/attr/RefSB_Cal_Coefficients"] slope = self._correct_slope(coeffs[0::2]) intercept = coeffs[1::2] data = data * slope[band_index] + intercept[band_index] return data def _calibrate_emissive(self, data, band_index): - slope = self._correct_slope(self[self.l1b_prefix + 'Emissive_Radiance_Scales']. + slope = self._correct_slope(self[self.l1b_prefix + "Emissive_Radiance_Scales"]. data[:, band_index][:, np.newaxis]) - intercept = self[self.l1b_prefix + 'Emissive_Radiance_Offsets'].data[:, band_index][:, np.newaxis] + intercept = self[self.l1b_prefix + "Emissive_Radiance_Offsets"].data[:, band_index][:, np.newaxis] # Converts cm^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. - wave_number = self['/attr/' + self.wave_number][band_index] * 100 + wave_number = self["/attr/" + self.wave_number][band_index] * 100 bt_data = rad2temp(wave_number, (data.data * slope + intercept) * 1e-5) if isinstance(bt_data, np.ndarray): # old versions of pyspectral produce numpy arrays @@ -161,11 +161,11 @@ def _correct_slope(self, slope): @property def start_time(self): """Get starting observation time.""" - start_time = self['/attr/Observing Beginning Date'] + 'T' + self['/attr/Observing Beginning Time'] + 'Z' - return datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%S.%fZ') + start_time = self["/attr/Observing Beginning Date"] + "T" + self["/attr/Observing Beginning Time"] + "Z" + return datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get ending observation time.""" - end_time = self['/attr/Observing Ending Date'] + 'T' + self['/attr/Observing Ending Time'] + 'Z' - return datetime.strptime(end_time, '%Y-%m-%dT%H:%M:%S.%fZ') + end_time = self["/attr/Observing Ending Date"] + "T" + self["/attr/Observing Ending Time"] + "Z" + return datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ") diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index 1c4e68d621..ff3599052a 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -87,13 +87,13 @@ def _match_filenames(filenames, pattern): def _verify_reader_info_assign_config_files(config, config_files): try: - reader_info = config['reader'] + reader_info = config["reader"] except KeyError: raise KeyError( "Malformed config file {}: missing reader 'reader'".format( config_files)) else: - reader_info['config_files'] = config_files + reader_info["config_files"] = config_files def load_yaml_configs(*config_files, loader=Loader): @@ -113,9 +113,9 @@ def load_yaml_configs(*config_files, loader=Loader): """ config = {} - logger.debug('Reading %s', str(config_files)) + logger.debug("Reading %s", str(config_files)) for config_file in config_files: - with open(config_file, 'r', encoding='utf-8') as fd: + with open(config_file, "r", encoding="utf-8") as fd: config = recursive_dict_update(config, yaml.load(fd, Loader=loader)) _verify_reader_info_assign_config_files(config, config_files) return config @@ -136,23 +136,23 @@ def __init__(self, config_dict): "deprecated. Use ReaderClass.from_config_files " "instead.") self.config = config_dict - self.info = self.config['reader'] - self.name = self.info['name'] + self.info = self.config["reader"] + self.name = self.info["name"] self.file_patterns = [] - for file_type, filetype_info in self.config['file_types'].items(): - filetype_info.setdefault('file_type', file_type) + for file_type, filetype_info in self.config["file_types"].items(): + filetype_info.setdefault("file_type", file_type) # correct separator if needed - file_patterns = [os.path.join(*pattern.split('/')) - for pattern in filetype_info['file_patterns']] - filetype_info['file_patterns'] = file_patterns + file_patterns = [os.path.join(*pattern.split("/")) + for pattern in filetype_info["file_patterns"]] + filetype_info["file_patterns"] = file_patterns self.file_patterns.extend(file_patterns) - if 'sensors' in self.info and not isinstance(self.info['sensors'], (list, tuple)): - self.info['sensors'] = [self.info['sensors']] - self.datasets = self.config.get('datasets', {}) - self._id_keys = self.info.get('data_identification_keys', default_id_keys_config) - self._co_keys = self.info.get('coord_identification_keys', default_co_keys_config) - self.info['filenames'] = [] + if "sensors" in self.info and not isinstance(self.info["sensors"], (list, tuple)): + self.info["sensors"] = [self.info["sensors"]] + self.datasets = self.config.get("datasets", {}) + self._id_keys = self.info.get("data_identification_keys", default_id_keys_config) + self._co_keys = self.info.get("coord_identification_keys", default_co_keys_config) + self.info["filenames"] = [] self.all_ids = {} self.load_ds_ids_from_config() @@ -160,12 +160,12 @@ def __init__(self, config_dict): def from_config_files(cls, *config_files, **reader_kwargs): """Create a reader instance from one or more YAML configuration files.""" config_dict = load_yaml_configs(*config_files) - return config_dict['reader']['reader'](config_dict, **reader_kwargs) + return config_dict["reader"]["reader"](config_dict, **reader_kwargs) @property def sensor_names(self): """Names of sensors whose data is being loaded by this reader.""" - return self.info['sensors'] or [] + return self.info["sensors"] or [] @property def all_dataset_ids(self): @@ -176,7 +176,7 @@ def all_dataset_ids(self): def all_dataset_names(self): """Get names of all datasets known to this reader.""" # remove the duplicates from various calibration and resolutions - return set(ds_id['name'] for ds_id in self.all_dataset_ids) + return set(ds_id["name"] for ds_id in self.all_dataset_ids) @property def available_dataset_ids(self): @@ -188,7 +188,7 @@ def available_dataset_ids(self): @property def available_dataset_names(self): """Get names of datasets that are loadable by this reader.""" - return (ds_id['name'] for ds_id in self.available_dataset_ids) + return (ds_id["name"] for ds_id in self.available_dataset_ids) @property @abstractmethod @@ -243,7 +243,7 @@ def select_files_from_directory( """ filenames = set() if directory is None: - directory = '' + directory = "" # all the glob patterns that we are going to look at all_globs = {os.path.join(directory, globify(pattern)) for pattern in self.file_patterns} @@ -286,9 +286,9 @@ def load_ds_ids_from_config(self): for dataset in self.datasets.values(): # xarray doesn't like concatenating attributes that are lists # https://github.com/pydata/xarray/issues/2060 - if 'coordinates' in dataset and \ - isinstance(dataset['coordinates'], list): - dataset['coordinates'] = tuple(dataset['coordinates']) + if "coordinates" in dataset and \ + isinstance(dataset["coordinates"], list): + dataset["coordinates"] = tuple(dataset["coordinates"]) id_keys = get_keys_from_config(self._id_keys, dataset) # Build each permutation/product of the dataset @@ -316,10 +316,10 @@ def _build_id_permutations(self, dataset, id_keys): """Build each permutation/product of the dataset.""" id_kwargs = [] for key, idval in id_keys.items(): - val = dataset.get(key, idval.get('default') if idval is not None else None) + val = dataset.get(key, idval.get("default") if idval is not None else None) val_type = None if idval is not None: - val_type = idval.get('type') + val_type = idval.get("type") if val_type is not None and issubclass(val_type, tuple): # special case: wavelength can be [min, nominal, max] # but is still considered 1 option @@ -363,7 +363,7 @@ def __init__(self, self.file_handlers = {} self.available_ids = {} - self.filter_filenames = self.info.get('filter_filenames', filter_filenames) + self.filter_filenames = self.info.get("filter_filenames", filter_filenames) self.filter_parameters = filter_parameters or {} self.register_data_files() @@ -371,7 +371,7 @@ def __init__(self, def sensor_names(self): """Names of sensors whose data is being loaded by this reader.""" if not self.file_handlers: - return self.info['sensors'] + return self.info["sensors"] file_handlers = (handlers[0] for handlers in self.file_handlers.values()) @@ -382,7 +382,7 @@ def sensor_names(self): except NotImplementedError: continue if not sensor_names: - return self.info['sensors'] + return self.info["sensors"] return sorted(sensor_names) @property @@ -453,11 +453,11 @@ def find_required_filehandlers(self, requirements, filename_info): def sorted_filetype_items(self): """Sort the instance's filetypes in using order.""" processed_types = [] - file_type_items = deque(self.config['file_types'].items()) + file_type_items = deque(self.config["file_types"].items()) while len(file_type_items): filetype, filetype_info = file_type_items.popleft() - requirements = filetype_info.get('requires') + requirements = filetype_info.get("requires") if requirements is not None: # requirements have not been processed yet -> wait missing = [req for req in requirements @@ -475,7 +475,7 @@ def filename_items_for_filetype(filenames, filetype_info): if not isinstance(filenames, set): # we perform set operations later on to improve performance filenames = set(filenames) - for pattern in filetype_info['file_patterns']: + for pattern in filetype_info["file_patterns"]: matched_files = set() matches = _match_filenames(filenames, pattern) for filename in matches: @@ -491,8 +491,8 @@ def filename_items_for_filetype(filenames, filetype_info): def _new_filehandler_instances(self, filetype_info, filename_items, fh_kwargs=None): """Generate new filehandler instances.""" - requirements = filetype_info.get('requires') - filetype_cls = filetype_info['file_reader'] + requirements = filetype_info.get("requires") + filetype_cls = filetype_info["file_reader"] if fh_kwargs is None: fh_kwargs = {} @@ -507,15 +507,15 @@ def _new_filehandler_instances(self, filetype_info, filename_items, fh_kwargs=No warnings.warn(msg, stacklevel=4) continue except RuntimeError as err: - warnings.warn(str(err) + ' for {}'.format(filename), stacklevel=4) + warnings.warn(str(err) + " for {}".format(filename), stacklevel=4) continue yield filetype_cls(filename, filename_info, filetype_info, *req_fh, **fh_kwargs) def time_matches(self, fstart, fend): """Check that a file's start and end time mtach filter_parameters of this reader.""" - start_time = self.filter_parameters.get('start_time') - end_time = self.filter_parameters.get('end_time') + start_time = self.filter_parameters.get("start_time") + end_time = self.filter_parameters.get("end_time") fend = fend or fstart if start_time and fend and fend < start_time: return False @@ -527,17 +527,17 @@ def metadata_matches(self, sample_dict, file_handler=None): """Check that file metadata matches filter_parameters of this reader.""" # special handling of start/end times if not self.time_matches( - sample_dict.get('start_time'), sample_dict.get('end_time')): + sample_dict.get("start_time"), sample_dict.get("end_time")): return False for key, val in self.filter_parameters.items(): - if key != 'area' and key not in sample_dict: + if key != "area" and key not in sample_dict: continue - if key in ['start_time', 'end_time']: + if key in ["start_time", "end_time"]: continue - elif key == 'area' and file_handler: + elif key == "area" and file_handler: if not self.check_file_covers_area(file_handler, val): - logger.info('Filtering out %s based on area', + logger.info("Filtering out %s based on area", file_handler.filename) break elif key in sample_dict and val != sample_dict[key]: @@ -556,22 +556,22 @@ def filter_filenames_by_info(self, filename_items): the requested end time. """ for filename, filename_info in filename_items: - fend = filename_info.get('end_time') - fstart = filename_info.setdefault('start_time', fend) + fend = filename_info.get("end_time") + fstart = filename_info.setdefault("start_time", fend) if fend and fend < fstart: # correct for filenames with 1 date and 2 times fend = fend.replace(year=fstart.year, month=fstart.month, day=fstart.day) - filename_info['end_time'] = fend + filename_info["end_time"] = fend if self.metadata_matches(filename_info): yield filename, filename_info def filter_fh_by_metadata(self, filehandlers): """Filter out filehandlers using provide filter parameters.""" for filehandler in filehandlers: - filehandler.metadata['start_time'] = filehandler.start_time - filehandler.metadata['end_time'] = filehandler.end_time + filehandler.metadata["start_time"] = filehandler.start_time + filehandler.metadata["end_time"] = filehandler.end_time if self.metadata_matches(filehandler.metadata, filehandler): yield filehandler @@ -606,9 +606,9 @@ def _new_filehandlers_for_filetype(self, filetype_info, filenames, fh_kwargs=Non def create_filehandlers(self, filenames, fh_kwargs=None): """Organize the filenames into file types and create file handlers.""" filenames = list(OrderedDict.fromkeys(filenames)) - logger.debug("Assigning to %s: %s", self.info['name'], filenames) + logger.debug("Assigning to %s: %s", self.info["name"], filenames) - self.info.setdefault('filenames', []).extend(filenames) + self.info.setdefault("filenames", []).extend(filenames) filename_set = set(filenames) created_fhs = {} # load files that we know about by creating the file handlers @@ -670,13 +670,13 @@ def update_ds_ids_from_file_handlers(self): new_ids = {} for is_avail, ds_info in avail_datasets: # especially from the yaml config - coordinates = ds_info.get('coordinates') + coordinates = ds_info.get("coordinates") if isinstance(coordinates, list): # xarray doesn't like concatenating attributes that are # lists: https://github.com/pydata/xarray/issues/2060 - ds_info['coordinates'] = tuple(ds_info['coordinates']) + ds_info["coordinates"] = tuple(ds_info["coordinates"]) - ds_info.setdefault('modifiers', tuple()) # default to no mods + ds_info.setdefault("modifiers", tuple()) # default to no mods # Create DataID for this dataset ds_id = DataID(self._id_keys, **ds_info) @@ -690,7 +690,7 @@ def update_ds_ids_from_file_handlers(self): self.all_ids = new_ids @staticmethod - def _load_dataset(dsid, ds_info, file_handlers, dim='y', **kwargs): + def _load_dataset(dsid, ds_info, file_handlers, dim="y", **kwargs): """Load only a piece of the dataset.""" slice_list = [] failure = True @@ -723,9 +723,9 @@ def _load_dataset_data(self, file_handlers, dsid, **kwargs): proj = self._load_dataset(dsid, ds_info, file_handlers, **kwargs) # FIXME: areas could be concatenated here # Update the metadata - proj.attrs['start_time'] = file_handlers[0].start_time - proj.attrs['end_time'] = file_handlers[-1].end_time - proj.attrs['reader'] = self.name + proj.attrs["start_time"] = file_handlers[0].start_time + proj.attrs["end_time"] = file_handlers[-1].end_time + proj.attrs["reader"] = self.name return proj def _preferred_filetype(self, filetypes): @@ -750,10 +750,10 @@ def _get_file_handlers(self, dsid): """Get the file handler to load this dataset.""" ds_info = self.all_ids[dsid] - filetype = self._preferred_filetype(ds_info['file_type']) + filetype = self._preferred_filetype(ds_info["file_type"]) if filetype is None: logger.warning("Required file type '%s' not found or loaded for " - "'%s'", ds_info['file_type'], dsid['name']) + "'%s'", ds_info["file_type"], dsid["name"]) else: return self.file_handlers[filetype] @@ -786,12 +786,12 @@ def _get_lons_lats_from_coords(self, coords): """Get lons and lats from the coords list.""" lons, lats = None, None for coord in coords: - if coord.attrs.get('standard_name') == 'longitude': + if coord.attrs.get("standard_name") == "longitude": lons = coord - elif coord.attrs.get('standard_name') == 'latitude': + elif coord.attrs.get("standard_name") == "latitude": lats = coord if lons is None or lats is None: - raise ValueError('Missing longitude or latitude coordinate: ' + str(coords)) + raise ValueError("Missing longitude or latitude coordinate: " + str(coords)) return lons, lats def _make_swath_definition_from_lons_lats(self, lons, lats): @@ -804,11 +804,11 @@ def _make_swath_definition_from_lons_lats(self, lons, lats): sdef = None if sdef is None: sdef = SwathDefinition(lons, lats) - sensor_str = '_'.join(self.info['sensors']) - shape_str = '_'.join(map(str, lons.shape)) + sensor_str = "_".join(self.info["sensors"]) + shape_str = "_".join(map(str, lons.shape)) sdef.name = "{}_{}_{}_{}".format(sensor_str, shape_str, - lons.attrs.get('name', lons.name), - lats.attrs.get('name', lats.name)) + lons.attrs.get("name", lons.name), + lats.attrs.get("name", lats.name)) if key is not None: FileYAMLReader._coords_cache[key] = sdef return sdef @@ -830,7 +830,7 @@ def _load_dataset_with_area(self, dsid, coords, **kwargs): area = self._load_dataset_area(dsid, file_handlers, coords, **kwargs) if area is not None: - ds.attrs['area'] = area + ds.attrs["area"] = area ds = add_crs_xy_coords(ds, area) return ds @@ -840,7 +840,7 @@ def _assign_coords_from_dataarray(coords, ds): if not coords: coords = [] for coord in ds.coords.values(): - if coord.attrs.get('standard_name') in ['longitude', 'latitude']: + if coord.attrs.get("standard_name") in ["longitude", "latitude"]: coords.append(coord) return coords @@ -855,12 +855,12 @@ def _load_ancillary_variables(self, datasets, **kwargs): for dataset in datasets.values(): new_vars = [] - for av_id in dataset.attrs.get('ancillary_variables', []): + for av_id in dataset.attrs.get("ancillary_variables", []): if isinstance(av_id, DataID): new_vars.append(datasets[av_id]) else: new_vars.append(av_id) - dataset.attrs['ancillary_variables'] = new_vars + dataset.attrs["ancillary_variables"] = new_vars def _gather_ancillary_variables_ids(self, datasets): """Gather ancillary variables' ids. @@ -869,9 +869,9 @@ def _gather_ancillary_variables_ids(self, datasets): """ all_av_ids = set() for dataset in datasets.values(): - ancillary_variables = dataset.attrs.get('ancillary_variables', []) + ancillary_variables = dataset.attrs.get("ancillary_variables", []) if not isinstance(ancillary_variables, (list, tuple, set)): - ancillary_variables = ancillary_variables.split(' ') + ancillary_variables = ancillary_variables.split(" ") av_ids = [] for key in ancillary_variables: try: @@ -880,7 +880,7 @@ def _gather_ancillary_variables_ids(self, datasets): logger.warning("Can't load ancillary dataset %s", str(key)) all_av_ids |= set(av_ids) - dataset.attrs['ancillary_variables'] = av_ids + dataset.attrs["ancillary_variables"] = av_ids return all_av_ids def get_dataset_key(self, key, available_only=False, **kwargs): @@ -953,12 +953,12 @@ def _get_coordinates_for_dataset_key(self, dsid): """Get the coordinate dataset keys for *dsid*.""" ds_info = self.all_ids[dsid] cids = [] - for cinfo in ds_info.get('coordinates', []): + for cinfo in ds_info.get("coordinates", []): if not isinstance(cinfo, dict): - cinfo = {'name': cinfo} + cinfo = {"name": cinfo} for key in self._co_keys: - if key == 'name': + if key == "name": continue if key in ds_info: if ds_info[key] is not None: @@ -995,52 +995,52 @@ def _set_orientation(dataset, upper_right_corner): """ # do some checks and early returns - if upper_right_corner == 'native': + if upper_right_corner == "native": logger.debug("Requested orientation for Dataset {} is 'native' (default). " - "No flipping is applied.".format(dataset.attrs.get('name'))) + "No flipping is applied.".format(dataset.attrs.get("name"))) return dataset - if upper_right_corner not in ['NW', 'NE', 'SE', 'SW', 'native']: + if upper_right_corner not in ["NW", "NE", "SE", "SW", "native"]: raise ValueError("Target orientation for Dataset {} not recognized. " "Kwarg upper_right_corner should be " - "'NW', 'NE', 'SW', 'SE' or 'native'.".format(dataset.attrs.get('name', 'unknown_name'))) + "'NW', 'NE', 'SW', 'SE' or 'native'.".format(dataset.attrs.get("name", "unknown_name"))) - if 'area' not in dataset.attrs: + if "area" not in dataset.attrs: logger.info("Dataset {} is missing the area attribute " - "and will not be flipped.".format(dataset.attrs.get('name', 'unknown_name'))) + "and will not be flipped.".format(dataset.attrs.get("name", "unknown_name"))) return dataset - if isinstance(dataset.attrs['area'], SwathDefinition): + if isinstance(dataset.attrs["area"], SwathDefinition): logger.info("Dataset {} is in a SwathDefinition " - "and will not be flipped.".format(dataset.attrs.get('name', 'unknown_name'))) + "and will not be flipped.".format(dataset.attrs.get("name", "unknown_name"))) return dataset - projection_type = _get_projection_type(dataset.attrs['area']) - accepted_geos_proj_types = ['Geostationary Satellite (Sweep Y)', 'Geostationary Satellite (Sweep X)'] + projection_type = _get_projection_type(dataset.attrs["area"]) + accepted_geos_proj_types = ["Geostationary Satellite (Sweep Y)", "Geostationary Satellite (Sweep X)"] if projection_type not in accepted_geos_proj_types: logger.info("Dataset {} is not in one of the known geostationary projections {} " - "and cannot be flipped.".format(dataset.attrs.get('name', 'unknown_name'), + "and cannot be flipped.".format(dataset.attrs.get("name", "unknown_name"), accepted_geos_proj_types)) return dataset target_eastright, target_northup = _get_target_scene_orientation(upper_right_corner) - area_extents_to_update = _get_dataset_area_extents_array(dataset.attrs['area']) + area_extents_to_update = _get_dataset_area_extents_array(dataset.attrs["area"]) current_eastright, current_northup = _get_current_scene_orientation(area_extents_to_update) if target_northup == current_northup and target_eastright == current_eastright: logger.info("Dataset {} is already in the target orientation " - "and will not be flipped.".format(dataset.attrs.get('name', 'unknown_name'))) + "and will not be flipped.".format(dataset.attrs.get("name", "unknown_name"))) return dataset if target_northup != current_northup: dataset, area_extents_to_update = _flip_dataset_data_and_area_extents(dataset, area_extents_to_update, - 'upsidedown') + "upsidedown") if target_eastright != current_eastright: dataset, area_extents_to_update = _flip_dataset_data_and_area_extents(dataset, area_extents_to_update, - 'leftright') + "leftright") - dataset.attrs['area'] = _get_new_flipped_area_definition(dataset.attrs['area'], area_extents_to_update, + dataset.attrs["area"] = _get_new_flipped_area_definition(dataset.attrs["area"], area_extents_to_update, flip_areadef_stacking=target_northup != current_northup) return dataset @@ -1062,9 +1062,9 @@ def _get_target_scene_orientation(upper_right_corner): 'NE' corresponds to target_eastright and target_northup being True. """ - target_northup = upper_right_corner in ['NW', 'NE'] + target_northup = upper_right_corner in ["NW", "NE"] - target_eastright = upper_right_corner in ['NE', 'SE'] + target_eastright = upper_right_corner in ["NE", "SE"] return target_eastright, target_northup @@ -1091,11 +1091,11 @@ def _get_current_scene_orientation(area_extents_to_update): def _flip_dataset_data_and_area_extents(dataset, area_extents_to_update, flip_direction): """Flip the data and area extents array for a dataset.""" - logger.info("Flipping Dataset {} {}.".format(dataset.attrs.get('name', 'unknown_name'), flip_direction)) - if flip_direction == 'upsidedown': + logger.info("Flipping Dataset {} {}.".format(dataset.attrs.get("name", "unknown_name"), flip_direction)) + if flip_direction == "upsidedown": dataset = dataset[::-1, :] area_extents_to_update[:, [1, 3]] = area_extents_to_update[:, [3, 1]] - elif flip_direction == 'leftright': + elif flip_direction == "leftright": dataset = dataset[:, ::-1] area_extents_to_update[:, [0, 2]] = area_extents_to_update[:, [2, 0]] else: @@ -1128,7 +1128,7 @@ def _get_new_flipped_area_definition(dataset_area_attr, area_extents_to_update, class GEOFlippableFileYAMLReader(FileYAMLReader): """Reader for flippable geostationary data.""" - def _load_dataset_with_area(self, dsid, coords, upper_right_corner='native', **kwargs): + def _load_dataset_with_area(self, dsid, coords, upper_right_corner="native", **kwargs): ds = super(GEOFlippableFileYAMLReader, self)._load_dataset_with_area(dsid, coords, **kwargs) if ds is not None: @@ -1165,15 +1165,15 @@ def create_filehandlers(self, filenames, fh_kwargs=None): for fhs in created_fhs.values(): for fh in fhs: # check the filename for total_segments parameter as a fallback - ts = fh.filename_info.get('total_segments', 1) + ts = fh.filename_info.get("total_segments", 1) # if the YAML has segments explicitly specified then use that - fh.filetype_info.setdefault('expected_segments', ts) + fh.filetype_info.setdefault("expected_segments", ts) # add segment key-values for FCI filehandlers - if 'segment' not in fh.filename_info: - fh.filename_info['segment'] = fh.filename_info.get('count_in_repeat_cycle', 1) + if "segment" not in fh.filename_info: + fh.filename_info["segment"] = fh.filename_info.get("count_in_repeat_cycle", 1) return created_fhs - def _load_dataset(self, dsid, ds_info, file_handlers, dim='y', pad_data=True): + def _load_dataset(self, dsid, ds_info, file_handlers, dim="y", pad_data=True): """Load only a piece of the dataset.""" if not pad_data: return FileYAMLReader._load_dataset(dsid, ds_info, @@ -1186,7 +1186,7 @@ def _load_dataset(self, dsid, ds_info, file_handlers, dim='y', pad_data=True): raise KeyError( "Could not load {} from any provided files".format(dsid)) - filetype = file_handlers[0].filetype_info['file_type'] + filetype = file_handlers[0].filetype_info["file_type"] self.empty_segment = xr.full_like(projectable, np.nan) for i, sli in enumerate(slice_list): if sli is None: @@ -1230,9 +1230,9 @@ def _load_area_def_with_padding(self, dsid, file_handlers): def _pad_later_segments_area(self, file_handlers, dsid): """Pad area definitions for missing segments that are later in sequence than the first available.""" - expected_segments = file_handlers[0].filetype_info['expected_segments'] - filetype = file_handlers[0].filetype_info['file_type'] - available_segments = [int(fh.filename_info.get('segment', 1)) for + expected_segments = file_handlers[0].filetype_info["expected_segments"] + filetype = file_handlers[0].filetype_info["file_type"] + available_segments = [int(fh.filename_info.get("segment", 1)) for fh in file_handlers] area_defs = self._get_segments_areadef_with_later_padded(file_handlers, filetype, dsid, available_segments, @@ -1250,7 +1250,7 @@ def _get_segments_areadef_with_later_padded(self, file_handlers, filetype, dsid, fh = file_handlers[idx] area = fh.get_area_def(dsid) except ValueError: - area = self._get_new_areadef_for_padded_segment(area, filetype, seg_size, segment, padding_type='later') + area = self._get_new_areadef_for_padded_segment(area, filetype, seg_size, segment, padding_type="later") area_defs[segment] = area seg_size = area.shape @@ -1258,14 +1258,14 @@ def _get_segments_areadef_with_later_padded(self, file_handlers, filetype, dsid, def _pad_earlier_segments_area(self, file_handlers, dsid, area_defs): """Pad area definitions for missing segments that are earlier in sequence than the first available.""" - available_segments = [int(fh.filename_info.get('segment', 1)) for + available_segments = [int(fh.filename_info.get("segment", 1)) for fh in file_handlers] area = file_handlers[0].get_area_def(dsid) seg_size = area.shape - filetype = file_handlers[0].filetype_info['file_type'] + filetype = file_handlers[0].filetype_info["file_type"] for segment in range(available_segments[0] - 1, 0, -1): - area = self._get_new_areadef_for_padded_segment(area, filetype, seg_size, segment, padding_type='earlier') + area = self._get_new_areadef_for_padded_segment(area, filetype, seg_size, segment, padding_type="earlier") area_defs[segment] = area seg_size = area.shape @@ -1278,7 +1278,7 @@ def _get_new_areadef_for_padded_segment(self, area, filetype, seg_size, segment, fill_extent = (area.area_extent[0], new_ll_y, area.area_extent[2], new_ur_y) - area = AreaDefinition('fill', 'fill', 'fill', area.crs, + area = AreaDefinition("fill", "fill", "fill", area.crs, seg_size[1], new_height_px, fill_extent) return area @@ -1287,10 +1287,10 @@ def _get_y_area_extents_for_padded_segment(self, area, filetype, padding_type, s new_height_proj_coord, new_height_px = self._get_new_areadef_heights(area, seg_size, segment_n=segment, filetype=filetype) - if padding_type == 'later': + if padding_type == "later": new_ll_y = area.area_extent[1] + new_height_proj_coord new_ur_y = area.area_extent[1] - elif padding_type == 'earlier': + elif padding_type == "earlier": new_ll_y = area.area_extent[3] new_ur_y = area.area_extent[3] - new_height_proj_coord else: @@ -1324,13 +1324,13 @@ def _find_missing_segments(file_handlers, ds_info, dsid): expected_segments = 1 # get list of file handlers in segment order # (ex. first segment, second segment, etc) - handlers = sorted(file_handlers, key=lambda x: x.filename_info.get('segment', 1)) + handlers = sorted(file_handlers, key=lambda x: x.filename_info.get("segment", 1)) projectable = None for fh in handlers: - if fh.filetype_info['file_type'] in ds_info['file_type']: - expected_segments = fh.filetype_info['expected_segments'] + if fh.filetype_info["file_type"] in ds_info["file_type"]: + expected_segments = fh.filetype_info["expected_segments"] - while int(fh.filename_info.get('segment', 1)) > counter: + while int(fh.filename_info.get("segment", 1)) > counter: slice_list.append(None) counter += 1 try: @@ -1396,17 +1396,17 @@ def _collect_segment_position_infos(self, filetype): # collect the segment positioning infos for all available segments for fh in self.file_handlers[filetype]: chk_infos = fh.get_segment_position_info() - chk_infos.update({'segment_nr': fh.filename_info['segment'] - 1}) - self.segment_infos[filetype]['available_segment_infos'].append(chk_infos) + chk_infos.update({"segment_nr": fh.filename_info["segment"] - 1}) + self.segment_infos[filetype]["available_segment_infos"].append(chk_infos) def _initialise_segment_infos(self, filetype): # initialise the segment info for this filetype filetype_fhs_sample = self.file_handlers[filetype][0] - exp_segment_nr = filetype_fhs_sample.filetype_info['expected_segments'] + exp_segment_nr = filetype_fhs_sample.filetype_info["expected_segments"] grid_width_to_grid_type = _get_grid_width_to_grid_type(filetype_fhs_sample.get_segment_position_info()) - self.segment_infos.update({filetype: {'available_segment_infos': [], - 'expected_segments': exp_segment_nr, - 'grid_width_to_grid_type': grid_width_to_grid_type}}) + self.segment_infos.update({filetype: {"available_segment_infos": [], + "expected_segments": exp_segment_nr, + "grid_width_to_grid_type": grid_width_to_grid_type}}) def _get_empty_segment(self, dim=None, idx=None, filetype=None): grid_width = self.empty_segment.shape[1] @@ -1416,7 +1416,7 @@ def _get_empty_segment(self, dim=None, idx=None, filetype=None): def _segment_heights(self, filetype, grid_width): """Compute optimal padded segment heights (in number of pixels) based on the location of available segments.""" self._extract_segment_location_dicts(filetype) - grid_type = self.segment_infos[filetype]['grid_width_to_grid_type'][grid_width] + grid_type = self.segment_infos[filetype]["grid_width_to_grid_type"][grid_width] segment_heights = _compute_optimal_missing_segment_heights(self.segment_infos[filetype], grid_type, grid_width) return segment_heights @@ -1434,18 +1434,18 @@ def _get_new_areadef_heights(self, previous_area, previous_seg_size, segment_n=N def _get_grid_width_to_grid_type(seg_info): grid_width_to_grid_type = dict() for grid_type, grid_type_seg_info in seg_info.items(): - grid_width_to_grid_type.update({grid_type_seg_info['grid_width']: grid_type}) + grid_width_to_grid_type.update({grid_type_seg_info["grid_width"]: grid_type}) return grid_width_to_grid_type def _compute_optimal_missing_segment_heights(seg_infos, grid_type, expected_vertical_size): # initialise positioning arrays segment_start_rows, segment_end_rows, segment_heights = _init_positioning_arrays_for_variable_padding( - seg_infos['available_segment_infos'], grid_type, seg_infos['expected_segments']) + seg_infos["available_segment_infos"], grid_type, seg_infos["expected_segments"]) # populate start row of first segment and end row of last segment with known values segment_start_rows[0] = 1 - segment_end_rows[seg_infos['expected_segments'] - 1] = expected_vertical_size + segment_end_rows[seg_infos["expected_segments"] - 1] = expected_vertical_size # find missing segments and group contiguous missing segments together missing_segments = np.where(segment_heights == 0)[0] @@ -1454,7 +1454,7 @@ def _compute_optimal_missing_segment_heights(seg_infos, grid_type, expected_vert for group in groups_missing_segments: _compute_positioning_data_for_missing_group(segment_start_rows, segment_end_rows, segment_heights, group) - return segment_heights.astype('int') + return segment_heights.astype("int") def _compute_positioning_data_for_missing_group(segment_start_rows, segment_end_rows, segment_heights, group): @@ -1513,20 +1513,20 @@ def _init_positioning_arrays_for_variable_padding(chk_infos, grid_type, exp_segm def _populate_positioning_arrays_with_available_segment_info(chk_infos, grid_type, segment_start_rows, segment_end_rows, segment_heights): for chk_info in chk_infos: - current_fh_segment_nr = chk_info['segment_nr'] - segment_heights[current_fh_segment_nr] = chk_info[grid_type]['segment_height'] - segment_start_rows[current_fh_segment_nr] = chk_info[grid_type]['start_position_row'] - segment_end_rows[current_fh_segment_nr] = chk_info[grid_type]['end_position_row'] + current_fh_segment_nr = chk_info["segment_nr"] + segment_heights[current_fh_segment_nr] = chk_info[grid_type]["segment_height"] + segment_start_rows[current_fh_segment_nr] = chk_info[grid_type]["start_position_row"] + segment_end_rows[current_fh_segment_nr] = chk_info[grid_type]["end_position_row"] def split_integer_in_most_equal_parts(x, n): """Split an integer number x in n parts that are as equally-sizes as possible.""" if x % n == 0: - return np.repeat(x // n, n).astype('int') + return np.repeat(x // n, n).astype("int") else: # split the remainder amount over the last remainder parts remainder = int(x % n) mod = int(x // n) ar = np.repeat(mod, n) ar[-remainder:] = mod + 1 - return ar.astype('int') + return ar.astype("int") diff --git a/satpy/resample.py b/satpy/resample.py index b124c84933..ebf5776267 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -184,16 +184,16 @@ def lcm(a, b): CHUNK_SIZE = get_legacy_chunk_size() CACHE_SIZE = 10 -NN_COORDINATES = {'valid_input_index': ('y1', 'x1'), - 'valid_output_index': ('y2', 'x2'), - 'index_array': ('y2', 'x2', 'z2')} -BIL_COORDINATES = {'bilinear_s': ('x1', ), - 'bilinear_t': ('x1', ), - 'slices_x': ('x1', 'n'), - 'slices_y': ('x1', 'n'), - 'mask_slices': ('x1', 'n'), - 'out_coords_x': ('x2', ), - 'out_coords_y': ('y2', )} +NN_COORDINATES = {"valid_input_index": ("y1", "x1"), + "valid_output_index": ("y2", "x2"), + "index_array": ("y2", "x2", "z2")} +BIL_COORDINATES = {"bilinear_s": ("x1", ), + "bilinear_t": ("x1", ), + "slices_x": ("x1", "n"), + "slices_y": ("x1", "n"), + "mask_slices": ("x1", "n"), + "out_coords_x": ("x2", ), + "out_coords_y": ("y2", )} resamplers_cache: "WeakValueDictionary[tuple, object]" = WeakValueDictionary() @@ -204,7 +204,7 @@ def hash_dict(the_dict, the_hash=None): """Calculate a hash for a dictionary.""" if the_hash is None: the_hash = hashlib.sha1() # nosec - the_hash.update(json.dumps(the_dict, sort_keys=True).encode('utf-8')) + the_hash.update(json.dumps(the_dict, sort_keys=True).encode("utf-8")) return the_hash @@ -213,11 +213,11 @@ def get_area_file(): The files are to be named `areas.yaml` or `areas.def`. """ - paths = config_search_paths('areas.yaml') + paths = config_search_paths("areas.yaml") if paths: return paths else: - return get_config_path('areas.def') + return get_config_path("areas.def") def get_area_def(area_name): @@ -249,13 +249,13 @@ def add_xy_coords(data_arr, area, crs=None): Returns (xarray.DataArray): Updated DataArray object """ - if 'x' in data_arr.coords and 'y' in data_arr.coords: + if "x" in data_arr.coords and "y" in data_arr.coords: # x/y coords already provided return data_arr - if 'x' not in data_arr.dims or 'y' not in data_arr.dims: + if "x" not in data_arr.dims or "y" not in data_arr.dims: # no defined x and y dimensions return data_arr - if not hasattr(area, 'get_proj_vectors'): + if not hasattr(area, "get_proj_vectors"): return data_arr x, y = area.get_proj_vectors() @@ -265,15 +265,15 @@ def add_xy_coords(data_arr, area, crs=None): if crs is not None: units = crs.axis_info[0].unit_name # fix udunits/CF standard units - units = units.replace('metre', 'meter') - if units == 'degree': - y_attrs['units'] = 'degrees_north' - x_attrs['units'] = 'degrees_east' + units = units.replace("metre", "meter") + if units == "degree": + y_attrs["units"] = "degrees_north" + x_attrs["units"] = "degrees_east" else: - y_attrs['units'] = units - x_attrs['units'] = units - y = xr.DataArray(y, dims=('y',), attrs=y_attrs) - x = xr.DataArray(x, dims=('x',), attrs=x_attrs) + y_attrs["units"] = units + x_attrs["units"] = units + y = xr.DataArray(y, dims=("y",), attrs=y_attrs) + x = xr.DataArray(x, dims=("x",), attrs=x_attrs) return data_arr.assign_coords(y=y, x=x) @@ -303,10 +303,10 @@ def add_crs_xy_coords(data_arr, area): # default lat/lon projection latlon_proj = "+proj=latlong +datum=WGS84 +ellps=WGS84" # otherwise get it from the area definition - if hasattr(area, 'crs'): + if hasattr(area, "crs"): crs = area.crs else: - proj_str = getattr(area, 'proj_str', latlon_proj) + proj_str = getattr(area, "proj_str", latlon_proj) crs = CRS.from_string(proj_str) data_arr = data_arr.assign_coords(crs=crs) @@ -319,12 +319,12 @@ def add_crs_xy_coords(data_arr, area): # array). lons = area.lons lats = area.lats - lons.attrs.setdefault('standard_name', 'longitude') - lons.attrs.setdefault('long_name', 'longitude') - lons.attrs.setdefault('units', 'degrees_east') - lats.attrs.setdefault('standard_name', 'latitude') - lats.attrs.setdefault('long_name', 'latitude') - lats.attrs.setdefault('units', 'degrees_north') + lons.attrs.setdefault("standard_name", "longitude") + lons.attrs.setdefault("long_name", "longitude") + lons.attrs.setdefault("units", "degrees_east") + lats.attrs.setdefault("standard_name", "latitude") + lats.attrs.setdefault("long_name", "latitude") + lats.attrs.setdefault("units", "degrees_north") # See https://github.com/pydata/xarray/issues/3068 # data_arr = data_arr.assign_coords(longitude=lons, latitude=lats) else: @@ -347,7 +347,7 @@ def update_resampled_coords(old_data, new_data, new_area): # this *MUST* happen before we set 'crs' below otherwise any 'crs' # coordinate in the coordinate variables we are copying will overwrite the # 'crs' coordinate we just assigned to the data - ignore_coords = ('y', 'x', 'crs') + ignore_coords = ("y", "x", "crs") new_coords = {} for cname, cval in old_data.coords.items(): # we don't want coordinates that depended on the old x/y dimensions @@ -437,19 +437,19 @@ def resample(self, data, cache_dir=None, mask_area=None, **kwargs): if isinstance(self.source_geo_def, SwathDefinition): geo_dims = self.source_geo_def.lons.dims else: - geo_dims = ('y', 'x') + geo_dims = ("y", "x") flat_dims = [dim for dim in data.dims if dim not in geo_dims] if np.issubdtype(data.dtype, np.integer): - kwargs['mask'] = data == data.attrs.get('_FillValue', np.iinfo(data.dtype.type).max) + kwargs["mask"] = data == data.attrs.get("_FillValue", np.iinfo(data.dtype.type).max) else: - kwargs['mask'] = data.isnull() - kwargs['mask'] = kwargs['mask'].all(dim=flat_dims) + kwargs["mask"] = data.isnull() + kwargs["mask"] = kwargs["mask"].all(dim=flat_dims) cache_id = self.precompute(cache_dir=cache_dir, **kwargs) return self.compute(data, cache_id=cache_id, **kwargs) - def _create_cache_filename(self, cache_dir, prefix='', - fmt='.zarr', **kwargs): + def _create_cache_filename(self, cache_dir, prefix="", + fmt=".zarr", **kwargs): """Create filename for the cached resampling parameters.""" hash_str = self.get_hash(**kwargs) return os.path.join(cache_dir, prefix + hash_str + fmt) @@ -500,7 +500,7 @@ def precompute(self, mask=None, radius_of_influence=None, epsilon=0, "masked pixels. Will not cache results.") cache_dir = None - if radius_of_influence is None and not hasattr(self.source_geo_def, 'geocentric_resolution'): + if radius_of_influence is None and not hasattr(self.source_geo_def, "geocentric_resolution"): radius_of_influence = self._adjust_radius_of_influence(radius_of_influence) kwargs = dict(source_geo_def=self.source_geo_def, @@ -555,11 +555,11 @@ def _check_numpy_cache(self, cache_dir, mask=None, if cache_dir is None: return fname_np = self._create_cache_filename(cache_dir, - prefix='resample_lut-', - mask=mask, fmt='.npz', + prefix="resample_lut-", + mask=mask, fmt=".npz", **kwargs) - fname_zarr = self._create_cache_filename(cache_dir, prefix='nn_lut-', - mask=mask, fmt='.zarr', + fname_zarr = self._create_cache_filename(cache_dir, prefix="nn_lut-", + mask=mask, fmt=".zarr", **kwargs) LOG.debug("Check if %s exists", fname_np) if os.path.exists(fname_np) and not os.path.exists(fname_zarr): @@ -570,7 +570,7 @@ def _check_numpy_cache(self, cache_dir, mask=None, ) LOG.warning("Converting resampling LUT from .npz to .zarr") zarr_out = xr.Dataset() - with np.load(fname_np, 'r') as fid: + with np.load(fname_np, "r") as fid: for idx_name, coord in NN_COORDINATES.items(): zarr_out[idx_name] = (coord, fid[idx_name]) @@ -580,7 +580,7 @@ def _check_numpy_cache(self, cache_dir, mask=None, def load_neighbour_info(self, cache_dir, mask=None, **kwargs): """Read index arrays from either the in-memory or disk cache.""" - mask_name = getattr(mask, 'name', None) + mask_name = getattr(mask, "name", None) cached = {} self._check_numpy_cache(cache_dir, mask=mask_name, **kwargs) @@ -591,11 +591,11 @@ def load_neighbour_info(self, cache_dir, mask=None, **kwargs): elif cache_dir: try: filename = self._create_cache_filename( - cache_dir, prefix='nn_lut-', + cache_dir, prefix="nn_lut-", mask=mask_name, **kwargs) - fid = zarr.open(filename, 'r') + fid = zarr.open(filename, "r") cache = np.array(fid[idx_name]) - if idx_name == 'valid_input_index': + if idx_name == "valid_input_index": # valid input index array needs to be boolean cache = cache.astype(bool) except ValueError: @@ -609,11 +609,11 @@ def load_neighbour_info(self, cache_dir, mask=None, **kwargs): def save_neighbour_info(self, cache_dir, mask=None, **kwargs): """Cache resampler's index arrays if there is a cache dir.""" if cache_dir: - mask_name = getattr(mask, 'name', None) + mask_name = getattr(mask, "name", None) cache = self._read_resampler_attrs() filename = self._create_cache_filename( - cache_dir, prefix='nn_lut-', mask=mask_name, **kwargs) - LOG.info('Saving kd_tree neighbour info to %s', filename) + cache_dir, prefix="nn_lut-", mask=mask_name, **kwargs) + LOG.info("Saving kd_tree neighbour info to %s", filename) zarr_out = xr.Dataset() for idx_name, coord in NN_COORDINATES.items(): # update the cache in place with persisted dask arrays @@ -707,7 +707,7 @@ def resample(self, *args, **kwargs): not needed in EWA resampling currently. """ - kwargs.setdefault('mask_area', False) + kwargs.setdefault("mask_area", False) return super(_LegacySatpyEWAResampler, self).resample(*args, **kwargs) def _call_ll2cr(self, lons, lats, target_geo_def, swath_usage=0): @@ -739,7 +739,7 @@ def precompute(self, cache_dir=None, swath_usage=0, **kwargs): # no need to recompute ll2cr output again return None - if kwargs.get('mask') is not None: + if kwargs.get("mask") is not None: LOG.warning("'mask' parameter has no affect during EWA " "resampling") @@ -808,13 +808,13 @@ def compute(self, data, cache_id=None, fill_value=0, weight_count=10000, # if the data is scan based then check its metadata or the passed # kwargs otherwise assume the entire input swath is one large # "scanline" - rows_per_scan = kwargs.get('rows_per_scan', + rows_per_scan = kwargs.get("rows_per_scan", data.attrs.get("rows_per_scan", data.shape[0])) - if data.ndim == 3 and 'bands' in data.dims: + if data.ndim == 3 and "bands" in data.dims: data_in = tuple(data.sel(bands=band).data - for band in data['bands']) + for band in data["bands"]) elif data.ndim == 2: data_in = data.data else: @@ -834,10 +834,10 @@ def compute(self, data, cache_id=None, fill_value=0, weight_count=10000, data_arr = da.from_delayed(res, new_shape, data.dtype) # from delayed creates one large chunk, break it up a bit if we can data_arr = data_arr.rechunk([CHUNK_SIZE] * data_arr.ndim) - if data.ndim == 3 and data.dims[0] == 'bands': - dims = ('bands', 'y', 'x') + if data.ndim == 3 and data.dims[0] == "bands": + dims = ("bands", "y", "x") elif data.ndim == 2: - dims = ('y', 'x') + dims = ("y", "x") else: dims = data.dims @@ -900,7 +900,7 @@ def load_bil_info(self, cache_dir, **kwargs): """Load bilinear resampling info from cache directory.""" if cache_dir: filename = self._create_cache_filename(cache_dir, - prefix='bil_lut-', + prefix="bil_lut-", **kwargs) try: self.resampler.load_resampling_info(filename) @@ -918,12 +918,12 @@ def save_bil_info(self, cache_dir, **kwargs): """Save bilinear resampling info to cache directory.""" if cache_dir: filename = self._create_cache_filename(cache_dir, - prefix='bil_lut-', + prefix="bil_lut-", **kwargs) # There are some old caches, move them out of the way if os.path.exists(filename): _move_existing_caches(cache_dir, filename) - LOG.info('Saving BIL neighbour info to %s', filename) + LOG.info("Saving BIL neighbour info to %s", filename) try: self.resampler.save_resampling_info(filename) except AttributeError: @@ -938,7 +938,7 @@ def compute(self, data, fill_value=None, **kwargs): del kwargs if fill_value is None: - fill_value = data.attrs.get('_FillValue') + fill_value = data.attrs.get("_FillValue") target_shape = self.target_geo_def.shape res = self.resampler.get_sample_from_bil_info(data, @@ -952,7 +952,7 @@ def _move_existing_caches(cache_dir, filename): """Move existing cache files out of the way.""" import os import shutil - old_cache_dir = os.path.join(cache_dir, 'moved_by_satpy') + old_cache_dir = os.path.join(cache_dir, "moved_by_satpy") try: os.makedirs(old_cache_dir) except FileExistsError: @@ -977,7 +977,7 @@ def _mean(data, y_size, x_size): def _repeat_by_factor(data, block_info=None): if block_info is None: return data - out_shape = block_info[None]['chunk-shape'] + out_shape = block_info[None]["chunk-shape"] out_data = data for axis, axis_size in enumerate(out_shape): in_size = data.shape[axis] @@ -1035,15 +1035,15 @@ def compute(self, data, expand=True, **kwargs): target_geo_def = self.target_geo_def # convert xarray backed with numpy array to dask array - if 'x' not in data.dims or 'y' not in data.dims: + if "x" not in data.dims or "y" not in data.dims: if data.ndim not in [2, 3]: raise ValueError("Can only handle 2D or 3D arrays without dimensions.") # assume rows is the second to last axis y_axis = data.ndim - 2 x_axis = data.ndim - 1 else: - y_axis = data.dims.index('y') - x_axis = data.dims.index('x') + y_axis = data.dims.index("y") + x_axis = data.dims.index("x") out_shape = target_geo_def.shape in_shape = data.shape @@ -1124,24 +1124,24 @@ def _get_arg_to_pass_for_skipna_handling(**kwargs): # FIXME this can be removed once Pyresample 1.18.0 is a Satpy requirement if PR_USE_SKIPNA: - if 'mask_all_nan' in kwargs: + if "mask_all_nan" in kwargs: warnings.warn( - 'Argument mask_all_nan is deprecated. Please use skipna for missing values handling. ' - 'Continuing with default skipna=True, if not provided differently.', + "Argument mask_all_nan is deprecated. Please use skipna for missing values handling. " + "Continuing with default skipna=True, if not provided differently.", DeprecationWarning, stacklevel=3 ) - kwargs.pop('mask_all_nan') + kwargs.pop("mask_all_nan") else: - if 'mask_all_nan' in kwargs: + if "mask_all_nan" in kwargs: warnings.warn( - 'Argument mask_all_nan is deprecated.' - 'Please update Pyresample and use skipna for missing values handling.', + "Argument mask_all_nan is deprecated." + "Please update Pyresample and use skipna for missing values handling.", DeprecationWarning, stacklevel=3 ) - kwargs.setdefault('mask_all_nan', False) - kwargs.pop('skipna') + kwargs.setdefault("mask_all_nan", False) + kwargs.pop("skipna") return kwargs @@ -1178,32 +1178,32 @@ def resample(self, data, **kwargs): Returns (xarray.DataArray): Data resampled to the target area """ - if not PR_USE_SKIPNA and 'skipna' in kwargs: - raise ValueError('You are trying to set the skipna argument but you are using an old version of' - ' Pyresample that does not support it.' - 'Please update Pyresample to 1.18.0 or higher to be able to use this argument.') + if not PR_USE_SKIPNA and "skipna" in kwargs: + raise ValueError("You are trying to set the skipna argument but you are using an old version of" + " Pyresample that does not support it." + "Please update Pyresample to 1.18.0 or higher to be able to use this argument.") self.precompute(**kwargs) attrs = data.attrs.copy() data_arr = data.data - if data.ndim == 3 and data.dims[0] == 'bands': - dims = ('bands', 'y', 'x') + if data.ndim == 3 and data.dims[0] == "bands": + dims = ("bands", "y", "x") # Both one and two dimensional input data results in 2D output elif data.ndim in (1, 2): - dims = ('y', 'x') + dims = ("y", "x") else: dims = data.dims - LOG.debug("Resampling %s", str(data.attrs.get('_satpy_id', 'unknown'))) + LOG.debug("Resampling %s", str(data.attrs.get("_satpy_id", "unknown"))) result = self.compute(data_arr, **kwargs) coords = {} - if 'bands' in data.coords: - coords['bands'] = data.coords['bands'] + if "bands" in data.coords: + coords["bands"] = data.coords["bands"] # Fractions are returned in a dict elif isinstance(result, dict): - coords['categories'] = sorted(result.keys()) - dims = ('categories', 'y', 'x') + coords["categories"] = sorted(result.keys()) + dims = ("categories", "y", "x") new_result = [] - for cat in coords['categories']: + for cat in coords["categories"]: new_result.append(result[cat]) result = da.stack(new_result) if result.ndim > len(dims): @@ -1211,13 +1211,13 @@ def resample(self, data, **kwargs): # Adjust some attributes if "BucketFraction" in str(self): - attrs['units'] = '' - attrs['calibration'] = '' - attrs['standard_name'] = 'area_fraction' + attrs["units"] = "" + attrs["calibration"] = "" + attrs["standard_name"] = "area_fraction" elif "BucketCount" in str(self): - attrs['units'] = '' - attrs['calibration'] = '' - attrs['standard_name'] = 'number_of_observations' + attrs["units"] = "" + attrs["calibration"] = "" + attrs["standard_name"] = "number_of_observations" result = xr.DataArray(result, dims=dims, coords=coords, attrs=attrs) @@ -1362,10 +1362,10 @@ def compute(self, data, fill_value=np.nan, categories=None, **kwargs): "bucket_fraction": BucketFraction, } if DaskEWAResampler is not None: - RESAMPLERS['ewa'] = DaskEWAResampler - RESAMPLERS['ewa_legacy'] = LegacyDaskEWAResampler + RESAMPLERS["ewa"] = DaskEWAResampler + RESAMPLERS["ewa_legacy"] = LegacyDaskEWAResampler else: - RESAMPLERS['ewa'] = _LegacySatpyEWAResampler + RESAMPLERS["ewa"] = _LegacySatpyEWAResampler # deepcode ignore PythonSameEvalBinaryExpressiontrue: PRBaseResampler is None only on import errors @@ -1378,7 +1378,7 @@ def prepare_resampler(source_area, destination_area, resampler=None, **resample_ """Instantiate and return a resampler.""" if resampler is None: LOG.info("Using default KDTree resampler") - resampler = 'kd_tree' + resampler = "kd_tree" if isinstance(resampler, (BaseResampler, PRBaseResampler)): raise ValueError("Trying to create a resampler when one already " @@ -1388,7 +1388,7 @@ def prepare_resampler(source_area, destination_area, resampler=None, **resample_ if resampler_class is None: if resampler == "gradient_search": warnings.warn( - 'Gradient search resampler not available. Maybe missing `shapely`?', + "Gradient search resampler not available. Maybe missing `shapely`?", stacklevel=2 ) raise KeyError("Resampler '%s' not available" % resampler) @@ -1429,7 +1429,7 @@ def resample(source_area, data, destination_area, def get_fill_value(dataset): """Get the fill value of the *dataset*, defaulting to np.nan.""" if np.issubdtype(dataset.dtype, np.integer): - return dataset.attrs.get('_FillValue', np.nan) + return dataset.attrs.get("_FillValue", np.nan) return np.nan @@ -1453,11 +1453,11 @@ def resample_dataset(dataset, destination_area, **kwargs): source_area = dataset.attrs["area"] except KeyError: LOG.info("Cannot reproject dataset %s, missing area info", - dataset.attrs['name']) + dataset.attrs["name"]) return dataset - fill_value = kwargs.pop('fill_value', get_fill_value(dataset)) + fill_value = kwargs.pop("fill_value", get_fill_value(dataset)) new_data = resample(source_area, dataset, destination_area, fill_value=fill_value, **kwargs) new_attrs = new_data.attrs new_data.attrs = dataset.attrs.copy() diff --git a/satpy/scene.py b/satpy/scene.py index e3e71811e9..92a7c9d623 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -141,7 +141,7 @@ def __init__(self, filenames=None, reader=None, filter_parameters=None, cleaned_reader_kwargs = {} else: cleaned_reader_kwargs = cleaned_reader_kwargs.copy() - cleaned_reader_kwargs.setdefault('filter_parameters', {}).update(filter_parameters) + cleaned_reader_kwargs.setdefault("filter_parameters", {}).update(filter_parameters) if filenames and isinstance(filenames, str): raise ValueError("'filenames' must be a list of files: Scene(filenames=[filename])") @@ -163,7 +163,7 @@ def wishlist(self): return self._wishlist.copy() def _ipython_key_completions_(self): - return [x['name'] for x in self._datasets.keys()] + return [x["name"] for x in self._datasets.keys()] def _create_reader_instances(self, filenames=None, @@ -210,10 +210,10 @@ def start_time(self): will be consulted. """ - start_times = [data_arr.attrs['start_time'] for data_arr in self.values() - if 'start_time' in data_arr.attrs] + start_times = [data_arr.attrs["start_time"] for data_arr in self.values() + if "start_time" in data_arr.attrs] if not start_times: - start_times = self._reader_times('start_time') + start_times = self._reader_times("start_time") if not start_times: return None return min(start_times) @@ -227,10 +227,10 @@ def end_time(self): :attr:`Scene.start_time` is returned. """ - end_times = [data_arr.attrs['end_time'] for data_arr in self.values() - if 'end_time' in data_arr.attrs] + end_times = [data_arr.attrs["end_time"] for data_arr in self.values() + if "end_time" in data_arr.attrs] if not end_times: - end_times = self._reader_times('end_time') + end_times = self._reader_times("end_time") if not end_times: return self.start_time return max(end_times) @@ -309,7 +309,7 @@ def _gather_all_areas(self, datasets): continue elif not isinstance(ds, DataArray): ds = self[ds] - area = ds.attrs.get('area') + area = ds.attrs.get("area") areas.append(area) areas = [x for x in areas if x is not None] if not areas: @@ -439,7 +439,7 @@ def available_dataset_names(self, reader_name=None, composites=False): Returns: list of available dataset names """ - return sorted(set(x['name'] for x in self.available_dataset_ids( + return sorted(set(x["name"] for x in self.available_dataset_ids( reader_name=reader_name, composites=composites))) def all_dataset_ids(self, reader_name=None, composites=False): @@ -495,7 +495,7 @@ def all_dataset_names(self, reader_name=None, composites=False): Returns: list of all dataset names """ - return sorted(set(x['name'] for x in self.all_dataset_ids( + return sorted(set(x["name"] for x in self.all_dataset_ids( reader_name=reader_name, composites=composites))) def _check_known_composites(self, available_only=False): @@ -508,7 +508,7 @@ def _check_known_composites(self, available_only=False): dep_tree = DependencyTree(self._readers, sensor_comps, mods, available_only=available_only) # ignore inline compositor dependencies starting with '_' comps = (comp for comp_dict in sensor_comps.values() - for comp in comp_dict.keys() if not comp['name'].startswith('_')) + for comp in comp_dict.keys() if not comp["name"].startswith("_")) # make sure that these composites are even create-able by these readers all_comps = set(comps) # find_dependencies will update the all_comps set with DataIDs @@ -526,7 +526,7 @@ def available_composite_ids(self): def available_composite_names(self): """Names of all configured composites known to this Scene.""" - return sorted(set(x['name'] for x in self.available_composite_ids())) + return sorted(set(x["name"] for x in self.available_composite_ids())) def all_composite_ids(self): """Get all IDs for configured composites.""" @@ -534,7 +534,7 @@ def all_composite_ids(self): def all_composite_names(self): """Get all names for all configured composites.""" - return sorted(set(x['name'] for x in self.all_composite_ids())) + return sorted(set(x["name"] for x in self.all_composite_ids())) def all_modifier_names(self): """Get names of configured modifier objects.""" @@ -557,7 +557,7 @@ def iter_by_area(self): """ datasets_by_area = {} for ds in self: - a = ds.attrs.get('area') + a = ds.attrs.get("area") dsid = DataID.from_dataarray(ds) datasets_by_area.setdefault(a, []).append(dsid) @@ -597,14 +597,14 @@ def copy(self, datasets=None): @property def all_same_area(self): """All contained data arrays are on the same area.""" - all_areas = [x.attrs.get('area', None) for x in self.values()] + all_areas = [x.attrs.get("area", None) for x in self.values()] all_areas = [x for x in all_areas if x is not None] return all(all_areas[0] == x for x in all_areas[1:]) @property def all_same_proj(self): """All contained data array are in the same projection.""" - all_areas = [x.attrs.get('area', None) for x in self.values()] + all_areas = [x.attrs.get("area", None) for x in self.values()] all_areas = [x for x in all_areas if x is not None] return all(all_areas[0].crs == x.crs for x in all_areas[1:]) @@ -614,11 +614,11 @@ def _slice_area_from_bbox(src_area, dst_area, ll_bbox=None, """Slice the provided area using the bounds provided.""" if ll_bbox is not None: dst_area = AreaDefinition( - 'crop_area', 'crop_area', 'crop_latlong', - {'proj': 'latlong'}, 100, 100, ll_bbox) + "crop_area", "crop_area", "crop_latlong", + {"proj": "latlong"}, 100, 100, ll_bbox) elif xy_bbox is not None: dst_area = AreaDefinition( - 'crop_area', 'crop_area', 'crop_xy', + "crop_area", "crop_area", "crop_xy", src_area.crs, src_area.width, src_area.height, xy_bbox) x_slice, y_slice = src_area.get_area_slices(dst_area) @@ -638,7 +638,7 @@ def _slice_datasets(self, dataset_ids, slice_key, new_area, area_only=True): if ds_id in new_datasets: replace_anc(ds, pres) continue - if area_only and ds.attrs.get('area') is None: + if area_only and ds.attrs.get("area") is None: new_datasets[ds_id] = ds replace_anc(ds, pres) continue @@ -650,7 +650,7 @@ def _slice_datasets(self, dataset_ids, slice_key, new_area, area_only=True): key = slice_key new_ds = ds.isel(**key) if new_area is not None: - new_ds.attrs['area'] = new_area + new_ds.attrs["area"] = new_area new_datasets[ds_id] = new_ds if parent_ds is None: @@ -679,7 +679,7 @@ def slice(self, key): if area is not None: # assume dimensions for area are y and x one_ds = self[dataset_ids[0]] - area_key = tuple(sl for dim, sl in zip(one_ds.dims, key) if dim in ['y', 'x']) + area_key = tuple(sl for dim, sl in zip(one_ds.dims, key) if dim in ["y", "x"]) new_area = area[area_key] else: new_area = None @@ -759,7 +759,7 @@ def crop(self, area=None, ll_bbox=None, xy_bbox=None, dataset_ids=None): x_slice = slice(min_x_slice.start * x_factor, min_x_slice.stop * x_factor) new_area = src_area[y_slice, x_slice] - slice_key = {'y': y_slice, 'x': x_slice} + slice_key = {"y": y_slice, "x": x_slice} new_scn._slice_datasets(dataset_ids, slice_key, new_area) else: new_target_areas[src_area] = self._slice_area_from_bbox( @@ -768,7 +768,7 @@ def crop(self, area=None, ll_bbox=None, xy_bbox=None, dataset_ids=None): return new_scn - def aggregate(self, dataset_ids=None, boundary='trim', side='left', func='mean', **dim_kwargs): + def aggregate(self, dataset_ids=None, boundary="trim", side="left", func="mean", **dim_kwargs): """Create an aggregated version of the Scene. Args: @@ -810,8 +810,8 @@ def aggregate(self, dataset_ids=None, boundary='trim', side='left', func='mean', side=side, **dim_kwargs) new_scn._datasets[ds_id].attrs = self[ds_id].attrs.copy() - new_scn._datasets[ds_id].attrs['area'] = target_area - new_scn._datasets[ds_id].attrs['resolution'] = resolution + new_scn._datasets[ds_id].attrs["area"] = target_area + new_scn._datasets[ds_id].attrs["resolution"] = resolution return new_scn def get(self, key, default=None): @@ -846,11 +846,11 @@ def _slice_data(self, source_area, slices, dataset): """Slice the data to reduce it.""" slice_x, slice_y = slices dataset = dataset.isel(x=slice_x, y=slice_y) - if ('x', source_area.width) not in dataset.sizes.items(): + if ("x", source_area.width) not in dataset.sizes.items(): raise RuntimeError - if ('y', source_area.height) not in dataset.sizes.items(): + if ("y", source_area.height) not in dataset.sizes.items(): raise RuntimeError - dataset.attrs['area'] = source_area + dataset.attrs["area"] = source_area return dataset @@ -877,19 +877,19 @@ def _resampled_scene(self, new_scn, destination_area, reduce_data=True, if ds_id in new_scn._datasets: new_scn._datasets[ds_id] = new_datasets[ds_id] continue - if dataset.attrs.get('area') is None: + if dataset.attrs.get("area") is None: if parent_dataset is None: new_scn._datasets[ds_id] = dataset else: replace_anc(dataset, pres) continue LOG.debug("Resampling %s", ds_id) - source_area = dataset.attrs['area'] + source_area = dataset.attrs["area"] dataset, source_area = self._reduce_data(dataset, source_area, destination_area, reduce_data, reductions, resample_kwargs) self._prepare_resampler(source_area, destination_area, resamplers, resample_kwargs) kwargs = resample_kwargs.copy() - kwargs['resampler'] = resamplers[source_area] + kwargs["resampler"] = resamplers[source_area] res = resample_dataset(dataset, destination_area, **kwargs) new_datasets[ds_id] = res if ds_id in new_scn._datasets: @@ -900,7 +900,7 @@ def _resampled_scene(self, new_scn, destination_area, reduce_data=True, def _get_finalized_destination_area(self, destination_area, new_scn): if isinstance(destination_area, str): destination_area = get_area_def(destination_area) - if hasattr(destination_area, 'freeze'): + if hasattr(destination_area, "freeze"): try: finest_area = new_scn.finest_area() destination_area = destination_area.freeze(finest_area) @@ -923,8 +923,8 @@ def _reduce_data(self, dataset, source_area, destination_area, reduce_data, redu try: (slice_x, slice_y), source_area = reductions[key] except KeyError: - if resample_kwargs.get('resampler') == 'gradient_search': - factor = resample_kwargs.get('shape_divisible_by', 2) + if resample_kwargs.get("resampler") == "gradient_search": + factor = resample_kwargs.get("shape_divisible_by", 2) else: factor = None try: @@ -1051,7 +1051,7 @@ def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynami # by default select first data variable as display variable vdims = ds.data_vars[list(ds.data_vars.keys())[0]].name - if hasattr(ds, "area") and hasattr(ds.area, 'to_cartopy_crs'): + if hasattr(ds, "area") and hasattr(ds.area, "to_cartopy_crs"): dscrs = ds.area.to_cartopy_crs() gvds = gv.Dataset(ds, crs=dscrs) else: @@ -1083,17 +1083,17 @@ def to_xarray_dataset(self, datasets=None): if len(dataarrays) == 0: return xr.Dataset() - ds_dict = {i.attrs['name']: i.rename(i.attrs['name']) for i in dataarrays if i.attrs.get('area') is not None} + ds_dict = {i.attrs["name"]: i.rename(i.attrs["name"]) for i in dataarrays if i.attrs.get("area") is not None} mdata = combine_metadata(*tuple(i.attrs for i in dataarrays)) - if mdata.get('area') is None or not isinstance(mdata['area'], SwathDefinition): + if mdata.get("area") is None or not isinstance(mdata["area"], SwathDefinition): # either don't know what the area is or we have an AreaDefinition ds = xr.merge(ds_dict.values()) else: # we have a swath definition and should use lon/lat values - lons, lats = mdata['area'].get_lonlats() + lons, lats = mdata["area"].get_lonlats() if not isinstance(lons, DataArray): - lons = DataArray(lons, dims=('y', 'x')) - lats = DataArray(lats, dims=('y', 'x')) + lons = DataArray(lons, dims=("y", "x")) + lats = DataArray(lats, dims=("y", "x")) ds = xr.Dataset(ds_dict, coords={"latitude": lats, "longitude": lons}) @@ -1109,7 +1109,7 @@ def to_xarray(self, include_lonlats=True, epoch=None, include_orig_name=True, - numeric_name_prefix='CHANNEL_'): + numeric_name_prefix="CHANNEL_"): """Merge all xr.DataArray(s) of a satpy.Scene to a CF-compliant xarray object. If all Scene DataArrays are on the same area, it returns an xr.Dataset. @@ -1211,7 +1211,7 @@ def save_dataset(self, dataset_id, filename=None, writer=None, """ if writer is None and filename is None: - writer = 'geotiff' + writer = "geotiff" elif writer is None: writer = self._get_writer_by_ext(os.path.splitext(filename)[1]) @@ -1274,7 +1274,7 @@ def save_datasets(self, writer=None, filename=None, datasets=None, compute=True, "dimensions (eg. through resampling).") if writer is None: if filename is None: - writer = 'geotiff' + writer = "geotiff" else: writer = self._get_writer_by_ext(os.path.splitext(filename)[1]) writer, save_kwargs = load_writer(writer, @@ -1346,7 +1346,7 @@ def _get_writer_by_ext(extension): """ mapping = {".tiff": "geotiff", ".tif": "geotiff", ".nc": "cf", ".mitiff": "mitiff"} - return mapping.get(extension.lower(), 'simple_image') + return mapping.get(extension.lower(), "simple_image") def _remove_failed_datasets(self, keepables): """Remove the datasets that we couldn't create.""" @@ -1384,8 +1384,8 @@ def unload(self, keepables=None): LOG.debug("Unloading dataset: %r", ds_id) del self._datasets[ds_id] - def load(self, wishlist, calibration='*', resolution='*', - polarization='*', level='*', modifiers='*', generate=True, unload=True, + def load(self, wishlist, calibration="*", resolution="*", + polarization="*", level="*", modifiers="*", generate=True, unload=True, **kwargs): """Read and generate requested datasets. diff --git a/satpy/tests/compositor_tests/test_abi.py b/satpy/tests/compositor_tests/test_abi.py index 93df810cf5..79c5ae99ed 100644 --- a/satpy/tests/compositor_tests/test_abi.py +++ b/satpy/tests/compositor_tests/test_abi.py @@ -26,7 +26,7 @@ class TestABIComposites(unittest.TestCase): def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors - load_compositor_configs_for_sensors(['abi']) + load_compositor_configs_for_sensors(["abi"]) def test_simulated_green(self): """Test creating a fake 'green' band.""" @@ -39,28 +39,28 @@ def test_simulated_green(self): rows = 5 cols = 10 area = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, + "test", "test", "test", + {"proj": "eqc", "lon_0": 0.0, + "lat_0": 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) - comp = SimulatedGreen('green', prerequisites=('C01', 'C02', 'C03'), - standard_name='toa_bidirectional_reflectance') + comp = SimulatedGreen("green", prerequisites=("C01", "C02", "C03"), + standard_name="toa_bidirectional_reflectance") c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.25, - dims=('y', 'x'), - attrs={'name': 'C01', 'area': area}) + dims=("y", "x"), + attrs={"name": "C01", "area": area}) c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30, - dims=('y', 'x'), - attrs={'name': 'C02', 'area': area}) + dims=("y", "x"), + attrs={"name": "C02", "area": area}) c03 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.35, - dims=('y', 'x'), - attrs={'name': 'C03', 'area': area}) + dims=("y", "x"), + attrs={"name": "C03", "area": area}) res = comp((c01, c02, c03)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs['name'], 'green') - self.assertEqual(res.attrs['standard_name'], - 'toa_bidirectional_reflectance') + self.assertEqual(res.attrs["name"], "green") + self.assertEqual(res.attrs["standard_name"], + "toa_bidirectional_reflectance") data = res.compute() np.testing.assert_allclose(data, 0.28025) diff --git a/satpy/tests/compositor_tests/test_agri.py b/satpy/tests/compositor_tests/test_agri.py index 32fcc72c61..27a566a82c 100644 --- a/satpy/tests/compositor_tests/test_agri.py +++ b/satpy/tests/compositor_tests/test_agri.py @@ -26,7 +26,7 @@ class TestAGRIComposites(unittest.TestCase): def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors - load_compositor_configs_for_sensors(['agri']) + load_compositor_configs_for_sensors(["agri"]) def test_simulated_red(self): """Test creating a fake 'red' band.""" @@ -39,25 +39,25 @@ def test_simulated_red(self): rows = 5 cols = 10 area = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, + "test", "test", "test", + {"proj": "eqc", "lon_0": 0.0, + "lat_0": 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) - comp = SimulatedRed('red', prerequisites=('C01', 'C02', 'C03'), - standard_name='toa_bidirectional_reflectance') + comp = SimulatedRed("red", prerequisites=("C01", "C02", "C03"), + standard_name="toa_bidirectional_reflectance") c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.25, - dims=('y', 'x'), - attrs={'name': 'C01', 'area': area}) + dims=("y", "x"), + attrs={"name": "C01", "area": area}) c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30, - dims=('y', 'x'), - attrs={'name': 'C02', 'area': area}) + dims=("y", "x"), + attrs={"name": "C02", "area": area}) res = comp((c01, c02)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs['name'], 'red') - self.assertEqual(res.attrs['standard_name'], - 'toa_bidirectional_reflectance') + self.assertEqual(res.attrs["name"], "red") + self.assertEqual(res.attrs["standard_name"], + "toa_bidirectional_reflectance") data = res.compute() np.testing.assert_allclose(data, 0.24252874) diff --git a/satpy/tests/compositor_tests/test_ahi.py b/satpy/tests/compositor_tests/test_ahi.py index ed485bd924..980f5a746b 100644 --- a/satpy/tests/compositor_tests/test_ahi.py +++ b/satpy/tests/compositor_tests/test_ahi.py @@ -26,4 +26,4 @@ class TestAHIComposites(unittest.TestCase): def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors - load_compositor_configs_for_sensors(['ahi']) + load_compositor_configs_for_sensors(["ahi"]) diff --git a/satpy/tests/compositor_tests/test_glm.py b/satpy/tests/compositor_tests/test_glm.py index 6b79f96678..13783905da 100644 --- a/satpy/tests/compositor_tests/test_glm.py +++ b/satpy/tests/compositor_tests/test_glm.py @@ -24,7 +24,7 @@ class TestGLMComposites: def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors - load_compositor_configs_for_sensors(['glm']) + load_compositor_configs_for_sensors(["glm"]) def test_highlight_compositor(self): """Test creating a highlight composite.""" @@ -37,34 +37,34 @@ def test_highlight_compositor(self): rows = 5 cols = 10 area = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, + "test", "test", "test", + {"proj": "eqc", "lon_0": 0.0, + "lat_0": 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = HighlightCompositor( - 'c14_highlight', - prerequisites=('flash_extent_density', 'C14'), + "c14_highlight", + prerequisites=("flash_extent_density", "C14"), min_hightlight=0.0, max_hightlight=1.0, ) flash_extent_density = xr.DataArray( da.zeros((rows, cols), chunks=25) + 0.5, - dims=('y', 'x'), - attrs={'name': 'flash_extent_density', 'area': area}) + dims=("y", "x"), + attrs={"name": "flash_extent_density", "area": area}) c14_data = np.repeat(np.arange(cols, dtype=np.float64)[None, :], rows, axis=0) c14 = xr.DataArray(da.from_array(c14_data, chunks=25) + 303.15, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'name': 'C14', - 'area': area, - 'standard_name': 'toa_brightness_temperature', + "name": "C14", + "area": area, + "standard_name": "toa_brightness_temperature", }) res = comp((flash_extent_density, c14)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'c14_highlight' + assert res.attrs["name"] == "c14_highlight" data = res.compute() np.testing.assert_almost_equal(data.values.min(), -0.04) np.testing.assert_almost_equal(data.values.max(), 1.04) diff --git a/satpy/tests/compositor_tests/test_sar.py b/satpy/tests/compositor_tests/test_sar.py index ed71e22730..d7cd2a9c80 100644 --- a/satpy/tests/compositor_tests/test_sar.py +++ b/satpy/tests/compositor_tests/test_sar.py @@ -33,25 +33,25 @@ def test_sar_ice(self): rows = 2 cols = 2 - comp = SARIce('sar_ice', prerequisites=('hh', 'hv'), - standard_name='sar-ice') + comp = SARIce("sar_ice", prerequisites=("hh", "hv"), + standard_name="sar-ice") hh = xr.DataArray(da.zeros((rows, cols), chunks=25) + 2000, - dims=('y', 'x'), - attrs={'name': 'hh'}) + dims=("y", "x"), + attrs={"name": "hh"}) hv = xr.DataArray(da.zeros((rows, cols), chunks=25) + 1000, - dims=('y', 'x'), - attrs={'name': 'hv'}) + dims=("y", "x"), + attrs={"name": "hv"}) res = comp((hh, hv)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs['name'], 'sar_ice') - self.assertEqual(res.attrs['standard_name'], - 'sar-ice') + self.assertEqual(res.attrs["name"], "sar_ice") + self.assertEqual(res.attrs["standard_name"], + "sar-ice") data = res.compute() - np.testing.assert_allclose(data.sel(bands='R'), 31.58280822) - np.testing.assert_allclose(data.sel(bands='G'), 159869.56789876) - np.testing.assert_allclose(data.sel(bands='B'), 44.68138191) + np.testing.assert_allclose(data.sel(bands="R"), 31.58280822) + np.testing.assert_allclose(data.sel(bands="G"), 159869.56789876) + np.testing.assert_allclose(data.sel(bands="B"), 44.68138191) def test_sar_ice_log(self): """Test creating a the sar_ice_log composite.""" @@ -63,22 +63,22 @@ def test_sar_ice_log(self): rows = 2 cols = 2 - comp = SARIceLog('sar_ice_log', prerequisites=('hh', 'hv'), - standard_name='sar-ice-log') + comp = SARIceLog("sar_ice_log", prerequisites=("hh", "hv"), + standard_name="sar-ice-log") hh = xr.DataArray(da.zeros((rows, cols), chunks=25) - 10, - dims=('y', 'x'), - attrs={'name': 'hh'}) + dims=("y", "x"), + attrs={"name": "hh"}) hv = xr.DataArray(da.zeros((rows, cols), chunks=25) - 20, - dims=('y', 'x'), - attrs={'name': 'hv'}) + dims=("y", "x"), + attrs={"name": "hv"}) res = comp((hh, hv)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs['name'], 'sar_ice_log') - self.assertEqual(res.attrs['standard_name'], - 'sar-ice-log') + self.assertEqual(res.attrs["name"], "sar_ice_log") + self.assertEqual(res.attrs["standard_name"], + "sar-ice-log") data = res.compute() - np.testing.assert_allclose(data.sel(bands='R'), -20) - np.testing.assert_allclose(data.sel(bands='G'), -4.6) - np.testing.assert_allclose(data.sel(bands='B'), -10) + np.testing.assert_allclose(data.sel(bands="R"), -20) + np.testing.assert_allclose(data.sel(bands="G"), -4.6) + np.testing.assert_allclose(data.sel(bands="B"), -10) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 467adf119b..4800f12a7b 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -31,71 +31,71 @@ def setup_method(self): """Initialize channels.""" rows = 5 cols = 10 - self.c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.20, dims=('y', 'x'), attrs={'name': 'C02'}) - self.c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30, dims=('y', 'x'), attrs={'name': 'C03'}) - self.c03 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.40, dims=('y', 'x'), attrs={'name': 'C04'}) + self.c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.20, dims=("y", "x"), attrs={"name": "C02"}) + self.c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30, dims=("y", "x"), attrs={"name": "C03"}) + self.c03 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.40, dims=("y", "x"), attrs={"name": "C04"}) def test_bad_lengths(self): """Test that error is raised if the amount of channels to blend does not match the number of weights.""" - comp = SpectralBlender('blended_channel', fractions=(0.3, 0.7), prerequisites=(0.51, 0.85), - standard_name='toa_bidirectional_reflectance') + comp = SpectralBlender("blended_channel", fractions=(0.3, 0.7), prerequisites=(0.51, 0.85), + standard_name="toa_bidirectional_reflectance") with pytest.raises(ValueError): comp((self.c01, self.c02, self.c03)) def test_spectral_blender(self): """Test the base class for spectral blending of channels.""" - comp = SpectralBlender('blended_channel', fractions=(0.3, 0.4, 0.3), prerequisites=(0.51, 0.65, 0.85), - standard_name='toa_bidirectional_reflectance') + comp = SpectralBlender("blended_channel", fractions=(0.3, 0.4, 0.3), prerequisites=(0.51, 0.65, 0.85), + standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c02, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'blended_channel' - assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' + assert res.attrs["name"] == "blended_channel" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.3) def test_hybrid_green(self): """Test hybrid green correction of the 'green' band.""" - comp = HybridGreen('hybrid_green', fraction=0.15, prerequisites=(0.51, 0.85), - standard_name='toa_bidirectional_reflectance') + comp = HybridGreen("hybrid_green", fraction=0.15, prerequisites=(0.51, 0.85), + standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'hybrid_green' - assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' + assert res.attrs["name"] == "hybrid_green" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.23) def test_ndvi_hybrid_green(self): """Test NDVI-scaled hybrid green correction of 'green' band.""" self.c01 = xr.DataArray(da.from_array([[0.25, 0.30], [0.20, 0.30]], chunks=25), - dims=('y', 'x'), attrs={'name': 'C02'}) + dims=("y", "x"), attrs={"name": "C02"}) self.c02 = xr.DataArray(da.from_array([[0.25, 0.30], [0.25, 0.35]], chunks=25), - dims=('y', 'x'), attrs={'name': 'C03'}) + dims=("y", "x"), attrs={"name": "C03"}) self.c03 = xr.DataArray(da.from_array([[0.35, 0.35], [0.28, 0.65]], chunks=25), - dims=('y', 'x'), attrs={'name': 'C04'}) + dims=("y", "x"), attrs={"name": "C04"}) - comp = NDVIHybridGreen('ndvi_hybrid_green', limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), - standard_name='toa_bidirectional_reflectance') + comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), + standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c02, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'ndvi_hybrid_green' - assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' + assert res.attrs["name"] == "ndvi_hybrid_green" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.values np.testing.assert_array_almost_equal(data, np.array([[0.2633, 0.3071], [0.2115, 0.3420]]), decimal=4) def test_green_corrector(self): """Test the deprecated class for green corrections.""" with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=UserWarning, message=r'.*deprecated.*') - comp = GreenCorrector('blended_channel', fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), - standard_name='toa_bidirectional_reflectance') + warnings.filterwarnings("ignore", category=UserWarning, message=r".*deprecated.*") + comp = GreenCorrector("blended_channel", fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), + standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'blended_channel' - assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' + assert res.attrs["name"] == "blended_channel" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.23) diff --git a/satpy/tests/compositor_tests/test_viirs.py b/satpy/tests/compositor_tests/test_viirs.py index 66c5e8c426..969f4579ef 100644 --- a/satpy/tests/compositor_tests/test_viirs.py +++ b/satpy/tests/compositor_tests/test_viirs.py @@ -35,9 +35,9 @@ def area(self): rows = 5 cols = 10 area = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, + "test", "test", "test", + {"proj": "eqc", "lon_0": 0.0, + "lat_0": 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) return area @@ -50,9 +50,9 @@ def dnb(self, area): dnb[4:, :] += 0.5 dnb = da.from_array(dnb, chunks=25) c01 = xr.DataArray(dnb, - dims=('y', 'x'), - attrs={'name': 'DNB', 'area': area, - 'start_time': datetime(2020, 1, 1, 12, 0, 0)}) + dims=("y", "x"), + attrs={"name": "DNB", "area": area, + "start_time": datetime(2020, 1, 1, 12, 0, 0)}) return c01 @pytest.fixture @@ -64,9 +64,9 @@ def sza(self, area): sza[:, 4:] += 45.0 sza = da.from_array(sza, chunks=25) c02 = xr.DataArray(sza, - dims=('y', 'x'), - attrs={'name': 'solar_zenith_angle', 'area': area, - 'start_time': datetime(2020, 1, 1, 12, 0, 0)}) + dims=("y", "x"), + attrs={"name": "solar_zenith_angle", "area": area, + "start_time": datetime(2020, 1, 1, 12, 0, 0)}) return c02 @pytest.fixture @@ -77,29 +77,29 @@ def lza(self, area): lza[:, 4:] += 45.0 lza = da.from_array(lza, chunks=25) c03 = xr.DataArray(lza, - dims=('y', 'x'), - attrs={'name': 'lunar_zenith_angle', 'area': area, - 'start_time': datetime(2020, 1, 1, 12, 0, 0) + dims=("y", "x"), + attrs={"name": "lunar_zenith_angle", "area": area, + "start_time": datetime(2020, 1, 1, 12, 0, 0) }) return c03 def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors - load_compositor_configs_for_sensors(['viirs']) + load_compositor_configs_for_sensors(["viirs"]) def test_histogram_dnb(self, dnb, sza): """Test the 'histogram_dnb' compositor.""" from satpy.composites.viirs import HistogramDNB - comp = HistogramDNB('histogram_dnb', prerequisites=('dnb',), - standard_name='toa_outgoing_radiance_per_' - 'unit_wavelength') + comp = HistogramDNB("histogram_dnb", prerequisites=("dnb",), + standard_name="toa_outgoing_radiance_per_" + "unit_wavelength") res = comp((dnb, sza)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'histogram_dnb' - assert res.attrs['standard_name'] == 'equalized_radiance' + assert res.attrs["name"] == "histogram_dnb" + assert res.attrs["standard_name"] == "equalized_radiance" data = res.compute() unique_values = np.unique(data) np.testing.assert_allclose(unique_values, [0.5994, 0.7992, 0.999], rtol=1e-3) @@ -108,14 +108,14 @@ def test_adaptive_dnb(self, dnb, sza): """Test the 'adaptive_dnb' compositor.""" from satpy.composites.viirs import AdaptiveDNB - comp = AdaptiveDNB('adaptive_dnb', prerequisites=('dnb',), - standard_name='toa_outgoing_radiance_per_' - 'unit_wavelength') + comp = AdaptiveDNB("adaptive_dnb", prerequisites=("dnb",), + standard_name="toa_outgoing_radiance_per_" + "unit_wavelength") res = comp((dnb, sza)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'adaptive_dnb' - assert res.attrs['standard_name'] == 'equalized_radiance' + assert res.attrs["name"] == "adaptive_dnb" + assert res.attrs["standard_name"] == "equalized_radiance" data = res.compute() np.testing.assert_allclose(data.data, 0.999, rtol=1e-4) @@ -123,17 +123,17 @@ def test_hncc_dnb(self, area, dnb, sza, lza): """Test the 'hncc_dnb' compositor.""" from satpy.composites.viirs import NCCZinke - comp = NCCZinke('hncc_dnb', prerequisites=('dnb',), - standard_name='toa_outgoing_radiance_per_' - 'unit_wavelength') + comp = NCCZinke("hncc_dnb", prerequisites=("dnb",), + standard_name="toa_outgoing_radiance_per_" + "unit_wavelength") mif = xr.DataArray(da.zeros((5,), chunks=5) + 0.1, - dims=('y',), - attrs={'name': 'moon_illumination_fraction', 'area': area}) + dims=("y",), + attrs={"name": "moon_illumination_fraction", "area": area}) res = comp((dnb, sza, lza, mif)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'hncc_dnb' - assert res.attrs['standard_name'] == 'ncc_radiance' + assert res.attrs["name"] == "hncc_dnb" + assert res.attrs["standard_name"] == "ncc_radiance" data = res.compute() unique = np.unique(data) np.testing.assert_allclose( @@ -148,14 +148,14 @@ def test_hncc_dnb_nomoonpha(self, area, dnb, sza, lza): """Test the 'hncc_dnb' compositor when no moon phase data is provided.""" from satpy.composites.viirs import NCCZinke - comp = NCCZinke('hncc_dnb', prerequisites=('dnb',), - standard_name='toa_outgoing_radiance_per_' - 'unit_wavelength') + comp = NCCZinke("hncc_dnb", prerequisites=("dnb",), + standard_name="toa_outgoing_radiance_per_" + "unit_wavelength") res = comp((dnb, sza, lza)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'hncc_dnb' - assert res.attrs['standard_name'] == 'ncc_radiance' + assert res.attrs["name"] == "hncc_dnb" + assert res.attrs["standard_name"] == "ncc_radiance" data = res.compute() unique = np.unique(data) np.testing.assert_allclose( @@ -169,10 +169,10 @@ def test_erf_dnb(self, dnb_units, saturation_correction, area, sza, lza): """Test the 'dynamic_dnb' or ERF DNB compositor.""" from satpy.composites.viirs import ERFDNB - comp = ERFDNB('dynamic_dnb', prerequisites=('dnb',), + comp = ERFDNB("dynamic_dnb", prerequisites=("dnb",), saturation_correction=saturation_correction, - standard_name='toa_outgoing_radiance_per_' - 'unit_wavelength') + standard_name="toa_outgoing_radiance_per_" + "unit_wavelength") # dnb is different from in the other tests, so don't use the fixture # here dnb = np.zeros(area.shape) + 0.25 @@ -184,16 +184,16 @@ def test_erf_dnb(self, dnb_units, saturation_correction, area, sza, lza): dnb /= 10000.0 dnb = da.from_array(dnb, chunks=25) c01 = xr.DataArray(dnb, - dims=('y', 'x'), - attrs={'name': 'DNB', 'area': area, 'units': dnb_units}) + dims=("y", "x"), + attrs={"name": "DNB", "area": area, "units": dnb_units}) mif = xr.DataArray(da.zeros((5,), chunks=5) + 0.1, - dims=('y',), - attrs={'name': 'moon_illumination_fraction', 'area': area}) + dims=("y",), + attrs={"name": "moon_illumination_fraction", "area": area}) res = comp((c01, sza, lza, mif)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'dynamic_dnb' - assert res.attrs['standard_name'] == 'equalized_radiance' + assert res.attrs["name"] == "dynamic_dnb" + assert res.attrs["standard_name"] == "equalized_radiance" data = res.compute() unique = np.unique(data) assert np.isnan(unique).any() diff --git a/satpy/tests/conftest.py b/satpy/tests/conftest.py index 8bcbea2093..842dade04e 100644 --- a/satpy/tests/conftest.py +++ b/satpy/tests/conftest.py @@ -26,7 +26,7 @@ import satpy -TEST_ETC_DIR = os.path.join(os.path.dirname(__file__), 'etc') +TEST_ETC_DIR = os.path.join(os.path.dirname(__file__), "etc") @pytest.fixture(autouse=True) diff --git a/satpy/tests/enhancement_tests/test_abi.py b/satpy/tests/enhancement_tests/test_abi.py index f7ebb853b4..4a878ce96c 100644 --- a/satpy/tests/enhancement_tests/test_abi.py +++ b/satpy/tests/enhancement_tests/test_abi.py @@ -30,7 +30,7 @@ class TestABIEnhancement(unittest.TestCase): def setUp(self): """Create fake data for the tests.""" data = da.linspace(0, 1, 16).reshape((4, 4)) - self.da = xr.DataArray(data, dims=('y', 'x'), attrs={'test': 'test'}) + self.da = xr.DataArray(data, dims=("y", "x"), attrs={"test": "test"}) def test_cimss_true_color_contrast(self): """Test the cimss_true_color_contrast enhancement.""" diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index e95c55a362..b518cc3f39 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -70,12 +70,12 @@ def setup_method(self): crefl_data /= 5.605 crefl_data[0, 0] = np.nan # one bad value for testing crefl_data[0, 1] = 0. - self.ch1 = xr.DataArray(da.from_array(data, chunks=2), dims=('y', 'x'), attrs={'test': 'test'}) - self.ch2 = xr.DataArray(da.from_array(crefl_data, chunks=2), dims=('y', 'x'), attrs={'test': 'test'}) + self.ch1 = xr.DataArray(da.from_array(data, chunks=2), dims=("y", "x"), attrs={"test": "test"}) + self.ch2 = xr.DataArray(da.from_array(crefl_data, chunks=2), dims=("y", "x"), attrs={"test": "test"}) rgb_data = np.stack([data, data, data]) self.rgb = xr.DataArray(da.from_array(rgb_data, chunks=(3, 2, 2)), - dims=('bands', 'y', 'x'), - coords={'bands': ['R', 'G', 'B']}) + dims=("bands", "y", "x"), + coords={"bands": ["R", "G", "B"]}) @pytest.mark.parametrize( ("decorator", "exp_call_cls"), @@ -208,20 +208,20 @@ def test_merge_colormaps(self): create_colormap_mock = mock.Mock(wraps=create_colormap) cmap1 = Colormap((1, (1., 1., 1.))) - kwargs = {'palettes': cmap1} + kwargs = {"palettes": cmap1} - with mock.patch('satpy.enhancements.create_colormap', create_colormap_mock): + with mock.patch("satpy.enhancements.create_colormap", create_colormap_mock): res = mcp(kwargs) assert res is cmap1 create_colormap_mock.assert_not_called() create_colormap_mock.reset_mock() ret_map.reset_mock() - cmap1 = {'colors': 'blues', 'min_value': 0, - 'max_value': 1} - kwargs = {'palettes': [cmap1]} - with mock.patch('satpy.enhancements.create_colormap', create_colormap_mock), \ - mock.patch('trollimage.colormap.blues', ret_map): + cmap1 = {"colors": "blues", "min_value": 0, + "max_value": 1} + kwargs = {"palettes": [cmap1]} + with mock.patch("satpy.enhancements.create_colormap", create_colormap_mock), \ + mock.patch("trollimage.colormap.blues", ret_map): _ = mcp(kwargs) create_colormap_mock.assert_called_once() ret_map.reverse.assert_not_called() @@ -229,18 +229,18 @@ def test_merge_colormaps(self): create_colormap_mock.reset_mock() ret_map.reset_mock() - cmap2 = {'colors': 'blues', 'min_value': 2, - 'max_value': 3, 'reverse': True} - kwargs = {'palettes': [cmap2]} - with mock.patch('trollimage.colormap.blues', ret_map): + cmap2 = {"colors": "blues", "min_value": 2, + "max_value": 3, "reverse": True} + kwargs = {"palettes": [cmap2]} + with mock.patch("trollimage.colormap.blues", ret_map): _ = mcp(kwargs) ret_map.reverse.assert_called_once() ret_map.set_range.assert_called_with(2, 3) create_colormap_mock.reset_mock() ret_map.reset_mock() - kwargs = {'palettes': [cmap1, cmap2]} - with mock.patch('trollimage.colormap.blues', ret_map): + kwargs = {"palettes": [cmap1, cmap2]} + with mock.patch("trollimage.colormap.blues", ret_map): _ = mcp(kwargs) ret_map.__add__.assert_called_once() @@ -342,7 +342,7 @@ def test_cmap_vrgb_as_rgba(self): with closed_named_temp_file(suffix=".npy") as cmap_filename: cmap_data = _generate_cmap_test_data(None, "VRGB") np.save(cmap_filename, cmap_data) - cmap = create_colormap({'filename': cmap_filename, 'colormap_mode': "RGBA"}) + cmap = create_colormap({"filename": cmap_filename, "colormap_mode": "RGBA"}) assert cmap.colors.shape[0] == 4 assert cmap.colors.shape[1] == 4 # RGBA np.testing.assert_equal(cmap.colors[0], [128 / 255., 1.0, 0, 0]) @@ -366,14 +366,14 @@ def test_cmap_bad_mode(self, real_mode, forced_mode, filename_suffix): _write_cmap_to_file(cmap_filename, cmap_data) # Force colormap_mode VRGBA to RGBA and we should see an exception with pytest.raises(ValueError): - create_colormap({'filename': cmap_filename, 'colormap_mode': forced_mode}) + create_colormap({"filename": cmap_filename, "colormap_mode": forced_mode}) def test_cmap_from_file_bad_shape(self): """Test that unknown array shape causes an error.""" from satpy.enhancements import create_colormap # create the colormap file on disk - with closed_named_temp_file(suffix='.npy') as cmap_filename: + with closed_named_temp_file(suffix=".npy") as cmap_filename: np.save(cmap_filename, np.array([ [0], [64], @@ -382,7 +382,7 @@ def test_cmap_from_file_bad_shape(self): ])) with pytest.raises(ValueError): - create_colormap({'filename': cmap_filename}) + create_colormap({"filename": cmap_filename}) def test_cmap_from_config_path(self, tmp_path): """Test loading a colormap relative to a config path.""" @@ -396,7 +396,7 @@ def test_cmap_from_config_path(self, tmp_path): np.save(cmap_filename, cmap_data) with satpy.config.set(config_path=[tmp_path]): rel_cmap_filename = os.path.join("colormaps", "my_colormap.npy") - cmap = create_colormap({'filename': rel_cmap_filename, 'colormap_mode': "RGBA"}) + cmap = create_colormap({"filename": rel_cmap_filename, "colormap_mode": "RGBA"}) assert cmap.colors.shape[0] == 4 assert cmap.colors.shape[1] == 4 # RGBA np.testing.assert_equal(cmap.colors[0], [128 / 255., 1.0, 0, 0]) @@ -407,7 +407,7 @@ def test_cmap_from_config_path(self, tmp_path): def test_cmap_from_trollimage(self): """Test that colormaps in trollimage can be loaded.""" from satpy.enhancements import create_colormap - cmap = create_colormap({'colors': 'pubu'}) + cmap = create_colormap({"colors": "pubu"}) from trollimage.colormap import pubu np.testing.assert_equal(cmap.colors, pubu.colors) np.testing.assert_equal(cmap.values, pubu.values) @@ -428,14 +428,14 @@ def test_cmap_list(self): [1, 1, 1], ] values = [2, 4, 6, 8] - cmap = create_colormap({'colors': colors, 'color_scale': 1}) + cmap = create_colormap({"colors": colors, "color_scale": 1}) assert cmap.colors.shape[0] == 4 np.testing.assert_equal(cmap.colors[0], [0.0, 0.0, 1.0]) assert cmap.values.shape[0] == 4 assert cmap.values[0] == 0 assert cmap.values[-1] == 1.0 - cmap = create_colormap({'colors': colors, 'color_scale': 1, 'values': values}) + cmap = create_colormap({"colors": colors, "color_scale": 1, "values": values}) assert cmap.colors.shape[0] == 4 np.testing.assert_equal(cmap.colors[0], [0.0, 0.0, 1.0]) assert cmap.values.shape[0] == 4 @@ -451,7 +451,7 @@ def func(array, index, gain=2): coords=array.coords, dims=array.dims, attrs=array.attrs) separate_func = on_separate_bands(func) - arr = xr.DataArray(np.zeros((3, 10, 10)), dims=['bands', 'y', 'x'], coords={"bands": ["R", "G", "B"]}) + arr = xr.DataArray(np.zeros((3, 10, 10)), dims=["bands", "y", "x"], coords={"bands": ["R", "G", "B"]}) assert separate_func(arr).shape == arr.shape assert all(separate_func(arr, gain=1).values[:, 0, 0] == [0, 1, 2]) @@ -460,11 +460,11 @@ def test_using_map_blocks(): """Test the `using_map_blocks` decorator.""" def func(np_array, block_info=None): - value = block_info[0]['chunk-location'][-1] + value = block_info[0]["chunk-location"][-1] return np.ones(np_array.shape) * value map_blocked_func = using_map_blocks(func) - arr = xr.DataArray(da.zeros((3, 10, 10), dtype=int, chunks=5), dims=['bands', 'y', 'x']) + arr = xr.DataArray(da.zeros((3, 10, 10), dtype=int, chunks=5), dims=["bands", "y", "x"]) res = map_blocked_func(arr) assert res.shape == arr.shape assert res[0, 0, 0].compute() != res[0, 9, 9].compute() @@ -479,7 +479,7 @@ def func(dask_array): return dask_array dask_func = on_dask_array(func) - arr = xr.DataArray(da.zeros((3, 10, 10), dtype=int, chunks=5), dims=['bands', 'y', 'x']) + arr = xr.DataArray(da.zeros((3, 10, 10), dtype=int, chunks=5), dims=["bands", "y", "x"]) res = dask_func(arr) assert res.shape == arr.shape @@ -492,82 +492,82 @@ def fake_area(): _nwcsaf_geo_props = { - 'cma_geo': ("geo", "cma", None, 'cma_pal', None, 'cloudmask', 'CMA', "uint8"), - 'cma_pps': ("pps", "cma", None, 'cma_pal', None, 'cloudmask', 'CMA', "uint8"), - 'cma_extended_pps': ("pps", "cma_extended", None, 'cma_extended_pal', None, - 'cloudmask_extended', 'CMA', "uint8"), - 'cmaprob_pps': ("pps", "cmaprob", None, 'cmaprob_pal', None, 'cloudmask_probability', - 'CMAPROB', "uint8"), - 'ct_geo': ("geo", "ct", None, 'ct_pal', None, 'cloudtype', 'CT', "uint8"), - 'ct_pps': ("pps", "ct", None, 'ct_pal', None, 'cloudtype', 'CT', "uint8"), - 'ctth_alti_geo': ("geo", "ctth_alti", None, 'ctth_alti_pal', None, 'cloud_top_height', - 'CTTH', "float64"), - 'ctth_alti_pps': ("pps", "ctth_alti", None, 'ctth_alti_pal', "ctth_status_flag", - 'cloud_top_height', 'CTTH', "float64"), - 'ctth_pres_geo': ("geo", "ctth_pres", None, 'ctth_pres_pal', None, 'cloud_top_pressure', - 'CTTH', "float64"), - 'ctth_pres_pps': ("pps", "ctth_pres", None, 'ctth_pres_pal', None, 'cloud_top_pressure', - 'CTTH', "float64"), - 'ctth_tempe_geo': ("geo", "ctth_tempe", None, 'ctth_tempe_pal', None, 'cloud_top_temperature', - 'CTTH', "float64"), - 'ctth_tempe_pps': ("pps", "ctth_tempe", None, 'ctth_tempe_pal', None, 'cloud_top_temperature', - 'CTTH', "float64"), - 'cmic_phase_geo': ("geo", "cmic_phase", None, 'cmic_phase_pal', None, 'cloud_top_phase', - 'CMIC', "uint8"), - 'cmic_phase_pps': ("pps", "cmic_phase", None, 'cmic_phase_pal', "cmic_status_flag", 'cloud_top_phase', - 'CMIC', "uint8"), - 'cmic_reff_geo': ("geo", "cmic_reff", None, 'cmic_reff_pal', None, 'cloud_drop_effective_radius', - 'CMIC', "float64"), - 'cmic_reff_pps': ("pps", "cmic_reff", "cmic_cre", 'cmic_cre_pal', "cmic_status_flag", - 'cloud_drop_effective_radius', 'CMIC', "float64"), - 'cmic_cot_geo': ("geo", "cmic_cot", None, 'cmic_cot_pal', None, 'cloud_optical_thickness', - 'CMIC', "float64"), - 'cmic_cot_pps': ("pps", "cmic_cot", None, 'cmic_cot_pal', None, 'cloud_optical_thickness', - 'CMIC', "float64"), - 'cmic_cwp_pps': ("pps", "cmic_cwp", None, 'cmic_cwp_pal', None, 'cloud_water_path', - 'CMIC', "float64"), - 'cmic_lwp_geo': ("geo", "cmic_lwp", None, 'cmic_lwp_pal', None, 'cloud_liquid_water_path', - 'CMIC', "float64"), - 'cmic_lwp_pps': ("pps", "cmic_lwp", None, 'cmic_lwp_pal', None, 'liquid_water_path', - 'CMIC', "float64"), - 'cmic_iwp_geo': ("geo", "cmic_iwp", None, 'cmic_iwp_pal', None, 'cloud_ice_water_path', - 'CMIC', "float64"), - 'cmic_iwp_pps': ("pps", "cmic_iwp", None, 'cmic_iwp_pal', None, 'ice_water_path', - 'CMIC', "float64"), - 'pc': ("geo", "pc", None, 'pc_pal', None, 'precipitation_probability', 'PC', "uint8"), - 'crr': ("geo", "crr", None, 'crr_pal', None, 'convective_rain_rate', 'CRR', "uint8"), - 'crr_accum': ("geo", "crr_accum", None, 'crr_pal', None, - 'convective_precipitation_hourly_accumulation', 'CRR', "uint8"), - 'ishai_tpw': ("geo", "ishai_tpw", None, 'ishai_tpw_pal', None, 'total_precipitable_water', - 'iSHAI', "float64"), - 'ishai_shw': ("geo", "ishai_shw", None, 'ishai_shw_pal', None, 'showalter_index', - 'iSHAI', "float64"), - 'ishai_li': ("geo", "ishai_li", None, 'ishai_li_pal', None, 'lifted_index', - 'iSHAI', "float64"), - 'ci_prob30': ("geo", "ci_prob30", None, 'ci_pal', None, 'convection_initiation_prob30', - 'CI', "float64"), - 'ci_prob60': ("geo", "ci_prob60", None, 'ci_pal', None, 'convection_initiation_prob60', - 'CI', "float64"), - 'ci_prob90': ("geo", "ci_prob90", None, 'ci_pal', None, 'convection_initiation_prob90', - 'CI', "float64"), - 'asii_turb_trop_prob': ("geo", "asii_turb_trop_prob", None, 'asii_turb_prob_pal', None, - 'asii_prob', 'ASII-NG', "float64"), - 'MapCellCatType': ("geo", "MapCellCatType", None, 'MapCellCatType_pal', None, - 'rdt_cell_type', 'RDT-CW', "uint8"), + "cma_geo": ("geo", "cma", None, "cma_pal", None, "cloudmask", "CMA", "uint8"), + "cma_pps": ("pps", "cma", None, "cma_pal", None, "cloudmask", "CMA", "uint8"), + "cma_extended_pps": ("pps", "cma_extended", None, "cma_extended_pal", None, + "cloudmask_extended", "CMA", "uint8"), + "cmaprob_pps": ("pps", "cmaprob", None, "cmaprob_pal", None, "cloudmask_probability", + "CMAPROB", "uint8"), + "ct_geo": ("geo", "ct", None, "ct_pal", None, "cloudtype", "CT", "uint8"), + "ct_pps": ("pps", "ct", None, "ct_pal", None, "cloudtype", "CT", "uint8"), + "ctth_alti_geo": ("geo", "ctth_alti", None, "ctth_alti_pal", None, "cloud_top_height", + "CTTH", "float64"), + "ctth_alti_pps": ("pps", "ctth_alti", None, "ctth_alti_pal", "ctth_status_flag", + "cloud_top_height", "CTTH", "float64"), + "ctth_pres_geo": ("geo", "ctth_pres", None, "ctth_pres_pal", None, "cloud_top_pressure", + "CTTH", "float64"), + "ctth_pres_pps": ("pps", "ctth_pres", None, "ctth_pres_pal", None, "cloud_top_pressure", + "CTTH", "float64"), + "ctth_tempe_geo": ("geo", "ctth_tempe", None, "ctth_tempe_pal", None, "cloud_top_temperature", + "CTTH", "float64"), + "ctth_tempe_pps": ("pps", "ctth_tempe", None, "ctth_tempe_pal", None, "cloud_top_temperature", + "CTTH", "float64"), + "cmic_phase_geo": ("geo", "cmic_phase", None, "cmic_phase_pal", None, "cloud_top_phase", + "CMIC", "uint8"), + "cmic_phase_pps": ("pps", "cmic_phase", None, "cmic_phase_pal", "cmic_status_flag", "cloud_top_phase", + "CMIC", "uint8"), + "cmic_reff_geo": ("geo", "cmic_reff", None, "cmic_reff_pal", None, "cloud_drop_effective_radius", + "CMIC", "float64"), + "cmic_reff_pps": ("pps", "cmic_reff", "cmic_cre", "cmic_cre_pal", "cmic_status_flag", + "cloud_drop_effective_radius", "CMIC", "float64"), + "cmic_cot_geo": ("geo", "cmic_cot", None, "cmic_cot_pal", None, "cloud_optical_thickness", + "CMIC", "float64"), + "cmic_cot_pps": ("pps", "cmic_cot", None, "cmic_cot_pal", None, "cloud_optical_thickness", + "CMIC", "float64"), + "cmic_cwp_pps": ("pps", "cmic_cwp", None, "cmic_cwp_pal", None, "cloud_water_path", + "CMIC", "float64"), + "cmic_lwp_geo": ("geo", "cmic_lwp", None, "cmic_lwp_pal", None, "cloud_liquid_water_path", + "CMIC", "float64"), + "cmic_lwp_pps": ("pps", "cmic_lwp", None, "cmic_lwp_pal", None, "liquid_water_path", + "CMIC", "float64"), + "cmic_iwp_geo": ("geo", "cmic_iwp", None, "cmic_iwp_pal", None, "cloud_ice_water_path", + "CMIC", "float64"), + "cmic_iwp_pps": ("pps", "cmic_iwp", None, "cmic_iwp_pal", None, "ice_water_path", + "CMIC", "float64"), + "pc": ("geo", "pc", None, "pc_pal", None, "precipitation_probability", "PC", "uint8"), + "crr": ("geo", "crr", None, "crr_pal", None, "convective_rain_rate", "CRR", "uint8"), + "crr_accum": ("geo", "crr_accum", None, "crr_pal", None, + "convective_precipitation_hourly_accumulation", "CRR", "uint8"), + "ishai_tpw": ("geo", "ishai_tpw", None, "ishai_tpw_pal", None, "total_precipitable_water", + "iSHAI", "float64"), + "ishai_shw": ("geo", "ishai_shw", None, "ishai_shw_pal", None, "showalter_index", + "iSHAI", "float64"), + "ishai_li": ("geo", "ishai_li", None, "ishai_li_pal", None, "lifted_index", + "iSHAI", "float64"), + "ci_prob30": ("geo", "ci_prob30", None, "ci_pal", None, "convection_initiation_prob30", + "CI", "float64"), + "ci_prob60": ("geo", "ci_prob60", None, "ci_pal", None, "convection_initiation_prob60", + "CI", "float64"), + "ci_prob90": ("geo", "ci_prob90", None, "ci_pal", None, "convection_initiation_prob90", + "CI", "float64"), + "asii_turb_trop_prob": ("geo", "asii_turb_trop_prob", None, "asii_turb_prob_pal", None, + "asii_prob", "ASII-NG", "float64"), + "MapCellCatType": ("geo", "MapCellCatType", None, "MapCellCatType_pal", None, + "rdt_cell_type", "RDT-CW", "uint8"), } @pytest.mark.parametrize( "data", - ['cma_geo', 'cma_pps', 'cma_extended_pps', 'cmaprob_pps', 'ct_geo', - 'ct_pps', 'ctth_alti_geo', 'ctth_alti_pps', 'ctth_pres_geo', - 'ctth_pres_pps', 'ctth_tempe_geo', 'ctth_tempe_pps', - 'cmic_phase_geo', 'cmic_phase_pps', 'cmic_reff_geo', - 'cmic_reff_pps', 'cmic_cot_geo', 'cmic_cot_pps', 'cmic_cwp_pps', - 'cmic_lwp_geo', 'cmic_lwp_pps', 'cmic_iwp_geo', 'cmic_iwp_pps', - 'pc', 'crr', 'crr_accum', 'ishai_tpw', 'ishai_shw', 'ishai_li', - 'ci_prob30', 'ci_prob60', 'ci_prob90', 'asii_turb_trop_prob', - 'MapCellCatType'] + ["cma_geo", "cma_pps", "cma_extended_pps", "cmaprob_pps", "ct_geo", + "ct_pps", "ctth_alti_geo", "ctth_alti_pps", "ctth_pres_geo", + "ctth_pres_pps", "ctth_tempe_geo", "ctth_tempe_pps", + "cmic_phase_geo", "cmic_phase_pps", "cmic_reff_geo", + "cmic_reff_pps", "cmic_cot_geo", "cmic_cot_pps", "cmic_cwp_pps", + "cmic_lwp_geo", "cmic_lwp_pps", "cmic_iwp_geo", "cmic_iwp_pps", + "pc", "crr", "crr_accum", "ishai_tpw", "ishai_shw", "ishai_li", + "ci_prob30", "ci_prob60", "ci_prob90", "asii_turb_trop_prob", + "MapCellCatType"] ) def test_nwcsaf_comps(fake_area, tmp_path, data): """Test loading NWCSAF composites.""" @@ -645,9 +645,9 @@ def setup_method(self): """Create test data.""" data = da.arange(-100, 1000, 110).reshape(2, 5) rgb_data = np.stack([data, data, data]) - self.rgb = xr.DataArray(rgb_data, dims=('bands', 'y', 'x'), - coords={'bands': ['R', 'G', 'B']}, - attrs={'platform_name': 'Himawari-8'}) + self.rgb = xr.DataArray(rgb_data, dims=("bands", "y", "x"), + coords={"bands": ["R", "G", "B"]}, + attrs={"platform_name": "Himawari-8"}) def test_jma_true_color_reproduction(self): """Test the jma_true_color_reproduction enhancement.""" @@ -669,12 +669,12 @@ def test_jma_true_color_reproduction(self): np.testing.assert_almost_equal(img.data.compute(), expected) - self.rgb.attrs['platform_name'] = None + self.rgb.attrs["platform_name"] = None img = XRImage(self.rgb) with pytest.raises(ValueError, match="Missing platform name."): jma_true_color_reproduction(img) - self.rgb.attrs['platform_name'] = 'Fakesat' + self.rgb.attrs["platform_name"] = "Fakesat" img = XRImage(self.rgb) with pytest.raises(KeyError, match="No conversion matrix found for platform Fakesat"): jma_true_color_reproduction(img) diff --git a/satpy/tests/enhancement_tests/test_viirs.py b/satpy/tests/enhancement_tests/test_viirs.py index 5595266034..b73e5fc700 100644 --- a/satpy/tests/enhancement_tests/test_viirs.py +++ b/satpy/tests/enhancement_tests/test_viirs.py @@ -33,8 +33,8 @@ def setUp(self): """Create test data.""" data = np.arange(15, 301, 15).reshape(2, 10) data = da.from_array(data, chunks=(2, 10)) - self.da = xr.DataArray(data, dims=('y', 'x'), attrs={'test': 'test'}) - self.palette = {'colors': + self.da = xr.DataArray(data, dims=("y", "x"), attrs={"test": "test"}) + self.palette = {"colors": [[14, [0.0, 0.0, 0.0]], [15, [0.0, 0.0, 0.39215]], [16, [0.76862, 0.63529, 0.44705]], @@ -64,8 +64,8 @@ def setUp(self): [191, [1.0, 0.0, 0.0]], [200, [1.0, 0.0, 0.0]], [201, [0.0, 0.0, 0.0]]], - 'min_value': 0, - 'max_value': 201} + "min_value": 0, + "max_value": 201} def test_viirs(self): """Test VIIRS flood enhancement.""" diff --git a/satpy/tests/features/steps/steps-load.py b/satpy/tests/features/steps/steps-load.py index 9dfe9eb9cc..7e2d1829a2 100644 --- a/satpy/tests/features/steps/steps-load.py +++ b/satpy/tests/features/steps/steps-load.py @@ -25,31 +25,31 @@ use_step_matcher("re") -@given(u'data is available') +@given(u"data is available") def step_impl_data_available(context): """Make data available.""" - if not os.path.exists('/tmp/SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5'): - response = urlopen('https://zenodo.org/record/16355/files/' - 'SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5') - with open('/tmp/SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5', + if not os.path.exists("/tmp/SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5"): + response = urlopen("https://zenodo.org/record/16355/files/" + "SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5") + with open("/tmp/SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5", mode="w") as fp: fp.write(response.read()) - if not os.path.exists('/tmp/GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5'): - response = urlopen('https://zenodo.org/record/16355/files/' - 'GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5') - with open('/tmp/GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5', + if not os.path.exists("/tmp/GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5"): + response = urlopen("https://zenodo.org/record/16355/files/" + "GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5") + with open("/tmp/GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5", mode="w") as fp: fp.write(response.read()) -@when(u'user loads the data without providing a config file') +@when(u"user loads the data without providing a config file") def step_impl_user_loads_no_config(context): """Load the data without a config.""" from datetime import datetime from satpy import Scene, find_files_and_readers os.chdir("/tmp/") - readers_files = find_files_and_readers(sensor='viirs', + readers_files = find_files_and_readers(sensor="viirs", start_time=datetime(2015, 3, 11, 11, 20), end_time=datetime(2015, 3, 11, 11, 26)) scn = Scene(filenames=readers_files) @@ -57,20 +57,20 @@ def step_impl_user_loads_no_config(context): context.scene = scn -@then(u'the data is available in a scene object') +@then(u"the data is available in a scene object") def step_impl_data_available_in_scene(context): """Check that the data is available in the scene.""" assert context.scene["M02"] is not None assert context.scene.get("M01") is None -@when(u'some items are not available') +@when(u"some items are not available") def step_impl_items_not_available(context): """Load some data.""" context.scene.load(["M01"]) -@when(u'user wants to know what data is available') +@when(u"user wants to know what data is available") def step_impl_user_checks_availability(context): """Check availability.""" from datetime import datetime @@ -84,7 +84,7 @@ def step_impl_user_checks_availability(context): context.available_dataset_ids = scn.available_dataset_ids() -@then(u'available datasets are returned') +@then(u"available datasets are returned") def step_impl_available_datasets_are_returned(context): """Check that datasets are returned.""" assert (len(context.available_dataset_ids) >= 5) @@ -98,13 +98,13 @@ def step_impl_datasets_with_same_name(context): from satpy import Scene from satpy.tests.utils import make_dataid scn = Scene() - scn[make_dataid(name='ds1', calibration='radiance')] = DataArray([[1, 2], [3, 4]]) - scn[make_dataid(name='ds1', resolution=500, calibration='reflectance')] = DataArray([[5, 6], [7, 8]]) - scn[make_dataid(name='ds1', resolution=250, calibration='reflectance')] = DataArray([[5, 6], [7, 8]]) - scn[make_dataid(name='ds1', resolution=1000, calibration='reflectance')] = DataArray([[5, 6], [7, 8]]) - scn[make_dataid(name='ds1', resolution=500, calibration='radiance', modifiers=('mod1',))] = \ + scn[make_dataid(name="ds1", calibration="radiance")] = DataArray([[1, 2], [3, 4]]) + scn[make_dataid(name="ds1", resolution=500, calibration="reflectance")] = DataArray([[5, 6], [7, 8]]) + scn[make_dataid(name="ds1", resolution=250, calibration="reflectance")] = DataArray([[5, 6], [7, 8]]) + scn[make_dataid(name="ds1", resolution=1000, calibration="reflectance")] = DataArray([[5, 6], [7, 8]]) + scn[make_dataid(name="ds1", resolution=500, calibration="radiance", modifiers=("mod1",))] = \ DataArray([[5, 6], [7, 8]]) - ds_id = make_dataid(name='ds1', resolution=1000, calibration='radiance', modifiers=('mod1', 'mod2')) + ds_id = make_dataid(name="ds1", resolution=1000, calibration="radiance", modifiers=("mod1", "mod2")) scn[ds_id] = DataArray([[5, 6], [7, 8]]) context.scene = scn @@ -112,10 +112,10 @@ def step_impl_datasets_with_same_name(context): @when("a dataset is retrieved by name") def step_impl_dataset_retrieved_by_name(context): """Use the Scene's getitem method to get a dataset.""" - context.returned_dataset = context.scene['ds1'] + context.returned_dataset = context.scene["ds1"] @then("the least modified version of the dataset is returned") def step_impl_least_modified_dataset_returned(context): """Check that the dataset should be one of the least modified datasets.""" - assert len(context.returned_dataset.attrs['modifiers']) == 0 + assert len(context.returned_dataset.attrs["modifiers"]) == 0 diff --git a/satpy/tests/features/steps/steps-real-load-process-write.py b/satpy/tests/features/steps/steps-real-load-process-write.py index d719d397e4..d99b167b97 100644 --- a/satpy/tests/features/steps/steps-real-load-process-write.py +++ b/satpy/tests/features/steps/steps-real-load-process-write.py @@ -80,10 +80,10 @@ def before_all(context): debug_on() -@given(u'{dformat} data is available') +@given(u"{dformat} data is available") def step_impl_input_files_exists(context, dformat): """Check that input data exists on disk.""" - data_path = os.path.join('test_data', dformat) + data_path = os.path.join("test_data", dformat) data_available = os.path.exists(data_path) if not data_available: context.scenario.skip(reason="No test data available for " + dformat) @@ -92,40 +92,40 @@ def step_impl_input_files_exists(context, dformat): context.data_path = data_path -@when(u'the user loads the {composite} composite') +@when(u"the user loads the {composite} composite") def step_impl_create_scene_and_load_single(context, composite): """Create a Scene and load a single composite.""" from satpy import Scene scn = Scene(reader=context.dformat, - filenames=get_all_files(os.path.join(context.data_path, 'data'), - '*')) + filenames=get_all_files(os.path.join(context.data_path, "data"), + "*")) scn.load([composite]) context.scn = scn context.composite = composite -@when(u'the user resamples the data to {area}') +@when(u"the user resamples the data to {area}") def step_impl_resample_scene(context, area): """Resample the scene to an area or use the native resampler.""" - if area != '-': + if area != "-": context.lscn = context.scn.resample(area) else: - context.lscn = context.scn.resample(resampler='native') + context.lscn = context.scn.resample(resampler="native") context.area = area -@when(u'the user saves the composite to disk') +@when(u"the user saves the composite to disk") def step_impl_save_to_png(context): """Call Scene.save_dataset to write a PNG image.""" - with NamedTemporaryFile(suffix='.png', delete=False) as tmp_file: + with NamedTemporaryFile(suffix=".png", delete=False) as tmp_file: context.lscn.save_dataset(context.composite, filename=tmp_file.name) context.new_filename = tmp_file.name -@then(u'the resulting image should match the reference image') +@then(u"the resulting image should match the reference image") def step_impl_compare_two_png_images(context): """Compare two PNG image files.""" - if context.area == '-': + if context.area == "-": ref_filename = context.composite + ".png" else: ref_filename = context.composite + "_" + context.area + ".png" diff --git a/satpy/tests/features/steps/steps-save.py b/satpy/tests/features/steps/steps-save.py index b42d8751a2..dbfd8040c0 100644 --- a/satpy/tests/features/steps/steps-save.py +++ b/satpy/tests/features/steps/steps-save.py @@ -36,7 +36,7 @@ def step_impl_create_scene_one_dataset(context): from satpy import Scene scn = Scene() - scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=['y', 'x']) + scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=["y", "x"]) context.scene = scn @@ -48,7 +48,7 @@ def step_impl_scene_show(context): context (behave.runner.Context): Test context """ - with patch('trollimage.xrimage.XRImage.show') as mock_show: + with patch("trollimage.xrimage.XRImage.show") as mock_show: context.scene.show("MyDataset") mock_show.assert_called_once_with() @@ -100,8 +100,8 @@ def step_impl_create_scene_two_datasets(context): from satpy import Scene scn = Scene() - scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=['y', 'x']) - scn["MyDataset2"] = DataArray([[5, 6], [7, 8]], dims=['y', 'x']) + scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=["y", "x"]) + scn["MyDataset2"] = DataArray([[5, 6], [7, 8]], dims=["y", "x"]) context.scene = scn diff --git a/satpy/tests/modifier_tests/test_angles.py b/satpy/tests/modifier_tests/test_angles.py index cd5082a5b7..7bea78b7d1 100644 --- a/satpy/tests/modifier_tests/test_angles.py +++ b/satpy/tests/modifier_tests/test_angles.py @@ -79,9 +79,9 @@ def _get_angle_test_data(area_def: Optional[Union[AreaDefinition, StackedAreaDef vis = xr.DataArray(data, dims=dims, attrs={ - 'area': area_def, - 'start_time': stime, - 'orbital_parameters': orb_params, + "area": area_def, + "start_time": stime, + "orbital_parameters": orb_params, }) return vis diff --git a/satpy/tests/modifier_tests/test_crefl.py b/satpy/tests/modifier_tests/test_crefl.py index c842df701e..e43d7bc3fa 100644 --- a/satpy/tests/modifier_tests/test_crefl.py +++ b/satpy/tests/modifier_tests/test_crefl.py @@ -57,7 +57,7 @@ def _mock_and_create_dem_file(tmpdir, url, var_name, fill_value=None): def _mock_dem_retrieve(tmpdir, url): - rmock_obj = mock.patch('satpy.modifiers._crefl.retrieve') + rmock_obj = mock.patch("satpy.modifiers._crefl.retrieve") rmock = rmock_obj.start() dem_fn = str(tmpdir.join(url)) rmock.return_value = dem_fn @@ -74,17 +74,17 @@ def _create_fake_dem_file(dem_fn, var_name, fill_value): h.end() -def _make_viirs_xarray(data, area, name, standard_name, wavelength=None, units='degrees', calibration=None): - return xr.DataArray(data, dims=('y', 'x'), +def _make_viirs_xarray(data, area, name, standard_name, wavelength=None, units="degrees", calibration=None): + return xr.DataArray(data, dims=("y", "x"), attrs={ - 'start_orbit': 1708, 'end_orbit': 1708, 'wavelength': wavelength, - 'modifiers': None, 'calibration': calibration, - 'resolution': 371, 'name': name, - 'standard_name': standard_name, 'platform_name': 'Suomi-NPP', - 'polarization': None, 'sensor': 'viirs', 'units': units, - 'start_time': datetime(2012, 2, 25, 18, 1, 24, 570942), - 'end_time': datetime(2012, 2, 25, 18, 11, 21, 175760), 'area': area, - 'ancillary_variables': [] + "start_orbit": 1708, "end_orbit": 1708, "wavelength": wavelength, + "modifiers": None, "calibration": calibration, + "resolution": 371, "name": name, + "standard_name": standard_name, "platform_name": "Suomi-NPP", + "polarization": None, "sensor": "viirs", "units": units, + "start_time": datetime(2012, 2, 25, 18, 1, 24, 570942), + "end_time": datetime(2012, 2, 25, 18, 11, 21, 175760), "area": area, + "ancillary_variables": [] }) @@ -97,9 +97,9 @@ def data_area_ref_corrector(): rows = 3 cols = 5 area = AreaDefinition( - 'some_area_name', 'On-the-fly area', 'geosabii', - {'a': '6378137.0', 'b': '6356752.31414', 'h': '35786023.0', 'lon_0': '-89.5', 'proj': 'geos', 'sweep': 'x', - 'units': 'm'}, + "some_area_name", "On-the-fly area", "geosabii", + {"a": "6378137.0", "b": "6356752.31414", "h": "35786023.0", "lon_0": "-89.5", "proj": "geos", "sweep": "x", + "units": "m"}, cols, rows, (-5434894.954752679, -5434894.964451744, 5434894.964451744, 5434894.954752679)) @@ -135,39 +135,39 @@ def test_reflectance_corrector_abi(self, name, wavelength, resolution, exp_mean, from satpy.modifiers._crefl import ReflectanceCorrector from satpy.tests.utils import make_dsq ref_cor = ReflectanceCorrector(optional_prerequisites=[ - make_dsq(name='satellite_azimuth_angle'), - make_dsq(name='satellite_zenith_angle'), - make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle')], name=name, prerequisites=[], - wavelength=wavelength, resolution=resolution, calibration='reflectance', - modifiers=('sunz_corrected', 'rayleigh_corrected_crefl',), sensor='abi') - - assert ref_cor.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl') - assert ref_cor.attrs['calibration'] == 'reflectance' - assert ref_cor.attrs['wavelength'] == wavelength - assert ref_cor.attrs['name'] == name - assert ref_cor.attrs['resolution'] == resolution - assert ref_cor.attrs['sensor'] == 'abi' - assert ref_cor.attrs['prerequisites'] == [] - assert ref_cor.attrs['optional_prerequisites'] == [ - make_dsq(name='satellite_azimuth_angle'), - make_dsq(name='satellite_zenith_angle'), - make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle')] + make_dsq(name="satellite_azimuth_angle"), + make_dsq(name="satellite_zenith_angle"), + make_dsq(name="solar_azimuth_angle"), + make_dsq(name="solar_zenith_angle")], name=name, prerequisites=[], + wavelength=wavelength, resolution=resolution, calibration="reflectance", + modifiers=("sunz_corrected", "rayleigh_corrected_crefl",), sensor="abi") + + assert ref_cor.attrs["modifiers"] == ("sunz_corrected", "rayleigh_corrected_crefl") + assert ref_cor.attrs["calibration"] == "reflectance" + assert ref_cor.attrs["wavelength"] == wavelength + assert ref_cor.attrs["name"] == name + assert ref_cor.attrs["resolution"] == resolution + assert ref_cor.attrs["sensor"] == "abi" + assert ref_cor.attrs["prerequisites"] == [] + assert ref_cor.attrs["optional_prerequisites"] == [ + make_dsq(name="satellite_azimuth_angle"), + make_dsq(name="satellite_zenith_angle"), + make_dsq(name="solar_azimuth_angle"), + make_dsq(name="solar_zenith_angle")] area, dnb = self.data_area_ref_corrector() c01 = xr.DataArray(dnb, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'platform_name': 'GOES-16', - 'calibration': 'reflectance', 'units': '%', 'wavelength': wavelength, - 'name': name, 'resolution': resolution, 'sensor': 'abi', - 'start_time': '2017-09-20 17:30:40.800000', 'end_time': '2017-09-20 17:41:17.500000', - 'area': area, 'ancillary_variables': [], - 'orbital_parameters': { - 'satellite_nominal_longitude': -89.5, - 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_altitude': 35786023.4375, + "platform_name": "GOES-16", + "calibration": "reflectance", "units": "%", "wavelength": wavelength, + "name": name, "resolution": resolution, "sensor": "abi", + "start_time": "2017-09-20 17:30:40.800000", "end_time": "2017-09-20 17:41:17.500000", + "area": area, "ancillary_variables": [], + "orbital_parameters": { + "satellite_nominal_longitude": -89.5, + "satellite_nominal_latitude": 0.0, + "satellite_nominal_altitude": 35786023.4375, }, }) with assert_maximum_dask_computes(0): @@ -175,18 +175,18 @@ def test_reflectance_corrector_abi(self, name, wavelength, resolution, exp_mean, assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl') - assert res.attrs['platform_name'] == 'GOES-16' - assert res.attrs['calibration'] == 'reflectance' - assert res.attrs['units'] == '%' - assert res.attrs['wavelength'] == wavelength - assert res.attrs['name'] == name - assert res.attrs['resolution'] == resolution - assert res.attrs['sensor'] == 'abi' - assert res.attrs['start_time'] == '2017-09-20 17:30:40.800000' - assert res.attrs['end_time'] == '2017-09-20 17:41:17.500000' - assert res.attrs['area'] == area - assert res.attrs['ancillary_variables'] == [] + assert res.attrs["modifiers"] == ("sunz_corrected", "rayleigh_corrected_crefl") + assert res.attrs["platform_name"] == "GOES-16" + assert res.attrs["calibration"] == "reflectance" + assert res.attrs["units"] == "%" + assert res.attrs["wavelength"] == wavelength + assert res.attrs["name"] == name + assert res.attrs["resolution"] == resolution + assert res.attrs["sensor"] == "abi" + assert res.attrs["start_time"] == "2017-09-20 17:30:40.800000" + assert res.attrs["end_time"] == "2017-09-20 17:41:17.500000" + assert res.attrs["area"] == area + assert res.attrs["ancillary_variables"] == [] data = res.values unique = np.unique(data[~np.isnan(data)]) np.testing.assert_allclose(np.nanmean(data), exp_mean, rtol=1e-5) @@ -194,7 +194,7 @@ def test_reflectance_corrector_abi(self, name, wavelength, resolution, exp_mean, np.testing.assert_allclose(unique, exp_unique, rtol=1e-5) @pytest.mark.parametrize( - 'url,dem_mock_cm,dem_sds', + "url,dem_mock_cm,dem_sds", [ (None, mock_cmgdem, "average elevation"), ("CMGDEM.hdf", mock_cmgdem, "averaged elevation"), @@ -207,62 +207,62 @@ def test_reflectance_corrector_viirs(self, tmpdir, url, dem_mock_cm, dem_sds): ref_cor = ReflectanceCorrector( optional_prerequisites=[ - make_dsq(name='satellite_azimuth_angle'), - make_dsq(name='satellite_zenith_angle'), - make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle') + make_dsq(name="satellite_azimuth_angle"), + make_dsq(name="satellite_zenith_angle"), + make_dsq(name="solar_azimuth_angle"), + make_dsq(name="solar_zenith_angle") ], - name='I01', + name="I01", prerequisites=[], wavelength=(0.6, 0.64, 0.68), resolution=371, - calibration='reflectance', - modifiers=('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband'), - sensor='viirs', + calibration="reflectance", + modifiers=("sunz_corrected_iband", "rayleigh_corrected_crefl_iband"), + sensor="viirs", url=url, dem_sds=dem_sds, ) - assert ref_cor.attrs['modifiers'] == ('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband') - assert ref_cor.attrs['calibration'] == 'reflectance' - assert ref_cor.attrs['wavelength'] == (0.6, 0.64, 0.68) - assert ref_cor.attrs['name'] == 'I01' - assert ref_cor.attrs['resolution'] == 371 - assert ref_cor.attrs['sensor'] == 'viirs' - assert ref_cor.attrs['prerequisites'] == [] - assert ref_cor.attrs['optional_prerequisites'] == [ - make_dsq(name='satellite_azimuth_angle'), - make_dsq(name='satellite_zenith_angle'), - make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle')] + assert ref_cor.attrs["modifiers"] == ("sunz_corrected_iband", "rayleigh_corrected_crefl_iband") + assert ref_cor.attrs["calibration"] == "reflectance" + assert ref_cor.attrs["wavelength"] == (0.6, 0.64, 0.68) + assert ref_cor.attrs["name"] == "I01" + assert ref_cor.attrs["resolution"] == 371 + assert ref_cor.attrs["sensor"] == "viirs" + assert ref_cor.attrs["prerequisites"] == [] + assert ref_cor.attrs["optional_prerequisites"] == [ + make_dsq(name="satellite_azimuth_angle"), + make_dsq(name="satellite_zenith_angle"), + make_dsq(name="solar_azimuth_angle"), + make_dsq(name="solar_zenith_angle")] area, data = self.data_area_ref_corrector() - c01 = _make_viirs_xarray(data, area, 'I01', 'toa_bidirectional_reflectance', - wavelength=(0.6, 0.64, 0.68), units='%', - calibration='reflectance') - c02 = _make_viirs_xarray(data, area, 'satellite_azimuth_angle', 'sensor_azimuth_angle') - c03 = _make_viirs_xarray(data, area, 'satellite_zenith_angle', 'sensor_zenith_angle') - c04 = _make_viirs_xarray(data, area, 'solar_azimuth_angle', 'solar_azimuth_angle') - c05 = _make_viirs_xarray(data, area, 'solar_zenith_angle', 'solar_zenith_angle') + c01 = _make_viirs_xarray(data, area, "I01", "toa_bidirectional_reflectance", + wavelength=(0.6, 0.64, 0.68), units="%", + calibration="reflectance") + c02 = _make_viirs_xarray(data, area, "satellite_azimuth_angle", "sensor_azimuth_angle") + c03 = _make_viirs_xarray(data, area, "satellite_zenith_angle", "sensor_zenith_angle") + c04 = _make_viirs_xarray(data, area, "solar_azimuth_angle", "solar_azimuth_angle") + c05 = _make_viirs_xarray(data, area, "solar_zenith_angle", "solar_zenith_angle") with dem_mock_cm(tmpdir, url), assert_maximum_dask_computes(0): res = ref_cor([c01], [c02, c03, c04, c05]) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['wavelength'] == (0.6, 0.64, 0.68) - assert res.attrs['modifiers'] == ('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband') - assert res.attrs['calibration'] == 'reflectance' - assert res.attrs['resolution'] == 371 - assert res.attrs['name'] == 'I01' - assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' - assert res.attrs['platform_name'] == 'Suomi-NPP' - assert res.attrs['sensor'] == 'viirs' - assert res.attrs['units'] == '%' - assert res.attrs['start_time'] == datetime(2012, 2, 25, 18, 1, 24, 570942) - assert res.attrs['end_time'] == datetime(2012, 2, 25, 18, 11, 21, 175760) - assert res.attrs['area'] == area - assert res.attrs['ancillary_variables'] == [] + assert res.attrs["wavelength"] == (0.6, 0.64, 0.68) + assert res.attrs["modifiers"] == ("sunz_corrected_iband", "rayleigh_corrected_crefl_iband") + assert res.attrs["calibration"] == "reflectance" + assert res.attrs["resolution"] == 371 + assert res.attrs["name"] == "I01" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" + assert res.attrs["platform_name"] == "Suomi-NPP" + assert res.attrs["sensor"] == "viirs" + assert res.attrs["units"] == "%" + assert res.attrs["start_time"] == datetime(2012, 2, 25, 18, 1, 24, 570942) + assert res.attrs["end_time"] == datetime(2012, 2, 25, 18, 11, 21, 175760) + assert res.attrs["area"] == area + assert res.attrs["ancillary_variables"] == [] data = res.values assert abs(np.mean(data) - 51.12750267805715) < 1e-6 assert data.shape == (3, 5) @@ -273,64 +273,64 @@ def test_reflectance_corrector_modis(self): """Test ReflectanceCorrector modifier with MODIS data.""" from satpy.modifiers._crefl import ReflectanceCorrector from satpy.tests.utils import make_dsq - sataa_did = make_dsq(name='satellite_azimuth_angle') - satza_did = make_dsq(name='satellite_zenith_angle') - solaa_did = make_dsq(name='solar_azimuth_angle') - solza_did = make_dsq(name='solar_zenith_angle') + sataa_did = make_dsq(name="satellite_azimuth_angle") + satza_did = make_dsq(name="satellite_zenith_angle") + solaa_did = make_dsq(name="solar_azimuth_angle") + solza_did = make_dsq(name="solar_zenith_angle") ref_cor = ReflectanceCorrector( - optional_prerequisites=[sataa_did, satza_did, solaa_did, solza_did], name='1', - prerequisites=[], wavelength=(0.62, 0.645, 0.67), resolution=250, calibration='reflectance', - modifiers=('sunz_corrected', 'rayleigh_corrected_crefl'), sensor='modis') - assert ref_cor.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl') - assert ref_cor.attrs['calibration'] == 'reflectance' - assert ref_cor.attrs['wavelength'] == (0.62, 0.645, 0.67) - assert ref_cor.attrs['name'] == '1' - assert ref_cor.attrs['resolution'] == 250 - assert ref_cor.attrs['sensor'] == 'modis' - assert ref_cor.attrs['prerequisites'] == [] - assert ref_cor.attrs['optional_prerequisites'] == [ - make_dsq(name='satellite_azimuth_angle'), - make_dsq(name='satellite_zenith_angle'), - make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle')] + optional_prerequisites=[sataa_did, satza_did, solaa_did, solza_did], name="1", + prerequisites=[], wavelength=(0.62, 0.645, 0.67), resolution=250, calibration="reflectance", + modifiers=("sunz_corrected", "rayleigh_corrected_crefl"), sensor="modis") + assert ref_cor.attrs["modifiers"] == ("sunz_corrected", "rayleigh_corrected_crefl") + assert ref_cor.attrs["calibration"] == "reflectance" + assert ref_cor.attrs["wavelength"] == (0.62, 0.645, 0.67) + assert ref_cor.attrs["name"] == "1" + assert ref_cor.attrs["resolution"] == 250 + assert ref_cor.attrs["sensor"] == "modis" + assert ref_cor.attrs["prerequisites"] == [] + assert ref_cor.attrs["optional_prerequisites"] == [ + make_dsq(name="satellite_azimuth_angle"), + make_dsq(name="satellite_zenith_angle"), + make_dsq(name="solar_azimuth_angle"), + make_dsq(name="solar_zenith_angle")] area, dnb = self.data_area_ref_corrector() def make_xarray(name, calibration, wavelength=None, modifiers=None, resolution=1000): return xr.DataArray(dnb, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'wavelength': wavelength, 'level': None, 'modifiers': modifiers, - 'calibration': calibration, 'resolution': resolution, - 'name': name, 'coordinates': ['longitude', 'latitude'], - 'platform_name': 'EOS-Aqua', 'polarization': None, 'sensor': 'modis', - 'units': '%', 'start_time': datetime(2012, 8, 13, 18, 46, 1, 439838), - 'end_time': datetime(2012, 8, 13, 18, 57, 47, 746296), 'area': area, - 'ancillary_variables': [] + "wavelength": wavelength, "level": None, "modifiers": modifiers, + "calibration": calibration, "resolution": resolution, + "name": name, "coordinates": ["longitude", "latitude"], + "platform_name": "EOS-Aqua", "polarization": None, "sensor": "modis", + "units": "%", "start_time": datetime(2012, 8, 13, 18, 46, 1, 439838), + "end_time": datetime(2012, 8, 13, 18, 57, 47, 746296), "area": area, + "ancillary_variables": [] }) - c01 = make_xarray('1', 'reflectance', wavelength=(0.62, 0.645, 0.67), modifiers='sunz_corrected', + c01 = make_xarray("1", "reflectance", wavelength=(0.62, 0.645, 0.67), modifiers="sunz_corrected", resolution=500) - c02 = make_xarray('satellite_azimuth_angle', None) - c03 = make_xarray('satellite_zenith_angle', None) - c04 = make_xarray('solar_azimuth_angle', None) - c05 = make_xarray('solar_zenith_angle', None) + c02 = make_xarray("satellite_azimuth_angle", None) + c03 = make_xarray("satellite_zenith_angle", None) + c04 = make_xarray("solar_azimuth_angle", None) + c05 = make_xarray("solar_zenith_angle", None) res = ref_cor([c01], [c02, c03, c04, c05]) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['wavelength'] == (0.62, 0.645, 0.67) - assert res.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl',) - assert res.attrs['calibration'] == 'reflectance' - assert res.attrs['resolution'] == 500 - assert res.attrs['name'] == '1' - assert res.attrs['platform_name'] == 'EOS-Aqua' - assert res.attrs['sensor'] == 'modis' - assert res.attrs['units'] == '%' - assert res.attrs['start_time'] == datetime(2012, 8, 13, 18, 46, 1, 439838) - assert res.attrs['end_time'] == datetime(2012, 8, 13, 18, 57, 47, 746296) - assert res.attrs['area'] == area - assert res.attrs['ancillary_variables'] == [] + assert res.attrs["wavelength"] == (0.62, 0.645, 0.67) + assert res.attrs["modifiers"] == ("sunz_corrected", "rayleigh_corrected_crefl",) + assert res.attrs["calibration"] == "reflectance" + assert res.attrs["resolution"] == 500 + assert res.attrs["name"] == "1" + assert res.attrs["platform_name"] == "EOS-Aqua" + assert res.attrs["sensor"] == "modis" + assert res.attrs["units"] == "%" + assert res.attrs["start_time"] == datetime(2012, 8, 13, 18, 46, 1, 439838) + assert res.attrs["end_time"] == datetime(2012, 8, 13, 18, 57, 47, 746296) + assert res.attrs["area"] == area + assert res.attrs["ancillary_variables"] == [] data = res.values assert abs(np.mean(data) - 52.09372623964498) < 1e-6 assert data.shape == (3, 5) @@ -346,7 +346,7 @@ def test_reflectance_corrector_bad_prereqs(self): pytest.raises(ValueError, ref_cor, [], [1, 2, 3, 4]) @pytest.mark.parametrize( - 'url,dem_mock_cm,dem_sds', + "url,dem_mock_cm,dem_sds", [ (None, mock_cmgdem, "average elevation"), ("CMGDEM.hdf", mock_cmgdem, "averaged elevation"), @@ -364,31 +364,31 @@ def test_reflectance_corrector_different_chunks(self, tmpdir, url, dem_mock_cm, ref_cor = ReflectanceCorrector( optional_prerequisites=[ - make_dsq(name='satellite_azimuth_angle'), - make_dsq(name='satellite_zenith_angle'), - make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle') + make_dsq(name="satellite_azimuth_angle"), + make_dsq(name="satellite_zenith_angle"), + make_dsq(name="solar_azimuth_angle"), + make_dsq(name="solar_zenith_angle") ], - name='I01', + name="I01", prerequisites=[], wavelength=(0.6, 0.64, 0.68), resolution=371, - calibration='reflectance', - modifiers=('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband'), - sensor='viirs', + calibration="reflectance", + modifiers=("sunz_corrected_iband", "rayleigh_corrected_crefl_iband"), + sensor="viirs", url=url, dem_sds=dem_sds, ) area, data = self.data_area_ref_corrector() - c01 = _make_viirs_xarray(data, area, 'I01', 'toa_bidirectional_reflectance', - wavelength=(0.6, 0.64, 0.68), units='%', - calibration='reflectance') - c02 = _make_viirs_xarray(data, area, 'satellite_azimuth_angle', 'sensor_azimuth_angle') + c01 = _make_viirs_xarray(data, area, "I01", "toa_bidirectional_reflectance", + wavelength=(0.6, 0.64, 0.68), units="%", + calibration="reflectance") + c02 = _make_viirs_xarray(data, area, "satellite_azimuth_angle", "sensor_azimuth_angle") c02.data = c02.data.rechunk((1, -1)) - c03 = _make_viirs_xarray(data, area, 'satellite_zenith_angle', 'sensor_zenith_angle') - c04 = _make_viirs_xarray(data, area, 'solar_azimuth_angle', 'solar_azimuth_angle') - c05 = _make_viirs_xarray(data, area, 'solar_zenith_angle', 'solar_zenith_angle') + c03 = _make_viirs_xarray(data, area, "satellite_zenith_angle", "sensor_zenith_angle") + c04 = _make_viirs_xarray(data, area, "solar_azimuth_angle", "solar_azimuth_angle") + c05 = _make_viirs_xarray(data, area, "solar_zenith_angle", "solar_zenith_angle") with dem_mock_cm(tmpdir, url): res = ref_cor([c01], [c02, c03, c04, c05]) diff --git a/satpy/tests/modifier_tests/test_parallax.py b/satpy/tests/modifier_tests/test_parallax.py index 316192421c..04af43981f 100644 --- a/satpy/tests/modifier_tests/test_parallax.py +++ b/satpy/tests/modifier_tests/test_parallax.py @@ -516,7 +516,7 @@ def test_parallax_modifier_interface_with_cloud(self): w_cth = 25 h_cth = 15 - proj_dict = {'a': '6378137', 'h': '35785863', 'proj': 'geos', 'units': 'm'} + proj_dict = {"a": "6378137", "h": "35785863", "proj": "geos", "units": "m"} fake_area_cth = pyresample.create_area_def( area_id="test-area", projection=proj_dict, diff --git a/satpy/tests/multiscene_tests/test_blend.py b/satpy/tests/multiscene_tests/test_blend.py index 6b85dd9d79..c6e65d4615 100644 --- a/satpy/tests/multiscene_tests/test_blend.py +++ b/satpy/tests/multiscene_tests/test_blend.py @@ -42,15 +42,15 @@ def _get_expected_stack_select(scene1: Scene, scene2: Scene) -> xr.DataArray: - expected = scene2['polar-ct'] - expected[..., NUM_TEST_ROWS, :] = scene1['geo-ct'][..., NUM_TEST_ROWS, :] - expected[..., :, NUM_TEST_COLS] = scene1['geo-ct'][..., :, NUM_TEST_COLS] - expected[..., -1, :] = scene1['geo-ct'][..., -1, :] + expected = scene2["polar-ct"] + expected[..., NUM_TEST_ROWS, :] = scene1["geo-ct"][..., NUM_TEST_ROWS, :] + expected[..., :, NUM_TEST_COLS] = scene1["geo-ct"][..., :, NUM_TEST_COLS] + expected[..., -1, :] = scene1["geo-ct"][..., -1, :] return expected.compute() def _get_expected_stack_blend(scene1: Scene, scene2: Scene) -> xr.DataArray: - expected = scene2['polar-ct'].copy().compute().astype(np.float64) + expected = scene2["polar-ct"].copy().compute().astype(np.float64) expected[..., NUM_TEST_ROWS, :] = 5 / 3 # (1*2 + 3*1) / (2 + 1) expected[..., :, NUM_TEST_COLS] = 5 / 3 expected[..., -1, :] = np.nan # (1*0 + 0*1) / (0 + 1) @@ -88,21 +88,21 @@ def cloud_type_data_array1(test_area, data_type, image_mode): shape = DEFAULT_SHAPE if len(image_mode) == 0 else (len(image_mode),) + DEFAULT_SHAPE dims = ("y", "x") if len(image_mode) == 0 else ("bands", "y", "x") if data_type is np.int8: - data_arr = _create_test_int8_dataset(name='geo-ct', shape=shape, area=test_area, values=1, dims=dims) + data_arr = _create_test_int8_dataset(name="geo-ct", shape=shape, area=test_area, values=1, dims=dims) else: - data_arr = _create_test_dataset(name='geo-ct', shape=shape, area=test_area, values=1.0, dims=dims) - - data_arr.attrs['platform_name'] = 'Meteosat-11' - data_arr.attrs['sensor'] = {'seviri'} - data_arr.attrs['units'] = '1' - data_arr.attrs['long_name'] = 'NWC GEO CT Cloud Type' - data_arr.attrs['orbital_parameters'] = { - 'satellite_nominal_altitude': 35785863.0, - 'satellite_nominal_longitude': 0.0, - 'satellite_nominal_latitude': 0, + data_arr = _create_test_dataset(name="geo-ct", shape=shape, area=test_area, values=1.0, dims=dims) + + data_arr.attrs["platform_name"] = "Meteosat-11" + data_arr.attrs["sensor"] = {"seviri"} + data_arr.attrs["units"] = "1" + data_arr.attrs["long_name"] = "NWC GEO CT Cloud Type" + data_arr.attrs["orbital_parameters"] = { + "satellite_nominal_altitude": 35785863.0, + "satellite_nominal_longitude": 0.0, + "satellite_nominal_latitude": 0, } - data_arr.attrs['start_time'] = datetime(2023, 1, 16, 11, 9, 17) - data_arr.attrs['end_time'] = datetime(2023, 1, 16, 11, 12, 22) + data_arr.attrs["start_time"] = datetime(2023, 1, 16, 11, 9, 17) + data_arr.attrs["end_time"] = datetime(2023, 1, 16, 11, 12, 22) data_arr.attrs["_satpy_id"] = dsid1 return data_arr @@ -118,17 +118,17 @@ def cloud_type_data_array2(test_area, data_type, image_mode): shape = DEFAULT_SHAPE if len(image_mode) == 0 else (len(image_mode),) + DEFAULT_SHAPE dims = ("y", "x") if len(image_mode) == 0 else ("bands", "y", "x") if data_type is np.int8: - data_arr = _create_test_int8_dataset(name='polar-ct', shape=shape, area=test_area, values=3, dims=dims) - data_arr[..., -1, :] = data_arr.attrs['_FillValue'] + data_arr = _create_test_int8_dataset(name="polar-ct", shape=shape, area=test_area, values=3, dims=dims) + data_arr[..., -1, :] = data_arr.attrs["_FillValue"] else: - data_arr = _create_test_dataset(name='polar-ct', shape=shape, area=test_area, values=3.0, dims=dims) + data_arr = _create_test_dataset(name="polar-ct", shape=shape, area=test_area, values=3.0, dims=dims) data_arr[..., -1, :] = np.nan - data_arr.attrs['platform_name'] = 'NOAA-18' - data_arr.attrs['sensor'] = {'avhrr-3'} - data_arr.attrs['units'] = '1' - data_arr.attrs['long_name'] = 'SAFNWC PPS CT Cloud Type' - data_arr.attrs['start_time'] = datetime(2023, 1, 16, 11, 12, 57, 500000) - data_arr.attrs['end_time'] = datetime(2023, 1, 16, 11, 28, 1, 900000) + data_arr.attrs["platform_name"] = "NOAA-18" + data_arr.attrs["sensor"] = {"avhrr-3"} + data_arr.attrs["units"] = "1" + data_arr.attrs["long_name"] = "SAFNWC PPS CT Cloud Type" + data_arr.attrs["start_time"] = datetime(2023, 1, 16, 11, 12, 57, 500000) + data_arr.attrs["end_time"] = datetime(2023, 1, 16, 11, 28, 1, 900000) data_arr.attrs["_satpy_id"] = dsid1 return data_arr @@ -141,7 +141,7 @@ def scene1_with_weights(cloud_type_data_array1, test_area): scene = Scene() scene[cloud_type_data_array1.attrs["_satpy_id"]] = cloud_type_data_array1 - wgt1 = _create_test_dataset(name='geo-ct-wgt', area=test_area, values=0) + wgt1 = _create_test_dataset(name="geo-ct-wgt", area=test_area, values=0) wgt1[NUM_TEST_ROWS, :] = 2 wgt1[:, NUM_TEST_COLS] = 2 @@ -151,11 +151,11 @@ def scene1_with_weights(cloud_type_data_array1, test_area): resolution=3000, modifiers=() ) - scene[dsid2] = _create_test_int8_dataset(name='geo-cma', area=test_area, values=2) - scene[dsid2].attrs['start_time'] = datetime(2023, 1, 16, 11, 9, 17) - scene[dsid2].attrs['end_time'] = datetime(2023, 1, 16, 11, 12, 22) + scene[dsid2] = _create_test_int8_dataset(name="geo-cma", area=test_area, values=2) + scene[dsid2].attrs["start_time"] = datetime(2023, 1, 16, 11, 9, 17) + scene[dsid2].attrs["end_time"] = datetime(2023, 1, 16, 11, 12, 22) - wgt2 = _create_test_dataset(name='geo-cma-wgt', area=test_area, values=0) + wgt2 = _create_test_dataset(name="geo-cma-wgt", area=test_area, values=0) return scene, [wgt1, wgt2] @@ -168,18 +168,18 @@ def scene2_with_weights(cloud_type_data_array2, test_area): scene = Scene() scene[cloud_type_data_array2.attrs["_satpy_id"]] = cloud_type_data_array2 - wgt1 = _create_test_dataset(name='polar-ct-wgt', area=test_area, values=1) + wgt1 = _create_test_dataset(name="polar-ct-wgt", area=test_area, values=1) dsid2 = make_dataid( name="polar-cma", resolution=1000, modifiers=() ) - scene[dsid2] = _create_test_int8_dataset(name='polar-cma', area=test_area, values=4) - scene[dsid2].attrs['start_time'] = datetime(2023, 1, 16, 11, 12, 57, 500000) - scene[dsid2].attrs['end_time'] = datetime(2023, 1, 16, 11, 28, 1, 900000) + scene[dsid2] = _create_test_int8_dataset(name="polar-cma", area=test_area, values=4) + scene[dsid2].attrs["start_time"] = datetime(2023, 1, 16, 11, 12, 57, 500000) + scene[dsid2].attrs["end_time"] = datetime(2023, 1, 16, 11, 28, 1, 900000) - wgt2 = _create_test_dataset(name='polar-cma-wgt', area=test_area, values=1) + wgt2 = _create_test_dataset(name="polar-cma-wgt", area=test_area, values=1) return scene, [wgt1, wgt2] @@ -197,8 +197,8 @@ def multi_scene_and_weights(scene1_with_weights, scene2_with_weights): def groups(): """Get group definitions for the MultiScene.""" return { - DataQuery(name='CloudType'): ['geo-ct', 'polar-ct'], - DataQuery(name='CloudMask'): ['geo-cma', 'polar-cma'] + DataQuery(name="CloudType"): ["geo-ct", "polar-ct"], + DataQuery(name="CloudMask"): ["geo-cma", "polar-cma"] } @@ -216,15 +216,15 @@ def test_blend_two_scenes_using_stack(self, multi_scene_and_weights, groups, resampled = multi_scene stacked = resampled.blend(blend_function=stack) - result = stacked['CloudType'].compute() + result = stacked["CloudType"].compute() - expected = scene2['polar-ct'].copy() - expected[..., -1, :] = scene1['geo-ct'][..., -1, :] + expected = scene2["polar-ct"].copy() + expected[..., -1, :] = scene1["geo-ct"][..., -1, :] xr.testing.assert_equal(result, expected.compute()) _check_stacked_metadata(result, "CloudType") - assert result.attrs['start_time'] == datetime(2023, 1, 16, 11, 9, 17) - assert result.attrs['end_time'] == datetime(2023, 1, 16, 11, 28, 1, 900000) + assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 9, 17) + assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 28, 1, 900000) def test_blend_two_scenes_bad_blend_type(self, multi_scene_and_weights, groups): """Test exception is raised when bad 'blend_type' is used.""" @@ -232,7 +232,7 @@ def test_blend_two_scenes_bad_blend_type(self, multi_scene_and_weights, groups): multi_scene, weights = multi_scene_and_weights - simple_groups = {DataQuery(name='CloudType'): groups[DataQuery(name='CloudType')]} + simple_groups = {DataQuery(name="CloudType"): groups[DataQuery(name="CloudType")]} multi_scene.group(simple_groups) weights = [weights[0][0], weights[1][0]] @@ -263,7 +263,7 @@ def test_blend_two_scenes_using_stack_weighted(self, multi_scene_and_weights, gr scene1, weights1 = scene1_with_weights scene2, weights2 = scene2_with_weights - simple_groups = {DataQuery(name='CloudType'): groups[DataQuery(name='CloudType')]} + simple_groups = {DataQuery(name="CloudType"): groups[DataQuery(name="CloudType")]} multi_scene.group(simple_groups) weights = [weights[0][0], weights[1][0]] @@ -271,52 +271,52 @@ def test_blend_two_scenes_using_stack_weighted(self, multi_scene_and_weights, gr weighted_blend = multi_scene.blend(blend_function=stack_func) expected = exp_result_func(scene1, scene2) - result = weighted_blend['CloudType'].compute() + result = weighted_blend["CloudType"].compute() # result has NaNs and xarray's xr.testing.assert_equal doesn't support NaN comparison np.testing.assert_allclose(result.data, expected.data) _check_stacked_metadata(result, "CloudType") if combine_times: - assert result.attrs['start_time'] == datetime(2023, 1, 16, 11, 9, 17) - assert result.attrs['end_time'] == datetime(2023, 1, 16, 11, 28, 1, 900000) + assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 9, 17) + assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 28, 1, 900000) else: - assert result.attrs['start_time'] == datetime(2023, 1, 16, 11, 11, 7, 250000) - assert result.attrs['end_time'] == datetime(2023, 1, 16, 11, 20, 11, 950000) + assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 11, 7, 250000) + assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 20, 11, 950000) @pytest.fixture def datasets_and_weights(self): """X-Array datasets with area definition plus weights for input to tests.""" shape = (8, 12) - area = AreaDefinition('test', 'test', 'test', - {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + area = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, shape[1], shape[0], [-200, -200, 200, 200]) - ds1 = xr.DataArray(da.ones(shape, chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1, 0, 0, 0), 'area': area}) - ds2 = xr.DataArray(da.ones(shape, chunks=-1) * 2, dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1, 1, 0, 0), 'area': area}) - ds3 = xr.DataArray(da.ones(shape, chunks=-1) * 3, dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1, 1, 0, 0), 'area': area}) - - ds4 = xr.DataArray(da.zeros(shape, chunks=-1), dims=('y', 'time'), - attrs={'start_time': datetime(2018, 1, 1, 0, 0, 0), 'area': area}) - ds5 = xr.DataArray(da.zeros(shape, chunks=-1), dims=('y', 'time'), - attrs={'start_time': datetime(2018, 1, 1, 1, 0, 0), 'area': area}) - - wgt1 = xr.DataArray(da.ones(shape, chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1, 0, 0, 0), 'area': area}) - wgt2 = xr.DataArray(da.zeros(shape, chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1, 0, 0, 0), 'area': area}) - wgt3 = xr.DataArray(da.zeros(shape, chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1, 0, 0, 0), 'area': area}) - - datastruct = {'shape': shape, - 'area': area, - 'datasets': [ds1, ds2, ds3, ds4, ds5], - 'weights': [wgt1, wgt2, wgt3]} + ds1 = xr.DataArray(da.ones(shape, chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + ds2 = xr.DataArray(da.ones(shape, chunks=-1) * 2, dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1, 1, 0, 0), "area": area}) + ds3 = xr.DataArray(da.ones(shape, chunks=-1) * 3, dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1, 1, 0, 0), "area": area}) + + ds4 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "time"), + attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + ds5 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "time"), + attrs={"start_time": datetime(2018, 1, 1, 1, 0, 0), "area": area}) + + wgt1 = xr.DataArray(da.ones(shape, chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + wgt2 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + wgt3 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + + datastruct = {"shape": shape, + "area": area, + "datasets": [ds1, ds2, ds3, ds4, ds5], + "weights": [wgt1, wgt2, wgt3]} return datastruct - @pytest.mark.parametrize(('line', 'column',), + @pytest.mark.parametrize(("line", "column",), [(2, 3), (4, 5)] ) def test_blend_function_stack_weighted(self, datasets_and_weights, line, column): @@ -327,19 +327,19 @@ def test_blend_function_stack_weighted(self, datasets_and_weights, line, column) input_data = datasets_and_weights - input_data['weights'][1][line, :] = 2 - input_data['weights'][2][:, column] = 2 + input_data["weights"][1][line, :] = 2 + input_data["weights"][2][:, column] = 2 - stack_with_weights = partial(stack, weights=input_data['weights'], combine_times=False) - blend_result = stack_with_weights(input_data['datasets'][0:3]) + stack_with_weights = partial(stack, weights=input_data["weights"], combine_times=False) + blend_result = stack_with_weights(input_data["datasets"][0:3]) - ds1 = input_data['datasets'][0] - ds2 = input_data['datasets'][1] - ds3 = input_data['datasets'][2] + ds1 = input_data["datasets"][0] + ds2 = input_data["datasets"][1] + ds3 = input_data["datasets"][2] expected = ds1.copy() expected[:, column] = ds3[:, column] expected[line, :] = ds2[line, :] - expected.attrs = combine_metadata(*[x.attrs for x in input_data['datasets'][0:3]]) + expected.attrs = combine_metadata(*[x.attrs for x in input_data["datasets"][0:3]]) xr.testing.assert_equal(blend_result.compute(), expected.compute()) assert expected.attrs == blend_result.attrs @@ -348,8 +348,8 @@ def test_blend_function_stack(self, datasets_and_weights): """Test the 'stack' function.""" input_data = datasets_and_weights - ds1 = input_data['datasets'][0] - ds2 = input_data['datasets'][1] + ds1 = input_data["datasets"][0] + ds2 = input_data["datasets"][1] res = stack([ds1, ds2]) expected = ds2.copy() @@ -362,11 +362,11 @@ def test_timeseries(self, datasets_and_weights): """Test the 'timeseries' function.""" input_data = datasets_and_weights - ds1 = input_data['datasets'][0] - ds2 = input_data['datasets'][1] - ds4 = input_data['datasets'][2] - ds4 = input_data['datasets'][3] - ds5 = input_data['datasets'][4] + ds1 = input_data["datasets"][0] + ds2 = input_data["datasets"][1] + ds4 = input_data["datasets"][2] + ds4 = input_data["datasets"][3] + ds5 = input_data["datasets"][4] res = timeseries([ds1, ds2]) res2 = timeseries([ds4, ds5]) @@ -377,16 +377,16 @@ def test_timeseries(self, datasets_and_weights): def _check_stacked_metadata(data_arr: xr.DataArray, exp_name: str) -> None: - assert data_arr.attrs['units'] == '1' - assert data_arr.attrs['name'] == exp_name + assert data_arr.attrs["units"] == "1" + assert data_arr.attrs["name"] == exp_name if "_FillValue" in data_arr.attrs: - assert data_arr.attrs['_FillValue'] == 255 - assert data_arr.attrs['valid_range'] == [1, 15] + assert data_arr.attrs["_FillValue"] == 255 + assert data_arr.attrs["valid_range"] == [1, 15] expected_area = _create_test_area() - assert data_arr.attrs['area'] == expected_area + assert data_arr.attrs["area"] == expected_area # these metadata items don't match between all inputs - assert 'sensor' not in data_arr.attrs - assert 'platform_name' not in data_arr.attrs - assert 'long_name' not in data_arr.attrs + assert "sensor" not in data_arr.attrs + assert "platform_name" not in data_arr.attrs + assert "long_name" not in data_arr.attrs diff --git a/satpy/tests/multiscene_tests/test_misc.py b/satpy/tests/multiscene_tests/test_misc.py index 0cfedf226f..190045dad0 100644 --- a/satpy/tests/multiscene_tests/test_misc.py +++ b/satpy/tests/multiscene_tests/test_misc.py @@ -49,13 +49,13 @@ def test_properties(self): area = _create_test_area() scenes = _create_test_scenes(area=area) - ds1_id = make_dataid(name='ds1') - ds2_id = make_dataid(name='ds2') - ds3_id = make_dataid(name='ds3') - ds4_id = make_dataid(name='ds4') + ds1_id = make_dataid(name="ds1") + ds2_id = make_dataid(name="ds2") + ds3_id = make_dataid(name="ds3") + ds4_id = make_dataid(name="ds4") # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") mscn = MultiScene(scenes) self.assertSetEqual(mscn.loaded_dataset_ids, @@ -64,7 +64,7 @@ def test_properties(self): self.assertTrue(mscn.all_same_area) bigger_area = _create_test_area(shape=(20, 40)) - scenes[0]['ds4'] = _create_test_dataset('ds4', shape=(20, 40), + scenes[0]["ds4"] = _create_test_dataset("ds4", shape=(20, 40), area=bigger_area) self.assertSetEqual(mscn.loaded_dataset_ids, @@ -93,14 +93,14 @@ def test_from_files(self): "OR_GLM-L2-GLMC-M3_G16_s20171171506000_e20171171507000_c20380190314080.nc", "OR_GLM-L2-GLMC-M3_G16_s20171171507000_e20171171508000_c20380190314080.nc", ] - with mock.patch('satpy.multiscene._multiscene.Scene') as scn_mock: + with mock.patch("satpy.multiscene._multiscene.Scene") as scn_mock: mscn = MultiScene.from_files( input_files_abi, - reader='abi_l1b', + reader="abi_l1b", scene_kwargs={"reader_kwargs": {}}) assert len(mscn.scenes) == 6 calls = [mock.call( - filenames={'abi_l1b': [in_file_abi]}, + filenames={"abi_l1b": [in_file_abi]}, reader_kwargs={}) for in_file_abi in input_files_abi] scn_mock.assert_has_calls(calls) @@ -109,13 +109,13 @@ def test_from_files(self): with pytest.warns(DeprecationWarning): mscn = MultiScene.from_files( input_files_abi + input_files_glm, - reader=('abi_l1b', "glm_l2"), + reader=("abi_l1b", "glm_l2"), group_keys=["start_time"], ensure_all_readers=True, time_threshold=30) assert len(mscn.scenes) == 2 calls = [mock.call( - filenames={'abi_l1b': [in_file_abi], 'glm_l2': [in_file_glm]}) + filenames={"abi_l1b": [in_file_abi], "glm_l2": [in_file_glm]}) for (in_file_abi, in_file_glm) in zip(input_files_abi[0:2], [input_files_glm[2]] + [input_files_glm[7]])] @@ -123,7 +123,7 @@ def test_from_files(self): scn_mock.reset_mock() mscn = MultiScene.from_files( input_files_abi + input_files_glm, - reader=('abi_l1b', "glm_l2"), + reader=("abi_l1b", "glm_l2"), group_keys=["start_time"], ensure_all_readers=False, time_threshold=30) @@ -144,14 +144,14 @@ def scene1(self): wavelength=(1, 2, 3), polarization="H" ) - scene[dsid1] = _create_test_dataset(name='ds1') + scene[dsid1] = _create_test_dataset(name="ds1") dsid2 = make_dataid( name="ds2", resolution=456, wavelength=(4, 5, 6), polarization="V" ) - scene[dsid2] = _create_test_dataset(name='ds2') + scene[dsid2] = _create_test_dataset(name="ds2") return scene @pytest.fixture @@ -165,14 +165,14 @@ def scene2(self): wavelength=(1.1, 2.1, 3.1), polarization="H" ) - scene[dsid1] = _create_test_dataset(name='ds3') + scene[dsid1] = _create_test_dataset(name="ds3") dsid2 = make_dataid( name="ds4", resolution=456.1, wavelength=(4.1, 5.1, 6.1), polarization="V" ) - scene[dsid2] = _create_test_dataset(name='ds4') + scene[dsid2] = _create_test_dataset(name="ds4") return scene @pytest.fixture @@ -185,8 +185,8 @@ def multi_scene(self, scene1, scene2): def groups(self): """Get group definitions for the MultiScene.""" return { - DataQuery(name='odd'): ['ds1', 'ds3'], - DataQuery(name='even'): ['ds2', 'ds4'] + DataQuery(name="odd"): ["ds1", "ds3"], + DataQuery(name="even"): ["ds2", "ds4"] } def test_multi_scene_grouping(self, multi_scene, groups, scene1): @@ -194,12 +194,12 @@ def test_multi_scene_grouping(self, multi_scene, groups, scene1): multi_scene.group(groups) shared_ids_exp = {make_dataid(name="odd"), make_dataid(name="even")} assert multi_scene.shared_dataset_ids == shared_ids_exp - assert DataQuery(name='odd') not in scene1 + assert DataQuery(name="odd") not in scene1 xr.testing.assert_allclose(multi_scene.scenes[0]["ds1"], scene1["ds1"]) def test_fails_to_add_multiple_datasets_from_the_same_scene_to_a_group(self, multi_scene): """Test that multiple datasets from the same scene in one group fails.""" - groups = {DataQuery(name='mygroup'): ['ds1', 'ds2']} + groups = {DataQuery(name="mygroup"): ["ds1", "ds2"]} multi_scene.group(groups) with pytest.raises(ValueError): next(multi_scene.scenes) diff --git a/satpy/tests/multiscene_tests/test_save_animation.py b/satpy/tests/multiscene_tests/test_save_animation.py index 5e5b4a1d63..2ea41f18f4 100644 --- a/satpy/tests/multiscene_tests/test_save_animation.py +++ b/satpy/tests/multiscene_tests/test_save_animation.py @@ -50,7 +50,7 @@ def tearDown(self): except OSError: pass - @mock.patch('satpy.multiscene._multiscene.get_enhanced_image', _fake_get_enhanced_image) + @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_mp4_distributed(self): """Save a series of fake scenes to an mp4 video.""" from satpy import MultiScene @@ -58,61 +58,61 @@ def test_save_mp4_distributed(self): scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time - for ds_id in ['ds1', 'ds2', 'ds3']: - scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) - if ds_id == 'ds3': + for ds_id in ["ds1", "ds2", "ds3"]: + scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + if ds_id == "ds3": continue - scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = os.path.join( self.base_dir, - 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') + "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v.compute() for v in x) client_mock.gather.side_effect = lambda x: x - with mock.patch('satpy.multiscene._multiscene.imageio.get_writer') as get_writer: + with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer: get_writer.return_value = writer_mock # force order of datasets by specifying them - mscn.save_animation(fn, client=client_mock, datasets=['ds1', 'ds2', 'ds3']) + mscn.save_animation(fn, client=client_mock, datasets=["ds1", "ds2", "ds3"]) # 2 saves for the first scene + 1 black frame # 3 for the second scene self.assertEqual(writer_mock.append_data.call_count, 3 + 3) filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] - self.assertEqual(filenames[0], 'test_save_mp4_ds1_20180101_00_20180102_12.mp4') - self.assertEqual(filenames[1], 'test_save_mp4_ds2_20180101_00_20180102_12.mp4') - self.assertEqual(filenames[2], 'test_save_mp4_ds3_20180102_00_20180102_12.mp4') + self.assertEqual(filenames[0], "test_save_mp4_ds1_20180101_00_20180102_12.mp4") + self.assertEqual(filenames[1], "test_save_mp4_ds2_20180101_00_20180102_12.mp4") + self.assertEqual(filenames[2], "test_save_mp4_ds3_20180102_00_20180102_12.mp4") # Test no distributed client found mscn = MultiScene(scenes) fn = os.path.join( self.base_dir, - 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') + "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v.compute() for v in x) client_mock.gather.side_effect = lambda x: x - with mock.patch('satpy.multiscene._multiscene.imageio.get_writer') as get_writer, \ - mock.patch('satpy.multiscene._multiscene.get_client', mock.Mock(side_effect=ValueError("No client"))): + with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer, \ + mock.patch("satpy.multiscene._multiscene.get_client", mock.Mock(side_effect=ValueError("No client"))): get_writer.return_value = writer_mock # force order of datasets by specifying them - mscn.save_animation(fn, datasets=['ds1', 'ds2', 'ds3']) + mscn.save_animation(fn, datasets=["ds1", "ds2", "ds3"]) # 2 saves for the first scene + 1 black frame # 3 for the second scene self.assertEqual(writer_mock.append_data.call_count, 3 + 3) filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] - self.assertEqual(filenames[0], 'test_save_mp4_ds1_20180101_00_20180102_12.mp4') - self.assertEqual(filenames[1], 'test_save_mp4_ds2_20180101_00_20180102_12.mp4') - self.assertEqual(filenames[2], 'test_save_mp4_ds3_20180102_00_20180102_12.mp4') + self.assertEqual(filenames[0], "test_save_mp4_ds1_20180101_00_20180102_12.mp4") + self.assertEqual(filenames[1], "test_save_mp4_ds2_20180101_00_20180102_12.mp4") + self.assertEqual(filenames[2], "test_save_mp4_ds3_20180102_00_20180102_12.mp4") - @mock.patch('satpy.multiscene._multiscene.get_enhanced_image', _fake_get_enhanced_image) + @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_mp4_no_distributed(self): """Save a series of fake scenes to an mp4 video when distributed isn't available.""" from satpy import MultiScene @@ -120,39 +120,39 @@ def test_save_mp4_no_distributed(self): scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time - for ds_id in ['ds1', 'ds2', 'ds3']: - scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) - if ds_id == 'ds3': + for ds_id in ["ds1", "ds2", "ds3"]: + scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + if ds_id == "ds3": continue - scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = os.path.join( self.base_dir, - 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') + "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v.compute() for v in x) client_mock.gather.side_effect = lambda x: x - with mock.patch('satpy.multiscene._multiscene.imageio.get_writer') as get_writer, \ - mock.patch('satpy.multiscene._multiscene.get_client', None): + with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer, \ + mock.patch("satpy.multiscene._multiscene.get_client", None): get_writer.return_value = writer_mock # force order of datasets by specifying them - mscn.save_animation(fn, datasets=['ds1', 'ds2', 'ds3']) + mscn.save_animation(fn, datasets=["ds1", "ds2", "ds3"]) # 2 saves for the first scene + 1 black frame # 3 for the second scene self.assertEqual(writer_mock.append_data.call_count, 3 + 3) filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] - self.assertEqual(filenames[0], 'test_save_mp4_ds1_20180101_00_20180102_12.mp4') - self.assertEqual(filenames[1], 'test_save_mp4_ds2_20180101_00_20180102_12.mp4') - self.assertEqual(filenames[2], 'test_save_mp4_ds3_20180102_00_20180102_12.mp4') + self.assertEqual(filenames[0], "test_save_mp4_ds1_20180101_00_20180102_12.mp4") + self.assertEqual(filenames[1], "test_save_mp4_ds2_20180101_00_20180102_12.mp4") + self.assertEqual(filenames[2], "test_save_mp4_ds3_20180102_00_20180102_12.mp4") - @mock.patch('satpy.multiscene._multiscene.get_enhanced_image', _fake_get_enhanced_image) + @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_datasets_simple(self): """Save a series of fake scenes to an PNG images.""" from satpy import MultiScene @@ -160,30 +160,30 @@ def test_save_datasets_simple(self): scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time - for ds_id in ['ds1', 'ds2', 'ds3']: - scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) - if ds_id == 'ds3': + for ds_id in ["ds1", "ds2", "ds3"]: + scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + if ds_id == "ds3": continue - scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v for v in x) client_mock.gather.side_effect = lambda x: x - with mock.patch('satpy.multiscene._multiscene.Scene.save_datasets') as save_datasets: + with mock.patch("satpy.multiscene._multiscene.Scene.save_datasets") as save_datasets: save_datasets.return_value = [True] # some arbitrary return value # force order of datasets by specifying them - mscn.save_datasets(base_dir=self.base_dir, client=False, datasets=['ds1', 'ds2', 'ds3'], - writer='simple_image') + mscn.save_datasets(base_dir=self.base_dir, client=False, datasets=["ds1", "ds2", "ds3"], + writer="simple_image") # 2 for each scene self.assertEqual(save_datasets.call_count, 2) - @mock.patch('satpy.multiscene._multiscene.get_enhanced_image', _fake_get_enhanced_image) + @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_datasets_distributed_delayed(self): """Test distributed save for writers returning delayed obejcts e.g. simple_image.""" from dask.delayed import Delayed @@ -193,15 +193,15 @@ def test_save_datasets_distributed_delayed(self): scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time - for ds_id in ['ds1', 'ds2', 'ds3']: - scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) - if ds_id == 'ds3': + for ds_id in ["ds1", "ds2", "ds3"]: + scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + if ds_id == "ds3": continue - scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() @@ -209,16 +209,16 @@ def test_save_datasets_distributed_delayed(self): client_mock.gather.side_effect = lambda x: x future_mock = mock.MagicMock() future_mock.__class__ = Delayed - with mock.patch('satpy.multiscene._multiscene.Scene.save_datasets') as save_datasets: + with mock.patch("satpy.multiscene._multiscene.Scene.save_datasets") as save_datasets: save_datasets.return_value = [future_mock] # some arbitrary return value # force order of datasets by specifying them - mscn.save_datasets(base_dir=self.base_dir, client=client_mock, datasets=['ds1', 'ds2', 'ds3'], - writer='simple_image') + mscn.save_datasets(base_dir=self.base_dir, client=client_mock, datasets=["ds1", "ds2", "ds3"], + writer="simple_image") # 2 for each scene self.assertEqual(save_datasets.call_count, 2) - @mock.patch('satpy.multiscene._multiscene.get_enhanced_image', _fake_get_enhanced_image) + @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_datasets_distributed_source_target(self): """Test distributed save for writers returning sources and targets e.g. geotiff writer.""" import dask.array as da @@ -228,15 +228,15 @@ def test_save_datasets_distributed_source_target(self): scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time - for ds_id in ['ds1', 'ds2', 'ds3']: - scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) - if ds_id == 'ds3': + for ds_id in ["ds1", "ds2", "ds3"]: + scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + if ds_id == "ds3": continue - scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() @@ -245,12 +245,12 @@ def test_save_datasets_distributed_source_target(self): source_mock = mock.MagicMock() source_mock.__class__ = da.Array target_mock = mock.MagicMock() - with mock.patch('satpy.multiscene._multiscene.Scene.save_datasets') as save_datasets: + with mock.patch("satpy.multiscene._multiscene.Scene.save_datasets") as save_datasets: save_datasets.return_value = [(source_mock, target_mock)] # some arbitrary return value # force order of datasets by specifying them with self.assertRaises(NotImplementedError): - mscn.save_datasets(base_dir=self.base_dir, client=client_mock, datasets=['ds1', 'ds2', 'ds3'], - writer='geotiff') + mscn.save_datasets(base_dir=self.base_dir, client=client_mock, datasets=["ds1", "ds2", "ds3"], + writer="geotiff") def test_crop(self): """Test the crop method.""" @@ -262,44 +262,44 @@ def test_crop(self): scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) - proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, - 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} + proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, + "lon_0": 0.0, "proj": "geos", "units": "m"} x_size = 3712 y_size = 3712 area_def = AreaDefinition( - 'test', 'test', 'test', + "test", "test", "test", proj_dict, x_size, y_size, area_extent, ) area_def2 = AreaDefinition( - 'test2', 'test2', 'test2', proj_dict, + "test2", "test2", "test2", proj_dict, x_size // 2, y_size // 2, area_extent, ) scene1["1"] = DataArray(np.zeros((y_size, x_size))) - scene1["2"] = DataArray(np.zeros((y_size, x_size)), dims=('y', 'x')) - scene1["3"] = DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'), - attrs={'area': area_def}) - scene1["4"] = DataArray(np.zeros((y_size // 2, x_size // 2)), dims=('y', 'x'), - attrs={'area': area_def2}) + scene1["2"] = DataArray(np.zeros((y_size, x_size)), dims=("y", "x")) + scene1["3"] = DataArray(np.zeros((y_size, x_size)), dims=("y", "x"), + attrs={"area": area_def}) + scene1["4"] = DataArray(np.zeros((y_size // 2, x_size // 2)), dims=("y", "x"), + attrs={"area": area_def2}) mscn = MultiScene([scene1]) # by lon/lat bbox new_mscn = mscn.crop(ll_bbox=(-20., -5., 0, 0)) new_scn1 = list(new_mscn.scenes)[0] - self.assertIn('1', new_scn1) - self.assertIn('2', new_scn1) - self.assertIn('3', new_scn1) - self.assertTupleEqual(new_scn1['1'].shape, (y_size, x_size)) - self.assertTupleEqual(new_scn1['2'].shape, (y_size, x_size)) - self.assertTupleEqual(new_scn1['3'].shape, (184, 714)) - self.assertTupleEqual(new_scn1['4'].shape, (92, 357)) + self.assertIn("1", new_scn1) + self.assertIn("2", new_scn1) + self.assertIn("3", new_scn1) + self.assertTupleEqual(new_scn1["1"].shape, (y_size, x_size)) + self.assertTupleEqual(new_scn1["2"].shape, (y_size, x_size)) + self.assertTupleEqual(new_scn1["3"].shape, (184, 714)) + self.assertTupleEqual(new_scn1["4"].shape, (92, 357)) -@mock.patch('satpy.multiscene._multiscene.get_enhanced_image') +@mock.patch("satpy.multiscene._multiscene.get_enhanced_image") def test_save_mp4(smg, tmp_path): """Save a series of fake scenes to an mp4 video.""" from satpy import MultiScene @@ -308,38 +308,38 @@ def test_save_mp4(smg, tmp_path): smg.side_effect = _fake_get_enhanced_image # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time - for ds_id in ['ds1', 'ds2', 'ds3']: - scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) - if ds_id == 'ds3': + for ds_id in ["ds1", "ds2", "ds3"]: + scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + if ds_id == "ds3": continue - scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = str(tmp_path / - 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') + "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() - with mock.patch('satpy.multiscene._multiscene.imageio.get_writer') as get_writer: + with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer: get_writer.return_value = writer_mock # force order of datasets by specifying them - mscn.save_animation(fn, datasets=['ds1', 'ds2', 'ds3'], client=False) + mscn.save_animation(fn, datasets=["ds1", "ds2", "ds3"], client=False) # 2 saves for the first scene + 1 black frame # 3 for the second scene assert writer_mock.append_data.call_count == 3 + 3 filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] - assert filenames[0] == 'test_save_mp4_ds1_20180101_00_20180102_12.mp4' - assert filenames[1] == 'test_save_mp4_ds2_20180101_00_20180102_12.mp4' - assert filenames[2] == 'test_save_mp4_ds3_20180102_00_20180102_12.mp4' + assert filenames[0] == "test_save_mp4_ds1_20180101_00_20180102_12.mp4" + assert filenames[1] == "test_save_mp4_ds2_20180101_00_20180102_12.mp4" + assert filenames[2] == "test_save_mp4_ds3_20180102_00_20180102_12.mp4" # make sure that not specifying datasets still saves all of them fn = str(tmp_path / - 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') + "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() - with mock.patch('satpy.multiscene._multiscene.imageio.get_writer') as get_writer: + with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer: get_writer.return_value = writer_mock # force order of datasets by specifying them mscn.save_animation(fn, client=False) @@ -353,9 +353,9 @@ def test_save_mp4(smg, tmp_path): # test decorating and enhancing fn = str(tmp_path / - 'test-{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}-rich.mp4') + "test-{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}-rich.mp4") writer_mock = mock.MagicMock() - with mock.patch('satpy.multiscene._multiscene.imageio.get_writer') as get_writer: + with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer: get_writer.return_value = writer_mock mscn.save_animation( fn, client=False, diff --git a/satpy/tests/multiscene_tests/test_utils.py b/satpy/tests/multiscene_tests/test_utils.py index 409eb9cf86..310d68c215 100644 --- a/satpy/tests/multiscene_tests/test_utils.py +++ b/satpy/tests/multiscene_tests/test_utils.py @@ -30,27 +30,27 @@ DEFAULT_SHAPE = (5, 10) -local_id_keys_config = {'name': { - 'required': True, +local_id_keys_config = {"name": { + "required": True, }, - 'wavelength': { - 'type': WavelengthRange, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'reflectance', - 'brightness_temperature', - 'radiance', - 'counts' + "resolution": None, + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "counts" ] }, - 'polarization': None, - 'level': None, - 'modifiers': { - 'required': True, - 'default': ModifierTuple(), - 'type': ModifierTuple, + "polarization": None, + "level": None, + "modifiers": { + "required": True, + "default": ModifierTuple(), + "type": ModifierTuple, }, } @@ -63,14 +63,14 @@ def _fake_get_enhanced_image(img, enhance=None, overlay=None, decorate=None): def _create_test_area(proj_str=None, shape=DEFAULT_SHAPE, extents=None): """Create a test area definition.""" if proj_str is None: - proj_str = '+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. ' \ - '+lat_0=25 +lat_1=25 +units=m +no_defs' + proj_str = "+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. " \ + "+lat_0=25 +lat_1=25 +units=m +no_defs" extents = extents or (-1000., -1500., 1000., 1500.) return AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_str, shape[1], shape[0], @@ -82,9 +82,9 @@ def _create_test_int8_dataset(name, shape=DEFAULT_SHAPE, area=None, values=None, """Create a test DataArray object.""" return xr.DataArray( da.ones(shape, dtype=np.uint8, chunks=shape) * values, dims=dims, - attrs={'_FillValue': 255, - 'valid_range': [1, 15], - 'name': name, 'area': area, '_satpy_id_keys': local_id_keys_config}) + attrs={"_FillValue": 255, + "valid_range": [1, 15], + "name": name, "area": area, "_satpy_id_keys": local_id_keys_config}) def _create_test_dataset(name, shape=DEFAULT_SHAPE, area=None, values=None, dims=("y", "x")): @@ -92,22 +92,22 @@ def _create_test_dataset(name, shape=DEFAULT_SHAPE, area=None, values=None, dims if values: return xr.DataArray( da.ones(shape, dtype=np.float32, chunks=shape) * values, dims=dims, - attrs={'name': name, 'area': area, '_satpy_id_keys': local_id_keys_config}) + attrs={"name": name, "area": area, "_satpy_id_keys": local_id_keys_config}) return xr.DataArray( da.zeros(shape, dtype=np.float32, chunks=shape), dims=dims, - attrs={'name': name, 'area': area, '_satpy_id_keys': local_id_keys_config}) + attrs={"name": name, "area": area, "_satpy_id_keys": local_id_keys_config}) def _create_test_scenes(num_scenes=2, shape=DEFAULT_SHAPE, area=None): """Create some test scenes for various test cases.""" from satpy import Scene - ds1 = _create_test_dataset('ds1', shape=shape, area=area) - ds2 = _create_test_dataset('ds2', shape=shape, area=area) + ds1 = _create_test_dataset("ds1", shape=shape, area=area) + ds2 = _create_test_dataset("ds2", shape=shape, area=area) scenes = [] for _ in range(num_scenes): scn = Scene() - scn['ds1'] = ds1.copy() - scn['ds2'] = ds2.copy() + scn["ds1"] = ds1.copy() + scn["ds2"] = ds2.copy() scenes.append(scn) return scenes diff --git a/satpy/tests/reader_tests/_li_test_utils.py b/satpy/tests/reader_tests/_li_test_utils.py index 837e653cc3..d6a32253f5 100644 --- a/satpy/tests/reader_tests/_li_test_utils.py +++ b/satpy/tests/reader_tests/_li_test_utils.py @@ -23,16 +23,16 @@ # mapping of netcdf type code to numpy data type: TYPE_MAP = { - 'i1': np.int8, - 'i2': np.int16, - 'i4': np.int32, - 'i8': np.int64, - 'u1': np.uint8, - 'u2': np.uint16, - 'u4': np.uint32, - 'u8': np.uint64, - 'f4': np.float32, - 'f8': np.float64, + "i1": np.int8, + "i2": np.int16, + "i4": np.int32, + "i8": np.int64, + "u1": np.uint8, + "u2": np.uint16, + "u4": np.uint32, + "u8": np.uint64, + "f4": np.float32, + "f8": np.float64, } @@ -47,55 +47,55 @@ def rand_u16(num): return np.random.randint(low=0, high=np.iinfo(np.uint16).max - 1, size=num, dtype=np.uint16) return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', 'data/'), - 'dimensions': { - 'unfiltered_events': nobs, - 'l1b_chunks': nchunks, - 'l1b_offsets': nchunks, - 'filters': nfilters, - 'scalar': 1, + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", "data/"), + "dimensions": { + "unfiltered_events": nobs, + "l1b_chunks": nchunks, + "l1b_offsets": nchunks, + "filters": nfilters, + "scalar": 1, }, - 'variables': {}, - 'sector_variables': { + "variables": {}, + "sector_variables": { "event_id": { "format": "u2", - "shape": ('unfiltered_events',), + "shape": ("unfiltered_events",), "fill_value": 65535, "long_name": "ID of LI L2 Event", "default_data": lambda: rand_u16(nobs) }, "group_id": { "format": "u2", - "shape": ('unfiltered_events',), + "shape": ("unfiltered_events",), "fill_value": 65535, "long_name": "ID of associated LI L2 Group object", "default_data": lambda: rand_u16(nobs) }, "l1b_chunk_ids": { "format": "u4", - "shape": ('l1b_chunks',), + "shape": ("l1b_chunks",), "fill_value": 4294967295, "long_name": "Array of L1b event chunk IDs", "default_data": lambda: np.arange(nchunks) + 10000 }, "l1b_chunk_offsets": { "format": "u4", - "shape": ('l1b_offsets',), + "shape": ("l1b_offsets",), "fill_value": 4294967295, "long_name": "Array offset for L1b event chunk boundaries", "default_data": lambda: np.arange(nchunks) }, "l1b_window": { "format": "u4", - "shape": ('unfiltered_events',), + "shape": ("unfiltered_events",), "fill_value": 4294967295, "long_name": "window index of associated L1b event", "default_data": lambda: (np.arange(nobs) + 10000) }, "filter_values": { "format": "u1", - "shape": ('unfiltered_events', 'filters',), + "shape": ("unfiltered_events", "filters",), "fill_value": 255, "scale_factor": 0.004, "add_offset": 0.0, @@ -104,22 +104,22 @@ def rand_u16(num): }, "epoch_time": { "format": "f8", - "shape": ('scalar',), + "shape": ("scalar",), "fill_value": 9.96920996886869e36, "long_name": "Start time of integration frame", "default_data": lambda: 1.234, - 'precision': '1 millisecond', - 'time_standard': 'UTC', - 'standard_name': 'time', - 'units': 'seconds since 2000-01-01 00:00:00.0', + "precision": "1 millisecond", + "time_standard": "UTC", + "standard_name": "time", + "units": "seconds since 2000-01-01 00:00:00.0", }, "time_offset": { "format": "f4", - "shape": ('unfiltered_events',), + "shape": ("unfiltered_events",), "fill_value": 9.96921e36, "long_name": "Time offset from epoch time", "default_data": lambda: np.linspace(0.0, 1000.0, nobs), - 'units': 'seconds', + "units": "seconds", }, } } @@ -136,13 +136,13 @@ def l2_lef_schema(settings=None): nobs = settings.get("num_obs", 123) return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', 'data/'), - 'dimensions': { - 'events': nobs, - 'scalar': 1, + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", "data/"), + "dimensions": { + "events": nobs, + "scalar": 1, }, - 'variables': { + "variables": { "l1b_geolocation_warning": { "format": "i1", "shape": (), # test explicitly the scalar case @@ -150,47 +150,47 @@ def l2_lef_schema(settings=None): }, "l1b_missing_warning": { "format": "i1", - "shape": ('scalar',), + "shape": ("scalar",), "default_data": lambda: 0 }, "l1b_radiometric_warning": { "format": "i1", - "shape": ('scalar',), + "shape": ("scalar",), "default_data": lambda: 0 }, }, - 'sector_variables': { + "sector_variables": { "event_id": { "format": "u4", - "shape": ('events',), + "shape": ("events",), "fill_value": 65535, "long_name": "ID of LI L2 Event", "default_data": lambda: np.arange(1, nobs + 1) }, "group_id": { "format": "u4", - "shape": ('events',), + "shape": ("events",), "fill_value": 65535, "long_name": "ID of associated LI L2 Group object", "default_data": lambda: np.arange(1, nobs + 1) }, "flash_id": { "format": "u4", - "shape": ('events',), + "shape": ("events",), "fill_value": 65535, "long_name": "ID of associated LI L2 Flash object", "default_data": lambda: np.arange(1, nobs + 1) }, "detector": { "format": "u4", - "shape": ('scalar',), + "shape": ("scalar",), "fill_value": 65535, "long_name": "ID of detector for this group", "default_data": lambda: 1 }, "latitude": { "format": "i2", - "shape": ('events',), + "shape": ("events",), "fill_value": -32767, "long_name": "Latitude of group", "units": "degrees_north", @@ -199,7 +199,7 @@ def l2_lef_schema(settings=None): }, "longitude": { "format": "i2", - "shape": ('events',), + "shape": ("events",), "fill_value": -32767, "long_name": "Longitude of group", "units": "degrees_east", @@ -208,7 +208,7 @@ def l2_lef_schema(settings=None): }, "radiance": { "format": "u2", - "shape": ('events',), + "shape": ("events",), "long_name": "Radiance of Flash", "standard_name": "radiance", "units": "mW.m-2.sr-1", @@ -216,34 +216,34 @@ def l2_lef_schema(settings=None): }, "event_filter_qa": { "format": "u1", - "shape": ('events',), + "shape": ("events",), "long_name": "L2 event pre-filtering quality assurance value", "default_data": lambda: np.random.randint(1, 2 ** 8 - 1, nobs) }, "epoch_time": { "format": "f8", - "shape": ('scalar',), + "shape": ("scalar",), "long_name": "Start time of integration frame", "units": "seconds since 2000-01-01 00:00:00.0", "default_data": lambda: start_ts }, "time_offset": { "format": "f4", - "shape": ('events',), + "shape": ("events",), "long_name": "Time offset from epoch time", "units": "seconds", "default_data": lambda: np.random.uniform(1, 2 ** 31 - 1, nobs) }, "detector_row": { "format": "u2", - "shape": ('events',), + "shape": ("events",), "long_name": "Detector row position of event pixel", "units": "1", "default_data": lambda: np.random.randint(1, 1000, nobs) }, "detector_column": { "format": "u2", - "shape": ('events',), + "shape": ("events",), "long_name": "Detector column position of event pixel", "units": "1", "default_data": lambda: np.random.randint(1, 1000, nobs) @@ -258,22 +258,22 @@ def l2_lgr_schema(settings=None): ngrps = settings.get("num_groups", 120) return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', ''), - 'dimensions': { - 'groups': ngrps, + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", ""), + "dimensions": { + "groups": ngrps, }, - 'variables': { + "variables": { "latitude": { "format": "f4", - "shape": ('groups',), + "shape": ("groups",), "long_name": "Latitude of group", "units": "degrees_north", "default_data": lambda: np.linspace(-90, 90, ngrps) }, "longitude": { "format": "f4", - "shape": ('groups',), + "shape": ("groups",), "long_name": "Longitude of group", "units": "degrees_east", "default_data": lambda: np.linspace(-180, 80, ngrps) @@ -292,15 +292,15 @@ def l2_lfl_schema(settings=None): etime = (datetime(2019, 1, 2) - epoch).total_seconds() return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', ''), - 'dimensions': { - 'flashes': nobs, + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", ""), + "dimensions": { + "flashes": nobs, }, - 'variables': { + "variables": { "latitude": { "format": "i2", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Latitude of Flash", "standard_name": "latitude", "units": "degrees_north", @@ -312,7 +312,7 @@ def l2_lfl_schema(settings=None): }, "longitude": { "format": "i2", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Longitude of Flash", "standard_name": "longitude", "units": "degrees_east", @@ -324,7 +324,7 @@ def l2_lfl_schema(settings=None): }, "radiance": { "format": "u2", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Radiance of Flash", "standard_name": "radiance", "units": "mW.m-2.sr-1", @@ -332,7 +332,7 @@ def l2_lfl_schema(settings=None): }, "flash_duration": { "format": "u2", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Flash duration", "standard_name": "flash_duration", "units": "ms", @@ -340,56 +340,56 @@ def l2_lfl_schema(settings=None): }, "flash_filter_confidence": { "format": "i1", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "L2 filtered flash confidence", "standard_name": "flash_filter_confidence", "default_data": lambda: np.clip(np.round(np.random.normal(20, 10, nobs)), 1, 2 ** 7 - 1) }, "flash_footprint": { "format": "u2", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Flash footprint size", - "standard_name": 'flash_footprint', + "standard_name": "flash_footprint", "units": "L1 grid pixels", "default_data": lambda: np.maximum(1, np.round(np.random.normal(5, 3, nobs))) }, "flash_id": { "format": "u4", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Flash footprint size", - "standard_name": 'flash_id', + "standard_name": "flash_id", "default_data": lambda: np.arange(1, nobs + 1) }, "flash_time": { "format": "f8", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Nominal flash time", "units": "seconds since 2000-01-01 00:00:00.0", - "standard_name": 'time', + "standard_name": "time", "precision": "1 millisecond", "default_data": lambda: np.random.uniform(stime, etime, nobs) }, "l1b_geolocation_warning": { "format": "u8", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "L1b geolocation warning", "default_data": lambda: -127 }, "l1b_radiometric_warning": { "format": "u8", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "L1b radiometric warning", "default_data": lambda: -127 }, "number_of_events": { "format": "u2", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Number of events in each flash", "default_data": lambda: 1 }, "number_of_groups": { "format": "u4", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Number of flashes in each flash", "default_data": lambda: 1 }, @@ -403,45 +403,45 @@ def l2_af_schema(settings=None): nobs = settings.get("num_obs", 1234) return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', ''), - 'dimensions': accumulation_dimensions(1, nobs), - 'variables': { + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", ""), + "dimensions": accumulation_dimensions(1, nobs), + "variables": { "accumulation_offsets": { "format": "u4", - "shape": ('accumulations',), + "shape": ("accumulations",), "default_data": lambda: 0 }, "accumulation_start_times": { "format": "f8", - "shape": ('accumulations',), + "shape": ("accumulations",), "default_data": lambda: 4.25055600161e8 }, "l1b_geolocation_warning": { "format": "i1", - "shape": ('accumulations',), + "shape": ("accumulations",), "long_name": "L1b geolocation warning", "default_data": lambda: -127 }, "l1b_radiometric_warning": { "format": "i1", - "shape": ('accumulations',), + "shape": ("accumulations",), "long_name": "L1b radiometric warning", "default_data": lambda: -127 }, "average_flash_qa": { "format": "i1", - "shape": ('accumulations',), + "shape": ("accumulations",), "default_data": lambda: 23 }, "flash_accumulation": { "format": "u2", - "shape": ('pixels',), + "shape": ("pixels",), "default_data": lambda: np.clip(np.round(np.random.normal(1, 2, nobs)), 1, 2 ** 16 - 1) }, "mtg_geos_projection": mtg_geos_projection(), - "x": fci_grid_definition('X', nobs), - "y": fci_grid_definition('Y', nobs), + "x": fci_grid_definition("X", nobs), + "y": fci_grid_definition("Y", nobs), } } @@ -453,27 +453,27 @@ def l2_afa_schema(settings=None): nacc = settings.get("num_accumulations", 20) return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', ''), - 'dimensions': accumulation_dimensions(nacc, npix), - 'variables': { + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", ""), + "dimensions": accumulation_dimensions(nacc, npix), + "variables": { "accumulation_start_times": { "format": "f4", - "shape": ('accumulations',), + "shape": ("accumulations",), "long_name": "Accumulation start time", "units": "seconds since 2000-01-01 00:00:00.0", "default_data": lambda: np.linspace(0.0, 1.0, nacc) }, "accumulated_flash_area": { "format": "u4", - "shape": ('pixels',), + "shape": ("pixels",), "fill_value": 4294967295, "long_name": "Number of contributing unique flashes to each pixel", "default_data": lambda: np.mod(np.arange(npix), 10) + 1 }, "mtg_geos_projection": mtg_geos_projection(), - "x": fci_grid_definition('X', npix), - "y": fci_grid_definition('Y', npix), + "x": fci_grid_definition("X", npix), + "y": fci_grid_definition("Y", npix), } } @@ -485,13 +485,13 @@ def l2_afr_schema(settings=None): nacc = settings.get("num_accumulations", 20) return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', ''), - 'dimensions': accumulation_dimensions(nacc, nobs), - 'variables': { + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", ""), + "dimensions": accumulation_dimensions(nacc, nobs), + "variables": { "flash_radiance": { "format": "f4", - "shape": ('pixels',), + "shape": ("pixels",), "long_name": "Area averaged flash radiance accumulation", "grid_mapping": "mtg_geos_projection", "coordinate": "sparse: x y", @@ -499,14 +499,14 @@ def l2_afr_schema(settings=None): }, "accumulation_start_times": { "format": "f4", - "shape": ('accumulations',), + "shape": ("accumulations",), "long_name": "Accumulation start time", "units": "seconds since 2000-01-01 00:00:00.0", "default_data": lambda: 0 }, "mtg_geos_projection": mtg_geos_projection(), - "x": fci_grid_definition('X', nobs), - "y": fci_grid_definition('Y', nobs), + "x": fci_grid_definition("X", nobs), + "y": fci_grid_definition("Y", nobs), } } @@ -514,29 +514,29 @@ def l2_afr_schema(settings=None): def accumulation_dimensions(nacc, nobs): """Set dimensions for the accumulated products.""" return { - 'accumulations': nacc, - 'pixels': nobs, + "accumulations": nacc, + "pixels": nobs, } def fci_grid_definition(axis, nobs): """FCI grid definition on X or Y axis.""" - if axis == 'X': - long_name = 'azimuth angle encoded as column' - standard_name = 'projection_x_coordinate' + if axis == "X": + long_name = "azimuth angle encoded as column" + standard_name = "projection_x_coordinate" else: - long_name = 'zenith angle encoded as row' - standard_name = 'projection_y_coordinate' + long_name = "zenith angle encoded as row" + standard_name = "projection_y_coordinate" return { "format": "i2", - "shape": ('pixels',), + "shape": ("pixels",), "add_offset": -0.155619516, "axis": axis, "long_name": long_name, "scale_factor": 5.58878e-5, "standard_name": standard_name, - "units": 'radian', + "units": "radian", "valid_range": np.asarray([1, 5568]), "default_data": lambda: np.clip(np.round(np.random.normal(2000, 500, nobs)), 1, 2 ** 16 - 1) } @@ -546,49 +546,49 @@ def mtg_geos_projection(): """MTG geos projection definition.""" return { "format": "i4", - "shape": ('accumulations',), - "grid_mapping_name": 'geostationary', + "shape": ("accumulations",), + "grid_mapping_name": "geostationary", "inverse_flattening": 298.2572221, "latitude_of_projection_origin": 0, "longitude_of_projection_origin": 0, "perspective_point_height": 42164000, "semi_major_axis": 6378169, "semi_minor_axis": 6356583.8, - "sweep_angle_axis": 'y', - "long_name": 'MTG geostationary projection', + "sweep_angle_axis": "y", + "long_name": "MTG geostationary projection", "default_data": lambda: -2147483647 } products_dict = { - '2-LE': {'ftype': 'li_l2_le_nc', 'schema': l2_le_schema}, - '2-LEF': {'ftype': 'li_l2_lef_nc', 'schema': l2_lef_schema}, - '2-LGR': {'ftype': 'li_l2_lgr_nc', 'schema': l2_lgr_schema}, - '2-LFL': {'ftype': 'li_l2_lfl_nc', 'schema': l2_lfl_schema}, - '2-AF': {'ftype': 'li_l2_af_nc', 'schema': l2_af_schema}, - '2-AFA': {'ftype': 'li_l2_afa_nc', 'schema': l2_afa_schema}, - '2-AFR': {'ftype': 'li_l2_afr_nc', 'schema': l2_afr_schema}, + "2-LE": {"ftype": "li_l2_le_nc", "schema": l2_le_schema}, + "2-LEF": {"ftype": "li_l2_lef_nc", "schema": l2_lef_schema}, + "2-LGR": {"ftype": "li_l2_lgr_nc", "schema": l2_lgr_schema}, + "2-LFL": {"ftype": "li_l2_lfl_nc", "schema": l2_lfl_schema}, + "2-AF": {"ftype": "li_l2_af_nc", "schema": l2_af_schema}, + "2-AFA": {"ftype": "li_l2_afa_nc", "schema": l2_afa_schema}, + "2-AFR": {"ftype": "li_l2_afr_nc", "schema": l2_afr_schema}, } def get_product_schema(pname, settings=None): """Retrieve an LI product schema given its name.""" - return products_dict[pname]['schema'](settings) + return products_dict[pname]["schema"](settings) def extract_filetype_info(filetype_infos, filetype): """Extract Satpy-conform filetype_info from filetype_infos fixture.""" ftype_info = filetype_infos[filetype] - ftype_info['file_type'] = filetype + ftype_info["file_type"] = filetype return ftype_info def set_variable_path(var_path, desc, sname): """Replace variable default path if applicable and ensure trailing separator.""" - vpath = desc.get('path', var_path) + vpath = desc.get("path", var_path) # Ensure we have a trailing separator: - if vpath != "" and vpath[-1] != '/': - vpath += '/' + if vpath != "" and vpath[-1] != "/": + vpath += "/" if sname != "": vpath += sname + "/" return vpath @@ -606,9 +606,9 @@ def populate_dummy_data(data, names, details): # Otherwise we write the default data: if data.shape == (): # scalar case - data = desc['default_data']() + data = desc["default_data"]() else: - data[:] = desc['default_data']() + data[:] = desc["default_data"]() def add_attributes(attribs, ignored_attrs, desc): @@ -634,22 +634,22 @@ def get_variable_writer(self, dset, settings): var_path = settings.get("variable_path", "") # Also keep track of the potential providers: - providers = settings.get('providers', {}) + providers = settings.get("providers", {}) # list of ignored attribute names: ignored_attrs = ["path", "format", "shape", "default_data", "fill_value"] # dictionary of dimensions: - dims = settings.get('dimensions', {}) + dims = settings.get("dimensions", {}) def write_variable(vname, desc, sname=""): """Write a variable in our dataset.""" # get numeric shape: - shape_str = desc['shape'] + shape_str = desc["shape"] shape = tuple([dims[dname] for dname in shape_str]) # Get the desired data type: - dtype = TYPE_MAP[desc['format']] + dtype = TYPE_MAP[desc["format"]] # Prepare a numpy array with the appropriate shape and type: data = np.zeros(shape, dtype=dtype) @@ -665,8 +665,8 @@ def write_variable(vname, desc, sname=""): add_attributes(attribs, ignored_attrs, desc) # Rename the fill value attribute: - if 'fill_value' in desc: - attribs['_FillValue'] = desc['fill_value'] + if "fill_value" in desc: + attribs["_FillValue"] = desc["fill_value"] names = [vname, sname] details = [desc, providers, settings] @@ -692,7 +692,7 @@ def get_test_content(self, filename, filename_info, filetype_info): # Note: params *IS* callable below: params = params(filename, filename_info, filetype_info) # pylint: disable=not-callable - settings = get_product_schema(filetype_info['file_desc']['product_type'], params) + settings = get_product_schema(filetype_info["file_desc"]["product_type"], params) # Resulting dataset: dset = {} @@ -713,16 +713,16 @@ def get_test_content(self, filename, filename_info, filetype_info): def write_variables(self, settings, write_variable): """Write raw (i.e. not in sectors) variables.""" - if 'variables' in settings: - variables = settings.get('variables') + if "variables" in settings: + variables = settings.get("variables") for vname, desc in variables.items(): write_variable(vname, desc) def write_sector_variables(self, settings, write_variable): """Write the sector variables.""" - if 'sector_variables' in settings: - sector_vars = settings.get('sector_variables') - sectors = settings.get('sectors', ['north', 'east', 'south', 'west']) + if "sector_variables" in settings: + sector_vars = settings.get("sector_variables") + sectors = settings.get("sectors", ["north", "east", "south", "west"]) for sname in sectors: for vname, desc in sector_vars.items(): diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py index f4908c0a2b..486eba370b 100644 --- a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py @@ -227,7 +227,7 @@ def coord_conv(self): cpix["IR1"] = 0.5 # instead of 1672.5 cpix["VIS"] = 0.5 # instead of 6688.5 - conv['scheduled_observation_time'] = 50130.979089568464 + conv["scheduled_observation_time"] = 50130.979089568464 nsensors = conv["number_of_sensor_elements"] nsensors["IR1"] = 1 diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py index 144139a07a..5b3c6117d4 100644 --- a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py @@ -19,9 +19,9 @@ IR_NAVIGATION_REFERENCE = [ { "pixel": nav.Pixel(line=686, pixel=1680), - 'lon': 139.990380, - 'lat': 35.047056, - 'nav_params': nav.PixelNavigationParameters( + "lon": 139.990380, + "lat": 35.047056, + "nav_params": nav.PixelNavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.997397917902958, angle_between_sat_spin_and_z_axis=3.149118633034304, @@ -67,9 +67,9 @@ }, { "pixel": nav.Pixel(line=2089, pixel=1793), - 'lon': 144.996967, - 'lat': -34.959853, - 'nav_params': nav.PixelNavigationParameters( + "lon": 144.996967, + "lat": -34.959853, + "nav_params": nav.PixelNavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.935707944355762, angle_between_sat_spin_and_z_axis=3.149118633034304, @@ -119,9 +119,9 @@ VIS_NAVIGATION_REFERENCE = [ { "pixel": nav.Pixel(line=2744, pixel=6720), - 'lon': 139.975527, - 'lat': 35.078028, - 'nav_params': nav.PixelNavigationParameters( + "lon": 139.975527, + "lat": 35.078028, + "nav_params": nav.PixelNavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.997397918405798, angle_between_sat_spin_and_z_axis=3.149118633034304, @@ -167,9 +167,9 @@ }, { "pixel": nav.Pixel(line=8356, pixel=7172), - 'lon': 144.980104, - 'lat': -34.929123, - 'nav_params': nav.PixelNavigationParameters( + "lon": 144.980104, + "lat": -34.929123, + "nav_params": nav.PixelNavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.935707944858620, angle_between_sat_spin_and_z_axis=3.149118633034304, diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index dfc8f0aec6..efecd1aa53 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -32,12 +32,12 @@ # Level 1 Fixtures AVAILABLE_1KM_VIS_PRODUCT_NAMES = [str(x) for x in range(8, 13)] -AVAILABLE_1KM_VIS_PRODUCT_NAMES += ['13lo', '13hi', '14lo', '14hi'] +AVAILABLE_1KM_VIS_PRODUCT_NAMES += ["13lo", "13hi", "14lo", "14hi"] AVAILABLE_1KM_VIS_PRODUCT_NAMES += [str(x) for x in range(15, 20)] AVAILABLE_1KM_IR_PRODUCT_NAMES = [str(x) for x in range(20, 37)] AVAILABLE_1KM_PRODUCT_NAMES = AVAILABLE_1KM_VIS_PRODUCT_NAMES + AVAILABLE_1KM_IR_PRODUCT_NAMES AVAILABLE_HKM_PRODUCT_NAMES = [str(x) for x in range(3, 8)] -AVAILABLE_QKM_PRODUCT_NAMES = ['1', '2'] +AVAILABLE_QKM_PRODUCT_NAMES = ["1", "2"] SCAN_LEN_5KM = 6 # 3 scans of 5km data SCAN_WIDTH_5KM = 270 SCALE_FACTOR = 0.5 @@ -101,14 +101,14 @@ def _generate_visible_uncertainty_data(shape: tuple) -> np.ndarray: def _get_lonlat_variable_info(resolution: int) -> dict: lon_5km, lat_5km = _generate_lonlat_data(resolution) return { - 'Latitude': {'data': lat_5km, - 'type': SDC.FLOAT32, - 'fill_value': -999, - 'attrs': {'dim_labels': ['Cell_Along_Swath_5km:mod35', 'Cell_Across_Swath_5km:mod35']}}, - 'Longitude': {'data': lon_5km, - 'type': SDC.FLOAT32, - 'fill_value': -999, - 'attrs': {'dim_labels': ['Cell_Along_Swath_5km:mod35', 'Cell_Across_Swath_5km:mod35']}}, + "Latitude": {"data": lat_5km, + "type": SDC.FLOAT32, + "fill_value": -999, + "attrs": {"dim_labels": ["Cell_Along_Swath_5km:mod35", "Cell_Across_Swath_5km:mod35"]}}, + "Longitude": {"data": lon_5km, + "type": SDC.FLOAT32, + "fill_value": -999, + "attrs": {"dim_labels": ["Cell_Along_Swath_5km:mod35", "Cell_Across_Swath_5km:mod35"]}}, } @@ -116,19 +116,19 @@ def _get_angles_variable_info(resolution: int) -> dict: angle_data = _generate_angle_data(resolution) dim_factor = RES_TO_REPEAT_FACTOR[resolution] * 2 angle_info = { - 'data': angle_data, - 'type': SDC.INT16, - 'fill_value': -32767, - 'attrs': { - 'dim_labels': [ - f'{dim_factor}*nscans:MODIS_SWATH_Type_L1B', - '1KM_geo_dim:MODIS_SWATH_Type_L1B'], - 'scale_factor': 0.01, - 'add_offset': -0.01, + "data": angle_data, + "type": SDC.INT16, + "fill_value": -32767, + "attrs": { + "dim_labels": [ + f"{dim_factor}*nscans:MODIS_SWATH_Type_L1B", + "1KM_geo_dim:MODIS_SWATH_Type_L1B"], + "scale_factor": 0.01, + "add_offset": -0.01, }, } angles_info = {} - for var_name in ('SensorAzimuth', 'SensorZenith', 'SolarAzimuth', 'SolarZenith'): + for var_name in ("SensorAzimuth", "SensorZenith", "SolarAzimuth", "SolarZenith"): angles_info[var_name] = angle_info return angles_info @@ -139,30 +139,30 @@ def _get_visible_variable_info(var_name: str, resolution: int, bands: list[str]) uncertainty = _generate_visible_uncertainty_data(data.shape) dim_factor = RES_TO_REPEAT_FACTOR[resolution] * 2 band_dim_name = f"Band_{resolution}_{num_bands}_RefSB:MODIS_SWATH_Type_L1B" - row_dim_name = f'{dim_factor}*nscans:MODIS_SWATH_Type_L1B' - col_dim_name = 'Max_EV_frames:MODIS_SWATH_Type_L1B' + row_dim_name = f"{dim_factor}*nscans:MODIS_SWATH_Type_L1B" + col_dim_name = "Max_EV_frames:MODIS_SWATH_Type_L1B" return { var_name: { - 'data': data, - 'type': SDC.UINT16, - 'fill_value': 0, - 'attrs': { + "data": data, + "type": SDC.UINT16, + "fill_value": 0, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [band_dim_name, + "dim_labels": [band_dim_name, row_dim_name, col_dim_name], - 'valid_range': (0, 32767), - 'reflectance_scales': (2.0,) * num_bands, - 'reflectance_offsets': (-0.5,) * num_bands, - 'band_names': ",".join(bands), + "valid_range": (0, 32767), + "reflectance_scales": (2.0,) * num_bands, + "reflectance_offsets": (-0.5,) * num_bands, + "band_names": ",".join(bands), }, }, - var_name + '_Uncert_Indexes': { - 'data': uncertainty, - 'type': SDC.UINT8, - 'fill_value': 255, - 'attrs': { - 'dim_labels': [band_dim_name, + var_name + "_Uncert_Indexes": { + "data": uncertainty, + "type": SDC.UINT8, + "fill_value": 255, + "attrs": { + "dim_labels": [band_dim_name, row_dim_name, col_dim_name], }, @@ -175,27 +175,27 @@ def _get_emissive_variable_info(var_name: str, resolution: int, bands: list[str] data = _generate_visible_data(resolution, len(bands)) dim_factor = RES_TO_REPEAT_FACTOR[resolution] * 2 band_dim_name = f"Band_{resolution}_{num_bands}_Emissive:MODIS_SWATH_Type_L1B" - row_dim_name = f'{dim_factor}*nscans:MODIS_SWATH_Type_L1B' - col_dim_name = 'Max_EV_frames:MODIS_SWATH_Type_L1B' + row_dim_name = f"{dim_factor}*nscans:MODIS_SWATH_Type_L1B" + col_dim_name = "Max_EV_frames:MODIS_SWATH_Type_L1B" return { var_name: { - 'data': data, - 'type': SDC.UINT16, - 'fill_value': 0, - 'attrs': { - 'dim_labels': [band_dim_name, + "data": data, + "type": SDC.UINT16, + "fill_value": 0, + "attrs": { + "dim_labels": [band_dim_name, row_dim_name, col_dim_name], - 'valid_range': (0, 32767), - 'band_names': ",".join(bands), + "valid_range": (0, 32767), + "band_names": ",".join(bands), }, }, - var_name + '_Uncert_Indexes': { - 'data': np.zeros(data.shape, dtype=np.uint8), - 'type': SDC.UINT8, - 'fill_value': 255, - 'attrs': { - 'dim_labels': [band_dim_name, + var_name + "_Uncert_Indexes": { + "data": np.zeros(data.shape, dtype=np.uint8), + "type": SDC.UINT8, + "fill_value": 255, + "attrs": { + "dim_labels": [band_dim_name, row_dim_name, col_dim_name], }, @@ -217,13 +217,13 @@ def _get_l1b_geo_variable_info(filename: str, def generate_nasa_l1b_filename(prefix): """Generate a filename that follows NASA MODIS L1b convention.""" now = datetime.now() - return f'{prefix}_A{now:%y%j_%H%M%S}_{now:%Y%j%H%M%S}.hdf' + return f"{prefix}_A{now:%y%j_%H%M%S}_{now:%Y%j%H%M%S}.hdf" def generate_imapp_filename(suffix): """Generate a filename that follows IMAPP MODIS L1b convention.""" now = datetime.now() - return f't1.{now:%y%j.%H%M}.{suffix}.hdf' + return f"t1.{now:%y%j.%H%M}.{suffix}.hdf" def create_hdfeos_test_file(filename: str, @@ -262,17 +262,17 @@ def create_hdfeos_test_file(filename: str, def _add_variable_to_file(h, var_name, var_info): - v = h.create(var_name, var_info['type'], var_info['data'].shape) - v[:] = var_info['data'] + v = h.create(var_name, var_info["type"], var_info["data"].shape) + v[:] = var_info["data"] dim_count = 0 - for dimension_name in var_info['attrs']['dim_labels']: + for dimension_name in var_info["attrs"]["dim_labels"]: v.dim(dim_count).setname(dimension_name) dim_count += 1 - v.setfillvalue(var_info['fill_value']) - v.scale_factor = var_info['attrs'].get('scale_factor', SCALE_FACTOR) - v.add_offset = var_info['attrs'].get('add_offset', ADD_OFFSET) - for attr_key, attr_val in var_info['attrs'].items(): - if attr_key == 'dim_labels': + v.setfillvalue(var_info["fill_value"]) + v.scale_factor = var_info["attrs"].get("scale_factor", SCALE_FACTOR) + v.add_offset = var_info["attrs"].get("add_offset", ADD_OFFSET) + for attr_key, attr_val in var_info["attrs"].items(): + if attr_key == "dim_labels": continue setattr(v, attr_key, attr_val) @@ -281,12 +281,12 @@ def _create_core_metadata(file_shortname: str) -> str: beginning_date = datetime.now() ending_date = beginning_date + timedelta(minutes=5) core_metadata_header = "GROUP = INVENTORYMETADATA\nGROUPTYPE = MASTERGROUP\n\n" \ - "GROUP = RANGEDATETIME\n\nOBJECT = RANGEBEGINNINGDATE\nNUM_VAL = 1\nVALUE = \"{}\"\n" \ + 'GROUP = RANGEDATETIME\n\nOBJECT = RANGEBEGINNINGDATE\nNUM_VAL = 1\nVALUE = "{}"\n' \ "END_OBJECT = RANGEBEGINNINGDATE\n\nOBJECT = RANGEBEGINNINGTIME\n" \ - "NUM_VAL = 1\nVALUE = \"{}\"\n" \ + 'NUM_VAL = 1\nVALUE = "{}"\n' \ "END_OBJECT = RANGEBEGINNINGTIME\n\nOBJECT = RANGEENDINGDATE\n" \ - "NUM_VAL = 1\nVALUE = \"{}\"\n" \ - "END_OBJECT = RANGEENDINGDATE\n\nOBJECT = RANGEENDINGTIME\nNUM_VAL = 1\nVALUE = \"{}\"\n" \ + 'NUM_VAL = 1\nVALUE = "{}"\n' \ + 'END_OBJECT = RANGEENDINGDATE\n\nOBJECT = RANGEENDINGTIME\nNUM_VAL = 1\nVALUE = "{}"\n' \ "END_OBJECT = RANGEENDINGTIME\nEND_GROUP = RANGEDATETIME" core_metadata_header = core_metadata_header.format( beginning_date.strftime("%Y-%m-%d"), @@ -295,13 +295,13 @@ def _create_core_metadata(file_shortname: str) -> str: ending_date.strftime("%H:%M:%S.%f") ) inst_metadata = "GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR\n\n" \ - "OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER\nCLASS = \"1\"\n\n" \ - "OBJECT = ASSOCIATEDSENSORSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \ - "VALUE = \"MODIS\"\nEND_OBJECT = ASSOCIATEDSENSORSHORTNAME\n\n" \ - "OBJECT = ASSOCIATEDPLATFORMSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \ - "VALUE = \"Terra\"\nEND_OBJECT = ASSOCIATEDPLATFORMSHORTNAME\n\n" \ - "OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \ - "VALUE = \"MODIS\"\nEND_OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\n\n" \ + 'OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER\nCLASS = "1"\n\n' \ + 'OBJECT = ASSOCIATEDSENSORSHORTNAME\nCLASS = "1"\nNUM_VAL = 1\n' \ + 'VALUE = "MODIS"\nEND_OBJECT = ASSOCIATEDSENSORSHORTNAME\n\n' \ + 'OBJECT = ASSOCIATEDPLATFORMSHORTNAME\nCLASS = "1"\nNUM_VAL = 1\n' \ + 'VALUE = "Terra"\nEND_OBJECT = ASSOCIATEDPLATFORMSHORTNAME\n\n' \ + 'OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\nCLASS = "1"\nNUM_VAL = 1\n' \ + 'VALUE = "MODIS"\nEND_OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\n\n' \ "END_OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER\n\n" \ "END_GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR\n\n" collection_metadata = "GROUP = COLLECTIONDESCRIPTIONCLASS\n\nOBJECT = SHORTNAME\nNUM_VAL = 1\n" \ @@ -318,7 +318,7 @@ def _create_struct_metadata(geo_resolution: int) -> str: "GROUP=SWATH_1\n" \ "GROUP=DimensionMap\n" \ "OBJECT=DimensionMap_2\n" \ - f"GeoDimension=\"{geo_dim_factor}*nscans\"\n" \ + f'GeoDimension="{geo_dim_factor}*nscans"\n' \ "END_OBJECT=DimensionMap_2\n" \ "END_GROUP=DimensionMap\n" \ "END_GROUP=SWATH_1\n" \ @@ -413,20 +413,20 @@ def modis_l1b_nasa_1km_mod03_files(modis_l1b_nasa_mod021km_file, modis_l1b_nasa_ def _get_basic_variable_info(var_name: str, resolution: int) -> dict: shape = _shape_for_resolution(resolution) data = np.ones((shape[0], shape[1]), dtype=np.uint16) - row_dim_name = f'Cell_Along_Swath_{resolution}m:modl2' - col_dim_name = f'Cell_Across_Swath_{resolution}m:modl2' + row_dim_name = f"Cell_Along_Swath_{resolution}m:modl2" + col_dim_name = f"Cell_Across_Swath_{resolution}m:modl2" return { var_name: { - 'data': data, - 'type': SDC.UINT16, - 'fill_value': 0, - 'attrs': { + "data": data, + "type": SDC.UINT16, + "fill_value": 0, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [row_dim_name, + "dim_labels": [row_dim_name, col_dim_name], - 'valid_range': (0, 32767), - 'scale_factor': 2.0, - 'add_offset': -1.0, + "valid_range": (0, 32767), + "scale_factor": 2.0, + "add_offset": -1.0, }, }, } @@ -437,35 +437,35 @@ def _get_cloud_mask_variable_info(var_name: str, resolution: int) -> dict: shape = _shape_for_resolution(resolution) data = np.zeros((num_bytes, shape[0], shape[1]), dtype=np.int8) byte_dim_name = "Byte_Segment:mod35" - row_dim_name = 'Cell_Along_Swath_1km:mod35' - col_dim_name = 'Cell_Across_Swath_1km:mod35' + row_dim_name = "Cell_Along_Swath_1km:mod35" + col_dim_name = "Cell_Across_Swath_1km:mod35" return { var_name: { - 'data': data, - 'type': SDC.INT8, - 'fill_value': 0, - 'attrs': { + "data": data, + "type": SDC.INT8, + "fill_value": 0, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [byte_dim_name, + "dim_labels": [byte_dim_name, row_dim_name, col_dim_name], - 'valid_range': (0, -1), - 'scale_factor': 1., - 'add_offset': 0., + "valid_range": (0, -1), + "scale_factor": 1., + "add_offset": 0., }, }, - 'Quality_Assurance': { - 'data': np.ones((shape[0], shape[1], 10), dtype=np.int8), - 'type': SDC.INT8, - 'fill_value': 0, - 'attrs': { + "Quality_Assurance": { + "data": np.ones((shape[0], shape[1], 10), dtype=np.int8), + "type": SDC.INT8, + "fill_value": 0, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [row_dim_name, + "dim_labels": [row_dim_name, col_dim_name, - 'Quality_Dimension:mod35'], - 'valid_range': (0, -1), - 'scale_factor': 2., - 'add_offset': -0.5, + "Quality_Dimension:mod35"], + "valid_range": (0, -1), + "scale_factor": 2., + "add_offset": -0.5, }, }, } @@ -474,47 +474,47 @@ def _get_cloud_mask_variable_info(var_name: str, resolution: int) -> dict: def _get_mask_byte1_variable_info() -> dict: shape = _shape_for_resolution(1000) data = np.zeros((shape[0], shape[1]), dtype=np.uint16) - row_dim_name = 'Cell_Along_Swath_1km:mod35' - col_dim_name = 'Cell_Across_Swath_1km:mod35' + row_dim_name = "Cell_Along_Swath_1km:mod35" + col_dim_name = "Cell_Across_Swath_1km:mod35" return { "MODIS_Cloud_Mask": { - 'data': data, - 'type': SDC.UINT16, - 'fill_value': 9999, - 'attrs': { + "data": data, + "type": SDC.UINT16, + "fill_value": 9999, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [row_dim_name, + "dim_labels": [row_dim_name, col_dim_name], - 'valid_range': (0, 4), - 'scale_factor': 2, - 'add_offset': -1, + "valid_range": (0, 4), + "scale_factor": 2, + "add_offset": -1, }, }, "MODIS_Simple_LandSea_Mask": { - 'data': data, - 'type': SDC.UINT16, - 'fill_value': 9999, - 'attrs': { + "data": data, + "type": SDC.UINT16, + "fill_value": 9999, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [row_dim_name, + "dim_labels": [row_dim_name, col_dim_name], - 'valid_range': (0, 4), - 'scale_factor': 2, - 'add_offset': -1, + "valid_range": (0, 4), + "scale_factor": 2, + "add_offset": -1, }, }, "MODIS_Snow_Ice_Flag": { - 'data': data, - 'type': SDC.UINT16, - 'fill_value': 9999, - 'attrs': { + "data": data, + "type": SDC.UINT16, + "fill_value": 9999, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [row_dim_name, + "dim_labels": [row_dim_name, col_dim_name], - 'valid_range': (0, 2), - 'scale_factor': 2, - 'add_offset': -1, + "valid_range": (0, 2), + "scale_factor": 2, + "add_offset": -1, }, }, } @@ -523,7 +523,7 @@ def _get_mask_byte1_variable_info() -> dict: def generate_nasa_l2_filename(prefix: str) -> str: """Generate a file name that follows MODIS 35 L2 convention in a temporary directory.""" now = datetime.now() - return f'{prefix}_L2.A{now:%Y%j.%H%M}.061.{now:%Y%j%H%M%S}.hdf' + return f"{prefix}_L2.A{now:%Y%j.%H%M}.061.{now:%Y%j%H%M%S}.hdf" @pytest.fixture(scope="session") diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py index 56e8687844..85048de0af 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py @@ -50,7 +50,7 @@ def _check_shared_metadata(data_arr): assert data_arr.attrs["platform_name"] == "EOS-Terra" assert "rows_per_scan" in data_arr.attrs assert isinstance(data_arr.attrs["rows_per_scan"], int) - assert data_arr.attrs['reader'] == 'modis_l1b' + assert data_arr.attrs["reader"] == "modis_l1b" def _load_and_check_geolocation(scene, resolution, exp_res, exp_shape, has_res, @@ -79,30 +79,30 @@ class TestModisL1b: def test_available_reader(self): """Test that MODIS L1b reader is available.""" - assert 'modis_l1b' in available_readers() + assert "modis_l1b" in available_readers() @pytest.mark.parametrize( - ('input_files', 'expected_names', 'expected_data_res', 'expected_geo_res'), + ("input_files", "expected_names", "expected_data_res", "expected_geo_res"), [ - [lazy_fixture('modis_l1b_nasa_mod021km_file'), + [lazy_fixture("modis_l1b_nasa_mod021km_file"), AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [1000], [5000, 1000]], - [lazy_fixture('modis_l1b_imapp_1000m_file'), + [lazy_fixture("modis_l1b_imapp_1000m_file"), AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [1000], [5000, 1000]], - [lazy_fixture('modis_l1b_nasa_mod02hkm_file'), + [lazy_fixture("modis_l1b_nasa_mod02hkm_file"), AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [500], [1000, 500, 250]], - [lazy_fixture('modis_l1b_nasa_mod02qkm_file'), + [lazy_fixture("modis_l1b_nasa_mod02qkm_file"), AVAILABLE_QKM_PRODUCT_NAMES, [250], [1000, 500, 250]], ] ) def test_scene_available_datasets(self, input_files, expected_names, expected_data_res, expected_geo_res): """Test that datasets are available.""" - scene = Scene(reader='modis_l1b', filenames=input_files) + scene = Scene(reader="modis_l1b", filenames=input_files) available_datasets = scene.available_dataset_names() assert len(available_datasets) > 0 - assert 'longitude' in available_datasets - assert 'latitude' in available_datasets + assert "longitude" in available_datasets + assert "latitude" in available_datasets for chan_name in expected_names: assert chan_name in available_datasets @@ -111,8 +111,8 @@ def test_scene_available_datasets(self, input_files, expected_names, expected_da available_geos = {x: [] for x in expected_geo_res} # Make sure that every resolution from the reader is what we expect for data_id in available_data_ids: - res = data_id['resolution'] - if data_id['name'] in ['longitude', 'latitude']: + res = data_id["resolution"] + if data_id["name"] in ["longitude", "latitude"]: assert res in expected_geo_res available_geos[res].append(data_id) else: @@ -126,23 +126,23 @@ def test_scene_available_datasets(self, input_files, expected_names, expected_da assert avail_id, f"Missing geo datasets for geo resolution {exp_res}" @pytest.mark.parametrize( - ('input_files', 'has_5km', 'has_500', 'has_250', 'default_res'), + ("input_files", "has_5km", "has_500", "has_250", "default_res"), [ - [lazy_fixture('modis_l1b_nasa_mod021km_file'), + [lazy_fixture("modis_l1b_nasa_mod021km_file"), True, False, False, 1000], - [lazy_fixture('modis_l1b_imapp_1000m_file'), + [lazy_fixture("modis_l1b_imapp_1000m_file"), True, False, False, 1000], - [lazy_fixture('modis_l1b_nasa_mod02hkm_file'), + [lazy_fixture("modis_l1b_nasa_mod02hkm_file"), False, True, True, 250], - [lazy_fixture('modis_l1b_nasa_mod02qkm_file'), + [lazy_fixture("modis_l1b_nasa_mod02qkm_file"), False, True, True, 250], - [lazy_fixture('modis_l1b_nasa_1km_mod03_files'), + [lazy_fixture("modis_l1b_nasa_1km_mod03_files"), True, True, True, 250], ] ) def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res): """Test that longitude and latitude datasets are loaded correctly.""" - scene = Scene(reader='modis_l1b', filenames=input_files) + scene = Scene(reader="modis_l1b", filenames=input_files) shape_5km = _shape_for_resolution(5000) shape_500m = _shape_for_resolution(500) shape_250m = _shape_for_resolution(250) @@ -155,35 +155,35 @@ def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, d def test_load_sat_zenith_angle(self, modis_l1b_nasa_mod021km_file): """Test loading satellite zenith angle band.""" - scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file) - dataset_name = 'satellite_zenith_angle' + scene = Scene(reader="modis_l1b", filenames=modis_l1b_nasa_mod021km_file) + dataset_name = "satellite_zenith_angle" scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset.shape == _shape_for_resolution(1000) - assert dataset.attrs['resolution'] == 1000 + assert dataset.attrs["resolution"] == 1000 _check_shared_metadata(dataset) def test_load_vis(self, modis_l1b_nasa_mod021km_file): """Test loading visible band.""" - scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file) - dataset_name = '1' + scene = Scene(reader="modis_l1b", filenames=modis_l1b_nasa_mod021km_file) + dataset_name = "1" scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset[0, 0] == 300.0 assert dataset.shape == _shape_for_resolution(1000) - assert dataset.attrs['resolution'] == 1000 + assert dataset.attrs["resolution"] == 1000 _check_shared_metadata(dataset) @pytest.mark.parametrize("mask_saturated", [False, True]) def test_load_vis_saturation(self, mask_saturated, modis_l1b_nasa_mod021km_file): """Test loading visible band.""" - scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file, + scene = Scene(reader="modis_l1b", filenames=modis_l1b_nasa_mod021km_file, reader_kwargs={"mask_saturated": mask_saturated}) - dataset_name = '2' + dataset_name = "2" scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset.shape == _shape_for_resolution(1000) - assert dataset.attrs['resolution'] == 1000 + assert dataset.attrs["resolution"] == 1000 _check_shared_metadata(dataset) # check saturation fill values diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py index 222f365d87..99c0890d30 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py @@ -46,11 +46,11 @@ def _check_shared_metadata(data_arr, expect_area=False): assert data_arr.attrs["platform_name"] == "EOS-Terra" assert "rows_per_scan" in data_arr.attrs assert isinstance(data_arr.attrs["rows_per_scan"], int) - assert data_arr.attrs['reader'] == 'modis_l2' + assert data_arr.attrs["reader"] == "modis_l2" if expect_area: - assert data_arr.attrs.get('area') is not None + assert data_arr.attrs.get("area") is not None else: - assert 'area' not in data_arr.attrs + assert "area" not in data_arr.attrs class TestModisL2: @@ -58,28 +58,28 @@ class TestModisL2: def test_available_reader(self): """Test that MODIS L2 reader is available.""" - assert 'modis_l2' in available_readers() + assert "modis_l2" in available_readers() def test_scene_available_datasets(self, modis_l2_nasa_mod35_file): """Test that datasets are available.""" - scene = Scene(reader='modis_l2', filenames=modis_l2_nasa_mod35_file) + scene = Scene(reader="modis_l2", filenames=modis_l2_nasa_mod35_file) available_datasets = scene.all_dataset_names() assert len(available_datasets) > 0 - assert 'cloud_mask' in available_datasets - assert 'latitude' in available_datasets - assert 'longitude' in available_datasets + assert "cloud_mask" in available_datasets + assert "latitude" in available_datasets + assert "longitude" in available_datasets @pytest.mark.parametrize( - ('input_files', 'has_5km', 'has_500', 'has_250', 'default_res'), + ("input_files", "has_5km", "has_500", "has_250", "default_res"), [ - [lazy_fixture('modis_l2_nasa_mod35_file'), + [lazy_fixture("modis_l2_nasa_mod35_file"), True, False, False, 1000], ] ) def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res): """Test that longitude and latitude datasets are loaded correctly.""" from .test_modis_l1b import _load_and_check_geolocation - scene = Scene(reader='modis_l2', filenames=input_files) + scene = Scene(reader="modis_l2", filenames=input_files) shape_5km = _shape_for_resolution(5000) shape_500m = _shape_for_resolution(500) shape_250m = _shape_for_resolution(250) @@ -96,8 +96,8 @@ def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, d def test_load_quality_assurance(self, modis_l2_nasa_mod35_file): """Test loading quality assurance.""" - scene = Scene(reader='modis_l2', filenames=modis_l2_nasa_mod35_file) - dataset_name = 'quality_assurance' + scene = Scene(reader="modis_l2", filenames=modis_l2_nasa_mod35_file) + dataset_name = "quality_assurance" scene.load([dataset_name]) quality_assurance_id = make_dataid(name=dataset_name, resolution=1000) assert quality_assurance_id in scene @@ -106,19 +106,19 @@ def test_load_quality_assurance(self, modis_l2_nasa_mod35_file): _check_shared_metadata(quality_assurance, expect_area=True) @pytest.mark.parametrize( - ('input_files', 'loadables', 'request_resolution', 'exp_resolution', 'exp_area'), + ("input_files", "loadables", "request_resolution", "exp_resolution", "exp_area"), [ - [lazy_fixture('modis_l2_nasa_mod35_mod03_files'), + [lazy_fixture("modis_l2_nasa_mod35_mod03_files"), ["cloud_mask"], 1000, 1000, True], - [lazy_fixture('modis_l2_imapp_mask_byte1_geo_files'), + [lazy_fixture("modis_l2_imapp_mask_byte1_geo_files"), ["cloud_mask", "land_sea_mask", "snow_ice_mask"], None, 1000, True], ] ) def test_load_category_dataset(self, input_files, loadables, request_resolution, exp_resolution, exp_area): """Test loading category products.""" - scene = Scene(reader='modis_l2', filenames=input_files) + scene = Scene(reader="modis_l2", filenames=input_files) kwargs = {"resolution": request_resolution} if request_resolution is not None else {} scene.load(loadables, **kwargs) for ds_name in loadables: @@ -129,23 +129,23 @@ def test_load_category_dataset(self, input_files, loadables, request_resolution, cat_data_arr = cat_data_arr.compute() assert cat_data_arr.shape == _shape_for_resolution(exp_resolution) assert cat_data_arr.values[0, 0] == 0.0 - assert cat_data_arr.attrs.get('resolution') == exp_resolution + assert cat_data_arr.attrs.get("resolution") == exp_resolution # mask variables should be integers assert np.issubdtype(cat_data_arr.dtype, np.integer) - assert cat_data_arr.attrs.get('_FillValue') is not None + assert cat_data_arr.attrs.get("_FillValue") is not None _check_shared_metadata(cat_data_arr, expect_area=exp_area) @pytest.mark.parametrize( - ('input_files', 'exp_area'), + ("input_files", "exp_area"), [ - [lazy_fixture('modis_l2_nasa_mod35_file'), False], - [lazy_fixture('modis_l2_nasa_mod35_mod03_files'), True], + [lazy_fixture("modis_l2_nasa_mod35_file"), False], + [lazy_fixture("modis_l2_nasa_mod35_mod03_files"), True], ] ) def test_load_250m_cloud_mask_dataset(self, input_files, exp_area): """Test loading 250m cloud mask.""" - scene = Scene(reader='modis_l2', filenames=input_files) - dataset_name = 'cloud_mask' + scene = Scene(reader="modis_l2", filenames=input_files) + dataset_name = "cloud_mask" scene.load([dataset_name], resolution=250) cloud_mask_id = make_dataid(name=dataset_name, resolution=250) assert cloud_mask_id in scene @@ -156,21 +156,21 @@ def test_load_250m_cloud_mask_dataset(self, input_files, exp_area): assert cloud_mask.values[0, 0] == 0.0 # mask variables should be integers assert np.issubdtype(cloud_mask.dtype, np.integer) - assert cloud_mask.attrs.get('_FillValue') is not None + assert cloud_mask.attrs.get("_FillValue") is not None _check_shared_metadata(cloud_mask, expect_area=exp_area) @pytest.mark.parametrize( - ('input_files', 'loadables', 'exp_resolution', 'exp_area', 'exp_value'), + ("input_files", "loadables", "exp_resolution", "exp_area", "exp_value"), [ - [lazy_fixture('modis_l2_nasa_mod06_file'), ["surface_pressure"], 5000, True, 4.0], + [lazy_fixture("modis_l2_nasa_mod06_file"), ["surface_pressure"], 5000, True, 4.0], # snow mask is considered a category product, factor/offset ignored - [lazy_fixture('modis_l2_imapp_snowmask_file'), ["snow_mask"], 1000, False, 1.0], - [lazy_fixture('modis_l2_imapp_snowmask_geo_files'), ["snow_mask"], 1000, True, 1.0], + [lazy_fixture("modis_l2_imapp_snowmask_file"), ["snow_mask"], 1000, False, 1.0], + [lazy_fixture("modis_l2_imapp_snowmask_geo_files"), ["snow_mask"], 1000, True, 1.0], ] ) def test_load_l2_dataset(self, input_files, loadables, exp_resolution, exp_area, exp_value): """Load and check an L2 variable.""" - scene = Scene(reader='modis_l2', filenames=input_files) + scene = Scene(reader="modis_l2", filenames=input_files) scene.load(loadables) for ds_name in loadables: assert ds_name in scene @@ -179,5 +179,5 @@ def test_load_l2_dataset(self, input_files, loadables, exp_resolution, exp_area, data_arr = data_arr.compute() assert data_arr.values[0, 0] == exp_value assert data_arr.shape == _shape_for_resolution(exp_resolution) - assert data_arr.attrs.get('resolution') == exp_resolution + assert data_arr.attrs.get("resolution") == exp_resolution _check_shared_metadata(data_arr, expect_area=exp_area) diff --git a/satpy/tests/reader_tests/test_aapp_l1b.py b/satpy/tests/reader_tests/test_aapp_l1b.py index c0f84c5a63..e9414ee521 100644 --- a/satpy/tests/reader_tests/test_aapp_l1b.py +++ b/satpy/tests/reader_tests/test_aapp_l1b.py @@ -36,26 +36,26 @@ class TestAAPPL1BAllChannelsPresent(unittest.TestCase): def setUp(self): """Set up the test case.""" self._header = np.zeros(1, dtype=_HEADERTYPE) - self._header['satid'][0] = 13 - self._header['radtempcnv'][0] = [[267194, -171669, 1002811], + self._header["satid"][0] = 13 + self._header["radtempcnv"][0] = [[267194, -171669, 1002811], [930310, -59084, 1001600], [828600, -37854, 1001147]] # first 3b is off, 3a is on - self._header['inststat1'][0] = 0b1111011100000000 + self._header["inststat1"][0] = 0b1111011100000000 # switch 3a off at position 1 - self._header['statchrecnb'][0] = 1 + self._header["statchrecnb"][0] = 1 # 3b is on, 3a is off - self._header['inststat2'][0] = 0b1111101100000000 + self._header["inststat2"][0] = 0b1111101100000000 self._data = np.zeros(3, dtype=_SCANTYPE) - self._data['scnlinyr'][:] = 2020 - self._data['scnlindy'][:] = 8 - self._data['scnlintime'][0] = 30195225 - self._data['scnlintime'][1] = 30195389 - self._data['scnlintime'][2] = 30195556 - self._data['scnlinbit'][0] = -16383 - self._data['scnlinbit'][1] = -16383 - self._data['scnlinbit'][2] = -16384 + self._data["scnlinyr"][:] = 2020 + self._data["scnlindy"][:] = 8 + self._data["scnlintime"][0] = 30195225 + self._data["scnlintime"][1] = 30195389 + self._data["scnlintime"][2] = 30195556 + self._data["scnlinbit"][0] = -16383 + self._data["scnlinbit"][1] = -16383 + self._data["scnlinbit"][2] = -16384 calvis = np.array([[[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [543489984, -21941870, 1592440064, -545027008, 499]], @@ -65,8 +65,8 @@ def setUp(self): [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [257550000, -10449420, 1812019968, -785690304, 499]]]) - self._data['calvis'][:] = calvis - self._data['calir'] = [[[[0, -2675, 2655265], + self._data["calvis"][:] = calvis + self._data["calir"] = [[[[0, -2675, 2655265], [0, 0, 0]], [[33605, -260786, 226818992], [0, 0, 0]], @@ -84,13 +84,13 @@ def setUp(self): [0, 0, 0]], [[13871, -249531, 234652640], [0, 0, 0]]]] - self._data['hrpt'] = np.ones_like(self._data['hrpt']) * (np.arange(2048) // 2)[np.newaxis, :, np.newaxis] + self._data["hrpt"] = np.ones_like(self._data["hrpt"]) * (np.arange(2048) // 2)[np.newaxis, :, np.newaxis] - self.filename_info = {'platform_shortname': 'metop03', 'start_time': datetime.datetime(2020, 1, 8, 8, 19), - 'orbit_number': 6071} - self.filetype_info = {'file_reader': AVHRRAAPPL1BFile, + self.filename_info = {"platform_shortname": "metop03", "start_time": datetime.datetime(2020, 1, 8, 8, 19), + "orbit_number": 6071} + self.filetype_info = {"file_reader": AVHRRAAPPL1BFile, 'file_patterns': ['hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1b'], # noqa - 'file_type': 'avhrr_aapp_l1b'} + "file_type": "avhrr_aapp_l1b"} def test_read(self): """Test the reading.""" @@ -103,22 +103,22 @@ def test_read(self): info = {} mins = [] maxs = [] - for name in ['1', '2', '3a']: - key = make_dataid(name=name, calibration='reflectance') + for name in ["1", "2", "3a"]: + key = make_dataid(name=name, calibration="reflectance") res = fh.get_dataset(key, info) assert res.min() == 0 assert res.max() >= 100 mins.append(res.min().values) maxs.append(res.max().values) - if name == '3a': + if name == "3a": assert np.all(np.isnan(res[:2, :])) - for name in ['3b', '4', '5']: - key = make_dataid(name=name, calibration='reflectance') + for name in ["3b", "4", "5"]: + key = make_dataid(name=name, calibration="reflectance") res = fh.get_dataset(key, info) mins.append(res.min().values) maxs.append(res.max().values) - if name == '3b': + if name == "3b": assert np.all(np.isnan(res[2:, :])) np.testing.assert_allclose(mins, [0., 0., 0., 204.10106939, 103.23477235, 106.42609758]) @@ -134,7 +134,7 @@ def test_angles(self): fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} - key = make_dataid(name='solar_zenith_angle') + key = make_dataid(name="solar_zenith_angle") res = fh.get_dataset(key, info) assert np.all(res == 0) @@ -147,10 +147,10 @@ def test_navigation(self): fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} - key = make_dataid(name='longitude') + key = make_dataid(name="longitude") res = fh.get_dataset(key, info) assert np.all(res == 0) - key = make_dataid(name='latitude') + key = make_dataid(name="latitude") res = fh.get_dataset(key, info) assert np.all(res == 0) @@ -286,25 +286,25 @@ class TestAAPPL1BChannel3AMissing(unittest.TestCase): def setUp(self): """Set up the test case.""" self._header = np.zeros(1, dtype=_HEADERTYPE) - self._header['satid'][0] = 13 - self._header['radtempcnv'][0] = [[267194, -171669, 1002811], + self._header["satid"][0] = 13 + self._header["radtempcnv"][0] = [[267194, -171669, 1002811], [930310, -59084, 1001600], [828600, -37854, 1001147]] # first 3a is off, 3b is on - self._header['inststat1'][0] = 0b1111011100000000 + self._header["inststat1"][0] = 0b1111011100000000 # valid for the whole pass - self._header['statchrecnb'][0] = 0 - self._header['inststat2'][0] = 0b0 + self._header["statchrecnb"][0] = 0 + self._header["inststat2"][0] = 0b0 self._data = np.zeros(3, dtype=_SCANTYPE) - self._data['scnlinyr'][:] = 2020 - self._data['scnlindy'][:] = 8 - self._data['scnlintime'][0] = 30195225 - self._data['scnlintime'][1] = 30195389 - self._data['scnlintime'][2] = 30195556 - self._data['scnlinbit'][0] = -16383 - self._data['scnlinbit'][1] = -16383 - self._data['scnlinbit'][2] = -16383 + self._data["scnlinyr"][:] = 2020 + self._data["scnlindy"][:] = 8 + self._data["scnlintime"][0] = 30195225 + self._data["scnlintime"][1] = 30195389 + self._data["scnlintime"][2] = 30195556 + self._data["scnlinbit"][0] = -16383 + self._data["scnlinbit"][1] = -16383 + self._data["scnlinbit"][2] = -16383 calvis = np.array([[[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [543489984, -21941870, 1592440064, -545027008, 499]], @@ -314,8 +314,8 @@ def setUp(self): [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [257550000, -10449420, 1812019968, -785690304, 499]]]) - self._data['calvis'][:] = calvis - self._data['calir'] = [[[[0, -2675, 2655265], + self._data["calvis"][:] = calvis + self._data["calir"] = [[[[0, -2675, 2655265], [0, 0, 0]], [[33605, -260786, 226818992], [0, 0, 0]], @@ -333,15 +333,15 @@ def setUp(self): [0, 0, 0]], [[13871, -249531, 234652640], [0, 0, 0]]]] - self._data['hrpt'] = np.ones_like(self._data['hrpt']) * (np.arange(2048) // 2)[np.newaxis, :, np.newaxis] + self._data["hrpt"] = np.ones_like(self._data["hrpt"]) * (np.arange(2048) // 2)[np.newaxis, :, np.newaxis] - self.filename_info = {'platform_shortname': 'metop03', 'start_time': datetime.datetime(2020, 1, 8, 8, 19), - 'orbit_number': 6071} - self.filetype_info = {'file_reader': AVHRRAAPPL1BFile, - 'file_patterns': [ - 'hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1b'], + self.filename_info = {"platform_shortname": "metop03", "start_time": datetime.datetime(2020, 1, 8, 8, 19), + "orbit_number": 6071} + self.filetype_info = {"file_reader": AVHRRAAPPL1BFile, + "file_patterns": [ + "hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1b"], # noqa - 'file_type': 'avhrr_aapp_l1b'} + "file_type": "avhrr_aapp_l1b"} def test_loading_missing_channels_returns_none(self): """Test that loading a missing channel raises a keyerror.""" @@ -352,7 +352,7 @@ def test_loading_missing_channels_returns_none(self): fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} - key = make_dataid(name='3a', calibration='reflectance') + key = make_dataid(name="3a", calibration="reflectance") assert fh.get_dataset(key, info) is None def test_available_datasets_miss_3a(self): @@ -363,16 +363,16 @@ def test_available_datasets_miss_3a(self): self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) - configured_datasets = [[None, {'name': '1'}], - [None, {'name': '2'}], - [None, {'name': '3a'}], - [None, {'name': '3b'}], - [None, {'name': '4'}], - [None, {'name': '5'}], + configured_datasets = [[None, {"name": "1"}], + [None, {"name": "2"}], + [None, {"name": "3a"}], + [None, {"name": "3b"}], + [None, {"name": "4"}], + [None, {"name": "5"}], ] available_datasets = fh.available_datasets(configured_datasets) for status, mda in available_datasets: - if mda['name'] == '3a': + if mda["name"] == "3a": assert status is False else: assert status is True @@ -397,9 +397,9 @@ def setUp(self): [[18214, -200932, 182150896], [0, 0, 0]], [[6761, -200105, 192092496], [0, 0, 0]]], dtype=" 0 def test_is_valid_time(self): """Test that valid times are correctly identified.""" - assert AHIHSDFileHandler._is_valid_timeline(FAKE_BASIC_INFO['observation_timeline']) - assert not AHIHSDFileHandler._is_valid_timeline('65526') + assert AHIHSDFileHandler._is_valid_timeline(FAKE_BASIC_INFO["observation_timeline"]) + assert not AHIHSDFileHandler._is_valid_timeline("65526") def test_time_rounding(self): """Test rounding of the nominal time.""" mocker = mock.MagicMock() in_date = datetime(2020, 1, 1, 12, 0, 0) - with mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._is_valid_timeline', mocker): + with mock.patch("satpy.readers.ahi_hsd.AHIHSDFileHandler._is_valid_timeline", mocker): with _fake_hsd_handler() as fh: mocker.return_value = True assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 3, 0, 0) @@ -468,31 +468,31 @@ def test_time_rounding(self): class TestAHICalibration(unittest.TestCase): """Test case for various AHI calibration types.""" - @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler.__init__', + @mock.patch("satpy.readers.ahi_hsd.AHIHSDFileHandler.__init__", return_value=None) def setUp(self, *mocks): """Create fake data for testing.""" self.def_cali = [-0.0037, 15.20] self.upd_cali = [-0.0074, 30.40] self.bad_cali = [0.0, 0.0] - fh = AHIHSDFileHandler(filetype_info={'file_type': 'hsd_b01'}) - fh.calib_mode = 'NOMINAL' + fh = AHIHSDFileHandler(filetype_info={"file_type": "hsd_b01"}) + fh.calib_mode = "NOMINAL" fh.user_calibration = None fh.is_zipped = False fh._header = { - 'block5': {'band_number': [5], - 'gain_count2rad_conversion': [self.def_cali[0]], - 'offset_count2rad_conversion': [self.def_cali[1]], - 'central_wave_length': [10.4073], }, - 'calibration': {'coeff_rad2albedo_conversion': [0.0019255], - 'speed_of_light': [299792458.0], - 'planck_constant': [6.62606957e-34], - 'boltzmann_constant': [1.3806488e-23], - 'c0_rad2tb_conversion': [-0.116127314574], - 'c1_rad2tb_conversion': [1.00099153832], - 'c2_rad2tb_conversion': [-1.76961091571e-06], - 'cali_gain_count2rad_conversion': [self.upd_cali[0]], - 'cali_offset_count2rad_conversion': [self.upd_cali[1]]}, + "block5": {"band_number": [5], + "gain_count2rad_conversion": [self.def_cali[0]], + "offset_count2rad_conversion": [self.def_cali[1]], + "central_wave_length": [10.4073], }, + "calibration": {"coeff_rad2albedo_conversion": [0.0019255], + "speed_of_light": [299792458.0], + "planck_constant": [6.62606957e-34], + "boltzmann_constant": [1.3806488e-23], + "c0_rad2tb_conversion": [-0.116127314574], + "c1_rad2tb_conversion": [1.00099153832], + "c2_rad2tb_conversion": [-1.76961091571e-06], + "cali_gain_count2rad_conversion": [self.upd_cali[0]], + "cali_offset_count2rad_conversion": [self.upd_cali[1]]}, } self.counts = da.array(np.array([[0., 1000.], @@ -504,56 +504,56 @@ def test_default_calibrate(self, *mocks): self.setUp() # Counts self.assertEqual(self.fh.calibrate(data=123, - calibration='counts'), + calibration="counts"), 123) # Radiance rad_exp = np.array([[15.2, 11.5], [7.8, -3.3]]) rad = self.fh.calibrate(data=self.counts, - calibration='radiance') + calibration="radiance") self.assertTrue(np.allclose(rad, rad_exp)) # Brightness Temperature bt_exp = np.array([[330.978979, 310.524688], [285.845017, np.nan]]) bt = self.fh.calibrate(data=self.counts, - calibration='brightness_temperature') + calibration="brightness_temperature") np.testing.assert_allclose(bt, bt_exp) # Reflectance refl_exp = np.array([[2.92676, 2.214325], [1.50189, 0.]]) refl = self.fh.calibrate(data=self.counts, - calibration='reflectance') + calibration="reflectance") self.assertTrue(np.allclose(refl, refl_exp)) def test_updated_calibrate(self): """Test updated in-file calibration modes.""" # Standard operation - self.fh.calib_mode = 'UPDATE' + self.fh.calib_mode = "UPDATE" rad_exp = np.array([[30.4, 23.0], [15.6, -6.6]]) - rad = self.fh.calibrate(data=self.counts, calibration='radiance') + rad = self.fh.calibrate(data=self.counts, calibration="radiance") self.assertTrue(np.allclose(rad, rad_exp)) # Case for no updated calibration available (older data) self.fh._header = { - 'block5': {'band_number': [5], - 'gain_count2rad_conversion': [self.def_cali[0]], - 'offset_count2rad_conversion': [self.def_cali[1]], - 'central_wave_length': [10.4073], }, - 'calibration': {'coeff_rad2albedo_conversion': [0.0019255], - 'speed_of_light': [299792458.0], - 'planck_constant': [6.62606957e-34], - 'boltzmann_constant': [1.3806488e-23], - 'c0_rad2tb_conversion': [-0.116127314574], - 'c1_rad2tb_conversion': [1.00099153832], - 'c2_rad2tb_conversion': [-1.76961091571e-06], - 'cali_gain_count2rad_conversion': [self.bad_cali[0]], - 'cali_offset_count2rad_conversion': [self.bad_cali[1]]}, + "block5": {"band_number": [5], + "gain_count2rad_conversion": [self.def_cali[0]], + "offset_count2rad_conversion": [self.def_cali[1]], + "central_wave_length": [10.4073], }, + "calibration": {"coeff_rad2albedo_conversion": [0.0019255], + "speed_of_light": [299792458.0], + "planck_constant": [6.62606957e-34], + "boltzmann_constant": [1.3806488e-23], + "c0_rad2tb_conversion": [-0.116127314574], + "c1_rad2tb_conversion": [1.00099153832], + "c2_rad2tb_conversion": [-1.76961091571e-06], + "cali_gain_count2rad_conversion": [self.bad_cali[0]], + "cali_offset_count2rad_conversion": [self.bad_cali[1]]}, } - rad = self.fh.calibrate(data=self.counts, calibration='radiance') + rad = self.fh.calibrate(data=self.counts, calibration="radiance") rad_exp = np.array([[15.2, 11.5], [7.8, -3.3]]) self.assertTrue(np.allclose(rad, rad_exp)) @@ -561,20 +561,20 @@ def test_updated_calibrate(self): def test_user_calibration(self): """Test user-defined calibration modes.""" # This is for radiance correction - self.fh.user_calibration = {'B13': {'slope': 0.95, - 'offset': -0.1}} - self.fh.band_name = 'B13' - rad = self.fh.calibrate(data=self.counts, calibration='radiance').compute() + self.fh.user_calibration = {"B13": {"slope": 0.95, + "offset": -0.1}} + self.fh.band_name = "B13" + rad = self.fh.calibrate(data=self.counts, calibration="radiance").compute() rad_exp = np.array([[16.10526316, 12.21052632], [8.31578947, -3.36842105]]) self.assertTrue(np.allclose(rad, rad_exp)) # This is for DN calibration - self.fh.user_calibration = {'B13': {'slope': -0.0032, - 'offset': 15.20}, - 'type': 'DN'} - self.fh.band_name = 'B13' - rad = self.fh.calibrate(data=self.counts, calibration='radiance').compute() + self.fh.user_calibration = {"B13": {"slope": -0.0032, + "offset": 15.20}, + "type": "DN"} + self.fh.band_name = "B13" + rad = self.fh.calibrate(data=self.counts, calibration="radiance").compute() rad_exp = np.array([[15.2, 12.], [8.8, -0.8]]) self.assertTrue(np.allclose(rad, rad_exp)) @@ -584,10 +584,10 @@ def test_user_calibration(self): def _fake_hsd_handler(fh_kwargs=None): """Create a test file handler.""" m = mock.mock_open() - with mock.patch('satpy.readers.ahi_hsd.np.fromfile', _custom_fromfile), \ - mock.patch('satpy.readers.ahi_hsd.unzip_file', mock.MagicMock(side_effect=_new_unzip)), \ - mock.patch('satpy.readers.ahi_hsd.open', m, create=True): - in_fname = 'test_file.bz2' + with mock.patch("satpy.readers.ahi_hsd.np.fromfile", _custom_fromfile), \ + mock.patch("satpy.readers.ahi_hsd.unzip_file", mock.MagicMock(side_effect=_new_unzip)), \ + mock.patch("satpy.readers.ahi_hsd.open", m, create=True): + in_fname = "test_file.bz2" fh = _create_fake_file_handler(in_fname, fh_kwargs=fh_kwargs) yield fh @@ -639,14 +639,14 @@ def _custom_fromfile(*args, **kwargs): def _create_fake_file_handler(in_fname, filename_info=None, filetype_info=None, fh_kwargs=None): if filename_info is None: - filename_info = {'segment': 8, 'total_segments': 10} + filename_info = {"segment": 8, "total_segments": 10} if filetype_info is None: - filetype_info = {'file_type': 'hsd_b01'} + filetype_info = {"file_type": "hsd_b01"} if fh_kwargs is None: fh_kwargs = {} fh = AHIHSDFileHandler(in_fname, filename_info, filetype_info, **fh_kwargs) # Check that the filename is altered and 2 digit segment prefix added for bz2 format files assert in_fname != fh.filename - assert str(filename_info['segment']).zfill(2) == fh.filename[0:2] + assert str(filename_info["segment"]).zfill(2) == fh.filename[0:2] return fh diff --git a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py index e4ef6ec72f..9d1302ef41 100644 --- a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py +++ b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py @@ -35,65 +35,65 @@ class TestAHIGriddedArea(unittest.TestCase): def setUp(self): """Create fake data for testing.""" - self.FULLDISK_SIZES = {0.005: {'x_size': 24000, - 'y_size': 24000}, - 0.01: {'x_size': 12000, - 'y_size': 12000}, - 0.02: {'x_size': 6000, - 'y_size': 6000}} + self.FULLDISK_SIZES = {0.005: {"x_size": 24000, + "y_size": 24000}, + 0.01: {"x_size": 12000, + "y_size": 12000}, + 0.02: {"x_size": 6000, + "y_size": 6000}} self.AHI_FULLDISK_EXTENT = [85., -60., 205., 60.] @staticmethod - def make_fh(filetype, area='fld'): + def make_fh(filetype, area="fld"): """Create a test file handler.""" m = mock.mock_open() - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): - fh = AHIGriddedFileHandler('somefile', - {'area': area}, - filetype_info={'file_type': filetype}) + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): + fh = AHIGriddedFileHandler("somefile", + {"area": area}, + filetype_info={"file_type": filetype}) return fh def test_low_res(self): """Check size of the low resolution (2km) grid.""" - tmp_fh = self.make_fh('tir.01') - self.assertEqual(self.FULLDISK_SIZES[0.02]['x_size'], tmp_fh.ncols) - self.assertEqual(self.FULLDISK_SIZES[0.02]['y_size'], tmp_fh.nlines) + tmp_fh = self.make_fh("tir.01") + self.assertEqual(self.FULLDISK_SIZES[0.02]["x_size"], tmp_fh.ncols) + self.assertEqual(self.FULLDISK_SIZES[0.02]["y_size"], tmp_fh.nlines) def test_med_res(self): """Check size of the low resolution (1km) grid.""" - tmp_fh = self.make_fh('vis.02') - self.assertEqual(self.FULLDISK_SIZES[0.01]['x_size'], tmp_fh.ncols) - self.assertEqual(self.FULLDISK_SIZES[0.01]['y_size'], tmp_fh.nlines) + tmp_fh = self.make_fh("vis.02") + self.assertEqual(self.FULLDISK_SIZES[0.01]["x_size"], tmp_fh.ncols) + self.assertEqual(self.FULLDISK_SIZES[0.01]["y_size"], tmp_fh.nlines) def test_hi_res(self): """Check size of the low resolution (0.5km) grid.""" - tmp_fh = self.make_fh('ext.01') - self.assertEqual(self.FULLDISK_SIZES[0.005]['x_size'], tmp_fh.ncols) - self.assertEqual(self.FULLDISK_SIZES[0.005]['y_size'], tmp_fh.nlines) + tmp_fh = self.make_fh("ext.01") + self.assertEqual(self.FULLDISK_SIZES[0.005]["x_size"], tmp_fh.ncols) + self.assertEqual(self.FULLDISK_SIZES[0.005]["y_size"], tmp_fh.nlines) def test_area_def(self): """Check that a valid full disk area is produced.""" - good_area = AreaDefinition('gridded_himawari', - 'A gridded Himawari area', - 'longlat', - 'EPSG:4326', - self.FULLDISK_SIZES[0.01]['x_size'], - self.FULLDISK_SIZES[0.01]['y_size'], + good_area = AreaDefinition("gridded_himawari", + "A gridded Himawari area", + "longlat", + "EPSG:4326", + self.FULLDISK_SIZES[0.01]["x_size"], + self.FULLDISK_SIZES[0.01]["y_size"], self.AHI_FULLDISK_EXTENT) - tmp_fh = self.make_fh('vis.01') + tmp_fh = self.make_fh("vis.01") tmp_fh.get_area_def(None) self.assertEqual(tmp_fh.area, good_area) def test_bad_area(self): """Ensure an error is raised for an usupported area.""" - tmp_fh = self.make_fh('ext.01') - tmp_fh.areaname = 'scanning' + tmp_fh = self.make_fh("ext.01") + tmp_fh.areaname = "scanning" with self.assertRaises(NotImplementedError): tmp_fh.get_area_def(None) with self.assertRaises(NotImplementedError): - self.make_fh('ext.01', area='scanning') + self.make_fh("ext.01", area="scanning") class TestAHIGriddedFileCalibration(unittest.TestCase): @@ -102,16 +102,16 @@ class TestAHIGriddedFileCalibration(unittest.TestCase): def setUp(self): """Create a test file handler.""" m = mock.mock_open() - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): - in_fname = 'test_file' + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): + in_fname = "test_file" fh = AHIGriddedFileHandler(in_fname, - {'area': 'fld'}, - filetype_info={'file_type': 'tir.01'}) + {"area": "fld"}, + filetype_info={"file_type": "tir.01"}) self.fh = fh - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._get_luts') - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.os.path.exists') - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.np.loadtxt') + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._get_luts") + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.os.path.exists") + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.np.loadtxt") def test_calibrate(self, np_loadtxt, os_exist, get_luts): """Test the calibration modes of AHI using the LUTs.""" load_return = np.squeeze(np.dstack([np.arange(0, 2048, 1), @@ -128,26 +128,26 @@ def test_calibrate(self, np_loadtxt, os_exist, get_luts): os_exist.return_value = False # Check that the LUT download is called if we don't have the LUTS - self.fh.calibrate(in_data, 'reflectance') + self.fh.calibrate(in_data, "reflectance") get_luts.assert_called() os_exist.return_value = True # Ensure results equal if no calibration applied - out_data = self.fh.calibrate(in_data, 'counts') + out_data = self.fh.calibrate(in_data, "counts") np.testing.assert_equal(in_data, out_data) # Now ensure results equal if LUT calibration applied - out_data = self.fh.calibrate(in_data, 'reflectance') + out_data = self.fh.calibrate(in_data, "reflectance") np.testing.assert_allclose(refl_out, out_data) # Check that exception is raised if bad calibration is passed with self.assertRaises(NotImplementedError): - self.fh.calibrate(in_data, 'lasers') + self.fh.calibrate(in_data, "lasers") # Check that exception is raised if no file is present np_loadtxt.side_effect = FileNotFoundError with self.assertRaises(FileNotFoundError): - self.fh.calibrate(in_data, 'reflectance') + self.fh.calibrate(in_data, "reflectance") class TestAHIGriddedFileHandler(unittest.TestCase): @@ -155,44 +155,44 @@ class TestAHIGriddedFileHandler(unittest.TestCase): def new_unzip(fname): """Fake unzipping.""" - if fname[-3:] == 'bz2': + if fname[-3:] == "bz2": return fname[:-4] - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.unzip_file', + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.unzip_file", mock.MagicMock(side_effect=new_unzip)) def setUp(self): """Create a test file handler.""" m = mock.mock_open() - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): - in_fname = 'test_file.bz2' + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): + in_fname = "test_file.bz2" fh = AHIGriddedFileHandler(in_fname, - {'area': 'fld'}, - filetype_info={'file_type': 'tir.01'}) + {"area": "fld"}, + filetype_info={"file_type": "tir.01"}) # Check that the filename is altered for bz2 format files self.assertNotEqual(in_fname, fh.filename) self.fh = fh - key = {'calibration': 'counts', - 'name': 'vis.01'} - info = {'units': 'unitless', - 'standard_name': 'vis.01', - 'wavelength': 10.8, - 'resolution': 0.05} + key = {"calibration": "counts", + "name": "vis.01"} + info = {"units": "unitless", + "standard_name": "vis.01", + "wavelength": 10.8, + "resolution": 0.05} self.key = key self.info = info - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.np.memmap') + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.np.memmap") def test_dataread(self, memmap): """Check that a dask array is returned from the read function.""" test_arr = np.zeros((10, 10)) memmap.return_value = test_arr m = mock.mock_open() - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): res = self.fh._read_data(mock.MagicMock()) np.testing.assert_allclose(res, da.from_array(test_arr)) - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._read_data') + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._read_data") def test_get_dataset(self, mocked_read): """Check that a good dataset is returned on request.""" m = mock.mock_open() @@ -200,17 +200,17 @@ def test_get_dataset(self, mocked_read): out_data = np.array([[100., 300., 500.], [800., 1500., 2040.]]) mocked_read.return_value = out_data - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): res = self.fh.get_dataset(self.key, self.info) mocked_read.assert_called() # Check output data is correct np.testing.assert_allclose(res.values, out_data) # Also check a couple of attributes - self.assertEqual(res.attrs['name'], self.key['name']) - self.assertEqual(res.attrs['wavelength'], self.info['wavelength']) + self.assertEqual(res.attrs["name"], self.key["name"]) + self.assertEqual(res.attrs["wavelength"], self.info["wavelength"]) - @mock.patch('os.path.exists', return_value=True) - @mock.patch('os.remove') + @mock.patch("os.path.exists", return_value=True) + @mock.patch("os.remove") def test_destructor(self, exist_patch, remove_patch): """Check that file handler deletes files if needed.""" del self.fh @@ -229,27 +229,27 @@ def mocked_ftp_dl(fname): with tarfile.open(fname, "w:gz") as tar_handle: for namer in AHI_LUT_NAMES: tmpf = os.path.join(tempfile.tempdir, namer) - with open(tmpf, 'w') as tmp_fid: + with open(tmpf, "w") as tmp_fid: tmp_fid.write("TEST\n") - tar_handle.add(tmpf, arcname='count2tbb_v102/'+namer) + tar_handle.add(tmpf, arcname="count2tbb_v102/"+namer) os.remove(tmpf) def setUp(self): """Create a test file handler.""" m = mock.mock_open() - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): - in_fname = 'test_file' + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): + in_fname = "test_file" fh = AHIGriddedFileHandler(in_fname, - {'area': 'fld'}, - filetype_info={'file_type': 'tir.01'}) + {"area": "fld"}, + filetype_info={"file_type": "tir.01"}) self.fh = fh - key = {'calibration': 'counts', - 'name': 'vis.01'} - info = {'units': 'unitless', - 'standard_name': 'vis.01', - 'wavelength': 10.8, - 'resolution': 0.05} + key = {"calibration": "counts", + "name": "vis.01"} + info = {"units": "unitless", + "standard_name": "vis.01", + "wavelength": 10.8, + "resolution": 0.05} self.key = key self.info = info @@ -262,23 +262,23 @@ def tearDown(self): if os.path.isdir(self.fh.lut_dir): shutil.rmtree(self.fh.lut_dir) - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._download_luts', + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._download_luts", mock.MagicMock(side_effect=mocked_ftp_dl)) def test_get_luts(self): """Check that the function to download LUTs operates successfully.""" tempdir = tempfile.gettempdir() print(self.fh.lut_dir) self.fh._get_luts() - self.assertFalse(os.path.exists(os.path.join(tempdir, 'count2tbb_v102/'))) + self.assertFalse(os.path.exists(os.path.join(tempdir, "count2tbb_v102/"))) for lut_name in AHI_LUT_NAMES: self.assertTrue(os.path.isfile(os.path.join(self.fh.lut_dir, lut_name))) - @mock.patch('urllib.request.urlopen') - @mock.patch('shutil.copyfileobj') + @mock.patch("urllib.request.urlopen") + @mock.patch("shutil.copyfileobj") def test_download_luts(self, mock_dl, mock_shutil): """Test that the FTP library is called for downloading LUTS.""" m = mock.mock_open() - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): - self.fh._download_luts('/test_file') + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): + self.fh._download_luts("/test_file") mock_dl.assert_called() mock_shutil.assert_called() diff --git a/satpy/tests/reader_tests/test_ami_l1b.py b/satpy/tests/reader_tests/test_ami_l1b.py index 50f6f2af03..58a3612b49 100644 --- a/satpy/tests/reader_tests/test_ami_l1b.py +++ b/satpy/tests/reader_tests/test_ami_l1b.py @@ -56,7 +56,7 @@ def close(self): class TestAMIL1bNetCDFBase(unittest.TestCase): """Common setup for NC_ABI_L1B tests.""" - @mock.patch('satpy.readers.ami_l1b.xr') + @mock.patch("satpy.readers.ami_l1b.xr") def setUp(self, xr_, counts=None): """Create a fake dataset using the given counts data.""" from satpy.readers.ami_l1b import AMIL1bNetCDF @@ -66,35 +66,35 @@ def setUp(self, xr_, counts=None): rad_data = (rad_data + 1.) / 0.5 rad_data = rad_data.astype(np.int16) counts = xr.DataArray( - da.from_array(rad_data, chunks='auto'), - dims=('y', 'x'), + da.from_array(rad_data, chunks="auto"), + dims=("y", "x"), attrs={ - 'channel_name': "VI006", - 'detector_side': 2, - 'number_of_total_pixels': 484000000, - 'number_of_error_pixels': 113892451, - 'max_pixel_value': 32768, - 'min_pixel_value': 6, - 'average_pixel_value': 8228.98770845248, - 'stddev_pixel_value': 13621.130386551, - 'number_of_total_bits_per_pixel': 16, - 'number_of_data_quality_flag_bits_per_pixel': 2, - 'number_of_valid_bits_per_pixel': 12, - 'data_quality_flag_meaning': + "channel_name": "VI006", + "detector_side": 2, + "number_of_total_pixels": 484000000, + "number_of_error_pixels": 113892451, + "max_pixel_value": 32768, + "min_pixel_value": 6, + "average_pixel_value": 8228.98770845248, + "stddev_pixel_value": 13621.130386551, + "number_of_total_bits_per_pixel": 16, + "number_of_data_quality_flag_bits_per_pixel": 2, + "number_of_valid_bits_per_pixel": 12, + "data_quality_flag_meaning": "0:good_pixel, 1:conditionally_usable_pixel, 2:out_of_scan_area_pixel, 3:error_pixel", - 'ground_sample_distance_ew': 1.4e-05, - 'ground_sample_distance_ns': 1.4e-05, + "ground_sample_distance_ew": 1.4e-05, + "ground_sample_distance_ns": 1.4e-05, } ) sc_position = xr.DataArray(0., attrs={ - 'sc_position_center_pixel': [-26113466.1974016, 33100139.1630508, 3943.75470244799], + "sc_position_center_pixel": [-26113466.1974016, 33100139.1630508, 3943.75470244799], }) xr_.open_dataset.return_value = FakeDataset( { - 'image_pixel_values': counts, - 'sc_position': sc_position, - 'gsics_coeff_intercept': [0.1859369], - 'gsics_coeff_slope': [0.9967594], + "image_pixel_values": counts, + "sc_position": sc_position, + "gsics_coeff_intercept": [0.1859369], + "gsics_coeff_slope": [0.9967594], }, { "satellite_name": "GK-2A", @@ -125,9 +125,9 @@ def setUp(self, xr_, counts=None): } ) - self.reader = AMIL1bNetCDF('filename', - {'platform_shortname': 'gk2a'}, - {'file_type': 'ir087'},) + self.reader = AMIL1bNetCDF("filename", + {"platform_shortname": "gk2a"}, + {"file_type": "ir087"},) class TestAMIL1bNetCDF(TestAMIL1bNetCDFBase): @@ -136,12 +136,12 @@ class TestAMIL1bNetCDF(TestAMIL1bNetCDFBase): def _check_orbital_parameters(self, orb_params): """Check that orbital parameters match expected values.""" exp_params = { - 'projection_altitude': 35785863.0, - 'projection_latitude': 0.0, - 'projection_longitude': 128.2, - 'satellite_actual_altitude': 35782654.56070405, - 'satellite_actual_latitude': 0.005364927, - 'satellite_actual_longitude': 128.2707, + "projection_altitude": 35785863.0, + "projection_latitude": 0.0, + "projection_longitude": 128.2, + "satellite_actual_altitude": 35782654.56070405, + "satellite_actual_latitude": 0.005364927, + "satellite_actual_longitude": 128.2707, } for key, val in exp_params.items(): self.assertAlmostEqual(val, orb_params[key], places=3) @@ -150,25 +150,25 @@ def test_filename_grouping(self): """Test that filenames are grouped properly.""" from satpy.readers import group_files filenames = [ - 'gk2a_ami_le1b_ir087_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_ir096_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_ir105_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_ir112_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_ir123_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_ir133_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_nr013_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_nr016_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_sw038_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_vi004_fd010ge_201909300300.nc', - 'gk2a_ami_le1b_vi005_fd010ge_201909300300.nc', - 'gk2a_ami_le1b_vi006_fd005ge_201909300300.nc', - 'gk2a_ami_le1b_vi008_fd010ge_201909300300.nc', - 'gk2a_ami_le1b_wv063_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_wv069_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_wv073_fd020ge_201909300300.nc'] - groups = group_files(filenames, reader='ami_l1b') + "gk2a_ami_le1b_ir087_fd020ge_201909300300.nc", + "gk2a_ami_le1b_ir096_fd020ge_201909300300.nc", + "gk2a_ami_le1b_ir105_fd020ge_201909300300.nc", + "gk2a_ami_le1b_ir112_fd020ge_201909300300.nc", + "gk2a_ami_le1b_ir123_fd020ge_201909300300.nc", + "gk2a_ami_le1b_ir133_fd020ge_201909300300.nc", + "gk2a_ami_le1b_nr013_fd020ge_201909300300.nc", + "gk2a_ami_le1b_nr016_fd020ge_201909300300.nc", + "gk2a_ami_le1b_sw038_fd020ge_201909300300.nc", + "gk2a_ami_le1b_vi004_fd010ge_201909300300.nc", + "gk2a_ami_le1b_vi005_fd010ge_201909300300.nc", + "gk2a_ami_le1b_vi006_fd005ge_201909300300.nc", + "gk2a_ami_le1b_vi008_fd010ge_201909300300.nc", + "gk2a_ami_le1b_wv063_fd020ge_201909300300.nc", + "gk2a_ami_le1b_wv069_fd020ge_201909300300.nc", + "gk2a_ami_le1b_wv073_fd020ge_201909300300.nc"] + groups = group_files(filenames, reader="ami_l1b") self.assertEqual(len(groups), 1) - self.assertEqual(len(groups[0]['ami_l1b']), 16) + self.assertEqual(len(groups[0]["ami_l1b"]), 16) def test_basic_attributes(self): """Test getting basic file attributes.""" @@ -181,84 +181,84 @@ def test_basic_attributes(self): def test_get_dataset(self): """Test gettting radiance data.""" from satpy.tests.utils import make_dataid - key = make_dataid(name='VI006', calibration='radiance') + key = make_dataid(name="VI006", calibration="radiance") res = self.reader.get_dataset(key, { - 'file_key': 'image_pixel_values', - 'standard_name': 'toa_outgoing_radiance_per_unit_wavelength', - 'units': 'W m-2 um-1 sr-1', + "file_key": "image_pixel_values", + "standard_name": "toa_outgoing_radiance_per_unit_wavelength", + "units": "W m-2 um-1 sr-1", }) - exp = {'calibration': 'radiance', - 'modifiers': (), - 'platform_name': 'GEO-KOMPSAT-2A', - 'sensor': 'ami', - 'units': 'W m-2 um-1 sr-1'} + exp = {"calibration": "radiance", + "modifiers": (), + "platform_name": "GEO-KOMPSAT-2A", + "sensor": "ami", + "units": "W m-2 um-1 sr-1"} for key, val in exp.items(): self.assertEqual(val, res.attrs[key]) - self._check_orbital_parameters(res.attrs['orbital_parameters']) + self._check_orbital_parameters(res.attrs["orbital_parameters"]) def test_bad_calibration(self): """Test that asking for a bad calibration fails.""" from satpy.tests.utils import make_dataid with self.assertRaises(ValueError): - ds_id = make_dataid(name='VI006', calibration='_bad_') - ds_info = {'file_key': 'image_pixel_values', - 'standard_name': 'toa_outgoing_radiance_per_unit_wavelength', - 'units': 'W m-2 um-1 sr-1', + ds_id = make_dataid(name="VI006", calibration="_bad_") + ds_info = {"file_key": "image_pixel_values", + "standard_name": "toa_outgoing_radiance_per_unit_wavelength", + "units": "W m-2 um-1 sr-1", } self.reader.get_dataset(ds_id, ds_info) - @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') + @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def(self, adef): """Test the area generation.""" self.reader.get_area_def(None) self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] - exp = {'a': 6378137.0, 'b': 6356752.3, 'h': 35785863.0, - 'lon_0': 128.2, 'proj': 'geos', 'units': 'm'} + exp = {"a": 6378137.0, "b": 6356752.3, "h": 35785863.0, + "lon_0": 128.2, "proj": "geos", "units": "m"} for key, val in exp.items(): self.assertIn(key, call_args[3]) self.assertAlmostEqual(val, call_args[3][key]) - self.assertEqual(call_args[4], self.reader.nc.attrs['number_of_columns']) - self.assertEqual(call_args[5], self.reader.nc.attrs['number_of_lines']) + self.assertEqual(call_args[4], self.reader.nc.attrs["number_of_columns"]) + self.assertEqual(call_args[5], self.reader.nc.attrs["number_of_lines"]) np.testing.assert_allclose(call_args[6], [-5511022.902, -5511022.902, 5511022.902, 5511022.902]) def test_get_dataset_vis(self): """Test get visible calibrated data.""" from satpy.tests.utils import make_dataid - key = make_dataid(name='VI006', calibration='reflectance') + key = make_dataid(name="VI006", calibration="reflectance") res = self.reader.get_dataset(key, { - 'file_key': 'image_pixel_values', - 'standard_name': 'toa_bidirectional_reflectance', - 'units': '%', + "file_key": "image_pixel_values", + "standard_name": "toa_bidirectional_reflectance", + "units": "%", }) - exp = {'calibration': 'reflectance', - 'modifiers': (), - 'platform_name': 'GEO-KOMPSAT-2A', - 'sensor': 'ami', - 'units': '%'} + exp = {"calibration": "reflectance", + "modifiers": (), + "platform_name": "GEO-KOMPSAT-2A", + "sensor": "ami", + "units": "%"} for key, val in exp.items(): self.assertEqual(val, res.attrs[key]) - self._check_orbital_parameters(res.attrs['orbital_parameters']) + self._check_orbital_parameters(res.attrs["orbital_parameters"]) def test_get_dataset_counts(self): """Test get counts data.""" from satpy.tests.utils import make_dataid - key = make_dataid(name='VI006', calibration='counts') + key = make_dataid(name="VI006", calibration="counts") res = self.reader.get_dataset(key, { - 'file_key': 'image_pixel_values', - 'standard_name': 'counts', - 'units': '1', + "file_key": "image_pixel_values", + "standard_name": "counts", + "units": "1", }) - exp = {'calibration': 'counts', - 'modifiers': (), - 'platform_name': 'GEO-KOMPSAT-2A', - 'sensor': 'ami', - 'units': '1'} + exp = {"calibration": "counts", + "modifiers": (), + "platform_name": "GEO-KOMPSAT-2A", + "sensor": "ami", + "units": "1"} for key, val in exp.items(): self.assertEqual(val, res.attrs[key]) - self._check_orbital_parameters(res.attrs['orbital_parameters']) + self._check_orbital_parameters(res.attrs["orbital_parameters"]) class TestAMIL1bNetCDFIRCal(TestAMIL1bNetCDFBase): @@ -270,53 +270,53 @@ def setUp(self): count_data = (np.arange(10).reshape((2, 5))) + 7000 count_data = count_data.astype(np.uint16) count = xr.DataArray( - da.from_array(count_data, chunks='auto'), - dims=('y', 'x'), + da.from_array(count_data, chunks="auto"), + dims=("y", "x"), attrs={ - 'channel_name': "IR087", - 'detector_side': 2, - 'number_of_total_pixels': 484000000, - 'number_of_error_pixels': 113892451, - 'max_pixel_value': 32768, - 'min_pixel_value': 6, - 'average_pixel_value': 8228.98770845248, - 'stddev_pixel_value': 13621.130386551, - 'number_of_total_bits_per_pixel': 16, - 'number_of_data_quality_flag_bits_per_pixel': 2, - 'number_of_valid_bits_per_pixel': 13, - 'data_quality_flag_meaning': + "channel_name": "IR087", + "detector_side": 2, + "number_of_total_pixels": 484000000, + "number_of_error_pixels": 113892451, + "max_pixel_value": 32768, + "min_pixel_value": 6, + "average_pixel_value": 8228.98770845248, + "stddev_pixel_value": 13621.130386551, + "number_of_total_bits_per_pixel": 16, + "number_of_data_quality_flag_bits_per_pixel": 2, + "number_of_valid_bits_per_pixel": 13, + "data_quality_flag_meaning": "0:good_pixel, 1:conditionally_usable_pixel, 2:out_of_scan_area_pixel, 3:error_pixel", - 'ground_sample_distance_ew': 1.4e-05, - 'ground_sample_distance_ns': 1.4e-05, + "ground_sample_distance_ew": 1.4e-05, + "ground_sample_distance_ns": 1.4e-05, } ) - self.ds_id = make_dataid(name='IR087', wavelength=[8.415, 8.59, 8.765], - calibration='brightness_temperature') + self.ds_id = make_dataid(name="IR087", wavelength=[8.415, 8.59, 8.765], + calibration="brightness_temperature") self.ds_info = { - 'file_key': 'image_pixel_values', - 'wavelength': [8.415, 8.59, 8.765], - 'standard_name': 'toa_brightness_temperature', - 'units': 'K', + "file_key": "image_pixel_values", + "wavelength": [8.415, 8.59, 8.765], + "standard_name": "toa_brightness_temperature", + "units": "K", } super(TestAMIL1bNetCDFIRCal, self).setUp(counts=count) def test_default_calibrate(self): """Test default (pyspectral) IR calibration.""" from satpy.readers.ami_l1b import rad2temp - with mock.patch('satpy.readers.ami_l1b.rad2temp', wraps=rad2temp) as r2t_mock: + with mock.patch("satpy.readers.ami_l1b.rad2temp", wraps=rad2temp) as r2t_mock: res = self.reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_called_once() expected = np.array([[238.34385135, 238.31443527, 238.28500087, 238.25554813, 238.22607701], [238.1965875, 238.16707956, 238.13755317, 238.10800829, 238.07844489]]) np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') + self.assertEqual(res.attrs["standard_name"], "toa_brightness_temperature") def test_infile_calibrate(self): """Test IR calibration using in-file coefficients.""" from satpy.readers.ami_l1b import rad2temp - self.reader.calib_mode = 'FILE' - with mock.patch('satpy.readers.ami_l1b.rad2temp', wraps=rad2temp) as r2t_mock: + self.reader.calib_mode = "FILE" + with mock.patch("satpy.readers.ami_l1b.rad2temp", wraps=rad2temp) as r2t_mock: res = self.reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_not_called() expected = np.array([[238.34385135, 238.31443527, 238.28500087, 238.25554813, 238.22607701], @@ -324,34 +324,34 @@ def test_infile_calibrate(self): # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.04) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') + self.assertEqual(res.attrs["standard_name"], "toa_brightness_temperature") def test_gsics_radiance_corr(self): """Test IR radiance adjustment using in-file GSICS coefs.""" from satpy.readers.ami_l1b import rad2temp - self.reader.calib_mode = 'GSICS' + self.reader.calib_mode = "GSICS" expected = np.array([[238.036797, 238.007106, 237.977396, 237.947668, 237.91792], [237.888154, 237.85837, 237.828566, 237.798743, 237.768902]]) - with mock.patch('satpy.readers.ami_l1b.rad2temp', wraps=rad2temp) as r2t_mock: + with mock.patch("satpy.readers.ami_l1b.rad2temp", wraps=rad2temp) as r2t_mock: res = self.reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_not_called() # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.01) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') + self.assertEqual(res.attrs["standard_name"], "toa_brightness_temperature") def test_user_radiance_corr(self): """Test IR radiance adjustment using user-supplied coefs.""" from satpy.readers.ami_l1b import rad2temp - self.reader.calib_mode = 'FILE' - self.reader.user_calibration = {'IR087': {'slope': 0.99669, - 'offset': 0.16907}} + self.reader.calib_mode = "FILE" + self.reader.user_calibration = {"IR087": {"slope": 0.99669, + "offset": 0.16907}} expected = np.array([[238.073713, 238.044043, 238.014354, 237.984647, 237.954921], [237.925176, 237.895413, 237.865631, 237.835829, 237.806009]]) - with mock.patch('satpy.readers.ami_l1b.rad2temp', wraps=rad2temp) as r2t_mock: + with mock.patch("satpy.readers.ami_l1b.rad2temp", wraps=rad2temp) as r2t_mock: res = self.reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_not_called() # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.01) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') + self.assertEqual(res.attrs["standard_name"], "toa_brightness_temperature") diff --git a/satpy/tests/reader_tests/test_amsr2_l1b.py b/satpy/tests/reader_tests/test_amsr2_l1b.py index f3e9de538f..b8e51b845b 100644 --- a/satpy/tests/reader_tests/test_amsr2_l1b.py +++ b/satpy/tests/reader_tests/test_amsr2_l1b.py @@ -43,56 +43,56 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { - '/attr/PlatformShortName': 'GCOM-W1', - '/attr/SensorShortName': 'AMSR2', - '/attr/StartOrbitNumber': '22210', - '/attr/StopOrbitNumber': '22210', + "/attr/PlatformShortName": "GCOM-W1", + "/attr/SensorShortName": "AMSR2", + "/attr/StartOrbitNumber": "22210", + "/attr/StopOrbitNumber": "22210", } for bt_chan in [ - '(10.7GHz,H)', - '(10.7GHz,V)', - '(18.7GHz,H)', - '(18.7GHz,V)', - '(23.8GHz,H)', - '(23.8GHz,V)', - '(36.5GHz,H)', - '(36.5GHz,V)', - '(6.9GHz,H)', - '(6.9GHz,V)', - '(7.3GHz,H)', - '(7.3GHz,V)', - '(89.0GHz-A,H)', - '(89.0GHz-A,V)', - '(89.0GHz-B,H)', - '(89.0GHz-B,V)', + "(10.7GHz,H)", + "(10.7GHz,V)", + "(18.7GHz,H)", + "(18.7GHz,V)", + "(23.8GHz,H)", + "(23.8GHz,V)", + "(36.5GHz,H)", + "(36.5GHz,V)", + "(6.9GHz,H)", + "(6.9GHz,V)", + "(7.3GHz,H)", + "(7.3GHz,V)", + "(89.0GHz-A,H)", + "(89.0GHz-A,V)", + "(89.0GHz-B,H)", + "(89.0GHz-B,V)", ]: - k = 'Brightness Temperature {}'.format(bt_chan) + k = "Brightness Temperature {}".format(bt_chan) file_content[k] = DEFAULT_FILE_DATA[:, ::2] - file_content[k + '/shape'] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2) - file_content[k + '/attr/UNIT'] = 'K' - file_content[k + '/attr/SCALE FACTOR'] = 0.01 + file_content[k + "/shape"] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2) + file_content[k + "/attr/UNIT"] = "K" + file_content[k + "/attr/SCALE FACTOR"] = 0.01 for bt_chan in [ - '(89.0GHz-A,H)', - '(89.0GHz-A,V)', - '(89.0GHz-B,H)', - '(89.0GHz-B,V)', + "(89.0GHz-A,H)", + "(89.0GHz-A,V)", + "(89.0GHz-B,H)", + "(89.0GHz-B,V)", ]: - k = 'Brightness Temperature {}'.format(bt_chan) + k = "Brightness Temperature {}".format(bt_chan) file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/UNIT'] = 'K' - file_content[k + '/attr/SCALE FACTOR'] = 0.01 - for nav_chan in ['89A', '89B']: - lon_k = 'Longitude of Observation Point for ' + nav_chan - lat_k = 'Latitude of Observation Point for ' + nav_chan + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/UNIT"] = "K" + file_content[k + "/attr/SCALE FACTOR"] = 0.01 + for nav_chan in ["89A", "89B"]: + lon_k = "Longitude of Observation Point for " + nav_chan + lat_k = "Latitude of Observation Point for " + nav_chan file_content[lon_k] = DEFAULT_LON_DATA - file_content[lon_k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[lon_k + '/attr/SCALE FACTOR'] = 1 - file_content[lon_k + '/attr/UNIT'] = 'deg' + file_content[lon_k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[lon_k + "/attr/SCALE FACTOR"] = 1 + file_content[lon_k + "/attr/UNIT"] = "deg" file_content[lat_k] = DEFAULT_LAT_DATA - file_content[lat_k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[lat_k + '/attr/SCALE FACTOR'] = 1 - file_content[lat_k + '/attr/UNIT'] = 'deg' + file_content[lat_k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[lat_k + "/attr/SCALE FACTOR"] = 1 + file_content[lat_k + "/attr/UNIT"] = "deg" convert_file_content_to_data_array(file_content) return file_content @@ -107,9 +107,9 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.amsr2_l1b import AMSR2L1BFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(AMSR2L1BFileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(AMSR2L1BFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -122,7 +122,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5', + "GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -134,59 +134,59 @@ def test_load_basic(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5', + "GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) ds = r.load([ - 'btemp_10.7v', - 'btemp_10.7h', - 'btemp_6.9v', - 'btemp_6.9h', - 'btemp_7.3v', - 'btemp_7.3h', - 'btemp_18.7v', - 'btemp_18.7h', - 'btemp_23.8v', - 'btemp_23.8h', - 'btemp_36.5v', - 'btemp_36.5h', + "btemp_10.7v", + "btemp_10.7h", + "btemp_6.9v", + "btemp_6.9h", + "btemp_7.3v", + "btemp_7.3h", + "btemp_18.7v", + "btemp_18.7h", + "btemp_23.8v", + "btemp_23.8h", + "btemp_36.5v", + "btemp_36.5h", ]) self.assertEqual(len(ds), 12) for d in ds.values(): - self.assertEqual(d.attrs['calibration'], 'brightness_temperature') + self.assertEqual(d.attrs["calibration"], "brightness_temperature") self.assertTupleEqual(d.shape, (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1] // 2))) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertTupleEqual(d.attrs['area'].lons.shape, + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) + self.assertTupleEqual(d.attrs["area"].lons.shape, (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2)) - self.assertTupleEqual(d.attrs['area'].lats.shape, + self.assertTupleEqual(d.attrs["area"].lats.shape, (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2)) - assert d.attrs['sensor'] == 'amsr2' - assert d.attrs['platform_name'] == 'GCOM-W1' + assert d.attrs["sensor"] == "amsr2" + assert d.attrs["platform_name"] == "GCOM-W1" def test_load_89ghz(self): """Test loading of 89GHz channels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5', + "GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) ds = r.load([ - 'btemp_89.0av', - 'btemp_89.0ah', - 'btemp_89.0bv', - 'btemp_89.0bh', + "btemp_89.0av", + "btemp_89.0ah", + "btemp_89.0bv", + "btemp_89.0bh", ]) self.assertEqual(len(ds), 4) for d in ds.values(): - self.assertEqual(d.attrs['calibration'], 'brightness_temperature') + self.assertEqual(d.attrs["calibration"], "brightness_temperature") self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertTupleEqual(d.attrs['area'].lons.shape, + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) + self.assertTupleEqual(d.attrs["area"].lons.shape, DEFAULT_FILE_SHAPE) - self.assertTupleEqual(d.attrs['area'].lats.shape, + self.assertTupleEqual(d.attrs["area"].lats.shape, DEFAULT_FILE_SHAPE) diff --git a/satpy/tests/reader_tests/test_amsr2_l2.py b/satpy/tests/reader_tests/test_amsr2_l2.py index 711754c989..106f558919 100644 --- a/satpy/tests/reader_tests/test_amsr2_l2.py +++ b/satpy/tests/reader_tests/test_amsr2_l2.py @@ -42,29 +42,29 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { - '/attr/PlatformShortName': 'GCOM-W1', - '/attr/SensorShortName': 'AMSR2', - '/attr/StartOrbitNumber': '22210', - '/attr/StopOrbitNumber': '22210', + "/attr/PlatformShortName": "GCOM-W1", + "/attr/SensorShortName": "AMSR2", + "/attr/StartOrbitNumber": "22210", + "/attr/StopOrbitNumber": "22210", } - k = 'Geophysical Data' + k = "Geophysical Data" file_content[k] = DEFAULT_FILE_DATA[:, :] - file_content[k + '/shape'] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) - file_content[k + '/attr/UNIT'] = 'K' - file_content[k + '/attr/SCALE FACTOR'] = 1 + file_content[k + "/shape"] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) + file_content[k + "/attr/UNIT"] = "K" + file_content[k + "/attr/SCALE FACTOR"] = 1 - k = 'Latitude of Observation Point' + k = "Latitude of Observation Point" file_content[k] = DEFAULT_FILE_DATA[:, :] - file_content[k + '/shape'] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) - file_content[k + '/attr/UNIT'] = 'deg' - file_content[k + '/attr/SCALE FACTOR'] = 1 - k = 'Longitude of Observation Point' + file_content[k + "/shape"] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) + file_content[k + "/attr/UNIT"] = "deg" + file_content[k + "/attr/SCALE FACTOR"] = 1 + k = "Longitude of Observation Point" file_content[k] = DEFAULT_FILE_DATA[:, :] - file_content[k + '/shape'] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) - file_content[k + '/attr/UNIT'] = 'deg' - file_content[k + '/attr/SCALE FACTOR'] = 1 + file_content[k + "/shape"] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) + file_content[k + "/attr/UNIT"] = "deg" + file_content[k + "/attr/SCALE FACTOR"] = 1 - convert_file_content_to_data_array(file_content, dims=('dim_0', 'dim_1')) + convert_file_content_to_data_array(file_content, dims=("dim_0", "dim_1")) return file_content @@ -78,9 +78,9 @@ def setUp(self): from satpy._config import config_search_paths from satpy.readers.amsr2_l1b import AMSR2L1BFileHandler from satpy.readers.amsr2_l2 import AMSR2L2FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(AMSR2L2FileHandler, '__bases__', (FakeHDF5FileHandler2, + self.p = mock.patch.object(AMSR2L2FileHandler, "__bases__", (FakeHDF5FileHandler2, AMSR2L1BFileHandler)) self.fake_handler = self.p.start() self.p.is_local = True @@ -94,7 +94,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5', + "GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -106,17 +106,17 @@ def test_load_basic(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5', + "GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) - ds = r.load(['ssw']) + ds = r.load(["ssw"]) self.assertEqual(len(ds), 1) for d in ds.values(): self.assertTupleEqual(d.shape, (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1]))) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertTupleEqual(d.attrs['area'].lons.shape, + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) + self.assertTupleEqual(d.attrs["area"].lons.shape, (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1])) - self.assertTupleEqual(d.attrs['area'].lats.shape, + self.assertTupleEqual(d.attrs["area"].lats.shape, (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1])) diff --git a/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py b/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py index ac271e7206..2f1b3ad7b0 100644 --- a/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py +++ b/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py @@ -47,10 +47,10 @@ def _get_shared_global_attrs(filename): attrs = { - 'time_coverage_start': '2020-08-12T05:58:31.0Z', - 'time_coverage_end': '2020-08-12T06:07:01.0Z', - 'platform_name': 'GCOM-W1', - 'instrument_name': 'AMSR2', + "time_coverage_start": "2020-08-12T05:58:31.0Z", + "time_coverage_end": "2020-08-12T06:07:01.0Z", + "platform_name": "GCOM-W1", + "instrument_name": "AMSR2", } return attrs @@ -58,43 +58,43 @@ def _get_shared_global_attrs(filename): def _create_two_res_gaasp_dataset(filename): """Represent files with two resolution of variables in them (ex. OCEAN).""" lon_var_hi = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_hi_rez_FOVs'), - attrs={'standard_name': 'longitude'}) + dims=("Number_of_Scans", "Number_of_hi_rez_FOVs"), + attrs={"standard_name": "longitude"}) lat_var_hi = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_hi_rez_FOVs'), - attrs={'standard_name': 'latitude'}) + dims=("Number_of_Scans", "Number_of_hi_rez_FOVs"), + attrs={"standard_name": "latitude"}) lon_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - attrs={'standard_name': 'longitude'}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + attrs={"standard_name": "longitude"}) lat_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - attrs={'standard_name': 'latitude'}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + attrs={"standard_name": "latitude"}) swath_var1 = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_hi_rez_FOVs'), - coords={'some_longitude_hi': lon_var_hi, 'some_latitude_hi': lat_var_hi}, - attrs={'_FillValue': -9999., - 'scale_factor': 0.5, 'add_offset': 2.0}) + dims=("Number_of_Scans", "Number_of_hi_rez_FOVs"), + coords={"some_longitude_hi": lon_var_hi, "some_latitude_hi": lat_var_hi}, + attrs={"_FillValue": -9999., + "scale_factor": 0.5, "add_offset": 2.0}) swath_var2 = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - coords={'some_longitude_lo': lon_var_lo, 'some_latitude_lo': lat_var_lo}, - attrs={'_FillValue': -9999.}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + coords={"some_longitude_lo": lon_var_lo, "some_latitude_lo": lat_var_lo}, + attrs={"_FillValue": -9999.}) swath_int_var = xr.DataArray(da.zeros((10, 10), dtype=np.uint16), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - attrs={'_FillValue': 100, 'comment': 'Some comment'}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + attrs={"_FillValue": 100, "comment": "Some comment"}) not_xy_dim_var = xr.DataArray(da.zeros((10, 5), dtype=np.float32), - dims=('Number_of_Scans', 'Time_Dimension')) + dims=("Number_of_Scans", "Time_Dimension")) time_var = xr.DataArray(da.zeros((5,), dtype=np.float32), - dims=('Time_Dimension',)) + dims=("Time_Dimension",)) ds_vars = { - 'swath_var_hi': swath_var1, - 'swath_var_low': swath_var2, - 'swath_var_low_int': swath_int_var, - 'some_longitude_hi': lon_var_hi, - 'some_latitude_hi': lat_var_hi, - 'some_longitude_lo': lon_var_lo, - 'some_latitude_lo': lat_var_lo, - 'not_xy_dim_var': not_xy_dim_var, - 'time_var': time_var, + "swath_var_hi": swath_var1, + "swath_var_low": swath_var2, + "swath_var_low_int": swath_int_var, + "some_longitude_hi": lon_var_hi, + "some_latitude_hi": lat_var_hi, + "some_longitude_lo": lon_var_lo, + "some_latitude_lo": lat_var_lo, + "not_xy_dim_var": not_xy_dim_var, + "time_var": time_var, } attrs = _get_shared_global_attrs(filename) ds = xr.Dataset(ds_vars, attrs=attrs) @@ -104,22 +104,22 @@ def _create_two_res_gaasp_dataset(filename): def _create_gridded_gaasp_dataset(filename): """Represent files with gridded products.""" grid_var = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Y_Dimension', 'Number_of_X_Dimension'), + dims=("Number_of_Y_Dimension", "Number_of_X_Dimension"), attrs={ - '_FillValue': -9999., - 'scale_factor': 0.5, 'add_offset': 2.0 + "_FillValue": -9999., + "scale_factor": 0.5, "add_offset": 2.0 }) latency_var = xr.DataArray(da.zeros((10, 10), dtype=np.timedelta64), - dims=('Number_of_Y_Dimension', 'Number_of_X_Dimension'), + dims=("Number_of_Y_Dimension", "Number_of_X_Dimension"), attrs={ - '_FillValue': -9999, + "_FillValue": -9999, }) time_var = xr.DataArray(da.zeros((5,), dtype=np.float32), - dims=('Time_Dimension',)) + dims=("Time_Dimension",)) ds_vars = { - 'grid_var': grid_var, - 'latency_var': latency_var, - 'time_var': time_var, + "grid_var": grid_var, + "latency_var": latency_var, + "time_var": time_var, } attrs = _get_shared_global_attrs(filename) return xr.Dataset(ds_vars, attrs=attrs) @@ -128,29 +128,29 @@ def _create_gridded_gaasp_dataset(filename): def _create_one_res_gaasp_dataset(filename): """Represent files with one resolution of variables in them (ex. SOIL).""" lon_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - attrs={'standard_name': 'longitude'}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + attrs={"standard_name": "longitude"}) lat_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - attrs={'standard_name': 'latitude'}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + attrs={"standard_name": "latitude"}) swath_var2 = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - coords={'some_longitude_lo': lon_var_lo, 'some_latitude_lo': lat_var_lo}, + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + coords={"some_longitude_lo": lon_var_lo, "some_latitude_lo": lat_var_lo}, attrs={ - '_FillValue': -9999., - 'scale_factor': 0.5, 'add_offset': 2.0 + "_FillValue": -9999., + "scale_factor": 0.5, "add_offset": 2.0 }) swath_int_var = xr.DataArray(da.zeros((10, 10), dtype=np.uint16), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - attrs={'_FillValue': 100, 'comment': 'Some comment'}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + attrs={"_FillValue": 100, "comment": "Some comment"}) time_var = xr.DataArray(da.zeros((5,), dtype=np.float32), - dims=('Time_Dimension',)) + dims=("Time_Dimension",)) ds_vars = { - 'swath_var': swath_var2, - 'swath_var_int': swath_int_var, - 'some_longitude_lo': lon_var_lo, - 'some_latitude_lo': lat_var_lo, - 'time_var': time_var, + "swath_var": swath_var2, + "swath_var_int": swath_int_var, + "some_longitude_lo": lon_var_lo, + "some_latitude_lo": lat_var_lo, + "time_var": time_var, } attrs = _get_shared_global_attrs(filename) return xr.Dataset(ds_vars, attrs=attrs) @@ -168,12 +168,12 @@ def fake_open_dataset(filename, **kwargs): class TestGAASPReader: """Tests for the GAASP reader.""" - yaml_file = 'amsr2_l2_gaasp.yaml' + yaml_file = "amsr2_l2_gaasp.yaml" def setup_method(self): """Wrap pygrib to read fake data.""" from satpy._config import config_search_paths - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) @pytest.mark.parametrize( ("filenames", "expected_loadables"), @@ -191,7 +191,7 @@ def setup_method(self): def test_reader_creation(self, filenames, expected_loadables): """Test basic initialization.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.amsr2_l2_gaasp.xr.open_dataset') as od: + with mock.patch("satpy.readers.amsr2_l2_gaasp.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) @@ -203,26 +203,26 @@ def test_reader_creation(self, filenames, expected_loadables): @pytest.mark.parametrize( ("filenames", "expected_datasets"), [ - (EXAMPLE_FILENAMES, ['swath_var_hi', 'swath_var_low', - 'swath_var_low_int', 'swath_var', - 'swath_var_int', - 'grid_var_NH', 'grid_var_SH', - 'latency_var_NH', 'latency_var_SH']), - ([MBT_FILENAME], ['swath_var_hi', 'swath_var_low', - 'swath_var_low_int']), - ([PRECIP_FILENAME], ['swath_var_hi', 'swath_var_low', - 'swath_var_low_int']), - ([OCEAN_FILENAME], ['swath_var_hi', 'swath_var_low', - 'swath_var_low_int']), - ([SEAICE_NH_FILENAME], ['grid_var_NH', 'latency_var_NH']), - ([SEAICE_SH_FILENAME], ['grid_var_SH', 'latency_var_SH']), - ([SNOW_FILENAME], ['swath_var', 'swath_var_int']), - ([SOIL_FILENAME], ['swath_var', 'swath_var_int']), + (EXAMPLE_FILENAMES, ["swath_var_hi", "swath_var_low", + "swath_var_low_int", "swath_var", + "swath_var_int", + "grid_var_NH", "grid_var_SH", + "latency_var_NH", "latency_var_SH"]), + ([MBT_FILENAME], ["swath_var_hi", "swath_var_low", + "swath_var_low_int"]), + ([PRECIP_FILENAME], ["swath_var_hi", "swath_var_low", + "swath_var_low_int"]), + ([OCEAN_FILENAME], ["swath_var_hi", "swath_var_low", + "swath_var_low_int"]), + ([SEAICE_NH_FILENAME], ["grid_var_NH", "latency_var_NH"]), + ([SEAICE_SH_FILENAME], ["grid_var_SH", "latency_var_SH"]), + ([SNOW_FILENAME], ["swath_var", "swath_var_int"]), + ([SOIL_FILENAME], ["swath_var", "swath_var_int"]), ]) def test_available_datasets(self, filenames, expected_datasets): """Test that variables are dynamically discovered.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.amsr2_l2_gaasp.xr.open_dataset') as od: + with mock.patch("satpy.readers.amsr2_l2_gaasp.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) @@ -230,24 +230,24 @@ def test_available_datasets(self, filenames, expected_datasets): avails = list(r.available_dataset_names) for var_name in expected_datasets: assert var_name in avails - assert 'not_xy_dim_var' not in expected_datasets + assert "not_xy_dim_var" not in expected_datasets @staticmethod def _check_area(data_id, data_arr): from pyresample.geometry import AreaDefinition, SwathDefinition - area = data_arr.attrs['area'] - if 'grid_var' in data_id['name'] or 'latency_var' in data_id['name']: + area = data_arr.attrs["area"] + if "grid_var" in data_id["name"] or "latency_var" in data_id["name"]: assert isinstance(area, AreaDefinition) else: assert isinstance(area, SwathDefinition) @staticmethod def _check_fill(data_id, data_arr): - if 'int' in data_id['name']: - assert data_arr.attrs['_FillValue'] == 100 + if "int" in data_id["name"]: + assert data_arr.attrs["_FillValue"] == 100 assert np.issubdtype(data_arr.dtype, np.integer) else: - assert '_FillValue' not in data_arr.attrs + assert "_FillValue" not in data_arr.attrs if np.issubdtype(data_arr.dtype, np.floating): # we started with float32, it should stay that way assert data_arr.dtype.type == np.float32 @@ -255,33 +255,33 @@ def _check_fill(data_id, data_arr): @staticmethod def _check_attrs(data_arr): attrs = data_arr.attrs - assert 'scale_factor' not in attrs - assert 'add_offset' not in attrs - assert attrs['platform_name'] == 'GCOM-W1' - assert attrs['sensor'] == 'amsr2' - assert attrs['start_time'] == datetime(2020, 8, 12, 5, 58, 31) - assert attrs['end_time'] == datetime(2020, 8, 12, 6, 7, 1) + assert "scale_factor" not in attrs + assert "add_offset" not in attrs + assert attrs["platform_name"] == "GCOM-W1" + assert attrs["sensor"] == "amsr2" + assert attrs["start_time"] == datetime(2020, 8, 12, 5, 58, 31) + assert attrs["end_time"] == datetime(2020, 8, 12, 6, 7, 1) @pytest.mark.parametrize( ("filenames", "loadable_ids"), [ - (EXAMPLE_FILENAMES, ['swath_var_hi', 'swath_var_low', - 'swath_var_low_int', 'swath_var', - 'swath_var_int', - 'grid_var_NH', 'grid_var_SH', - 'latency_var_NH', 'latency_var_SH']), - ([MBT_FILENAME], ['swath_var_hi', 'swath_var_low', 'swath_var_low_int']), - ([PRECIP_FILENAME], ['swath_var_hi', 'swath_var_low', 'swath_var_low_int']), - ([OCEAN_FILENAME], ['swath_var_hi', 'swath_var_low', 'swath_var_low_int']), - ([SEAICE_NH_FILENAME], ['grid_var_NH', 'latency_var_NH']), - ([SEAICE_SH_FILENAME], ['grid_var_SH', 'latency_var_SH']), - ([SNOW_FILENAME], ['swath_var', 'swath_var_int']), - ([SOIL_FILENAME], ['swath_var', 'swath_var_int']), + (EXAMPLE_FILENAMES, ["swath_var_hi", "swath_var_low", + "swath_var_low_int", "swath_var", + "swath_var_int", + "grid_var_NH", "grid_var_SH", + "latency_var_NH", "latency_var_SH"]), + ([MBT_FILENAME], ["swath_var_hi", "swath_var_low", "swath_var_low_int"]), + ([PRECIP_FILENAME], ["swath_var_hi", "swath_var_low", "swath_var_low_int"]), + ([OCEAN_FILENAME], ["swath_var_hi", "swath_var_low", "swath_var_low_int"]), + ([SEAICE_NH_FILENAME], ["grid_var_NH", "latency_var_NH"]), + ([SEAICE_SH_FILENAME], ["grid_var_SH", "latency_var_SH"]), + ([SNOW_FILENAME], ["swath_var", "swath_var_int"]), + ([SOIL_FILENAME], ["swath_var", "swath_var_int"]), ]) def test_basic_load(self, filenames, loadable_ids): """Test that variables are loaded properly.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.amsr2_l2_gaasp.xr.open_dataset') as od: + with mock.patch("satpy.readers.amsr2_l2_gaasp.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) diff --git a/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py b/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py index a65d0638f5..17ac9f62de 100644 --- a/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py +++ b/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py @@ -42,42 +42,42 @@ def create_message(): surfaceSoilMoisture = np.round(rstate.rand(samples)*100, 1) surfaceSoilMoisture[0] = -1e+100 retmsg = { - 'inputDelayedDescriptorReplicationFactor': [8], - 'edition': 4, - 'masterTableNumber': 0, - 'bufrHeaderCentre': 254, - 'bufrHeaderSubCentre': 0, - 'updateSequenceNumber': 0, - 'dataCategory': 12, - 'internationalDataSubCategory': 255, - 'dataSubCategory': 190, - 'masterTablesVersionNumber': 13, - 'localTablesVersionNumber': 0, - 'typicalYear': 2020, - 'typicalMonth': 12, - 'typicalDay': 21, - 'typicalHour': 9, - 'typicalMinute': 33, - 'typicalSecond': 0, - 'numberOfSubsets': samples, - 'observedData': 1, - 'compressedData': 1, - 'unexpandedDescriptors': 312061, - 'centre': 254, - 'subCentre': 0, - '#1#softwareIdentification': 1000, - 'satelliteIdentifier': 4, - 'satelliteInstruments': 190, - 'year': 2020, - 'month': 12, - 'day': 21, - 'hour': 9, - 'minute': 33, - 'second': np.linspace(0, 59, samples), - 'latitude': lat, - 'longitude': lon, - 'surfaceSoilMoisture': surfaceSoilMoisture, - 'soilMoistureQuality': np.zeros(samples), + "inputDelayedDescriptorReplicationFactor": [8], + "edition": 4, + "masterTableNumber": 0, + "bufrHeaderCentre": 254, + "bufrHeaderSubCentre": 0, + "updateSequenceNumber": 0, + "dataCategory": 12, + "internationalDataSubCategory": 255, + "dataSubCategory": 190, + "masterTablesVersionNumber": 13, + "localTablesVersionNumber": 0, + "typicalYear": 2020, + "typicalMonth": 12, + "typicalDay": 21, + "typicalHour": 9, + "typicalMinute": 33, + "typicalSecond": 0, + "numberOfSubsets": samples, + "observedData": 1, + "compressedData": 1, + "unexpandedDescriptors": 312061, + "centre": 254, + "subCentre": 0, + "#1#softwareIdentification": 1000, + "satelliteIdentifier": 4, + "satelliteInstruments": 190, + "year": 2020, + "month": 12, + "day": 21, + "hour": 9, + "minute": 33, + "second": np.linspace(0, 59, samples), + "latitude": lat, + "longitude": lon, + "surfaceSoilMoisture": surfaceSoilMoisture, + "soilMoistureQuality": np.zeros(samples), } return retmsg @@ -85,22 +85,22 @@ def create_message(): MSG = create_message() # the notional filename that would contain the above test message data -FILENAME = 'W_XX-EUMETSAT-TEST,SOUNDING+SATELLITE,METOPA+ASCAT_C_EUMC_20201221093300_73545_eps_o_125_ssm_l2.bin' +FILENAME = "W_XX-EUMETSAT-TEST,SOUNDING+SATELLITE,METOPA+ASCAT_C_EUMC_20201221093300_73545_eps_o_125_ssm_l2.bin" # the information that would be extracted from the above filename according to the pattern in the .yaml FILENAME_INFO = { - 'reception_location': 'TEST', - 'platform': 'METOPA', - 'instrument': 'ASCAT', - 'start_time': '20201221093300', - 'perigee': '73545', - 'species': '125_ssm', - 'level': 'l2' + "reception_location": "TEST", + "platform": "METOPA", + "instrument": "ASCAT", + "start_time": "20201221093300", + "perigee": "73545", + "species": "125_ssm", + "level": "l2" } # file type info for the above file that is defined in the .yaml FILETYPE_INFO = { - 'file_type': 'ascat_l2_soilmoisture_bufr', - 'file_reader': 'AscatSoilMoistureBufr' + "file_type": "ascat_l2_soilmoisture_bufr", + "file_reader": "AscatSoilMoistureBufr" } @@ -110,14 +110,14 @@ def save_test_data(path): filepath = os.path.join(path, FILENAME) with open(filepath, "wb") as f: for m in [MSG]: - buf = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') + buf = ec.codes_bufr_new_from_samples("BUFR4_local_satellite") for key in m: val = m[key] if np.isscalar(val): ec.codes_set(buf, key, val) else: ec.codes_set_array(buf, key, val) - ec.codes_set(buf, 'pack', 1) + ec.codes_set(buf, "pack", 1) ec.codes_write(buf, f) ec.codes_release(buf) return filepath @@ -145,41 +145,41 @@ def tearDown(self): except OSError: pass - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene(self): """Test scene creation.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) - scn = Scene(reader='ascat_l2_soilmoisture_bufr', filenames=[fname]) - self.assertTrue('scatterometer' in scn.sensor_names) + scn = Scene(reader="ascat_l2_soilmoisture_bufr", filenames=[fname]) + self.assertTrue("scatterometer" in scn.sensor_names) self.assertTrue(datetime(2020, 12, 21, 9, 33, 0) == scn.start_time) self.assertTrue(datetime(2020, 12, 21, 9, 33, 59) == scn.end_time) - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_load_available_datasets(self): """Test that all datasets are available.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) - scn = Scene(reader='ascat_l2_soilmoisture_bufr', filenames=[fname]) - self.assertTrue('surface_soil_moisture' in scn.available_dataset_names()) + scn = Scene(reader="ascat_l2_soilmoisture_bufr", filenames=[fname]) + self.assertTrue("surface_soil_moisture" in scn.available_dataset_names()) scn.load(scn.available_dataset_names()) loaded = [dataset.name for dataset in scn] self.assertTrue(sorted(loaded) == sorted(scn.available_dataset_names())) - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_dataset_values(self): """Test loading data.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) - scn = Scene(reader='ascat_l2_soilmoisture_bufr', filenames=[fname]) + scn = Scene(reader="ascat_l2_soilmoisture_bufr", filenames=[fname]) for name in scn.available_dataset_names(): scn.load([name]) loaded_values = scn[name].values - fill_value = scn[name].attrs['fill_value'] + fill_value = scn[name].attrs["fill_value"] # replace nans in data loaded from file with the fill value defined in the .yaml # to make them comparable loaded_values_nan_filled = np.nan_to_num(loaded_values, nan=fill_value) - key = scn[name].attrs['key'] + key = scn[name].attrs["key"] original_values = MSG[key] # this makes each assertion below a separate test from unittest's point of view # (note: if all subtests pass, they will count as one test) diff --git a/satpy/tests/reader_tests/test_atms_sdr_hdf5.py b/satpy/tests/reader_tests/test_atms_sdr_hdf5.py index 96fb4d7305..aa11e66d09 100644 --- a/satpy/tests/reader_tests/test_atms_sdr_hdf5.py +++ b/satpy/tests/reader_tests/test_atms_sdr_hdf5.py @@ -53,14 +53,14 @@ def __init__(self, filename, filename_info, filetype_info, include_factors=True) @staticmethod def _add_basic_metadata_to_file_content(file_content, filename_info, num_grans): - start_time = filename_info['start_time'] - end_time = filename_info['end_time'].replace(year=start_time.year, + start_time = filename_info["start_time"] + end_time = filename_info["end_time"].replace(year=start_time.year, month=start_time.month, day=start_time.day) - begin_date = start_time.strftime('%Y%m%d') - begin_time = start_time.strftime('%H%M%S.%fZ') - ending_date = end_time.strftime('%Y%m%d') - ending_time = end_time.strftime('%H%M%S.%fZ') + begin_date = start_time.strftime("%Y%m%d") + begin_time = start_time.strftime("%H%M%S.%fZ") + ending_date = end_time.strftime("%Y%m%d") + ending_time = end_time.strftime("%H%M%S.%fZ") new_file_content = { "{prefix2}/attr/AggregateNumberGranules": num_grans, "{prefix2}/attr/AggregateBeginningDate": begin_date, @@ -69,8 +69,8 @@ def _add_basic_metadata_to_file_content(file_content, filename_info, num_grans): "{prefix2}/attr/AggregateEndingTime": ending_time, "{prefix2}/attr/G-Ring_Longitude": np.array([0.0, 0.1, 0.2, 0.3]), "{prefix2}/attr/G-Ring_Latitude": np.array([0.0, 0.1, 0.2, 0.3]), - "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info['orbit']), - "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info['orbit']), + "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info["orbit"]), + "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info["orbit"]), "{prefix1}/attr/Instrument_Short_Name": "ATMS", "/attr/Platform_Short_Name": "J01", } @@ -82,13 +82,13 @@ def _add_granule_specific_info_to_file_content(self, file_content, dataset_group lats_lists = self._get_per_granule_lats() file_content["{prefix3}/NumberOfScans"] = np.array([1] * num_granules) for granule_idx in range(num_granules): - prefix_gran = '{prefix}/{dataset_group}_Gran_{idx}'.format(prefix=gran_group_prefix, + prefix_gran = "{prefix}/{dataset_group}_Gran_{idx}".format(prefix=gran_group_prefix, dataset_group=dataset_group, idx=granule_idx) num_scans = num_scans_per_granule[granule_idx] - file_content[prefix_gran + '/attr/N_Number_Of_Scans'] = num_scans - file_content[prefix_gran + '/attr/G-Ring_Longitude'] = lons_lists[granule_idx] - file_content[prefix_gran + '/attr/G-Ring_Latitude'] = lats_lists[granule_idx] + file_content[prefix_gran + "/attr/N_Number_Of_Scans"] = num_scans + file_content[prefix_gran + "/attr/G-Ring_Longitude"] = lons_lists[granule_idx] + file_content[prefix_gran + "/attr/G-Ring_Latitude"] = lats_lists[granule_idx] @staticmethod def _get_per_granule_lons(): @@ -152,7 +152,7 @@ def _add_data_info_to_file_content(self, file_content, filename, data_var_prefix # ATMS SDR files always produce data with 12 scans per granule even if there are less? FIXME! total_rows = DEFAULT_FILE_SHAPE[0] * 12 * num_grans new_shape = (total_rows, DEFAULT_FILE_SHAPE[1], self._num_of_bands) - key = 'BrightnessTemperature' + key = "BrightnessTemperature" key = data_var_prefix + "/" + key file_content[key] = np.repeat(DEFAULT_FILE_DATA.copy(), 12 * num_grans, axis=0) file_content[key] = np.repeat(file_content[key][:, :, np.newaxis], self._num_of_bands, axis=2) @@ -181,10 +181,10 @@ def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefi file_content[k] = np.repeat([file_content[k]], total_rows, axis=0) file_content[k + "/shape"] = new_shape - angles = ['SolarZenithAngle', - 'SolarAzimuthAngle', - 'SatelliteZenithAngle', - 'SatelliteAzimuthAngle'] + angles = ["SolarZenithAngle", + "SolarAzimuthAngle", + "SatelliteZenithAngle", + "SatelliteAzimuthAngle"] for k in angles: k = data_var_prefix + "/" + k file_content[k] = lon_data # close enough to SZA @@ -193,8 +193,8 @@ def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefi @staticmethod def _add_geo_ref(file_content, filename): - geo_prefix = 'GATMO' - file_content['/attr/N_GEO_Ref'] = geo_prefix + filename[5:] + geo_prefix = "GATMO" + file_content["/attr/N_GEO_Ref"] = geo_prefix + filename[5:] @staticmethod def _convert_numpy_content_to_dataarray(final_content): @@ -204,9 +204,9 @@ def _convert_numpy_content_to_dataarray(final_content): if isinstance(val, np.ndarray): val = da.from_array(val, chunks=val.shape) if val.ndim > 2: - final_content[key] = DataArray(val, dims=('y', 'x', 'z')) + final_content[key] = DataArray(val, dims=("y", "x", "z")) elif val.ndim > 1: - final_content[key] = DataArray(val, dims=('y', 'x')) + final_content[key] = DataArray(val, dims=("y", "x")) else: final_content[key] = DataArray(val) @@ -215,9 +215,9 @@ def get_test_content(self, filename, filename_info, filetype_info): final_content = {} for dataset in self.datasets: dataset_group = DATASET_KEYS[dataset] - prefix1 = 'Data_Products/{dataset_group}'.format(dataset_group=dataset_group) - prefix2 = '{prefix}/{dataset_group}_Aggr'.format(prefix=prefix1, dataset_group=dataset_group) - prefix3 = 'All_Data/{dataset_group}_All'.format(dataset_group=dataset_group) + prefix1 = "Data_Products/{dataset_group}".format(dataset_group=dataset_group) + prefix2 = "{prefix}/{dataset_group}_Aggr".format(prefix=prefix1, dataset_group=dataset_group) + prefix3 = "All_Data/{dataset_group}_All".format(dataset_group=dataset_group) file_content = {} self._add_basic_metadata_to_file_content(file_content, filename_info, self._num_test_granules) @@ -229,10 +229,10 @@ def get_test_content(self, filename, filename_info, filetype_info): for k, v in list(file_content.items()): file_content[k.format(prefix1=prefix1, prefix2=prefix2, prefix3=prefix3)] = v - if filename[:5] in ['SATMS', 'TATMS']: + if filename[:5] in ["SATMS", "TATMS"]: self._add_data_info_to_file_content(file_content, filename, prefix3, self._num_test_granules) - elif filename[0] == 'G': + elif filename[0] == "G": self._add_geolocation_info_to_file_content(file_content, filename, prefix3, self._num_test_granules) final_content.update(file_content) @@ -248,23 +248,23 @@ class TestATMS_SDR_Reader: def _assert_bt_properties(self, data_arr, num_scans=1, with_area=True): assert np.issubdtype(data_arr.dtype, np.float32) - assert data_arr.attrs['calibration'] == 'brightness_temperature' - assert data_arr.attrs['units'] == 'K' - assert data_arr.attrs['rows_per_scan'] == num_scans + assert data_arr.attrs["calibration"] == "brightness_temperature" + assert data_arr.attrs["units"] == "K" + assert data_arr.attrs["rows_per_scan"] == num_scans if with_area: - assert 'area' in data_arr.attrs - assert data_arr.attrs['area'] is not None - assert data_arr.attrs['area'].shape == data_arr.shape + assert "area" in data_arr.attrs + assert data_arr.attrs["area"] is not None + assert data_arr.attrs["area"].shape == data_arr.shape else: - assert 'area' not in data_arr.attrs + assert "area" not in data_arr.attrs def setup_method(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy.readers.viirs_atms_sdr_base import JPSS_SDR_FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(JPSS_SDR_FileHandler, '__bases__', (FakeHDF5_ATMS_SDR_FileHandler,)) + self.p = mock.patch.object(JPSS_SDR_FileHandler, "__bases__", (FakeHDF5_ATMS_SDR_FileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -277,7 +277,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - '/path/to/atms/sdr/data/SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5', + "/path/to/atms/sdr/data/SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) @@ -288,11 +288,11 @@ def test_init_start_end_time(self): """Test basic init with start and end times around the start/end times of the provided file.""" r = load_reader(self.reader_configs, filter_parameters={ - 'start_time': datetime(2022, 12, 19), - 'end_time': datetime(2022, 12, 21) + "start_time": datetime(2022, 12, 19), + "end_time": datetime(2022, 12, 21) }) loadables = r.select_files_from_pathnames([ - 'SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5', + "SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) @@ -300,10 +300,10 @@ def test_init_start_end_time(self): assert r.file_handlers @pytest.mark.parametrize("files, expected", - [(['SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5', - 'GATMO_j01_d20221220_t0910240_e0921356_b26361_c20221220100456680030_cspp_dev.h5'], + [(["SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", + "GATMO_j01_d20221220_t0910240_e0921356_b26361_c20221220100456680030_cspp_dev.h5"], True), - (['SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5', ], + (["SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", ], False)] ) def test_load_all_bands(self, files, expected): diff --git a/satpy/tests/reader_tests/test_avhrr_l0_hrpt.py b/satpy/tests/reader_tests/test_avhrr_l0_hrpt.py index e5241ba025..b1504e9014 100644 --- a/satpy/tests/reader_tests/test_avhrr_l0_hrpt.py +++ b/satpy/tests/reader_tests/test_avhrr_l0_hrpt.py @@ -44,7 +44,7 @@ def setUp(self) -> None: test_data["id"]["id"][:5] = 891 # Channel 3b test_data["id"]["id"][5:] = 890 - with NamedTemporaryFile(mode='w+', suffix='.hmf', delete=False) as hrpt_file: + with NamedTemporaryFile(mode="w+", suffix=".hmf", delete=False) as hrpt_file: self.filename = hrpt_file.name test_data.tofile(hrpt_file) @@ -71,7 +71,7 @@ class TestHRPTGetUncalibratedData(TestHRPTWithFile): """Test case for reading uncalibrated hrpt data.""" def _get_channel_1_counts(self): - return self._get_dataset(make_dataid(name='1', calibration='counts')) + return self._get_dataset(make_dataid(name="1", calibration="counts")) def test_get_dataset_returns_a_dataarray(self): """Test that get_dataset returns a dataarray.""" @@ -81,7 +81,7 @@ def test_get_dataset_returns_a_dataarray(self): def test_platform_name(self): """Test that the platform name is correct.""" result = self._get_channel_1_counts() - assert result.attrs['platform_name'] == 'NOAA 19' + assert result.attrs["platform_name"] == "NOAA 19" def test_no_calibration_values_are_1(self): """Test that the values of non-calibrated data is 1.""" @@ -137,7 +137,7 @@ class TestHRPTGetCalibratedReflectances(TestHRPTWithPatchedCalibratorAndFile): def _get_channel_1_reflectance(self): """Get the channel 1 reflectance.""" - dataset_id = make_dataid(name='1', calibration='reflectance') + dataset_id = make_dataid(name="1", calibration="reflectance") return self._get_dataset(dataset_id) def test_calibrated_reflectances_values(self): @@ -151,7 +151,7 @@ class TestHRPTGetCalibratedBT(TestHRPTWithPatchedCalibratorAndFile): def _get_channel_4_bt(self): """Get the channel 4 bt.""" - dataset_id = make_dataid(name='4', calibration='brightness_temperature') + dataset_id = make_dataid(name="4", calibration="brightness_temperature") return self._get_dataset(dataset_id) def test_calibrated_bt_values(self): @@ -165,17 +165,17 @@ class TestHRPTChannel3(TestHRPTWithPatchedCalibratorAndFile): def _get_channel_3b_bt(self): """Get the channel 4 bt.""" - dataset_id = make_dataid(name='3b', calibration='brightness_temperature') + dataset_id = make_dataid(name="3b", calibration="brightness_temperature") return self._get_dataset(dataset_id) def _get_channel_3a_reflectance(self): """Get the channel 4 bt.""" - dataset_id = make_dataid(name='3a', calibration='reflectance') + dataset_id = make_dataid(name="3a", calibration="reflectance") return self._get_dataset(dataset_id) def _get_channel_3a_counts(self): """Get the channel 4 bt.""" - dataset_id = make_dataid(name='3a', calibration='counts') + dataset_id = make_dataid(name="3a", calibration="counts") return self._get_dataset(dataset_id) def test_channel_3b_masking(self): @@ -212,7 +212,7 @@ def _prepare_mocks(self, Orbital, SatelliteInterpolator, get_lonlatalt): get_lonlatalt.return_value = (mock.MagicMock(), mock.MagicMock(), mock.MagicMock()) SatelliteInterpolator.return_value.interpolate.return_value = self.fake_lons, self.fake_lats - @mock.patch.multiple('satpy.readers.hrpt', + @mock.patch.multiple("satpy.readers.hrpt", Orbital=mock.DEFAULT, compute_pixels=mock.DEFAULT, get_lonlatalt=mock.DEFAULT, @@ -220,11 +220,11 @@ def _prepare_mocks(self, Orbital, SatelliteInterpolator, get_lonlatalt): def test_longitudes_are_returned(self, Orbital, compute_pixels, get_lonlatalt, SatelliteInterpolator): """Check that latitudes are returned properly.""" self._prepare_mocks(Orbital, SatelliteInterpolator, get_lonlatalt) - dataset_id = make_dataid(name='longitude') + dataset_id = make_dataid(name="longitude") result = self._get_dataset(dataset_id) assert (result == self.fake_lons).all() - @mock.patch.multiple('satpy.readers.hrpt', + @mock.patch.multiple("satpy.readers.hrpt", Orbital=mock.DEFAULT, compute_pixels=mock.DEFAULT, get_lonlatalt=mock.DEFAULT, @@ -232,6 +232,6 @@ def test_longitudes_are_returned(self, Orbital, compute_pixels, get_lonlatalt, S def test_latitudes_are_returned(self, Orbital, compute_pixels, get_lonlatalt, SatelliteInterpolator): """Check that latitudes are returned properly.""" self._prepare_mocks(Orbital, SatelliteInterpolator, get_lonlatalt) - dataset_id = make_dataid(name='latitude') + dataset_id = make_dataid(name="latitude") result = self._get_dataset(dataset_id) assert (result == self.fake_lats).all() diff --git a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py index 4a543b449c..076f89b0f2 100644 --- a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py +++ b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py @@ -27,49 +27,49 @@ GAC_PATTERN = '{creation_site:3s}.{transfer_mode:4s}.{platform_id:2s}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{orbit_number:05d}{end_orbit_last_digits:02d}.{station:2s}' # noqa -GAC_POD_FILENAMES = ['NSS.GHRR.NA.D79184.S1150.E1337.B0008384.WI', - 'NSS.GHRR.NA.D79184.S2350.E0137.B0008384.WI', - 'NSS.GHRR.NA.D80021.S0927.E1121.B0295354.WI', - 'NSS.GHRR.NA.D80021.S1120.E1301.B0295455.WI', - 'NSS.GHRR.NA.D80021.S1256.E1450.B0295556.GC', - 'NSS.GHRR.NE.D83208.S1219.E1404.B0171819.WI', - 'NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI', - 'NSS.GHRR.TN.D79183.S1258.E1444.B0369697.GC', - 'NSS.GHRR.TN.D80003.S1147.E1332.B0630506.GC', - 'NSS.GHRR.TN.D80003.S1328.E1513.B0630507.GC', - 'NSS.GHRR.TN.D80003.S1509.E1654.B0630608.GC'] - -GAC_KLM_FILENAMES = ['NSS.GHRR.NK.D01235.S0252.E0446.B1703233.GC', - 'NSS.GHRR.NL.D01288.S2315.E0104.B0549495.GC', - 'NSS.GHRR.NM.D04111.S2305.E0050.B0947778.GC', - 'NSS.GHRR.NN.D13011.S0559.E0741.B3939192.WI', - 'NSS.GHRR.NP.D15361.S0121.E0315.B3547172.SV', - 'NSS.GHRR.M1.D15362.S0031.E0129.B1699697.SV', - 'NSS.GHRR.M2.D10178.S2359.E0142.B1914142.SV'] - -LAC_POD_FILENAMES = ['BRN.HRPT.ND.D95152.S1730.E1715.B2102323.UB', - 'BRN.HRPT.ND.D95152.S1910.E1857.B2102424.UB', - 'BRN.HRPT.NF.D85152.S1345.E1330.B0241414.UB', - 'BRN.HRPT.NJ.D95152.S1233.E1217.B0216060.UB'] - -LAC_KLM_FILENAMES = ['BRN.HRPT.M1.D14152.S0958.E1012.B0883232.UB', - 'BRN.HRPT.M1.D14152.S1943.E1958.B0883838.UB', - 'BRN.HRPT.M2.D12153.S0912.E0922.B2914747.UB', - 'BRN.HRPT.NN.D12153.S0138.E0152.B3622828.UB', - 'BRN.HRPT.NN.D12153.S0139.E0153.B3622828.UB', - 'BRN.HRPT.NN.D12153.S1309.E1324.B3623535.UB', - 'BRN.HRPT.NP.D12153.S0003.E0016.B1707272.UB', - 'BRN.HRPT.NP.D12153.S1134.E1148.B1707979.UB', - 'BRN.HRPT.NP.D16184.S1256.E1311.B3813131.UB', - 'BRN.HRPT.NP.D16184.S1438.E1451.B3813232.UB', - 'BRN.HRPT.NP.D16184.S1439.E1451.B3813232.UB', - 'BRN.HRPT.NP.D16185.S1245.E1259.B3814545.UB', - 'BRN.HRPT.NP.D16185.S1427.E1440.B3814646.UB', - 'NSS.FRAC.M2.D12153.S1729.E1910.B2915354.SV', - 'NSS.LHRR.NP.D16306.S1803.E1814.B3985555.WI'] - - -@mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__', return_value=None) +GAC_POD_FILENAMES = ["NSS.GHRR.NA.D79184.S1150.E1337.B0008384.WI", + "NSS.GHRR.NA.D79184.S2350.E0137.B0008384.WI", + "NSS.GHRR.NA.D80021.S0927.E1121.B0295354.WI", + "NSS.GHRR.NA.D80021.S1120.E1301.B0295455.WI", + "NSS.GHRR.NA.D80021.S1256.E1450.B0295556.GC", + "NSS.GHRR.NE.D83208.S1219.E1404.B0171819.WI", + "NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI", + "NSS.GHRR.TN.D79183.S1258.E1444.B0369697.GC", + "NSS.GHRR.TN.D80003.S1147.E1332.B0630506.GC", + "NSS.GHRR.TN.D80003.S1328.E1513.B0630507.GC", + "NSS.GHRR.TN.D80003.S1509.E1654.B0630608.GC"] + +GAC_KLM_FILENAMES = ["NSS.GHRR.NK.D01235.S0252.E0446.B1703233.GC", + "NSS.GHRR.NL.D01288.S2315.E0104.B0549495.GC", + "NSS.GHRR.NM.D04111.S2305.E0050.B0947778.GC", + "NSS.GHRR.NN.D13011.S0559.E0741.B3939192.WI", + "NSS.GHRR.NP.D15361.S0121.E0315.B3547172.SV", + "NSS.GHRR.M1.D15362.S0031.E0129.B1699697.SV", + "NSS.GHRR.M2.D10178.S2359.E0142.B1914142.SV"] + +LAC_POD_FILENAMES = ["BRN.HRPT.ND.D95152.S1730.E1715.B2102323.UB", + "BRN.HRPT.ND.D95152.S1910.E1857.B2102424.UB", + "BRN.HRPT.NF.D85152.S1345.E1330.B0241414.UB", + "BRN.HRPT.NJ.D95152.S1233.E1217.B0216060.UB"] + +LAC_KLM_FILENAMES = ["BRN.HRPT.M1.D14152.S0958.E1012.B0883232.UB", + "BRN.HRPT.M1.D14152.S1943.E1958.B0883838.UB", + "BRN.HRPT.M2.D12153.S0912.E0922.B2914747.UB", + "BRN.HRPT.NN.D12153.S0138.E0152.B3622828.UB", + "BRN.HRPT.NN.D12153.S0139.E0153.B3622828.UB", + "BRN.HRPT.NN.D12153.S1309.E1324.B3623535.UB", + "BRN.HRPT.NP.D12153.S0003.E0016.B1707272.UB", + "BRN.HRPT.NP.D12153.S1134.E1148.B1707979.UB", + "BRN.HRPT.NP.D16184.S1256.E1311.B3813131.UB", + "BRN.HRPT.NP.D16184.S1438.E1451.B3813232.UB", + "BRN.HRPT.NP.D16184.S1439.E1451.B3813232.UB", + "BRN.HRPT.NP.D16185.S1245.E1259.B3814545.UB", + "BRN.HRPT.NP.D16185.S1427.E1440.B3814646.UB", + "NSS.FRAC.M2.D12153.S1729.E1910.B2915354.SV", + "NSS.LHRR.NP.D16306.S1803.E1814.B3985555.WI"] + + +@mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__", return_value=None) def _get_fh_mocked(init_mock, **attrs): """Create a mocked file handler with the given attributes.""" from satpy.readers.avhrr_l1b_gaclac import GACLACFile @@ -82,11 +82,11 @@ def _get_fh_mocked(init_mock, **attrs): def _get_reader_mocked(along_track=3): """Create a mocked reader.""" - reader = mock.MagicMock(spacecraft_name='spacecraft_name', - meta_data={'foo': 'bar'}) + reader = mock.MagicMock(spacecraft_name="spacecraft_name", + meta_data={"foo": "bar"}) reader.mask = [0, 0] reader.get_times.return_value = np.arange(along_track) - reader.get_tle_lines.return_value = 'tle' + reader.get_tle_lines.return_value = "tle" return reader @@ -98,16 +98,16 @@ def setUp(self): self.pygac = mock.MagicMock() self.fhs = mock.MagicMock() modules = { - 'pygac': self.pygac, - 'pygac.gac_klm': self.pygac.gac_klm, - 'pygac.gac_pod': self.pygac.gac_pod, - 'pygac.lac_klm': self.pygac.lac_klm, - 'pygac.lac_pod': self.pygac.lac_pod, - 'pygac.utils': self.pygac.utils, - 'pygac.calibration': self.pygac.calibration, + "pygac": self.pygac, + "pygac.gac_klm": self.pygac.gac_klm, + "pygac.gac_pod": self.pygac.gac_pod, + "pygac.lac_klm": self.pygac.lac_klm, + "pygac.lac_pod": self.pygac.lac_pod, + "pygac.utils": self.pygac.utils, + "pygac.calibration": self.pygac.calibration, } - self.module_patcher = mock.patch.dict('sys.modules', modules) + self.module_patcher = mock.patch.dict("sys.modules", modules) self.module_patcher.start() def tearDown(self): @@ -131,7 +131,7 @@ def setUp(self): class TestGACLACFile(GACLACFilePatcher): """Test the GACLAC file handler.""" - def _get_fh(self, filename='NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI', + def _get_fh(self, filename="NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI", **kwargs): """Create a file handler.""" from trollsift import parse @@ -145,15 +145,15 @@ def test_init(self): from pygac.lac_klm import LACKLMReader from pygac.lac_pod import LACPODReader - kwargs = {'start_line': 1, - 'end_line': 2, - 'strip_invalid_coords': True, - 'interpolate_coords': True, - 'adjust_clock_drift': True, - 'tle_dir': 'tle_dir', - 'tle_name': 'tle_name', - 'tle_thresh': 123, - 'calibration': 'calibration'} + kwargs = {"start_line": 1, + "end_line": 2, + "strip_invalid_coords": True, + "interpolate_coords": True, + "adjust_clock_drift": True, + "tle_dir": "tle_dir", + "tle_name": "tle_name", + "tle_thresh": 123, + "calibration": "calibration"} for filenames, reader_cls in zip([GAC_POD_FILENAMES, GAC_KLM_FILENAMES, LAC_POD_FILENAMES, LAC_KLM_FILENAMES], [GACPODReader, GACKLMReader, LACPODReader, LACKLMReader]): for filename in filenames: @@ -161,23 +161,23 @@ def test_init(self): self.assertLess(fh.start_time, fh.end_time, "Start time must precede end time.") self.assertIs(fh.reader_class, reader_cls, - 'Wrong reader class assigned to {}'.format(filename)) + "Wrong reader class assigned to {}".format(filename)) def test_read_raw_data(self): """Test raw data reading.""" fh = _get_fh_mocked(reader=None, - interpolate_coords='interpolate_coords', - creation_site='creation_site', - reader_kwargs={'foo': 'bar'}, - filename='myfile') + interpolate_coords="interpolate_coords", + creation_site="creation_site", + reader_kwargs={"foo": "bar"}, + filename="myfile") reader = mock.MagicMock(mask=[0]) reader_cls = mock.MagicMock(return_value=reader) fh.reader_class = reader_cls fh.read_raw_data() - reader_cls.assert_called_with(interpolate_coords='interpolate_coords', - creation_site='creation_site', - foo='bar') - reader.read.assert_called_with('myfile') + reader_cls.assert_called_with(interpolate_coords="interpolate_coords", + creation_site="creation_site", + foo="bar") + reader.read.assert_called_with("myfile") # Test exception if all data is masked reader.mask = [1] @@ -185,9 +185,9 @@ def test_read_raw_data(self): with self.assertRaises(ValueError): fh.read_raw_data() - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs') - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.slice') - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel') + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.slice") + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel") def test_get_dataset_slice(self, get_channel, slc, *mocks): """Get a slice of a dataset.""" from satpy.tests.utils import make_dataid @@ -206,28 +206,28 @@ def slice_patched(data, times): acq = np.array([0, 1, 2, 3, 4]) slc.side_effect = slice_patched get_channel.return_value = ch - kwargs_list = [{'strip_invalid_coords': False, - 'start_line': 123, 'end_line': 456}, - {'strip_invalid_coords': True, - 'start_line': None, 'end_line': None}, - {'strip_invalid_coords': True, - 'start_line': 123, 'end_line': 456}] + kwargs_list = [{"strip_invalid_coords": False, + "start_line": 123, "end_line": 456}, + {"strip_invalid_coords": True, + "start_line": None, "end_line": None}, + {"strip_invalid_coords": True, + "start_line": 123, "end_line": 456}] for kwargs in kwargs_list: fh = _get_fh_mocked( reader=_get_reader_mocked(along_track=len(acq)), - chn_dict={'1': 0}, + chn_dict={"1": 0}, **kwargs ) - key = make_dataid(name='1', calibration='reflectance') - info = {'name': '1', 'standard_name': 'reflectance'} + key = make_dataid(name="1", calibration="reflectance") + info = {"name": "1", "standard_name": "reflectance"} res = fh.get_dataset(key, info) np.testing.assert_array_equal(res.data, ch[1:3, :]) - np.testing.assert_array_equal(res.coords['acq_time'].data, acq[1:3]) - np.testing.assert_array_equal(slc.call_args_list[-1][1]['times'], acq) - np.testing.assert_array_equal(slc.call_args_list[-1][1]['data'], ch) + np.testing.assert_array_equal(res.coords["acq_time"].data, acq[1:3]) + np.testing.assert_array_equal(slc.call_args_list[-1][1]["times"], acq) + np.testing.assert_array_equal(slc.call_args_list[-1][1]["data"], ch) - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs') + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") def test_get_dataset_latlon(self, *mocks): """Test getting the latitudes and longitudes.""" from satpy.tests.utils import make_dataid @@ -245,26 +245,26 @@ def test_get_dataset_latlon(self, *mocks): ) # With interpolation of coordinates - for name, exp_data in zip(['longitude', 'latitude'], [lons, lats]): + for name, exp_data in zip(["longitude", "latitude"], [lons, lats]): key = make_dataid(name=name) - info = {'name': name, 'standard_name': 'my_standard_name'} + info = {"name": name, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) exp = xr.DataArray(exp_data, name=res.name, - dims=('y', 'x'), - coords={'acq_time': ('y', [0, 1, 2])}) + dims=("y", "x"), + coords={"acq_time": ("y", [0, 1, 2])}) xr.testing.assert_equal(res, exp) # Without interpolation of coordinates fh.interpolate_coords = False - for name, _exp_data in zip(['longitude', 'latitude'], [lons, lats]): + for name, _exp_data in zip(["longitude", "latitude"], [lons, lats]): key = make_dataid(name=name) - info = {'name': name, 'standard_name': 'my_standard_name'} + info = {"name": name, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) - self.assertTupleEqual(res.dims, ('y', 'x_every_eighth')) + self.assertTupleEqual(res.dims, ("y", "x_every_eighth")) - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs') - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_angle') + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_angle") def test_get_dataset_angles(self, get_angle, *mocks): """Test getting the angles.""" from satpy.readers.avhrr_l1b_gaclac import ANGLES @@ -284,23 +284,23 @@ def test_get_dataset_angles(self, get_angle, *mocks): # With interpolation of coordinates for angle in ANGLES: key = make_dataid(name=angle) - info = {'name': angle, 'standard_name': 'my_standard_name'} + info = {"name": angle, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) exp = xr.DataArray(ones, name=res.name, - dims=('y', 'x'), - coords={'acq_time': ('y', [0, 1, 2])}) + dims=("y", "x"), + coords={"acq_time": ("y", [0, 1, 2])}) xr.testing.assert_equal(res, exp) # Without interpolation of coordinates fh.interpolate_coords = False for angle in ANGLES: key = make_dataid(name=angle) - info = {'name': angle, 'standard_name': 'my_standard_name'} + info = {"name": angle, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) - self.assertTupleEqual(res.dims, ('y', 'x_every_eighth')) + self.assertTupleEqual(res.dims, ("y", "x_every_eighth")) - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs') + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") def test_get_dataset_qual_flags(self, *mocks): """Test getting the qualitiy flags.""" from satpy.tests.utils import make_dataid @@ -316,20 +316,20 @@ def test_get_dataset_qual_flags(self, *mocks): interpolate_coords=True ) - key = make_dataid(name='qual_flags') - info = {'name': 'qual_flags'} + key = make_dataid(name="qual_flags") + info = {"name": "qual_flags"} res = fh.get_dataset(key=key, info=info) exp = xr.DataArray(qual_flags, name=res.name, - dims=('y', 'num_flags'), - coords={'acq_time': ('y', [0, 1, 2]), - 'num_flags': ['Scan line number', - 'Fatal error flag', - 'Insufficient data for calibration', - 'Insufficient data for calibration', - 'Solar contamination of blackbody in channels 3', - 'Solar contamination of blackbody in channels 4', - 'Solar contamination of blackbody in channels 5']}) + dims=("y", "num_flags"), + coords={"acq_time": ("y", [0, 1, 2]), + "num_flags": ["Scan line number", + "Fatal error flag", + "Insufficient data for calibration", + "Insufficient data for calibration", + "Solar contamination of blackbody in channels 3", + "Solar contamination of blackbody in channels 4", + "Solar contamination of blackbody in channels 5"]}) xr.testing.assert_equal(res, exp) def test_get_channel(self): @@ -343,9 +343,9 @@ def test_get_channel(self): reader.get_counts.return_value = counts reader.get_calibrated_channels.return_value = calib_channels fh = _get_fh_mocked(reader=reader, counts=None, calib_channels=None, - chn_dict={'1': 0}) + chn_dict={"1": 0}) - key = make_dataid(name='1', calibration='counts') + key = make_dataid(name="1", calibration="counts") # Counts res = fh._get_channel(key=key) np.testing.assert_array_equal(res, [[1, 2, 3], @@ -353,8 +353,8 @@ def test_get_channel(self): np.testing.assert_array_equal(fh.counts, counts) # Reflectance and Brightness Temperature - for calib in ['reflectance', 'brightness_temperature']: - key = make_dataid(name='1', calibration=calib) + for calib in ["reflectance", "brightness_temperature"]: + key = make_dataid(name="1", calibration=calib) res = fh._get_channel(key=key) np.testing.assert_array_equal(res, [[2, 4, 6], [8, 10, 12]]) @@ -362,17 +362,17 @@ def test_get_channel(self): # Invalid with pytest.raises(ValueError): - key = make_dataid(name='7', calibration='coffee') + key = make_dataid(name="7", calibration="coffee") # Buffering reader.get_counts.reset_mock() - key = make_dataid(name='1', calibration='counts') + key = make_dataid(name="1", calibration="counts") fh._get_channel(key=key) reader.get_counts.assert_not_called() reader.get_calibrated_channels.reset_mock() - for calib in ['reflectance', 'brightness_temperature']: - key = make_dataid(name='1', calibration=calib) + for calib in ["reflectance", "brightness_temperature"]: + key = make_dataid(name="1", calibration=calib) fh._get_channel(key) reader.get_calibrated_channels.assert_not_called() @@ -385,17 +385,17 @@ def test_get_angle(self): fh = _get_fh_mocked(reader=reader, angles=None) # Test angle readout - key = make_dataid(name='sensor_zenith_angle') + key = make_dataid(name="sensor_zenith_angle") res = fh._get_angle(key) self.assertEqual(res, 2) - self.assertDictEqual(fh.angles, {'sensor_zenith_angle': 2, - 'sensor_azimuth_angle': 1, - 'solar_zenith_angle': 4, - 'solar_azimuth_angle': 3, - 'sun_sensor_azimuth_difference_angle': 5}) + self.assertDictEqual(fh.angles, {"sensor_zenith_angle": 2, + "sensor_azimuth_angle": 1, + "solar_zenith_angle": 4, + "solar_azimuth_angle": 3, + "sun_sensor_azimuth_difference_angle": 5}) # Test buffering - key = make_dataid(name='sensor_azimuth_angle') + key = make_dataid(name="sensor_azimuth_angle") fh._get_angle(key) reader.get_angles.assert_called_once() @@ -416,7 +416,7 @@ def test_strip_invalid_lat(self): fh._strip_invalid_lat() pygac.utils.strip_invalid_lat.assert_called_once() - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._slice') + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._slice") def test_slice(self, _slice): """Test slicing.""" @@ -425,7 +425,7 @@ def _slice_patched(data): _slice.side_effect = _slice_patched data = np.zeros((4, 2)) - times = np.array([1, 2, 3, 4], dtype='datetime64[us]') + times = np.array([1, 2, 3, 4], dtype="datetime64[us]") fh = _get_fh_mocked(start_line=1, end_line=3, strip_invalid_coords=False) data_slc, times_slc = fh.slice(data, times) @@ -434,22 +434,22 @@ def _slice_patched(data): self.assertEqual(fh.start_time, datetime(1970, 1, 1, 0, 0, 0, 2)) self.assertEqual(fh.end_time, datetime(1970, 1, 1, 0, 0, 0, 3)) - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_qual_flags') - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._strip_invalid_lat') + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_qual_flags") + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._strip_invalid_lat") def test__slice(self, strip_invalid_lat, get_qual_flags): """Test slicing.""" import pygac.utils pygac.utils.check_user_scanlines.return_value = 1, 2 - pygac.utils.slice_channel.return_value = 'sliced' + pygac.utils.slice_channel.return_value = "sliced" strip_invalid_lat.return_value = 3, 4 - get_qual_flags.return_value = 'qual_flags' + get_qual_flags.return_value = "qual_flags" data = np.zeros((2, 2)) # a) Only start/end line given fh = _get_fh_mocked(start_line=5, end_line=6, strip_invalid_coords=False) data_slc = fh._slice(data) - self.assertEqual(data_slc, 'sliced') + self.assertEqual(data_slc, "sliced") pygac.utils.check_user_scanlines.assert_called_with( start_line=5, end_line=6, first_valid_lat=None, last_valid_lat=None, along_track=2) @@ -472,9 +472,9 @@ def test__slice(self, strip_invalid_lat, get_qual_flags): first_valid_lat=3, last_valid_lat=4, along_track=2) # Test slicing with older pygac versions - pygac.utils.slice_channel.return_value = ('sliced', 'foo', 'bar') + pygac.utils.slice_channel.return_value = ("sliced", "foo", "bar") data_slc = fh._slice(data) - self.assertEqual(data_slc, 'sliced') + self.assertEqual(data_slc, "sliced") class TestGetDataset(GACLACFilePatcher): @@ -483,21 +483,21 @@ class TestGetDataset(GACLACFilePatcher): def setUp(self): """Set up the instance.""" self.exp = xr.DataArray(da.ones((3, 3)), - name='1', - dims=('y', 'x'), - coords={'acq_time': ('y', [0, 1, 2])}, - attrs={'name': '1', - 'platform_name': 'spacecraft_name', - 'orbit_number': 123, - 'sensor': 'sensor', - 'foo': 'bar', - 'standard_name': 'my_standard_name'}) - self.exp.coords['acq_time'].attrs['long_name'] = 'Mean scanline acquisition time' + name="1", + dims=("y", "x"), + coords={"acq_time": ("y", [0, 1, 2])}, + attrs={"name": "1", + "platform_name": "spacecraft_name", + "orbit_number": 123, + "sensor": "sensor", + "foo": "bar", + "standard_name": "my_standard_name"}) + self.exp.coords["acq_time"].attrs["long_name"] = "Mean scanline acquisition time" super().setUp() - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__', return_value=None) - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.read_raw_data') - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel', return_value=np.ones((3, 3))) + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__", return_value=None) + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.read_raw_data") + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel", return_value=np.ones((3, 3))) def test_get_dataset_channels(self, get_channel, *mocks): """Test getting the channel datasets.""" pygac_reader = _get_reader_mocked() @@ -506,7 +506,7 @@ def test_get_dataset_channels(self, get_channel, *mocks): # Test calibration to reflectance as well as attributes. key, res = self._get_dataset(fh) exp = self._create_expected(res.name) - exp.attrs['orbital_parameters'] = {'tle': 'tle'} + exp.attrs["orbital_parameters"] = {"tle": "tle"} xr.testing.assert_identical(res, exp) get_channel.assert_called_with(key) @@ -517,8 +517,8 @@ def test_get_dataset_channels(self, get_channel, *mocks): def _get_dataset(fh): from satpy.tests.utils import make_dataid - key = make_dataid(name='1', calibration='reflectance') - info = {'name': '1', 'standard_name': 'my_standard_name'} + key = make_dataid(name="1", calibration="reflectance") + info = {"name": "1", "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) return key, res @@ -527,12 +527,12 @@ def _create_file_handler(reader): """Mock reader and file handler.""" fh = _get_fh_mocked( reader=reader, - chn_dict={'1': 0, '5': 0}, + chn_dict={"1": 0, "5": 0}, start_line=None, end_line=None, strip_invalid_coords=False, - filename_info={'orbit_number': 123}, - sensor='sensor', + filename_info={"orbit_number": 123}, + sensor="sensor", ) return fh @@ -540,20 +540,20 @@ def _create_file_handler(reader): def _create_expected(name): exp = xr.DataArray(da.ones((3, 3)), name=name, - dims=('y', 'x'), - coords={'acq_time': ('y', [0, 1, 2])}, - attrs={'name': '1', - 'platform_name': 'spacecraft_name', - 'orbit_number': 123, - 'sensor': 'sensor', - 'foo': 'bar', - 'standard_name': 'my_standard_name'}) - exp.coords['acq_time'].attrs['long_name'] = 'Mean scanline acquisition time' + dims=("y", "x"), + coords={"acq_time": ("y", [0, 1, 2])}, + attrs={"name": "1", + "platform_name": "spacecraft_name", + "orbit_number": 123, + "sensor": "sensor", + "foo": "bar", + "standard_name": "my_standard_name"}) + exp.coords["acq_time"].attrs["long_name"] = "Mean scanline acquisition time" return exp - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__', return_value=None) - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.read_raw_data') - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel', return_value=np.ones((3, 3))) + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__", return_value=None) + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.read_raw_data") + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel", return_value=np.ones((3, 3))) def test_get_dataset_no_tle(self, get_channel, *mocks): """Test getting the channel datasets when no TLEs are present.""" pygac_reader = _get_reader_mocked() @@ -575,7 +575,7 @@ def _check_get_channel_calls(fh, get_channel): """Check _get_channel() calls.""" from satpy.tests.utils import make_dataid - for key in [make_dataid(name='1', calibration='counts'), - make_dataid(name='5', calibration='brightness_temperature')]: - fh.get_dataset(key=key, info={'name': 1}) + for key in [make_dataid(name="1", calibration="counts"), + make_dataid(name="5", calibration="brightness_temperature")]: + fh.get_dataset(key=key, info={"name": 1}) get_channel.assert_called_with(key) diff --git a/satpy/tests/reader_tests/test_clavrx.py b/satpy/tests/reader_tests/test_clavrx.py index 86e0cf1fa7..66758d44dc 100644 --- a/satpy/tests/reader_tests/test_clavrx.py +++ b/satpy/tests/reader_tests/test_clavrx.py @@ -45,64 +45,64 @@ class FakeHDF4FileHandlerPolar(FakeHDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { - '/attr/platform': 'SNPP', - '/attr/sensor': 'VIIRS', + "/attr/platform": "SNPP", + "/attr/sensor": "VIIRS", } - file_content['longitude'] = xr.DataArray( + file_content["longitude"] = xr.DataArray( da.from_array(DEFAULT_LON_DATA, chunks=4096), attrs={ - '_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'standard_name': 'longitude', + "_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "longitude", }) - file_content['longitude/shape'] = DEFAULT_FILE_SHAPE + file_content["longitude/shape"] = DEFAULT_FILE_SHAPE - file_content['latitude'] = xr.DataArray( + file_content["latitude"] = xr.DataArray( da.from_array(DEFAULT_LAT_DATA, chunks=4096), attrs={ - '_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'standard_name': 'latitude', + "_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "latitude", }) - file_content['latitude/shape'] = DEFAULT_FILE_SHAPE + file_content["latitude/shape"] = DEFAULT_FILE_SHAPE - file_content['variable1'] = xr.DataArray( + file_content["variable1"] = xr.DataArray( da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.float32), attrs={ - '_FillValue': -1, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', + "_FillValue": -1, + "scale_factor": 1., + "add_offset": 0., + "units": "1", }) - file_content['variable1/shape'] = DEFAULT_FILE_SHAPE + file_content["variable1/shape"] = DEFAULT_FILE_SHAPE # data with fill values - file_content['variable2'] = xr.DataArray( + file_content["variable2"] = xr.DataArray( da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.float32), attrs={ - '_FillValue': -1, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', + "_FillValue": -1, + "scale_factor": 1., + "add_offset": 0., + "units": "1", }) - file_content['variable2/shape'] = DEFAULT_FILE_SHAPE - file_content['variable2'] = file_content['variable2'].where( - file_content['variable2'] % 2 != 0) + file_content["variable2/shape"] = DEFAULT_FILE_SHAPE + file_content["variable2"] = file_content["variable2"].where( + file_content["variable2"] % 2 != 0) # category - file_content['variable3'] = xr.DataArray( + file_content["variable3"] = xr.DataArray( da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.byte), attrs={ - 'SCALED': 0, - '_FillValue': -128, - 'flag_meanings': 'clear water supercooled mixed ice unknown', - 'flag_values': [0, 1, 2, 3, 4, 5], - 'units': 'none', + "SCALED": 0, + "_FillValue": -128, + "flag_meanings": "clear water supercooled mixed ice unknown", + "flag_values": [0, 1, 2, 3, 4, 5], + "units": "none", }) - file_content['variable3/shape'] = DEFAULT_FILE_SHAPE + file_content["variable3/shape"] = DEFAULT_FILE_SHAPE return file_content @@ -116,9 +116,9 @@ def setUp(self): """Wrap HDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.clavrx import CLAVRXHDF4FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(CLAVRXHDF4FileHandler, '__bases__', (FakeHDF4FileHandlerPolar,)) + self.p = mock.patch.object(CLAVRXHDF4FileHandler, "__bases__", (FakeHDF4FileHandlerPolar,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -131,7 +131,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf', + "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -143,7 +143,7 @@ def test_available_datasets(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf', + "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -152,57 +152,57 @@ def test_available_datasets(self): # mimic the YAML file being configured for more datasets fake_dataset_info = [ - (None, {'name': 'variable1', 'resolution': None, 'file_type': ['clavrx_hdf4']}), - (True, {'name': 'variable2', 'resolution': 742, 'file_type': ['clavrx_hdf4']}), - (True, {'name': 'variable2', 'resolution': 1, 'file_type': ['clavrx_hdf4']}), - (None, {'name': 'variable2', 'resolution': 1, 'file_type': ['clavrx_hdf4']}), - (None, {'name': '_fake1', 'file_type': ['clavrx_hdf4']}), - (None, {'name': 'variable1', 'file_type': ['level_fake']}), - (True, {'name': 'variable3', 'file_type': ['clavrx_hdf4']}), + (None, {"name": "variable1", "resolution": None, "file_type": ["clavrx_hdf4"]}), + (True, {"name": "variable2", "resolution": 742, "file_type": ["clavrx_hdf4"]}), + (True, {"name": "variable2", "resolution": 1, "file_type": ["clavrx_hdf4"]}), + (None, {"name": "variable2", "resolution": 1, "file_type": ["clavrx_hdf4"]}), + (None, {"name": "_fake1", "file_type": ["clavrx_hdf4"]}), + (None, {"name": "variable1", "file_type": ["level_fake"]}), + (True, {"name": "variable3", "file_type": ["clavrx_hdf4"]}), ] - new_ds_infos = list(r.file_handlers['clavrx_hdf4'][0].available_datasets( + new_ds_infos = list(r.file_handlers["clavrx_hdf4"][0].available_datasets( fake_dataset_info)) self.assertEqual(len(new_ds_infos), 9) # we have this and can provide the resolution self.assertTrue(new_ds_infos[0][0]) - self.assertEqual(new_ds_infos[0][1]['resolution'], 742) # hardcoded + self.assertEqual(new_ds_infos[0][1]["resolution"], 742) # hardcoded # we have this, but previous file handler said it knew about it # and it is producing the same resolution as what we have self.assertTrue(new_ds_infos[1][0]) - self.assertEqual(new_ds_infos[1][1]['resolution'], 742) + self.assertEqual(new_ds_infos[1][1]["resolution"], 742) # we have this, but don't want to change the resolution # because a previous handler said it has it self.assertTrue(new_ds_infos[2][0]) - self.assertEqual(new_ds_infos[2][1]['resolution'], 1) + self.assertEqual(new_ds_infos[2][1]["resolution"], 1) # even though the previous one was known we can still # produce it at our new resolution self.assertTrue(new_ds_infos[3][0]) - self.assertEqual(new_ds_infos[3][1]['resolution'], 742) + self.assertEqual(new_ds_infos[3][1]["resolution"], 742) # we have this and can update the resolution since # no one else has claimed it self.assertTrue(new_ds_infos[4][0]) - self.assertEqual(new_ds_infos[4][1]['resolution'], 742) + self.assertEqual(new_ds_infos[4][1]["resolution"], 742) # we don't have this variable, don't change it self.assertFalse(new_ds_infos[5][0]) - self.assertIsNone(new_ds_infos[5][1].get('resolution')) + self.assertIsNone(new_ds_infos[5][1].get("resolution")) # we have this, but it isn't supposed to come from our file type self.assertIsNone(new_ds_infos[6][0]) - self.assertIsNone(new_ds_infos[6][1].get('resolution')) + self.assertIsNone(new_ds_infos[6][1].get("resolution")) # we could have loaded this but some other file handler said it has this self.assertTrue(new_ds_infos[7][0]) - self.assertIsNone(new_ds_infos[7][1].get('resolution')) + self.assertIsNone(new_ds_infos[7][1].get("resolution")) # we can add resolution to the previous dataset, so we do self.assertTrue(new_ds_infos[8][0]) - self.assertEqual(new_ds_infos[8][1]['resolution'], 742) + self.assertEqual(new_ds_infos[8][1]["resolution"], 742) def test_load_all(self): """Test loading all test datasets.""" @@ -210,22 +210,22 @@ def test_load_all(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): + with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf', + "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf", ]) r.create_filehandlers(loadables) - var_list = ['variable1', 'variable2', 'variable3'] + var_list = ["variable1", "variable2", "variable3"] datasets = r.load(var_list) self.assertEqual(len(datasets), len(var_list)) for v in datasets.values(): - self.assertEqual(v.attrs['units'], '1') - self.assertEqual(v.attrs['platform_name'], 'npp') - self.assertEqual(v.attrs['sensor'], 'viirs') - self.assertIsInstance(v.attrs['area'], SwathDefinition) - self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 16) - self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 16) + self.assertEqual(v.attrs["units"], "1") + self.assertEqual(v.attrs["platform_name"], "npp") + self.assertEqual(v.attrs["sensor"], "viirs") + self.assertIsInstance(v.attrs["area"], SwathDefinition) + self.assertEqual(v.attrs["area"].lons.attrs["rows_per_scan"], 16) + self.assertEqual(v.attrs["area"].lats.attrs["rows_per_scan"], 16) self.assertIsInstance(datasets["variable3"].attrs.get("flag_meanings"), list) @@ -235,72 +235,72 @@ class FakeHDF4FileHandlerGeo(FakeHDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { - '/attr/platform': 'HIM8', - '/attr/sensor': 'AHI', + "/attr/platform": "HIM8", + "/attr/sensor": "AHI", # this is a Level 2 file that came from a L1B file - '/attr/L1B': 'clavrx_H08_20180806_1800', + "/attr/L1B": "clavrx_H08_20180806_1800", } - file_content['longitude'] = xr.DataArray( + file_content["longitude"] = xr.DataArray( DEFAULT_LON_DATA, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - '_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'standard_name': 'longitude', + "_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "longitude", }) - file_content['longitude/shape'] = DEFAULT_FILE_SHAPE + file_content["longitude/shape"] = DEFAULT_FILE_SHAPE - file_content['latitude'] = xr.DataArray( + file_content["latitude"] = xr.DataArray( DEFAULT_LAT_DATA, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - '_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'standard_name': 'latitude', + "_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "latitude", }) - file_content['latitude/shape'] = DEFAULT_FILE_SHAPE + file_content["latitude/shape"] = DEFAULT_FILE_SHAPE - file_content['variable1'] = xr.DataArray( + file_content["variable1"] = xr.DataArray( DEFAULT_FILE_DATA.astype(np.float32), - dims=('y', 'x'), + dims=("y", "x"), attrs={ - '_FillValue': -1, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', - 'valid_range': (-32767, 32767), + "_FillValue": -1, + "scale_factor": 1., + "add_offset": 0., + "units": "1", + "valid_range": (-32767, 32767), }) - file_content['variable1/shape'] = DEFAULT_FILE_SHAPE + file_content["variable1/shape"] = DEFAULT_FILE_SHAPE # data with fill values - file_content['variable2'] = xr.DataArray( + file_content["variable2"] = xr.DataArray( DEFAULT_FILE_DATA.astype(np.float32), - dims=('y', 'x'), + dims=("y", "x"), attrs={ - '_FillValue': -1, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', + "_FillValue": -1, + "scale_factor": 1., + "add_offset": 0., + "units": "1", }) - file_content['variable2/shape'] = DEFAULT_FILE_SHAPE - file_content['variable2'] = file_content['variable2'].where( - file_content['variable2'] % 2 != 0) + file_content["variable2/shape"] = DEFAULT_FILE_SHAPE + file_content["variable2"] = file_content["variable2"].where( + file_content["variable2"] % 2 != 0) # category - file_content['variable3'] = xr.DataArray( + file_content["variable3"] = xr.DataArray( DEFAULT_FILE_DATA.astype(np.byte), - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'SCALED': 0, - '_FillValue': -128, - 'flag_meanings': 'clear water supercooled mixed ice unknown', - 'flag_values': [0, 1, 2, 3, 4, 5], - 'units': '1', + "SCALED": 0, + "_FillValue": -128, + "flag_meanings": "clear water supercooled mixed ice unknown", + "flag_values": [0, 1, 2, 3, 4, 5], + "units": "1", }) - file_content['variable3/shape'] = DEFAULT_FILE_SHAPE + file_content["variable3/shape"] = DEFAULT_FILE_SHAPE return file_content @@ -314,9 +314,9 @@ def setUp(self): """Wrap HDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.clavrx import CLAVRXHDF4FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(CLAVRXHDF4FileHandler, '__bases__', (FakeHDF4FileHandlerGeo,)) + self.p = mock.patch.object(CLAVRXHDF4FileHandler, "__bases__", (FakeHDF4FileHandlerGeo,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -329,7 +329,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'clavrx_H08_20180806_1800.level2.hdf', + "clavrx_H08_20180806_1800.level2.hdf", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -342,12 +342,12 @@ def test_no_nav_donor(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): + with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'clavrx_H08_20180806_1800.level2.hdf', + "clavrx_H08_20180806_1800.level2.hdf", ]) r.create_filehandlers(loadables) - self.assertRaises(IOError, r.load, ['variable1', 'variable2', 'variable3']) + self.assertRaises(IOError, r.load, ["variable1", "variable2", "variable3"]) def test_load_all_old_donor(self): """Test loading all test datasets with old donor.""" @@ -355,13 +355,13 @@ def test_load_all_old_donor(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): + with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'clavrx_H08_20180806_1800.level2.hdf', + "clavrx_H08_20180806_1800.level2.hdf", ]) r.create_filehandlers(loadables) - with mock.patch('satpy.readers.clavrx.glob') as g, mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: - g.return_value = ['fake_donor.nc'] + with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: + g.return_value = ["fake_donor.nc"] x = np.linspace(-0.1518, 0.1518, 300) y = np.linspace(0.1518, -0.1518, 10) proj = mock.Mock( @@ -369,29 +369,29 @@ def test_load_all_old_donor(self): semi_minor_axis=6356.7523142, perspective_point_height=35791, longitude_of_projection_origin=140.7, - sweep_angle_axis='y', + sweep_angle_axis="y", ) d.return_value = fake_donor = mock.MagicMock( - variables={'Projection': proj, 'x': x, 'y': y}, + variables={"Projection": proj, "x": x, "y": y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] - datasets = r.load(['variable1', 'variable2', 'variable3']) + datasets = r.load(["variable1", "variable2", "variable3"]) self.assertEqual(len(datasets), 3) for v in datasets.values(): - self.assertNotIn('calibration', v.attrs) - self.assertEqual(v.attrs['units'], '1') - self.assertIsInstance(v.attrs['area'], AreaDefinition) + self.assertNotIn("calibration", v.attrs) + self.assertEqual(v.attrs["units"], "1") + self.assertIsInstance(v.attrs["area"], AreaDefinition) if v.attrs.get("flag_values"): - self.assertIn('_FillValue', v.attrs) + self.assertIn("_FillValue", v.attrs) else: - self.assertNotIn('_FillValue', v.attrs) - if v.attrs["name"] == 'variable1': + self.assertNotIn("_FillValue", v.attrs) + if v.attrs["name"] == "variable1": self.assertIsInstance(v.attrs["valid_range"], list) else: - self.assertNotIn('valid_range', v.attrs) - if 'flag_values' in v.attrs: + self.assertNotIn("valid_range", v.attrs) + if "flag_values" in v.attrs: self.assertTrue(np.issubdtype(v.dtype, np.integer)) - self.assertIsNotNone(v.attrs.get('flag_meanings')) + self.assertIsNotNone(v.attrs.get("flag_meanings")) def test_load_all_new_donor(self): """Test loading all test datasets with new donor.""" @@ -399,13 +399,13 @@ def test_load_all_new_donor(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): + with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'clavrx_H08_20180806_1800.level2.hdf', + "clavrx_H08_20180806_1800.level2.hdf", ]) r.create_filehandlers(loadables) - with mock.patch('satpy.readers.clavrx.glob') as g, mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: - g.return_value = ['fake_donor.nc'] + with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: + g.return_value = ["fake_donor.nc"] x = np.linspace(-0.1518, 0.1518, 300) y = np.linspace(0.1518, -0.1518, 10) proj = mock.Mock( @@ -413,19 +413,19 @@ def test_load_all_new_donor(self): semi_minor_axis=6356752.3142, perspective_point_height=35791000, longitude_of_projection_origin=140.7, - sweep_angle_axis='y', + sweep_angle_axis="y", ) d.return_value = fake_donor = mock.MagicMock( - variables={'goes_imager_projection': proj, 'x': x, 'y': y}, + variables={"goes_imager_projection": proj, "x": x, "y": y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] - datasets = r.load(['variable1', 'variable2', 'variable3']) + datasets = r.load(["variable1", "variable2", "variable3"]) self.assertEqual(len(datasets), 3) for v in datasets.values(): - self.assertNotIn('calibration', v.attrs) - self.assertEqual(v.attrs['units'], '1') - self.assertIsInstance(v.attrs['area'], AreaDefinition) - self.assertTrue(v.attrs['area'].is_geostationary) - self.assertEqual(v.attrs['platform_name'], 'himawari8') - self.assertEqual(v.attrs['sensor'], 'ahi') - self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) + self.assertNotIn("calibration", v.attrs) + self.assertEqual(v.attrs["units"], "1") + self.assertIsInstance(v.attrs["area"], AreaDefinition) + self.assertTrue(v.attrs["area"].is_geostationary) + self.assertEqual(v.attrs["platform_name"], "himawari8") + self.assertEqual(v.attrs["sensor"], "ahi") + self.assertIsNotNone(datasets["variable3"].attrs.get("flag_meanings")) diff --git a/satpy/tests/reader_tests/test_clavrx_nc.py b/satpy/tests/reader_tests/test_clavrx_nc.py index ea0dcaed9b..33be29078a 100644 --- a/satpy/tests/reader_tests/test_clavrx_nc.py +++ b/satpy/tests/reader_tests/test_clavrx_nc.py @@ -36,75 +36,75 @@ DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) -AHI_FILE = 'clavrx_H08_20210603_1500_B01_FLDK_R.level2.nc' +AHI_FILE = "clavrx_H08_20210603_1500_B01_FLDK_R.level2.nc" def fake_test_content(filename, **kwargs): """Mimic reader input file content.""" attrs = { - 'platform': 'HIM8', - 'sensor': 'AHI', + "platform": "HIM8", + "sensor": "AHI", # this is a Level 2 file that came from a L1B file - 'L1B': 'clavrx_H08_20210603_1500_B01_FLDK_R', + "L1B": "clavrx_H08_20210603_1500_B01_FLDK_R", } longitude = xr.DataArray(DEFAULT_LON_DATA, - dims=('scan_lines_along_track_direction', - 'pixel_elements_along_scan_direction'), - attrs={'_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'standard_name': 'longitude', - 'units': 'degrees_east' + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "longitude", + "units": "degrees_east" }) latitude = xr.DataArray(DEFAULT_LAT_DATA, - dims=('scan_lines_along_track_direction', - 'pixel_elements_along_scan_direction'), - attrs={'_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'standard_name': 'latitude', - 'units': 'degrees_south' + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "latitude", + "units": "degrees_south" }) variable1 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.float32), - dims=('scan_lines_along_track_direction', - 'pixel_elements_along_scan_direction'), - attrs={'_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', - 'valid_range': [-32767, 32767], + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "units": "1", + "valid_range": [-32767, 32767], }) # data with fill values variable2 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.float32), - dims=('scan_lines_along_track_direction', - 'pixel_elements_along_scan_direction'), - attrs={'_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', - 'valid_range': [-32767, 32767], + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "units": "1", + "valid_range": [-32767, 32767], }) variable2 = variable2.where(variable2 % 2 != 0) # category variable3 = xr.DataArray(DEFAULT_FILE_FLAGS, - dims=('scan_lines_along_track_direction', - 'pixel_elements_along_scan_direction'), - attrs={'SCALED': 0, - '_FillValue': -127, - 'units': '1', - 'flag_values': [0, 1, 2, 3]}) + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"SCALED": 0, + "_FillValue": -127, + "units": "1", + "flag_values": [0, 1, 2, 3]}) ds_vars = { - 'longitude': longitude, - 'latitude': latitude, - 'variable1': variable1, - 'variable2': variable2, - 'variable3': variable3 + "longitude": longitude, + "latitude": latitude, + "variable1": variable1, + "variable2": variable2, + "variable3": variable3 } ds = xr.Dataset(ds_vars, attrs=attrs) @@ -121,7 +121,7 @@ class TestCLAVRXReaderGeo: def setup_method(self): """Read fake data.""" from satpy._config import config_search_paths - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) @pytest.mark.parametrize( ("filenames", "expected_loadables"), @@ -130,7 +130,7 @@ def setup_method(self): def test_reader_creation(self, filenames, expected_loadables): """Test basic initialization.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: + with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: od.side_effect = fake_test_content r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) @@ -141,12 +141,12 @@ def test_reader_creation(self, filenames, expected_loadables): @pytest.mark.parametrize( ("filenames", "expected_datasets"), - [([AHI_FILE], ['variable1', 'variable2', 'variable3']), ] + [([AHI_FILE], ["variable1", "variable2", "variable3"]), ] ) def test_available_datasets(self, filenames, expected_datasets): """Test that variables are dynamically discovered.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: + with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: od.side_effect = fake_test_content r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) @@ -157,19 +157,19 @@ def test_available_datasets(self, filenames, expected_datasets): @pytest.mark.parametrize( ("filenames", "loadable_ids"), - [([AHI_FILE], ['variable1', 'variable2', 'variable3']), ] + [([AHI_FILE], ["variable1", "variable2", "variable3"]), ] ) def test_load_all_new_donor(self, filenames, loadable_ids): """Test loading all test datasets with new donor.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: + with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: od.side_effect = fake_test_content r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables) - with mock.patch('satpy.readers.clavrx.glob') as g, \ - mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: - g.return_value = ['fake_donor.nc'] + with mock.patch("satpy.readers.clavrx.glob") as g, \ + mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: + g.return_value = ["fake_donor.nc"] x = np.linspace(-0.1518, 0.1518, 300) y = np.linspace(0.1518, -0.1518, 10) proj = mock.Mock( @@ -177,26 +177,26 @@ def test_load_all_new_donor(self, filenames, loadable_ids): semi_minor_axis=6356752.3142, perspective_point_height=35791000, longitude_of_projection_origin=140.7, - sweep_angle_axis='y', + sweep_angle_axis="y", ) d.return_value = fake_donor = mock.MagicMock( - variables={'goes_imager_projection': proj, 'x': x, 'y': y}, + variables={"goes_imager_projection": proj, "x": x, "y": y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] datasets = r.load(loadable_ids) assert len(datasets) == 3 for v in datasets.values(): - assert 'calibration' not in v.attrs - assert v.attrs['units'] == '1' - assert isinstance(v.attrs['area'], AreaDefinition) - assert v.attrs['platform_name'] == 'himawari8' - assert v.attrs['sensor'] == 'ahi' - assert 'rows_per_scan' not in v.coords.get('longitude').attrs + assert "calibration" not in v.attrs + assert v.attrs["units"] == "1" + assert isinstance(v.attrs["area"], AreaDefinition) + assert v.attrs["platform_name"] == "himawari8" + assert v.attrs["sensor"] == "ahi" + assert "rows_per_scan" not in v.coords.get("longitude").attrs if v.attrs["name"] in ["variable1", "variable2"]: assert isinstance(v.attrs["valid_range"], list) assert v.dtype == np.float32 assert "_FillValue" not in v.attrs.keys() else: - assert (datasets['variable3'].attrs.get('flag_meanings')) is not None - assert (datasets['variable3'].attrs.get('flag_meanings') == '') + assert (datasets["variable3"].attrs.get("flag_meanings")) is not None + assert (datasets["variable3"].attrs.get("flag_meanings") == "") assert np.issubdtype(v.dtype, np.integer) diff --git a/satpy/tests/reader_tests/test_cmsaf_claas.py b/satpy/tests/reader_tests/test_cmsaf_claas.py index 9c7c7e0089..7f5b728ba8 100644 --- a/satpy/tests/reader_tests/test_cmsaf_claas.py +++ b/satpy/tests/reader_tests/test_cmsaf_claas.py @@ -219,8 +219,8 @@ def test_get_area_def(self, file_handler, area_exp): @pytest.mark.parametrize( "ds_name,expected", [ - ("ctt", xr.DataArray([[280, 290], [300, 310]], dims=('y', 'x'))), - ("cph", xr.DataArray([[0, 1], [2, 0]], dims=('y', 'x'))), + ("ctt", xr.DataArray([[280, 290], [300, 310]], dims=("y", "x"))), + ("cph", xr.DataArray([[0, 1], [2, 0]], dims=("y", "x"))), ] ) def test_get_dataset(self, file_handler, ds_name, expected): diff --git a/satpy/tests/reader_tests/test_electrol_hrit.py b/satpy/tests/reader_tests/test_electrol_hrit.py index eca413d033..c555f377b1 100644 --- a/satpy/tests/reader_tests/test_electrol_hrit.py +++ b/satpy/tests/reader_tests/test_electrol_hrit.py @@ -48,15 +48,15 @@ class Testrecarray2dict(unittest.TestCase): def test_fun(self): """Test record array.""" - inner_st = np.dtype([('test_str', '02d}": {'channels': [4, 5, 6, 8, 9], - 'grid_type': '1km'}, - "nir_{:>02d}": {'channels': [13, 16, 22], - 'grid_type': '1km'}, - "ir_{:>02d}": {'channels': [38, 87, 97, 105, 123, 133], - 'grid_type': '2km'}, - "wv_{:>02d}": {'channels': [63, 73], - 'grid_type': '2km'}, + "vis_{:>02d}": {"channels": [4, 5, 6, 8, 9], + "grid_type": "1km"}, + "nir_{:>02d}": {"channels": [13, 16, 22], + "grid_type": "1km"}, + "ir_{:>02d}": {"channels": [38, 87, 97, 105, 123, 133], + "grid_type": "2km"}, + "wv_{:>02d}": {"channels": [63, 73], + "grid_type": "2km"}, } @@ -336,15 +336,15 @@ class FakeFCIFileHandlerWithBadIDPFData(FakeFCIFileHandlerFDHSI): def _get_test_content_all_channels(self): data = super()._get_test_content_all_channels() - data['data/vis_06/measured/x'].attrs['scale_factor'] *= -1 - data['data/vis_06/measured/x'].attrs['scale_factor'] = \ - np.float32(data['data/vis_06/measured/x'].attrs['scale_factor']) - data['data/vis_06/measured/x'].attrs['add_offset'] = \ - np.float32(data['data/vis_06/measured/x'].attrs['add_offset']) - data['data/vis_06/measured/y'].attrs['scale_factor'] = \ - np.float32(data['data/vis_06/measured/y'].attrs['scale_factor']) - data['data/vis_06/measured/y'].attrs['add_offset'] = \ - np.float32(data['data/vis_06/measured/y'].attrs['add_offset']) + data["data/vis_06/measured/x"].attrs["scale_factor"] *= -1 + data["data/vis_06/measured/x"].attrs["scale_factor"] = \ + np.float32(data["data/vis_06/measured/x"].attrs["scale_factor"]) + data["data/vis_06/measured/x"].attrs["add_offset"] = \ + np.float32(data["data/vis_06/measured/x"].attrs["add_offset"]) + data["data/vis_06/measured/y"].attrs["scale_factor"] = \ + np.float32(data["data/vis_06/measured/y"].attrs["scale_factor"]) + data["data/vis_06/measured/y"].attrs["add_offset"] = \ + np.float32(data["data/vis_06/measured/y"].attrs["add_offset"]) data["state/celestial/earth_sun_distance"] = xr.DataArray(da.repeat(da.array([30000000]), 6000)) return data @@ -354,12 +354,12 @@ class FakeFCIFileHandlerHRFI(FakeFCIFileHandlerBase): """Mock HRFI data.""" chan_patterns = { - "vis_{:>02d}_hr": {'channels': [6], - 'grid_type': '500m'}, - "nir_{:>02d}_hr": {'channels': [22], - 'grid_type': '500m'}, - "ir_{:>02d}_hr": {'channels': [38, 105], - 'grid_type': '1km'}, + "vis_{:>02d}_hr": {"channels": [6], + "grid_type": "500m"}, + "nir_{:>02d}_hr": {"channels": [22], + "grid_type": "500m"}, + "ir_{:>02d}_hr": {"channels": [38, 105], + "grid_type": "1km"}, } @@ -404,12 +404,12 @@ def clear_cache(reader): "terran": ["ir_38", "ir_105"], "terran_grid_type": ["1km"] * 2} -_test_filenames = {'fdhsi': [ +_test_filenames = {"fdhsi": [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc" ], - 'hrfi': [ + "hrfi": [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-HRFI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc" @@ -431,9 +431,9 @@ def FakeFCIFileHandlerFDHSI_fixture(): """Get a fixture for the fake FDHSI filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerFDHSI): param_dict = { - 'filetype': 'fci_l1c_fdhsi', - 'channels': _chans_fdhsi, - 'filenames': _test_filenames['fdhsi'] + "filetype": "fci_l1c_fdhsi", + "channels": _chans_fdhsi, + "filenames": _test_filenames["fdhsi"] } yield param_dict @@ -443,9 +443,9 @@ def FakeFCIFileHandlerHRFI_fixture(): """Get a fixture for the fake HRFI filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerHRFI): param_dict = { - 'filetype': 'fci_l1c_hrfi', - 'channels': _chans_hrfi, - 'filenames': _test_filenames['hrfi'] + "filetype": "fci_l1c_hrfi", + "channels": _chans_hrfi, + "filenames": _test_filenames["hrfi"] } yield param_dict @@ -458,12 +458,12 @@ def FakeFCIFileHandlerHRFI_fixture(): class TestFCIL1cNCReader: """Test FCI L1c NetCDF reader with nominal data.""" - fh_param_for_filetype = {'hrfi': {'channels': _chans_hrfi, - 'filenames': _test_filenames['hrfi']}, - 'fdhsi': {'channels': _chans_fdhsi, - 'filenames': _test_filenames['fdhsi']}} + fh_param_for_filetype = {"hrfi": {"channels": _chans_hrfi, + "filenames": _test_filenames["hrfi"]}, + "fdhsi": {"channels": _chans_fdhsi, + "filenames": _test_filenames["fdhsi"]}} - @pytest.mark.parametrize('filenames', [_test_filenames['fdhsi'], _test_filenames['hrfi']]) + @pytest.mark.parametrize("filenames", [_test_filenames["fdhsi"], _test_filenames["hrfi"]]) def test_file_pattern(self, reader_configs, filenames): """Test file pattern matching.""" from satpy.readers import load_reader @@ -472,8 +472,8 @@ def test_file_pattern(self, reader_configs, filenames): files = reader.select_files_from_pathnames(filenames) assert len(files) == 1 - @pytest.mark.parametrize('filenames', [_test_filenames['fdhsi'][0].replace('BODY', 'TRAIL'), - _test_filenames['hrfi'][0].replace('BODY', 'TRAIL')]) + @pytest.mark.parametrize("filenames", [_test_filenames["fdhsi"][0].replace("BODY", "TRAIL"), + _test_filenames["hrfi"][0].replace("BODY", "TRAIL")]) def test_file_pattern_for_TRAIL_file(self, reader_configs, filenames): """Test file pattern matching for TRAIL files, which should not be picked up.""" from satpy.readers import load_reader @@ -482,226 +482,226 @@ def test_file_pattern_for_TRAIL_file(self, reader_configs, filenames): files = reader.select_files_from_pathnames(filenames) assert len(files) == 0 - @pytest.mark.parametrize('fh_param,expected_res_n', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), 16), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), 4)]) + @pytest.mark.parametrize("fh_param,expected_res_n", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) def test_load_counts(self, reader_configs, fh_param, expected_res_n): """Test loading with counts.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( [make_dataid(name=name, calibration="counts") for name in - fh_param['channels']["solar"] + fh_param['channels']["terran"]], pad_data=False) + fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param['channels']["solar"] + fh_param['channels']["terran"], - fh_param['channels']["solar_grid_type"] + - fh_param['channels']["terran_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) + for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], + fh_param["channels"]["solar_grid_type"] + + fh_param["channels"]["terran_grid_type"]): + assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) assert res[ch].dtype == np.uint16 assert res[ch].attrs["calibration"] == "counts" assert res[ch].attrs["units"] == "count" - if ch == 'ir_38': + if ch == "ir_38": numpy.testing.assert_array_equal(res[ch][-1], 1) numpy.testing.assert_array_equal(res[ch][0], 5000) else: numpy.testing.assert_array_equal(res[ch], 1) - @pytest.mark.parametrize('fh_param,expected_res_n', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), 16), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), 4)]) + @pytest.mark.parametrize("fh_param,expected_res_n", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) def test_load_radiance(self, reader_configs, fh_param, expected_res_n): """Test loading with radiance.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( [make_dataid(name=name, calibration="radiance") for name in - fh_param['channels']["solar"] + fh_param['channels']["terran"]], pad_data=False) + fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param['channels']["solar"] + fh_param['channels']["terran"], - fh_param['channels']["solar_grid_type"] + - fh_param['channels']["terran_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) + for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], + fh_param["channels"]["solar_grid_type"] + + fh_param["channels"]["terran_grid_type"]): + assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) assert res[ch].dtype == np.float64 assert res[ch].attrs["calibration"] == "radiance" - assert res[ch].attrs["units"] == 'mW m-2 sr-1 (cm-1)-1' + assert res[ch].attrs["units"] == "mW m-2 sr-1 (cm-1)-1" assert res[ch].attrs["radiance_unit_conversion_coefficient"] == 1234.56 - if ch == 'ir_38': + if ch == "ir_38": numpy.testing.assert_array_equal(res[ch][-1], 15) numpy.testing.assert_array_equal(res[ch][0], 9700) else: numpy.testing.assert_array_equal(res[ch], 15) - @pytest.mark.parametrize('fh_param,expected_res_n', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), 8), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), 2)]) + @pytest.mark.parametrize("fh_param,expected_res_n", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 8), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 2)]) def test_load_reflectance(self, reader_configs, fh_param, expected_res_n): """Test loading with reflectance.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( [make_dataid(name=name, calibration="reflectance") for name in - fh_param['channels']["solar"]], pad_data=False) + fh_param["channels"]["solar"]], pad_data=False) assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param['channels']["solar"], fh_param['channels']["solar_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) + for ch, grid_type in zip(fh_param["channels"]["solar"], fh_param["channels"]["solar_grid_type"]): + assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) assert res[ch].dtype == np.float64 assert res[ch].attrs["calibration"] == "reflectance" assert res[ch].attrs["units"] == "%" numpy.testing.assert_array_almost_equal(res[ch], 100 * 15 * 1 * np.pi / 50) - @pytest.mark.parametrize('fh_param,expected_res_n', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), 8), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), 2)]) + @pytest.mark.parametrize("fh_param,expected_res_n", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 8), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 2)]) def test_load_bt(self, reader_configs, caplog, fh_param, expected_res_n): """Test loading with bt.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) with caplog.at_level(logging.WARNING): res = reader.load( [make_dataid(name=name, calibration="brightness_temperature") for - name in fh_param['channels']["terran"]], pad_data=False) + name in fh_param["channels"]["terran"]], pad_data=False) assert caplog.text == "" assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param['channels']["terran"], fh_param['channels']["terran_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) + for ch, grid_type in zip(fh_param["channels"]["terran"], fh_param["channels"]["terran_grid_type"]): + assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) assert res[ch].dtype == np.float64 assert res[ch].attrs["calibration"] == "brightness_temperature" assert res[ch].attrs["units"] == "K" - if ch == 'ir_38': + if ch == "ir_38": numpy.testing.assert_array_almost_equal(res[ch][-1], 209.68274099) numpy.testing.assert_array_almost_equal(res[ch][0], 1888.851296) else: numpy.testing.assert_array_almost_equal(res[ch], 209.68274099) - @pytest.mark.parametrize('fh_param', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture')), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'))]) + @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) def test_orbital_parameters_attr(self, reader_configs, fh_param): """Test the orbital parameter attribute.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( [make_dataid(name=name) for name in - fh_param['channels']["solar"] + fh_param['channels']["terran"]], pad_data=False) + fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) - for ch in fh_param['channels']["solar"] + fh_param['channels']["terran"]: + for ch in fh_param["channels"]["solar"] + fh_param["channels"]["terran"]: assert res[ch].attrs["orbital_parameters"] == { - 'satellite_actual_longitude': np.mean(np.arange(6000)), - 'satellite_actual_latitude': np.mean(np.arange(6000)), - 'satellite_actual_altitude': np.mean(np.arange(6000)), - 'satellite_nominal_longitude': 0.0, - 'satellite_nominal_latitude': 0, - 'satellite_nominal_altitude': 35786400.0, - 'projection_longitude': 0.0, - 'projection_latitude': 0, - 'projection_altitude': 35786400.0, + "satellite_actual_longitude": np.mean(np.arange(6000)), + "satellite_actual_latitude": np.mean(np.arange(6000)), + "satellite_actual_altitude": np.mean(np.arange(6000)), + "satellite_nominal_longitude": 0.0, + "satellite_nominal_latitude": 0, + "satellite_nominal_altitude": 35786400.0, + "projection_longitude": 0.0, + "projection_latitude": 0, + "projection_altitude": 35786400.0, } expected_pos_info_for_filetype = { - 'fdhsi': {'1km': {'start_position_row': 1, - 'end_position_row': 200, - 'segment_height': 200, - 'grid_width': 11136}, - '2km': {'start_position_row': 1, - 'end_position_row': 100, - 'segment_height': 100, - 'grid_width': 5568}}, - 'hrfi': {'500m': {'start_position_row': 1, - 'end_position_row': 400, - 'segment_height': 400, - 'grid_width': 22272}, - '1km': {'start_position_row': 1, - 'end_position_row': 200, - 'grid_width': 11136, - 'segment_height': 200}} + "fdhsi": {"1km": {"start_position_row": 1, + "end_position_row": 200, + "segment_height": 200, + "grid_width": 11136}, + "2km": {"start_position_row": 1, + "end_position_row": 100, + "segment_height": 100, + "grid_width": 5568}}, + "hrfi": {"500m": {"start_position_row": 1, + "end_position_row": 400, + "segment_height": 400, + "grid_width": 22272}, + "1km": {"start_position_row": 1, + "end_position_row": 200, + "grid_width": 11136, + "segment_height": 200}} } - @pytest.mark.parametrize('fh_param, expected_pos_info', [ - (lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), expected_pos_info_for_filetype['fdhsi']), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), expected_pos_info_for_filetype['hrfi']) + @pytest.mark.parametrize("fh_param, expected_pos_info", [ + (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), expected_pos_info_for_filetype["fdhsi"]), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), expected_pos_info_for_filetype["hrfi"]) ]) def test_get_segment_position_info(self, reader_configs, fh_param, expected_pos_info): """Test the segment position info method.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) for filetype_handler in list(reader.file_handlers.values())[0]: segpos_info = filetype_handler.get_segment_position_info() assert segpos_info == expected_pos_info - @pytest.mark.parametrize('fh_param,expected_res_n', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), 16), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), 4)]) + @pytest.mark.parametrize("fh_param,expected_res_n", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) def test_load_index_map(self, reader_configs, fh_param, expected_res_n): """Test loading of index_map.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( - [name + '_index_map' for name in - fh_param['channels']["solar"] + fh_param['channels']["terran"]], pad_data=False) + [name + "_index_map" for name in + fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param['channels']["solar"] + fh_param['channels']["terran"], - fh_param['channels']["solar_grid_type"] + - fh_param['channels']["terran_grid_type"]): - assert res[ch + '_index_map'].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) - numpy.testing.assert_array_equal(res[ch + '_index_map'][1, 1], 110) - - @pytest.mark.parametrize('fh_param', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture')), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'))]) + for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], + fh_param["channels"]["solar_grid_type"] + + fh_param["channels"]["terran_grid_type"]): + assert res[ch + "_index_map"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + numpy.testing.assert_array_equal(res[ch + "_index_map"][1, 1], 110) + + @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) def test_load_aux_data(self, reader_configs, fh_param): """Test loading of auxiliary data.""" from satpy.readers.fci_l1c_nc import AUX_DATA - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) - res = reader.load([fh_param['channels']['solar'][0] + '_' + key for key in AUX_DATA.keys()], + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + res = reader.load([fh_param["channels"]["solar"][0] + "_" + key for key in AUX_DATA.keys()], pad_data=False) - grid_type = fh_param['channels']['solar_grid_type'][0] - for aux in [fh_param['channels']['solar'][0] + '_' + key for key in AUX_DATA.keys()]: - assert res[aux].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) - if aux == fh_param['channels']['solar'][0] + '_earth_sun_distance': + grid_type = fh_param["channels"]["solar_grid_type"][0] + for aux in [fh_param["channels"]["solar"][0] + "_" + key for key in AUX_DATA.keys()]: + assert res[aux].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + if aux == fh_param["channels"]["solar"][0] + "_earth_sun_distance": numpy.testing.assert_array_equal(res[aux][1, 1], 149597870.7) else: numpy.testing.assert_array_equal(res[aux][1, 1], 10) - @pytest.mark.parametrize('fh_param,expected_res_n', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), 16), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), 4)]) + @pytest.mark.parametrize("fh_param,expected_res_n", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) def test_load_quality_only(self, reader_configs, fh_param, expected_res_n): """Test that loading quality only works.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( - [name + '_pixel_quality' for name in - fh_param['channels']["solar"] + fh_param['channels']["terran"]], pad_data=False) + [name + "_pixel_quality" for name in + fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param['channels']["solar"] + fh_param['channels']["terran"], - fh_param['channels']["solar_grid_type"] + - fh_param['channels']["terran_grid_type"]): - assert res[ch + '_pixel_quality'].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) - numpy.testing.assert_array_equal(res[ch + '_pixel_quality'][1, 1], 3) - assert res[ch + '_pixel_quality'].attrs["name"] == ch + '_pixel_quality' - - @pytest.mark.parametrize('fh_param', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture')), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'))]) + for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], + fh_param["channels"]["solar_grid_type"] + + fh_param["channels"]["terran_grid_type"]): + assert res[ch + "_pixel_quality"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + numpy.testing.assert_array_equal(res[ch + "_pixel_quality"][1, 1], 3) + assert res[ch + "_pixel_quality"].attrs["name"] == ch + "_pixel_quality" + + @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) def test_platform_name(self, reader_configs, fh_param): """Test that platform name is exposed. Test that the FCI reader exposes the platform name. Corresponds to GH issue 1014. """ - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load(["vis_06"], pad_data=False) assert res["vis_06"].attrs["platform_name"] == "MTG-I1" - @pytest.mark.parametrize('fh_param, expected_area', [ - (lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), ['mtg_fci_fdss_1km', 'mtg_fci_fdss_2km']), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), ['mtg_fci_fdss_500m', 'mtg_fci_fdss_1km']), + @pytest.mark.parametrize("fh_param, expected_area", [ + (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), ["mtg_fci_fdss_1km", "mtg_fci_fdss_2km"]), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), ["mtg_fci_fdss_500m", "mtg_fci_fdss_1km"]), ]) def test_area_definition_computation(self, reader_configs, fh_param, expected_area): """Test that the geolocation computation is correct.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) - res = reader.load(['ir_105', 'vis_06'], pad_data=False) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + res = reader.load(["ir_105", "vis_06"], pad_data=False) # test that area_ids are harmonisation-conform ___ - assert res['vis_06'].attrs['area'].area_id == expected_area[0] - assert res['ir_105'].attrs['area'].area_id == expected_area[1] + assert res["vis_06"].attrs["area"].area_id == expected_area[0] + assert res["ir_105"].attrs["area"].area_id == expected_area[1] - area_def = res['ir_105'].attrs['area'] + area_def = res["ir_105"].attrs["area"] # test area extents computation np.testing.assert_array_almost_equal(np.array(area_def.area_extent), np.array([-5567999.994203, -5367999.994411, @@ -709,23 +709,23 @@ def test_area_definition_computation(self, reader_configs, fh_param, expected_ar decimal=2) # check that the projection is read in properly - assert area_def.crs.coordinate_operation.method_name == 'Geostationary Satellite (Sweep Y)' + assert area_def.crs.coordinate_operation.method_name == "Geostationary Satellite (Sweep Y)" assert area_def.crs.coordinate_operation.params[0].value == 0.0 # projection origin longitude assert area_def.crs.coordinate_operation.params[1].value == 35786400.0 # projection height assert area_def.crs.ellipsoid.semi_major_metre == 6378137.0 assert area_def.crs.ellipsoid.inverse_flattening == 298.257223563 assert area_def.crs.ellipsoid.is_semi_minor_computed - @pytest.mark.parametrize('fh_param', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture')), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'))]) + @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) def test_excs(self, reader_configs, fh_param): """Test that exceptions are raised where expected.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) with pytest.raises(ValueError): - reader.file_handlers[fh_param['filetype']][0].get_dataset(make_dataid(name="invalid"), {}) + reader.file_handlers[fh_param["filetype"]][0].get_dataset(make_dataid(name="invalid"), {}) with pytest.raises(ValueError): - reader.file_handlers[fh_param['filetype']][0].get_dataset( + reader.file_handlers[fh_param["filetype"]][0].get_dataset( make_dataid(name="ir_123", calibration="unknown"), {"units": "unknown"}) @@ -736,7 +736,7 @@ def test_load_composite(self): # in the tests.compositor_tests package from satpy.composites.config_loader import load_compositor_configs_for_sensors - comps, mods = load_compositor_configs_for_sensors(['fci']) + comps, mods = load_compositor_configs_for_sensors(["fci"]) assert len(comps["fci"]) > 0 assert len(mods["fci"]) > 0 @@ -747,7 +747,7 @@ class TestFCIL1cNCReaderBadData: def test_handling_bad_data_ir(self, reader_configs, caplog): """Test handling of bad IR data.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadData): - reader = _get_reader_with_filehandlers(_test_filenames['fdhsi'], reader_configs) + reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) with caplog.at_level(logging.ERROR): reader.load([make_dataid( name="ir_105", @@ -757,7 +757,7 @@ def test_handling_bad_data_ir(self, reader_configs, caplog): def test_handling_bad_data_vis(self, reader_configs, caplog): """Test handling of bad VIS data.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadData): - reader = _get_reader_with_filehandlers(_test_filenames['fdhsi'], reader_configs) + reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) with caplog.at_level(logging.ERROR): reader.load([make_dataid( name="vis_06", @@ -771,7 +771,7 @@ class TestFCIL1cNCReaderBadDataFromIDPF: def test_handling_bad_earthsun_distance(self, reader_configs): """Test handling of bad earth-sun distance data.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadIDPFData): - reader = _get_reader_with_filehandlers(_test_filenames['fdhsi'], reader_configs) + reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) res = reader.load([make_dataid(name=["vis_06"], calibration="reflectance")], pad_data=False) numpy.testing.assert_array_almost_equal(res["vis_06"], 100 * 15 * 1 * np.pi / 50) @@ -779,10 +779,10 @@ def test_handling_bad_earthsun_distance(self, reader_configs): def test_bad_xy_coords(self, reader_configs): """Test that the geolocation computation is correct.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadIDPFData): - reader = _get_reader_with_filehandlers(_test_filenames['fdhsi'], reader_configs) - res = reader.load(['vis_06'], pad_data=False) + reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) + res = reader.load(["vis_06"], pad_data=False) - area_def = res['vis_06'].attrs['area'] + area_def = res["vis_06"].attrs["area"] # test area extents computation np.testing.assert_array_almost_equal(np.array(area_def.area_extent), np.array([-5568000.227139, -5368000.221262, diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index 9ebbdb32e7..114fa1d6d2 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -33,20 +33,20 @@ from satpy.tests.utils import make_dataid AREA_DEF = geometry.AreaDefinition( - 'mtg_fci_fdss_2km', - 'MTG FCI Full Disk Scanning Service area definition with 2 km resolution', + "mtg_fci_fdss_2km", + "MTG FCI Full Disk Scanning Service area definition with 2 km resolution", "", - {'h': 35786400., 'lon_0': 0.0, 'ellps': 'WGS84', 'proj': 'geos', 'units': 'm'}, + {"h": 35786400., "lon_0": 0.0, "ellps": "WGS84", "proj": "geos", "units": "m"}, 5568, 5568, (-5567999.9942, 5567999.9942, 5567999.9942, -5567999.9942) ) SEG_AREA_DEF = geometry.AreaDefinition( - 'mtg_fci_fdss_32km', - 'MTG FCI Full Disk Scanning Service area definition with 32 km resolution', + "mtg_fci_fdss_32km", + "MTG FCI Full Disk Scanning Service area definition with 32 km resolution", "", - {'h': 35786400., 'lon_0': 0.0, 'ellps': 'WGS84', 'proj': 'geos', 'units': 'm'}, + {"h": 35786400., "lon_0": 0.0, "ellps": "WGS84", "proj": "geos", "units": "m"}, 348, 348, (-5567999.9942, 5567999.9942, 5567999.9942, -5567999.9942) @@ -61,42 +61,42 @@ def setUp(self): # Easiest way to test the reader is to create a test netCDF file on the fly # Create unique filenames to prevent race conditions when tests are run in parallel self.test_file = str(uuid.uuid4()) + ".nc" - with Dataset(self.test_file, 'w') as nc: + with Dataset(self.test_file, "w") as nc: # Create dimensions - nc.createDimension('number_of_columns', 10) - nc.createDimension('number_of_rows', 100) - nc.createDimension('maximum_number_of_layers', 2) + nc.createDimension("number_of_columns", 10) + nc.createDimension("number_of_rows", 100) + nc.createDimension("maximum_number_of_layers", 2) # add global attributes - nc.data_source = 'test_data_source' - nc.platform = 'test_platform' + nc.data_source = "test_data_source" + nc.platform = "test_platform" # Add datasets - x = nc.createVariable('x', np.float32, dimensions=('number_of_columns',)) - x.standard_name = 'projection_x_coordinate' + x = nc.createVariable("x", np.float32, dimensions=("number_of_columns",)) + x.standard_name = "projection_x_coordinate" x[:] = np.arange(10) - y = nc.createVariable('y', np.float32, dimensions=('number_of_rows',)) - y.standard_name = 'projection_y_coordinate' + y = nc.createVariable("y", np.float32, dimensions=("number_of_rows",)) + y.standard_name = "projection_y_coordinate" y[:] = np.arange(100) - s = nc.createVariable('product_quality', np.int8) + s = nc.createVariable("product_quality", np.int8) s[:] = 99. - one_layer_dataset = nc.createVariable('test_one_layer', np.float32, - dimensions=('number_of_rows', 'number_of_columns')) + one_layer_dataset = nc.createVariable("test_one_layer", np.float32, + dimensions=("number_of_rows", "number_of_columns")) one_layer_dataset[:] = np.ones((100, 10)) - one_layer_dataset.test_attr = 'attr' - one_layer_dataset.units = 'test_units' + one_layer_dataset.test_attr = "attr" + one_layer_dataset.units = "test_units" - two_layers_dataset = nc.createVariable('test_two_layers', np.float32, - dimensions=('maximum_number_of_layers', - 'number_of_rows', - 'number_of_columns')) + two_layers_dataset = nc.createVariable("test_two_layers", np.float32, + dimensions=("maximum_number_of_layers", + "number_of_rows", + "number_of_columns")) two_layers_dataset[0, :, :] = np.ones((100, 10)) two_layers_dataset[1, :, :] = 2 * np.ones((100, 10)) - mtg_geos_projection = nc.createVariable('mtg_geos_projection', int, dimensions=()) + mtg_geos_projection = nc.createVariable("mtg_geos_projection", int, dimensions=()) mtg_geos_projection.longitude_of_projection_origin = 0.0 mtg_geos_projection.semi_major_axis = 6378137. mtg_geos_projection.inverse_flattening = 298.257223563 @@ -114,89 +114,89 @@ def tearDown(self): def test_all_basic(self): """Test all basic functionalities.""" - self.assertEqual(self.fh.spacecraft_name, 'test_platform') - self.assertEqual(self.fh.sensor_name, 'test_data_source') + self.assertEqual(self.fh.spacecraft_name, "test_platform") + self.assertEqual(self.fh.sensor_name, "test_data_source") self.assertEqual(self.fh.ssp_lon, 0.0) global_attributes = self.fh._get_global_attributes() expected_global_attributes = { - 'filename': self.test_file, - 'spacecraft_name': 'test_platform', - 'ssp_lon': 0.0, - 'sensor': 'test_data_source', - 'platform_name': 'test_platform' + "filename": self.test_file, + "spacecraft_name": "test_platform", + "ssp_lon": 0.0, + "sensor": "test_data_source", + "platform_name": "test_platform" } self.assertEqual(global_attributes, expected_global_attributes) - @mock.patch('satpy.readers.fci_l2_nc.geometry.AreaDefinition') - @mock.patch('satpy.readers.fci_l2_nc.make_ext') + @mock.patch("satpy.readers.fci_l2_nc.geometry.AreaDefinition") + @mock.patch("satpy.readers.fci_l2_nc.make_ext") def test_area_definition(self, me_, gad_): """Test the area definition computation.""" - self.fh._compute_area_def(make_dataid(name='test_area_def', resolution=2000)) + self.fh._compute_area_def(make_dataid(name="test_area_def", resolution=2000)) # Asserts that the make_ext function was called with the correct arguments me_.assert_called_once() args, kwargs = me_.call_args np.testing.assert_allclose(args, [-0.0, -515.6620, 5672.28217, 0.0, 35786400.]) - proj_dict = {'a': 6378137., - 'lon_0': 0.0, - 'h': 35786400, + proj_dict = {"a": 6378137., + "lon_0": 0.0, + "h": 35786400, "rf": 298.257223563, - 'proj': 'geos', - 'units': 'm', - 'sweep': 'y'} + "proj": "geos", + "units": "m", + "sweep": "y"} # Asserts that the get_area_definition function was called with the correct arguments gad_.assert_called_once() args, kwargs = gad_.call_args - self.assertEqual(args[0], 'mtg_fci_fdss_2km') - self.assertEqual(args[1], 'MTG FCI Full Disk Scanning Service area definition with 2 km resolution') - self.assertEqual(args[2], '') + self.assertEqual(args[0], "mtg_fci_fdss_2km") + self.assertEqual(args[1], "MTG FCI Full Disk Scanning Service area definition with 2 km resolution") + self.assertEqual(args[2], "") self.assertEqual(args[3], proj_dict) self.assertEqual(args[4], 10) self.assertEqual(args[5], 100) def test_dataset(self): """Test the correct execution of the get_dataset function with a valid file_key.""" - dataset = self.fh.get_dataset(make_dataid(name='test_one_layer', resolution=2000), - {'name': 'test_one_layer', - 'file_key': 'test_one_layer', - 'fill_value': -999, - 'file_type': 'test_file_type'}) + dataset = self.fh.get_dataset(make_dataid(name="test_one_layer", resolution=2000), + {"name": "test_one_layer", + "file_key": "test_one_layer", + "fill_value": -999, + "file_type": "test_file_type"}) np.testing.assert_allclose(dataset.values, np.ones((100, 10))) - self.assertEqual(dataset.attrs['test_attr'], 'attr') - self.assertEqual(dataset.attrs['units'], 'test_units') - self.assertEqual(dataset.attrs['fill_value'], -999) + self.assertEqual(dataset.attrs["test_attr"], "attr") + self.assertEqual(dataset.attrs["units"], "test_units") + self.assertEqual(dataset.attrs["fill_value"], -999) def test_dataset_with_layer(self): """Check the correct execution of the get_dataset function with a valid file_key & layer.""" - dataset = self.fh.get_dataset(make_dataid(name='test_two_layers', resolution=2000), - {'name': 'test_two_layers', - 'file_key': 'test_two_layers', 'layer': 1, - 'fill_value': -999, - 'file_type': 'test_file_type'}) + dataset = self.fh.get_dataset(make_dataid(name="test_two_layers", resolution=2000), + {"name": "test_two_layers", + "file_key": "test_two_layers", "layer": 1, + "fill_value": -999, + "file_type": "test_file_type"}) np.testing.assert_allclose(dataset.values, 2 * np.ones((100, 10))) - self.assertEqual(dataset.attrs['units'], None) - self.assertEqual(dataset.attrs['spacecraft_name'], 'test_platform') + self.assertEqual(dataset.attrs["units"], None) + self.assertEqual(dataset.attrs["spacecraft_name"], "test_platform") def test_dataset_with_invalid_filekey(self): """Test the correct execution of the get_dataset function with an invalid file_key.""" - invalid_dataset = self.fh.get_dataset(make_dataid(name='test_invalid', resolution=2000), - {'name': 'test_invalid', - 'file_key': 'test_invalid', - 'fill_value': -999, - 'file_type': 'test_file_type'}) + invalid_dataset = self.fh.get_dataset(make_dataid(name="test_invalid", resolution=2000), + {"name": "test_invalid", + "file_key": "test_invalid", + "fill_value": -999, + "file_type": "test_file_type"}) self.assertEqual(invalid_dataset, None) def test_dataset_with_total_cot(self): """Test the correct execution of the get_dataset function for total COT (add contributions from two layers).""" - dataset = self.fh.get_dataset(make_dataid(name='retrieved_cloud_optical_thickness', resolution=2000), - {'name': 'retrieved_cloud_optical_thickness', - 'file_key': 'test_two_layers', - 'fill_value': -999, - 'file_type': 'test_file_type'}) + dataset = self.fh.get_dataset(make_dataid(name="retrieved_cloud_optical_thickness", resolution=2000), + {"name": "retrieved_cloud_optical_thickness", + "file_key": "test_two_layers", + "fill_value": -999, + "file_type": "test_file_type"}) # Checks that the function returns None expected_sum = np.empty((100, 10)) expected_sum[:] = np.log10(10**2 + 10**1) @@ -205,10 +205,10 @@ def test_dataset_with_total_cot(self): def test_dataset_with_scalar(self): """Test the execution of the get_dataset function for scalar values.""" # Checks returned scalar value - dataset = self.fh.get_dataset(make_dataid(name='test_scalar'), - {'name': 'product_quality', - 'file_key': 'product_quality', - 'file_type': 'test_file_type'}) + dataset = self.fh.get_dataset(make_dataid(name="test_scalar"), + {"name": "product_quality", + "file_key": "product_quality", + "file_type": "test_file_type"}) self.assertEqual(dataset.values, 99.) # Checks that no AreaDefintion is implemented for scalar values @@ -223,44 +223,44 @@ def setUp(self): """Set up the test by creating a test file and opening it with the reader.""" # Easiest way to test the reader is to create a test netCDF file on the fly self.seg_test_file = str(uuid.uuid4()) + ".nc" - with Dataset(self.seg_test_file, 'w') as nc: + with Dataset(self.seg_test_file, "w") as nc: # Create dimensions - nc.createDimension('number_of_FoR_cols', 348) - nc.createDimension('number_of_FoR_rows', 348) - nc.createDimension('number_of_channels', 8) - nc.createDimension('number_of_categories', 6) + nc.createDimension("number_of_FoR_cols", 348) + nc.createDimension("number_of_FoR_rows", 348) + nc.createDimension("number_of_channels", 8) + nc.createDimension("number_of_categories", 6) # add global attributes - nc.data_source = 'test_fci_data_source' - nc.platform = 'test_fci_platform' + nc.data_source = "test_fci_data_source" + nc.platform = "test_fci_platform" # Add datasets - x = nc.createVariable('x', np.float32, dimensions=('number_of_FoR_cols',)) - x.standard_name = 'projection_x_coordinate' + x = nc.createVariable("x", np.float32, dimensions=("number_of_FoR_cols",)) + x.standard_name = "projection_x_coordinate" x[:] = np.arange(348) - y = nc.createVariable('y', np.float32, dimensions=('number_of_FoR_rows',)) - y.standard_name = 'projection_y_coordinate' + y = nc.createVariable("y", np.float32, dimensions=("number_of_FoR_rows",)) + y.standard_name = "projection_y_coordinate" y[:] = np.arange(348) - s = nc.createVariable('product_quality', np.int8) + s = nc.createVariable("product_quality", np.int8) s[:] = 99. - chans = nc.createVariable('channels', np.float32, dimensions=('number_of_channels',)) - chans.standard_name = 'fci_channels' + chans = nc.createVariable("channels", np.float32, dimensions=("number_of_channels",)) + chans.standard_name = "fci_channels" chans[:] = np.arange(8) - cats = nc.createVariable('categories', np.float32, dimensions=('number_of_categories',)) - cats.standard_name = 'product_categories' + cats = nc.createVariable("categories", np.float32, dimensions=("number_of_categories",)) + cats.standard_name = "product_categories" cats[:] = np.arange(6) - test_dataset = nc.createVariable('test_values', np.float32, - dimensions=('number_of_FoR_rows', 'number_of_FoR_cols', - 'number_of_channels', 'number_of_categories')) + test_dataset = nc.createVariable("test_values", np.float32, + dimensions=("number_of_FoR_rows", "number_of_FoR_cols", + "number_of_channels", "number_of_categories")) test_dataset[:] = self._get_unique_array(range(8), range(6)) - test_dataset.test_attr = 'attr' - test_dataset.units = 'test_units' + test_dataset.test_attr = "attr" + test_dataset.units = "test_units" def tearDown(self): """Remove the previously created test file.""" @@ -274,18 +274,18 @@ def test_all_basic(self): """Test all basic functionalities.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - assert self.fh.spacecraft_name == 'test_fci_platform' - assert self.fh.sensor_name == 'test_fci_data_source' + assert self.fh.spacecraft_name == "test_fci_platform" + assert self.fh.sensor_name == "test_fci_data_source" assert self.fh.ssp_lon == 0.0 global_attributes = self.fh._get_global_attributes() expected_global_attributes = { - 'filename': self.seg_test_file, - 'spacecraft_name': 'test_fci_platform', - 'ssp_lon': 0.0, - 'sensor': 'test_fci_data_source', - 'platform_name': 'test_fci_platform' + "filename": self.seg_test_file, + "spacecraft_name": "test_fci_platform", + "ssp_lon": 0.0, + "sensor": "test_fci_data_source", + "platform_name": "test_fci_platform" } self.assertEqual(global_attributes, expected_global_attributes) @@ -294,15 +294,15 @@ def test_dataset(self): self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) # Checks the correct execution of the get_dataset function with a valid file_key - dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), - {'name': 'test_values', - 'file_key': 'test_values', - 'fill_value': -999, }) + dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), + {"name": "test_values", + "file_key": "test_values", + "fill_value": -999, }) expected_dataset = self._get_unique_array(range(8), range(6)) np.testing.assert_allclose(dataset.values, expected_dataset) - self.assertEqual(dataset.attrs['test_attr'], 'attr') - self.assertEqual(dataset.attrs['units'], 'test_units') - self.assertEqual(dataset.attrs['fill_value'], -999) + self.assertEqual(dataset.attrs["test_attr"], "attr") + self.assertEqual(dataset.attrs["units"], "test_units") + self.assertEqual(dataset.attrs["fill_value"], -999) # Checks that no AreaDefintion is implemented with pytest.raises(NotImplementedError): @@ -313,10 +313,10 @@ def test_dataset_with_invalid_filekey(self): self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) # Checks the correct execution of the get_dataset function with an invalid file_key - invalid_dataset = self.fh.get_dataset(make_dataid(name='test_invalid', resolution=32000), - {'name': 'test_invalid', - 'file_key': 'test_invalid', - 'fill_value': -999, }) + invalid_dataset = self.fh.get_dataset(make_dataid(name="test_invalid", resolution=32000), + {"name": "test_invalid", + "file_key": "test_invalid", + "fill_value": -999, }) # Checks that the function returns None self.assertEqual(invalid_dataset, None) @@ -326,16 +326,16 @@ def test_dataset_with_adef(self): with_area_definition=True) # Checks the correct execution of the get_dataset function with a valid file_key - dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), - {'name': 'test_values', - 'file_key': 'test_values', - 'fill_value': -999, - 'coordinates': ('test_lon', 'test_lat'), }) + dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), + {"name": "test_values", + "file_key": "test_values", + "fill_value": -999, + "coordinates": ("test_lon", "test_lat"), }) expected_dataset = self._get_unique_array(range(8), range(6)) np.testing.assert_allclose(dataset.values, expected_dataset) - self.assertEqual(dataset.attrs['test_attr'], 'attr') - self.assertEqual(dataset.attrs['units'], 'test_units') - self.assertEqual(dataset.attrs['fill_value'], -999) + self.assertEqual(dataset.attrs["test_attr"], "attr") + self.assertEqual(dataset.attrs["units"], "test_units") + self.assertEqual(dataset.attrs["fill_value"], -999) # Checks returned AreaDefinition against reference adef = self.fh.get_area_def(None) @@ -346,18 +346,18 @@ def test_dataset_with_adef_and_wrongs_dims(self): self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}, with_area_definition=True) with pytest.raises(NotImplementedError): - self.fh.get_dataset(make_dataid(name='test_wrong_dims', resolution=6000), - {'name': 'test_wrong_dims', 'file_key': 'test_values', 'fill_value': -999} + self.fh.get_dataset(make_dataid(name="test_wrong_dims", resolution=6000), + {"name": "test_wrong_dims", "file_key": "test_values", "fill_value": -999} ) def test_dataset_with_scalar(self): """Test the execution of the get_dataset function for scalar values.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) # Checks returned scalar value - dataset = self.fh.get_dataset(make_dataid(name='test_scalar'), - {'name': 'product_quality', - 'file_key': 'product_quality', - 'file_type': 'test_file_type'}) + dataset = self.fh.get_dataset(make_dataid(name="test_scalar"), + {"name": "product_quality", + "file_key": "product_quality", + "file_type": "test_file_type"}) self.assertEqual(dataset.values, 99.) # Checks that no AreaDefintion is implemented for scalar values @@ -368,11 +368,11 @@ def test_dataset_slicing_catid(self): """Test the correct execution of the _slice_dataset function with 'category_id' set.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), - {'name': 'test_values', - 'file_key': 'test_values', - 'fill_value': -999, - 'category_id': 5}) + dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), + {"name": "test_values", + "file_key": "test_values", + "fill_value": -999, + "category_id": 5}) expected_dataset = self._get_unique_array(range(8), 5) np.testing.assert_allclose(dataset.values, expected_dataset) @@ -380,11 +380,11 @@ def test_dataset_slicing_chid_catid(self): """Test the correct execution of the _slice_dataset function with 'channel_id' and 'category_id' set.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), - {'name': 'test_values', - 'file_key': 'test_values', - 'fill_value': -999, - 'channel_id': 0, 'category_id': 1}) + dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), + {"name": "test_values", + "file_key": "test_values", + "fill_value": -999, + "channel_id": 0, "category_id": 1}) expected_dataset = self._get_unique_array(0, 1) np.testing.assert_allclose(dataset.values, expected_dataset) @@ -392,12 +392,12 @@ def test_dataset_slicing_visid_catid(self): """Test the correct execution of the _slice_dataset function with 'vis_channel_id' and 'category_id' set.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - self.fh.nc = self.fh.nc.rename_dims({'number_of_channels': 'number_of_vis_channels'}) - dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), - {'name': 'test_values', - 'file_key': 'test_values', - 'fill_value': -999, - 'vis_channel_id': 3, 'category_id': 3}) + self.fh.nc = self.fh.nc.rename_dims({"number_of_channels": "number_of_vis_channels"}) + dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), + {"name": "test_values", + "file_key": "test_values", + "fill_value": -999, + "vis_channel_id": 3, "category_id": 3}) expected_dataset = self._get_unique_array(3, 3) np.testing.assert_allclose(dataset.values, expected_dataset) @@ -405,21 +405,21 @@ def test_dataset_slicing_irid(self): """Test the correct execution of the _slice_dataset function with 'ir_channel_id' set.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - self.fh.nc = self.fh.nc.rename_dims({'number_of_channels': 'number_of_ir_channels'}) - dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), - {'name': 'test_values', - 'file_key': 'test_values', - 'fill_value': -999, - 'ir_channel_id': 4}) + self.fh.nc = self.fh.nc.rename_dims({"number_of_channels": "number_of_ir_channels"}) + dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), + {"name": "test_values", + "file_key": "test_values", + "fill_value": -999, + "ir_channel_id": 4}) expected_dataset = self._get_unique_array(4, range(6)) np.testing.assert_allclose(dataset.values, expected_dataset) @staticmethod def _get_unique_array(iarr, jarr): - if not hasattr(iarr, '__iter__'): + if not hasattr(iarr, "__iter__"): iarr = [iarr] - if not hasattr(jarr, '__iter__'): + if not hasattr(jarr, "__iter__"): jarr = [jarr] array = np.zeros((348, 348, 8, 6)) @@ -440,32 +440,32 @@ def setUp(self): """Set up the test by creating a test file and opening it with the reader.""" # Easiest way to test the reader is to create a test netCDF file on the fly self.test_byte_file = str(uuid.uuid4()) + ".nc" - with Dataset(self.test_byte_file, 'w') as nc_byte: + with Dataset(self.test_byte_file, "w") as nc_byte: # Create dimensions - nc_byte.createDimension('number_of_columns', 1) - nc_byte.createDimension('number_of_rows', 1) + nc_byte.createDimension("number_of_columns", 1) + nc_byte.createDimension("number_of_rows", 1) # add global attributes - nc_byte.data_source = 'test_data_source' - nc_byte.platform = 'test_platform' + nc_byte.data_source = "test_data_source" + nc_byte.platform = "test_platform" # Add datasets - x = nc_byte.createVariable('x', np.float32, dimensions=('number_of_columns',)) - x.standard_name = 'projection_x_coordinate' + x = nc_byte.createVariable("x", np.float32, dimensions=("number_of_columns",)) + x.standard_name = "projection_x_coordinate" x[:] = np.arange(1) - y = nc_byte.createVariable('y', np.float32, dimensions=('number_of_rows',)) - x.standard_name = 'projection_y_coordinate' + y = nc_byte.createVariable("y", np.float32, dimensions=("number_of_rows",)) + x.standard_name = "projection_y_coordinate" y[:] = np.arange(1) - mtg_geos_projection = nc_byte.createVariable('mtg_geos_projection', int, dimensions=()) + mtg_geos_projection = nc_byte.createVariable("mtg_geos_projection", int, dimensions=()) mtg_geos_projection.longitude_of_projection_origin = 0.0 mtg_geos_projection.semi_major_axis = 6378137. mtg_geos_projection.inverse_flattening = 298.257223563 mtg_geos_projection.perspective_point_height = 35786400. - test_dataset = nc_byte.createVariable('cloud_mask_test_flag', np.float32, - dimensions=('number_of_rows', 'number_of_columns',)) + test_dataset = nc_byte.createVariable("cloud_mask_test_flag", np.float32, + dimensions=("number_of_rows", "number_of_columns",)) # This number was chosen as we know the expected byte values test_dataset[:] = 4544767 @@ -487,23 +487,23 @@ def tearDown(self): def test_byte_extraction(self): """Test the execution of the get_dataset function.""" # Value of 1 is expected to be returned for this test - dataset = self.byte_reader.get_dataset(make_dataid(name='cloud_mask_test_flag', resolution=2000), - {'name': 'cloud_mask_test_flag', - 'file_key': 'cloud_mask_test_flag', - 'fill_value': -999, - 'file_type': 'nc_fci_test_clm', - 'extract_byte': 1, + dataset = self.byte_reader.get_dataset(make_dataid(name="cloud_mask_test_flag", resolution=2000), + {"name": "cloud_mask_test_flag", + "file_key": "cloud_mask_test_flag", + "fill_value": -999, + "file_type": "nc_fci_test_clm", + "extract_byte": 1, }) self.assertEqual(dataset.values, 1) # Value of 0 is expected fto be returned or this test - dataset = self.byte_reader.get_dataset(make_dataid(name='cloud_mask_test_flag', resolution=2000), - {'name': 'cloud_mask_test_flag', - 'file_key': 'cloud_mask_test_flag', - 'fill_value': -999, 'mask_value': 0., - 'file_type': 'nc_fci_test_clm', - 'extract_byte': 23, + dataset = self.byte_reader.get_dataset(make_dataid(name="cloud_mask_test_flag", resolution=2000), + {"name": "cloud_mask_test_flag", + "file_key": "cloud_mask_test_flag", + "fill_value": -999, "mask_value": 0., + "file_type": "nc_fci_test_clm", + "extract_byte": 23, }) self.assertEqual(dataset.values, 0) diff --git a/satpy/tests/reader_tests/test_fy4_base.py b/satpy/tests/reader_tests/test_fy4_base.py index 432117e1ad..ae6df61195 100644 --- a/satpy/tests/reader_tests/test_fy4_base.py +++ b/satpy/tests/reader_tests/test_fy4_base.py @@ -30,11 +30,11 @@ class Test_FY4Base: def setup_method(self): """Initialise the tests.""" - self.p = mock.patch.object(FY4Base, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(FY4Base, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True - self.file_type = {'file_type': 'agri_l1_0500m'} + self.file_type = {"file_type": "agri_l1_0500m"} def teardown_method(self): """Stop wrapping the HDF5 file handler.""" @@ -42,7 +42,7 @@ def teardown_method(self): def test_badsensor(self): """Test case where we pass a bad sensor name, must be GHI or AGRI.""" - fy4 = FY4Base(None, {'platform_id': 'FY4A', 'instrument': 'FCI'}, self.file_type) + fy4 = FY4Base(None, {"platform_id": "FY4A", "instrument": "FCI"}, self.file_type) with pytest.raises(ValueError): fy4.calibrate_to_reflectance(None, None, None) with pytest.raises(ValueError): @@ -50,11 +50,11 @@ def test_badsensor(self): def test_badcalibration(self): """Test case where we pass a bad calibration type, radiance is not supported.""" - fy4 = FY4Base(None, {'platform_id': 'FY4A', 'instrument': 'AGRI'}, self.file_type) + fy4 = FY4Base(None, {"platform_id": "FY4A", "instrument": "AGRI"}, self.file_type) with pytest.raises(NotImplementedError): - fy4.calibrate(None, {'calibration': 'radiance'}, None, None) + fy4.calibrate(None, {"calibration": "radiance"}, None, None) def test_badplatform(self): """Test case where we pass a bad calibration type, radiance is not supported.""" with pytest.raises(KeyError): - FY4Base(None, {'platform_id': 'FY3D', 'instrument': 'AGRI'}, self.file_type) + FY4Base(None, {"platform_id": "FY3D", "instrument": "AGRI"}, self.file_type) diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py index 1477586205..dc658ab79a 100644 --- a/satpy/tests/reader_tests/test_generic_image.py +++ b/satpy/tests/reader_tests/test_generic_image.py @@ -41,12 +41,12 @@ def setUp(self): self.date = datetime(2018, 1, 1) # Create area definition - pcs_id = 'ETRS89 / LAEA Europe' + pcs_id = "ETRS89 / LAEA Europe" proj4_dict = "EPSG:3035" self.x_size = 100 self.y_size = 100 area_extent = (2426378.0132, 1528101.2618, 6293974.6215, 5446513.5222) - self.area_def = AreaDefinition('geotiff_area', pcs_id, pcs_id, + self.area_def = AreaDefinition("geotiff_area", pcs_id, pcs_id, proj4_dict, self.x_size, self.y_size, area_extent) @@ -65,56 +65,56 @@ def setUp(self): r_nan__[:10, :10] = np.nan r_nan__ = da.from_array(r_nan__, chunks=(50, 50)) - ds_l = xr.DataArray(da.stack([r__]), dims=('bands', 'y', 'x'), - attrs={'name': 'test_l', - 'start_time': self.date}) - ds_l['bands'] = ['L'] - ds_la = xr.DataArray(da.stack([r__, a__]), dims=('bands', 'y', 'x'), - attrs={'name': 'test_la', - 'start_time': self.date}) - ds_la['bands'] = ['L', 'A'] + ds_l = xr.DataArray(da.stack([r__]), dims=("bands", "y", "x"), + attrs={"name": "test_l", + "start_time": self.date}) + ds_l["bands"] = ["L"] + ds_la = xr.DataArray(da.stack([r__, a__]), dims=("bands", "y", "x"), + attrs={"name": "test_la", + "start_time": self.date}) + ds_la["bands"] = ["L", "A"] ds_rgb = xr.DataArray(da.stack([r__, g__, b__]), - dims=('bands', 'y', 'x'), - attrs={'name': 'test_rgb', - 'start_time': self.date}) - ds_rgb['bands'] = ['R', 'G', 'B'] + dims=("bands", "y", "x"), + attrs={"name": "test_rgb", + "start_time": self.date}) + ds_rgb["bands"] = ["R", "G", "B"] ds_rgba = xr.DataArray(da.stack([r__, g__, b__, a__]), - dims=('bands', 'y', 'x'), - attrs={'name': 'test_rgba', - 'start_time': self.date}) - ds_rgba['bands'] = ['R', 'G', 'B', 'A'] + dims=("bands", "y", "x"), + attrs={"name": "test_rgba", + "start_time": self.date}) + ds_rgba["bands"] = ["R", "G", "B", "A"] ds_l_nan = xr.DataArray(da.stack([r_nan__]), - dims=('bands', 'y', 'x'), - attrs={'name': 'test_l_nan', - 'start_time': self.date}) - ds_l_nan['bands'] = ['L'] + dims=("bands", "y", "x"), + attrs={"name": "test_l_nan", + "start_time": self.date}) + ds_l_nan["bands"] = ["L"] # Temp dir for the saved images self.base_dir = tempfile.mkdtemp() # Put the datasets to Scene for easy saving scn = Scene() - scn['l'] = ds_l - scn['l'].attrs['area'] = self.area_def - scn['la'] = ds_la - scn['la'].attrs['area'] = self.area_def - scn['rgb'] = ds_rgb - scn['rgb'].attrs['area'] = self.area_def - scn['rgba'] = ds_rgba - scn['rgba'].attrs['area'] = self.area_def - scn['l_nan'] = ds_l_nan - scn['l_nan'].attrs['area'] = self.area_def + scn["l"] = ds_l + scn["l"].attrs["area"] = self.area_def + scn["la"] = ds_la + scn["la"].attrs["area"] = self.area_def + scn["rgb"] = ds_rgb + scn["rgb"].attrs["area"] = self.area_def + scn["rgba"] = ds_rgba + scn["rgba"].attrs["area"] = self.area_def + scn["l_nan"] = ds_l_nan + scn["l_nan"].attrs["area"] = self.area_def # Save the images. Two images in PNG and two in GeoTIFF - scn.save_dataset('l', os.path.join(self.base_dir, 'test_l.png'), writer='simple_image') - scn.save_dataset('la', os.path.join(self.base_dir, '20180101_0000_test_la.png'), writer='simple_image') - scn.save_dataset('rgb', os.path.join(self.base_dir, '20180101_0000_test_rgb.tif'), writer='geotiff') - scn.save_dataset('rgba', os.path.join(self.base_dir, 'test_rgba.tif'), writer='geotiff') - scn.save_dataset('l_nan', os.path.join(self.base_dir, 'test_l_nan_fillvalue.tif'), - writer='geotiff', fill_value=0) - scn.save_dataset('l_nan', os.path.join(self.base_dir, 'test_l_nan_nofillvalue.tif'), - writer='geotiff') + scn.save_dataset("l", os.path.join(self.base_dir, "test_l.png"), writer="simple_image") + scn.save_dataset("la", os.path.join(self.base_dir, "20180101_0000_test_la.png"), writer="simple_image") + scn.save_dataset("rgb", os.path.join(self.base_dir, "20180101_0000_test_rgb.tif"), writer="geotiff") + scn.save_dataset("rgba", os.path.join(self.base_dir, "test_rgba.tif"), writer="geotiff") + scn.save_dataset("l_nan", os.path.join(self.base_dir, "test_l_nan_fillvalue.tif"), + writer="geotiff", fill_value=0) + scn.save_dataset("l_nan", os.path.join(self.base_dir, "test_l_nan_nofillvalue.tif"), + writer="geotiff") self.scn = scn @@ -130,78 +130,78 @@ def test_png_scene(self): """Test reading PNG images via satpy.Scene().""" from satpy import Scene - fname = os.path.join(self.base_dir, 'test_l.png') - scn = Scene(reader='generic_image', filenames=[fname]) - scn.load(['image']) - self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) - self.assertEqual(scn.sensor_names, {'images'}) + fname = os.path.join(self.base_dir, "test_l.png") + scn = Scene(reader="generic_image", filenames=[fname]) + scn.load(["image"]) + self.assertEqual(scn["image"].shape, (1, self.y_size, self.x_size)) + self.assertEqual(scn.sensor_names, {"images"}) self.assertEqual(scn.start_time, None) self.assertEqual(scn.end_time, None) - self.assertNotIn('area', scn['image'].attrs) - - fname = os.path.join(self.base_dir, '20180101_0000_test_la.png') - scn = Scene(reader='generic_image', filenames=[fname]) - scn.load(['image']) - data = da.compute(scn['image'].data) - self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) - self.assertEqual(scn.sensor_names, {'images'}) + self.assertNotIn("area", scn["image"].attrs) + + fname = os.path.join(self.base_dir, "20180101_0000_test_la.png") + scn = Scene(reader="generic_image", filenames=[fname]) + scn.load(["image"]) + data = da.compute(scn["image"].data) + self.assertEqual(scn["image"].shape, (1, self.y_size, self.x_size)) + self.assertEqual(scn.sensor_names, {"images"}) self.assertEqual(scn.start_time, self.date) self.assertEqual(scn.end_time, self.date) - self.assertNotIn('area', scn['image'].attrs) + self.assertNotIn("area", scn["image"].attrs) self.assertEqual(np.sum(np.isnan(data)), 100) def test_geotiff_scene(self): """Test reading TIFF images via satpy.Scene().""" from satpy import Scene - fname = os.path.join(self.base_dir, '20180101_0000_test_rgb.tif') - scn = Scene(reader='generic_image', filenames=[fname]) - scn.load(['image']) - self.assertEqual(scn['image'].shape, (3, self.y_size, self.x_size)) - self.assertEqual(scn.sensor_names, {'images'}) + fname = os.path.join(self.base_dir, "20180101_0000_test_rgb.tif") + scn = Scene(reader="generic_image", filenames=[fname]) + scn.load(["image"]) + self.assertEqual(scn["image"].shape, (3, self.y_size, self.x_size)) + self.assertEqual(scn.sensor_names, {"images"}) self.assertEqual(scn.start_time, self.date) self.assertEqual(scn.end_time, self.date) - self.assertEqual(scn['image'].area, self.area_def) + self.assertEqual(scn["image"].area, self.area_def) - fname = os.path.join(self.base_dir, 'test_rgba.tif') - scn = Scene(reader='generic_image', filenames=[fname]) - scn.load(['image']) - self.assertEqual(scn['image'].shape, (3, self.y_size, self.x_size)) - self.assertEqual(scn.sensor_names, {'images'}) + fname = os.path.join(self.base_dir, "test_rgba.tif") + scn = Scene(reader="generic_image", filenames=[fname]) + scn.load(["image"]) + self.assertEqual(scn["image"].shape, (3, self.y_size, self.x_size)) + self.assertEqual(scn.sensor_names, {"images"}) self.assertEqual(scn.start_time, None) self.assertEqual(scn.end_time, None) - self.assertEqual(scn['image'].area, self.area_def) + self.assertEqual(scn["image"].area, self.area_def) def test_geotiff_scene_nan(self): """Test reading TIFF images originally containing NaN values via satpy.Scene().""" from satpy import Scene - fname = os.path.join(self.base_dir, 'test_l_nan_fillvalue.tif') - scn = Scene(reader='generic_image', filenames=[fname]) - scn.load(['image']) - self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) - self.assertEqual(np.sum(scn['image'].data[0][:10, :10].compute()), 0) + fname = os.path.join(self.base_dir, "test_l_nan_fillvalue.tif") + scn = Scene(reader="generic_image", filenames=[fname]) + scn.load(["image"]) + self.assertEqual(scn["image"].shape, (1, self.y_size, self.x_size)) + self.assertEqual(np.sum(scn["image"].data[0][:10, :10].compute()), 0) - fname = os.path.join(self.base_dir, 'test_l_nan_nofillvalue.tif') - scn = Scene(reader='generic_image', filenames=[fname]) - scn.load(['image']) - self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) - self.assertTrue(np.all(np.isnan(scn['image'].data[0][:10, :10].compute()))) + fname = os.path.join(self.base_dir, "test_l_nan_nofillvalue.tif") + scn = Scene(reader="generic_image", filenames=[fname]) + scn.load(["image"]) + self.assertEqual(scn["image"].shape, (1, self.y_size, self.x_size)) + self.assertTrue(np.all(np.isnan(scn["image"].data[0][:10, :10].compute()))) def test_GenericImageFileHandler(self): """Test direct use of the reader.""" from satpy.readers.generic_image import GenericImageFileHandler - fname = os.path.join(self.base_dir, 'test_rgba.tif') - fname_info = {'start_time': self.date} + fname = os.path.join(self.base_dir, "test_rgba.tif") + fname_info = {"start_time": self.date} ftype_info = {} reader = GenericImageFileHandler(fname, fname_info, ftype_info) - foo = make_dataid(name='image') + foo = make_dataid(name="image") self.assertTrue(reader.file_content) - self.assertEqual(reader.finfo['filename'], fname) - self.assertEqual(reader.finfo['start_time'], self.date) - self.assertEqual(reader.finfo['end_time'], self.date) + self.assertEqual(reader.finfo["filename"], fname) + self.assertEqual(reader.finfo["start_time"], self.date) + self.assertEqual(reader.finfo["end_time"], self.date) self.assertEqual(reader.area, self.area_def) self.assertEqual(reader.get_area_def(None), self.area_def) self.assertEqual(reader.start_time, self.date) @@ -209,7 +209,7 @@ def test_GenericImageFileHandler(self): dataset = reader.get_dataset(foo, {}) self.assertTrue(isinstance(dataset, xr.DataArray)) - self.assertIn('spatial_ref', dataset.coords) + self.assertIn("spatial_ref", dataset.coords) self.assertTrue(np.all(np.isnan(dataset.data[:, :10, :10].compute()))) def test_GenericImageFileHandler_masking_only_integer(self): @@ -225,59 +225,59 @@ def __init__(self, filename, filename_info, filetype_info, file_content, **kwarg self.dataset_name = None self.file_content.update(kwargs) - data = self.scn['rgba'] + data = self.scn["rgba"] # do nothing if not integer float_data = data / 255. reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": float_data}) - self.assertIs(reader.get_dataset(make_dataid(name='image'), {}), float_data) + self.assertIs(reader.get_dataset(make_dataid(name="image"), {}), float_data) # masking if integer data = data.astype(np.uint32) self.assertEqual(data.bands.size, 4) reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": data}) - ret_data = reader.get_dataset(make_dataid(name='image'), {}) + ret_data = reader.get_dataset(make_dataid(name="image"), {}) self.assertEqual(ret_data.bands.size, 3) def test_GenericImageFileHandler_datasetid(self): """Test direct use of the reader.""" from satpy.readers.generic_image import GenericImageFileHandler - fname = os.path.join(self.base_dir, 'test_rgba.tif') - fname_info = {'start_time': self.date} + fname = os.path.join(self.base_dir, "test_rgba.tif") + fname_info = {"start_time": self.date} ftype_info = {} reader = GenericImageFileHandler(fname, fname_info, ftype_info) - foo = make_dataid(name='image-custom') - self.assertTrue(reader.file_content, 'file_content should be set') + foo = make_dataid(name="image-custom") + self.assertTrue(reader.file_content, "file_content should be set") dataset = reader.get_dataset(foo, {}) - self.assertTrue(isinstance(dataset, xr.DataArray), 'dataset should be a xr.DataArray') + self.assertTrue(isinstance(dataset, xr.DataArray), "dataset should be a xr.DataArray") def test_GenericImageFileHandler_nodata(self): """Test nodata handling with direct use of the reader.""" from satpy.readers.generic_image import GenericImageFileHandler - fname = os.path.join(self.base_dir, 'test_l_nan_fillvalue.tif') - fname_info = {'start_time': self.date} + fname = os.path.join(self.base_dir, "test_l_nan_fillvalue.tif") + fname_info = {"start_time": self.date} ftype_info = {} reader = GenericImageFileHandler(fname, fname_info, ftype_info) - foo = make_dataid(name='image-custom') - self.assertTrue(reader.file_content, 'file_content should be set') - info = {'nodata_handling': 'nan_mask'} + foo = make_dataid(name="image-custom") + self.assertTrue(reader.file_content, "file_content should be set") + info = {"nodata_handling": "nan_mask"} dataset = reader.get_dataset(foo, info) - self.assertTrue(isinstance(dataset, xr.DataArray), 'dataset should be a xr.DataArray') - self.assertTrue(np.all(np.isnan(dataset.data[0][:10, :10].compute())), 'values should be np.nan') - self.assertTrue(np.isnan(dataset.attrs['_FillValue']), '_FillValue should be np.nan') + self.assertTrue(isinstance(dataset, xr.DataArray), "dataset should be a xr.DataArray") + self.assertTrue(np.all(np.isnan(dataset.data[0][:10, :10].compute())), "values should be np.nan") + self.assertTrue(np.isnan(dataset.attrs["_FillValue"]), "_FillValue should be np.nan") - info = {'nodata_handling': 'fill_value'} + info = {"nodata_handling": "fill_value"} dataset = reader.get_dataset(foo, info) - self.assertTrue(isinstance(dataset, xr.DataArray), 'dataset should be a xr.DataArray') + self.assertTrue(isinstance(dataset, xr.DataArray), "dataset should be a xr.DataArray") self.assertEqual(np.sum(dataset.data[0][:10, :10].compute()), 0) - self.assertEqual(dataset.attrs['_FillValue'], 0) + self.assertEqual(dataset.attrs["_FillValue"], 0) # default same as 'nodata_handling': 'fill_value' dataset = reader.get_dataset(foo, {}) - self.assertTrue(isinstance(dataset, xr.DataArray), 'dataset should be a xr.DataArray') + self.assertTrue(isinstance(dataset, xr.DataArray), "dataset should be a xr.DataArray") self.assertEqual(np.sum(dataset.data[0][:10, :10].compute()), 0) - self.assertEqual(dataset.attrs['_FillValue'], 0) + self.assertEqual(dataset.attrs["_FillValue"], 0) diff --git a/satpy/tests/reader_tests/test_geocat.py b/satpy/tests/reader_tests/test_geocat.py index 91de6a4265..91a6b7de37 100644 --- a/satpy/tests/reader_tests/test_geocat.py +++ b/satpy/tests/reader_tests/test_geocat.py @@ -43,64 +43,64 @@ class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { - '/attr/Platform_Name': filename_info['platform_shortname'], - '/attr/Element_Resolution': 2., - '/attr/Line_Resolution': 2., - '/attr/Subsatellite_Longitude': -70.2 if 'GOES' in filename_info['platform_shortname'] else 140.65, - 'pixel_longitude': DEFAULT_LON_DATA, - 'pixel_longitude/attr/scale_factor': 1., - 'pixel_longitude/attr/add_offset': 0., - 'pixel_longitude/shape': DEFAULT_FILE_SHAPE, - 'pixel_longitude/attr/_FillValue': np.nan, - 'pixel_latitude': DEFAULT_LAT_DATA, - 'pixel_latitude/attr/scale_factor': 1., - 'pixel_latitude/attr/add_offset': 0., - 'pixel_latitude/shape': DEFAULT_FILE_SHAPE, - 'pixel_latitude/attr/_FillValue': np.nan, + "/attr/Platform_Name": filename_info["platform_shortname"], + "/attr/Element_Resolution": 2., + "/attr/Line_Resolution": 2., + "/attr/Subsatellite_Longitude": -70.2 if "GOES" in filename_info["platform_shortname"] else 140.65, + "pixel_longitude": DEFAULT_LON_DATA, + "pixel_longitude/attr/scale_factor": 1., + "pixel_longitude/attr/add_offset": 0., + "pixel_longitude/shape": DEFAULT_FILE_SHAPE, + "pixel_longitude/attr/_FillValue": np.nan, + "pixel_latitude": DEFAULT_LAT_DATA, + "pixel_latitude/attr/scale_factor": 1., + "pixel_latitude/attr/add_offset": 0., + "pixel_latitude/shape": DEFAULT_FILE_SHAPE, + "pixel_latitude/attr/_FillValue": np.nan, } sensor = { - 'HIMAWARI-8': 'himawari8', - 'GOES-17': 'goesr', - 'GOES-16': 'goesr', - 'GOES-13': 'goes', - 'GOES-14': 'goes', - 'GOES-15': 'goes', - }[filename_info['platform_shortname']] - file_content['/attr/Sensor_Name'] = sensor - - if filename_info['platform_shortname'] == 'HIMAWARI-8': - file_content['pixel_longitude'] = DEFAULT_LON_DATA + 130. - - file_content['variable1'] = DEFAULT_FILE_DATA.astype(np.float32) - file_content['variable1/attr/_FillValue'] = -1 - file_content['variable1/attr/scale_factor'] = 1. - file_content['variable1/attr/add_offset'] = 0. - file_content['variable1/attr/units'] = '1' - file_content['variable1/shape'] = DEFAULT_FILE_SHAPE + "HIMAWARI-8": "himawari8", + "GOES-17": "goesr", + "GOES-16": "goesr", + "GOES-13": "goes", + "GOES-14": "goes", + "GOES-15": "goes", + }[filename_info["platform_shortname"]] + file_content["/attr/Sensor_Name"] = sensor + + if filename_info["platform_shortname"] == "HIMAWARI-8": + file_content["pixel_longitude"] = DEFAULT_LON_DATA + 130. + + file_content["variable1"] = DEFAULT_FILE_DATA.astype(np.float32) + file_content["variable1/attr/_FillValue"] = -1 + file_content["variable1/attr/scale_factor"] = 1. + file_content["variable1/attr/add_offset"] = 0. + file_content["variable1/attr/units"] = "1" + file_content["variable1/shape"] = DEFAULT_FILE_SHAPE # data with fill values - file_content['variable2'] = np.ma.masked_array( + file_content["variable2"] = np.ma.masked_array( DEFAULT_FILE_DATA.astype(np.float32), mask=np.zeros_like(DEFAULT_FILE_DATA)) - file_content['variable2'].mask[::5, ::5] = True - file_content['variable2/attr/_FillValue'] = -1 - file_content['variable2/attr/scale_factor'] = 1. - file_content['variable2/attr/add_offset'] = 0. - file_content['variable2/attr/units'] = '1' - file_content['variable2/shape'] = DEFAULT_FILE_SHAPE + file_content["variable2"].mask[::5, ::5] = True + file_content["variable2/attr/_FillValue"] = -1 + file_content["variable2/attr/scale_factor"] = 1. + file_content["variable2/attr/add_offset"] = 0. + file_content["variable2/attr/units"] = "1" + file_content["variable2/shape"] = DEFAULT_FILE_SHAPE # category - file_content['variable3'] = DEFAULT_FILE_DATA.astype(np.byte) - file_content['variable3/attr/_FillValue'] = -128 - file_content['variable3/attr/flag_meanings'] = "clear water supercooled mixed ice unknown" - file_content['variable3/attr/flag_values'] = [0, 1, 2, 3, 4, 5] - file_content['variable3/attr/units'] = '1' - file_content['variable3/shape'] = DEFAULT_FILE_SHAPE - - attrs = ('_FillValue', 'flag_meanings', 'flag_values', 'units') + file_content["variable3"] = DEFAULT_FILE_DATA.astype(np.byte) + file_content["variable3/attr/_FillValue"] = -128 + file_content["variable3/attr/flag_meanings"] = "clear water supercooled mixed ice unknown" + file_content["variable3/attr/flag_values"] = [0, 1, 2, 3, 4, 5] + file_content["variable3/attr/units"] = "1" + file_content["variable3/shape"] = DEFAULT_FILE_SHAPE + + attrs = ("_FillValue", "flag_meanings", "flag_values", "units") convert_file_content_to_data_array( file_content, attrs=attrs, - dims=('z', 'lines', 'elements')) + dims=("z", "lines", "elements")) return file_content @@ -113,9 +113,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.geocat import GEOCATFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(GEOCATFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) + self.p = mock.patch.object(GEOCATFileHandler, "__bases__", (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -128,7 +128,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'geocatL2.GOES-13.2015143.234500.nc', + "geocatL2.GOES-13.2015143.234500.nc", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) @@ -140,10 +140,10 @@ def test_init_with_kwargs(self): from satpy.readers import load_reader r = load_reader(self.reader_configs, xarray_kwargs={"decode_times": True}) loadables = r.select_files_from_pathnames([ - 'geocatL2.GOES-13.2015143.234500.nc', + "geocatL2.GOES-13.2015143.234500.nc", ]) assert len(loadables) == 1 - r.create_filehandlers(loadables, fh_kwargs={"xarray_kwargs": {'decode_times': True}}) + r.create_filehandlers(loadables, fh_kwargs={"xarray_kwargs": {"decode_times": True}}) # make sure we have some files assert r.file_handlers @@ -153,19 +153,19 @@ def test_load_all_old_goes(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.geocat.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.geocat.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'geocatL2.GOES-13.2015143.234500.nc', + "geocatL2.GOES-13.2015143.234500.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['variable1', - 'variable2', - 'variable3']) + datasets = r.load(["variable1", + "variable2", + "variable3"]) self.assertEqual(len(datasets), 3) for v in datasets.values(): - assert 'calibration' not in v.attrs - self.assertEqual(v.attrs['units'], '1') - self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) + assert "calibration" not in v.attrs + self.assertEqual(v.attrs["units"], "1") + self.assertIsNotNone(datasets["variable3"].attrs.get("flag_meanings")) def test_load_all_himawari8(self): """Test loading all test datasets from H8 NetCDF file.""" @@ -174,20 +174,20 @@ def test_load_all_himawari8(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.geocat.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.geocat.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'geocatL2.HIMAWARI-8.2017092.210730.R304.R20.nc', + "geocatL2.HIMAWARI-8.2017092.210730.R304.R20.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['variable1', - 'variable2', - 'variable3']) + datasets = r.load(["variable1", + "variable2", + "variable3"]) self.assertEqual(len(datasets), 3) for v in datasets.values(): - assert 'calibration' not in v.attrs - self.assertEqual(v.attrs['units'], '1') - self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) - self.assertIsInstance(datasets['variable1'].attrs['area'], AreaDefinition) + assert "calibration" not in v.attrs + self.assertEqual(v.attrs["units"], "1") + self.assertIsNotNone(datasets["variable3"].attrs.get("flag_meanings")) + self.assertIsInstance(datasets["variable1"].attrs["area"], AreaDefinition) def test_load_all_goes17_hdf4(self): """Test loading all test datasets from GOES-17 HDF4 file.""" @@ -196,17 +196,17 @@ def test_load_all_goes17_hdf4(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.geocat.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.geocat.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'geocatL2.GOES-17.CONUS.2020041.163130.hdf', + "geocatL2.GOES-17.CONUS.2020041.163130.hdf", ]) r.create_filehandlers(loadables) - datasets = r.load(['variable1', - 'variable2', - 'variable3']) + datasets = r.load(["variable1", + "variable2", + "variable3"]) self.assertEqual(len(datasets), 3) for v in datasets.values(): - assert 'calibration' not in v.attrs - self.assertEqual(v.attrs['units'], '1') - self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) - self.assertIsInstance(datasets['variable1'].attrs['area'], AreaDefinition) + assert "calibration" not in v.attrs + self.assertEqual(v.attrs["units"], "1") + self.assertIsNotNone(datasets["variable3"].attrs.get("flag_meanings")) + self.assertIsInstance(datasets["variable1"].attrs["area"], AreaDefinition) diff --git a/satpy/tests/reader_tests/test_geos_area.py b/satpy/tests/reader_tests/test_geos_area.py index d7d4c2a510..76dc2d7b92 100644 --- a/satpy/tests/reader_tests/test_geos_area.py +++ b/satpy/tests/reader_tests/test_geos_area.py @@ -37,58 +37,58 @@ class TestGEOSProjectionUtil(unittest.TestCase): def make_pdict_ext(self, typ, scan): """Create a dictionary and extents to use in testing.""" if typ == 1: # Fulldisk - pdict = {'a': 6378169.00, - 'b': 6356583.80, - 'h': 35785831.00, - 'ssp_lon': 0.0, - 'nlines': 3712, - 'ncols': 3712, - 'a_name': 'geostest', - 'a_desc': 'test area', - 'p_id': 'test_area', - 'cfac': -13642337, - 'lfac': -13642337, - 'coff': 1856} - if scan == 'N2S': - pdict['scandir'] = 'N2S' - pdict['loff'] = 1856 + pdict = {"a": 6378169.00, + "b": 6356583.80, + "h": 35785831.00, + "ssp_lon": 0.0, + "nlines": 3712, + "ncols": 3712, + "a_name": "geostest", + "a_desc": "test area", + "p_id": "test_area", + "cfac": -13642337, + "lfac": -13642337, + "coff": 1856} + if scan == "N2S": + pdict["scandir"] = "N2S" + pdict["loff"] = 1856 extent = (5567248.28340708, 5567248.28340708, -5570248.686685662, -5570248.686685662) - if scan == 'S2N': - pdict['scandir'] = 'S2N' - pdict['loff'] = -1856 + if scan == "S2N": + pdict["scandir"] = "S2N" + pdict["loff"] = -1856 extent = (5567248.28340708, 5570248.686685662, -5570248.686685662, -5567248.28340708) if typ == 2: # One sector - pdict = {'a': 6378169.00, - 'b': 6356583.80, - 'h': 35785831.00, - 'ssp_lon': 0.0, - 'nlines': 464, - 'ncols': 3712, - 'a_name': 'geostest', - 'a_desc': 'test area', - 'p_id': 'test_area', - 'cfac': -13642337, - 'lfac': -13642337, - 'coff': 1856} - if scan == 'N2S': - pdict['scandir'] = 'N2S' - pdict['loff'] = 464 + pdict = {"a": 6378169.00, + "b": 6356583.80, + "h": 35785831.00, + "ssp_lon": 0.0, + "nlines": 464, + "ncols": 3712, + "a_name": "geostest", + "a_desc": "test area", + "p_id": "test_area", + "cfac": -13642337, + "lfac": -13642337, + "coff": 1856} + if scan == "N2S": + pdict["scandir"] = "N2S" + pdict["loff"] = 464 extent = (5567248.28340708, 1390686.9196223018, -5570248.686685662, -1500.2016392905093) - if scan == 'S2N': - pdict['scandir'] = 'S2N' - pdict['loff'] = 464 + if scan == "S2N": + pdict["scandir"] = "S2N" + pdict["loff"] = 464 extent = (5567248.28340708, -1390686.9196223018, -5570248.686685662, @@ -99,39 +99,39 @@ def make_pdict_ext(self, typ, scan): def test_geos_area(self): """Test area extent calculation with N->S scan then S->N scan.""" # North -> South full disk - pdict, extent = self.make_pdict_ext(1, 'N2S') + pdict, extent = self.make_pdict_ext(1, "N2S") aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) # South -> North full disk - pdict, extent = self.make_pdict_ext(1, 'S2N') + pdict, extent = self.make_pdict_ext(1, "S2N") aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) # North -> South one sector - pdict, extent = self.make_pdict_ext(2, 'N2S') + pdict, extent = self.make_pdict_ext(2, "N2S") aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) # South -> North one sector - pdict, extent = self.make_pdict_ext(2, 'S2N') + pdict, extent = self.make_pdict_ext(2, "S2N") aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) def test_get_xy_from_linecol(self): """Test the scan angle calculation.""" - pdict, extent = self.make_pdict_ext(1, 'S2N') + pdict, extent = self.make_pdict_ext(1, "S2N") good_xy = [0.2690166648133674, -10.837528496767087] - factors = (pdict['lfac'], pdict['cfac']) - offsets = (pdict['loff'], pdict['coff']) + factors = (pdict["lfac"], pdict["cfac"]) + offsets = (pdict["loff"], pdict["coff"]) x, y = get_xy_from_linecol(400, 1800, offsets, factors) np.testing.assert_approx_equal(x, good_xy[0]) np.testing.assert_approx_equal(y, good_xy[1]) - pdict, extent = self.make_pdict_ext(2, 'N2S') + pdict, extent = self.make_pdict_ext(2, "N2S") good_xy = [0.2690166648133674, 0.30744761692956274] - factors = (pdict['lfac'], pdict['cfac']) - offsets = (pdict['loff'], pdict['coff']) + factors = (pdict["lfac"], pdict["cfac"]) + offsets = (pdict["loff"], pdict["coff"]) x, y = get_xy_from_linecol(400, 1800, offsets, factors) np.testing.assert_approx_equal(x, good_xy[0]) np.testing.assert_approx_equal(y, good_xy[1]) @@ -139,18 +139,18 @@ def test_get_xy_from_linecol(self): def test_get_area_definition(self): """Test the retrieval of the area definition.""" from pyresample.utils import proj4_radius_parameters - pdict, extent = self.make_pdict_ext(1, 'N2S') + pdict, extent = self.make_pdict_ext(1, "N2S") good_res = (-3000.4032785810186, -3000.4032785810186) a_def = get_area_definition(pdict, extent) - self.assertEqual(a_def.area_id, pdict['a_name']) + self.assertEqual(a_def.area_id, pdict["a_name"]) self.assertEqual(a_def.resolution, good_res) - self.assertEqual(a_def.proj_dict['proj'], 'geos') - self.assertEqual(a_def.proj_dict['units'], 'm') + self.assertEqual(a_def.proj_dict["proj"], "geos") + self.assertEqual(a_def.proj_dict["units"], "m") a, b = proj4_radius_parameters(a_def.proj_dict) self.assertEqual(a, 6378169) self.assertEqual(b, 6356583.8) - self.assertEqual(a_def.proj_dict['h'], 35785831) + self.assertEqual(a_def.proj_dict["h"], 35785831) def test_sampling_to_lfac_cfac(self): """Test conversion from angular sampling to line/column offset.""" @@ -160,26 +160,26 @@ def test_sampling_to_lfac_cfac(self): def test_get_geos_area_naming(self): """Test the geos area naming function.""" - input_dict = {'platform_name': 'testplatform', - 'instrument_name': 'testinstrument', - 'resolution': 1000, - 'service_name': 'testservicename', - 'service_desc': 'testdesc'} + input_dict = {"platform_name": "testplatform", + "instrument_name": "testinstrument", + "resolution": 1000, + "service_name": "testservicename", + "service_desc": "testdesc"} output_dict = get_geos_area_naming(input_dict) - self.assertEqual(output_dict['area_id'], 'testplatform_testinstrument_testservicename_1km') - self.assertEqual(output_dict['description'], 'TESTPLATFORM TESTINSTRUMENT testdesc area definition' - ' with 1 km resolution') + self.assertEqual(output_dict["area_id"], "testplatform_testinstrument_testservicename_1km") + self.assertEqual(output_dict["description"], "TESTPLATFORM TESTINSTRUMENT testdesc area definition" + " with 1 km resolution") def test_get_resolution_and_unit_strings_in_km(self): """Test the resolution and unit strings function for a km resolution.""" out = get_resolution_and_unit_strings(1000) - self.assertEqual(out['value'], '1') - self.assertEqual(out['unit'], 'km') + self.assertEqual(out["value"], "1") + self.assertEqual(out["unit"], "km") def test_get_resolution_and_unit_strings_in_m(self): """Test the resolution and unit strings function for a m resolution.""" out = get_resolution_and_unit_strings(500) - self.assertEqual(out['value'], '500') - self.assertEqual(out['unit'], 'm') + self.assertEqual(out["value"], "500") + self.assertEqual(out["unit"], "m") diff --git a/satpy/tests/reader_tests/test_ghi_l1.py b/satpy/tests/reader_tests/test_ghi_l1.py index 79667ef37d..2b6ff4af54 100644 --- a/satpy/tests/reader_tests/test_ghi_l1.py +++ b/satpy/tests/reader_tests/test_ghi_l1.py @@ -33,7 +33,7 @@ CHANNELS_BY_RESOLUTION = {250: ["C01"], 500: ["C01", "C02", "C03", "C04", "C05", "C06"], 2000: ALL_BAND_NAMES, - 'GEO': 'solar_azimuth_angle' + "GEO": "solar_azimuth_angle" } AREA_EXTENTS_BY_RESOLUTION = { @@ -48,58 +48,58 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): def make_test_data(self, cwl, ch, prefix, dims, file_type): """Make test data.""" - if prefix == 'CAL': + if prefix == "CAL": data = xr.DataArray( da.from_array((np.arange(10.) + 1.) / 10., [dims[0] * dims[1]]), attrs={ - 'Slope': np.array(1.), 'Intercept': np.array(0.), - 'FillValue': np.array(-65535.0), - 'units': 'NUL', - 'center_wavelength': '{}um'.format(cwl).encode('utf-8'), - 'band_names': 'band{}(band number is range from 1 to 14)' - .format(ch).encode('utf-8'), - 'long_name': 'Calibration table of {}um Channel'.format(cwl).encode('utf-8'), - 'valid_range': np.array([0, 1.5]), + "Slope": np.array(1.), "Intercept": np.array(0.), + "FillValue": np.array(-65535.0), + "units": "NUL", + "center_wavelength": "{}um".format(cwl).encode("utf-8"), + "band_names": "band{}(band number is range from 1 to 14)" + .format(ch).encode("utf-8"), + "long_name": "Calibration table of {}um Channel".format(cwl).encode("utf-8"), + "valid_range": np.array([0, 1.5]), }, - dims='_const') + dims="_const") - elif prefix == 'NOM': + elif prefix == "NOM": data = xr.DataArray( da.from_array(np.arange(10, dtype=np.uint16).reshape((2, 5)) + 1, [dim for dim in dims]), attrs={ - 'Slope': np.array(1.), 'Intercept': np.array(0.), - 'FillValue': np.array(65535), - 'units': 'DN', - 'center_wavelength': '{}um'.format(cwl).encode('utf-8'), - 'band_names': 'band{}(band number is range from 1 to 7)' - .format(ch).encode('utf-8'), - 'long_name': 'Calibration table of {}um Channel'.format(cwl).encode('utf-8'), - 'valid_range': np.array([0, 4095]), + "Slope": np.array(1.), "Intercept": np.array(0.), + "FillValue": np.array(65535), + "units": "DN", + "center_wavelength": "{}um".format(cwl).encode("utf-8"), + "band_names": "band{}(band number is range from 1 to 7)" + .format(ch).encode("utf-8"), + "long_name": "Calibration table of {}um Channel".format(cwl).encode("utf-8"), + "valid_range": np.array([0, 4095]), }, - dims=('_RegLength', '_RegWidth')) + dims=("_RegLength", "_RegWidth")) - elif prefix == 'GEO': + elif prefix == "GEO": data = xr.DataArray( da.from_array(np.arange(10, dtype=np.float32).reshape((2, 5)) + 1, [dim for dim in dims]), attrs={ - 'Slope': np.array(1.), 'Intercept': np.array(0.), - 'FillValue': np.array(65535.), - 'units': 'NUL', - 'band_names': 'NUL', - 'valid_range': np.array([0., 360.]), + "Slope": np.array(1.), "Intercept": np.array(0.), + "FillValue": np.array(65535.), + "units": "NUL", + "band_names": "NUL", + "valid_range": np.array([0., 360.]), }, - dims=('_RegLength', '_RegWidth')) + dims=("_RegLength", "_RegWidth")) - elif prefix == 'COEF': - if file_type == '250': + elif prefix == "COEF": + if file_type == "250": data = self._create_coeff_array(1) - elif file_type == '500': + elif file_type == "500": data = self._create_coeff_array(6) - elif file_type == '2000': + elif file_type == "2000": data = self._create_coeff_array(7) return data @@ -109,14 +109,14 @@ def _create_coeff_array(self, nb_channels): da.from_array((np.arange(nb_channels * 2).reshape((nb_channels, 2)) + 1.) / np.array([1E4, 1E2]), [nb_channels, 2]), attrs={ - 'Slope': 1., 'Intercept': 0., - 'FillValue': 0, - 'units': 'NUL', - 'band_names': 'NUL', - 'long_name': b'Calibration coefficient (SCALE and OFFSET)', - 'valid_range': [-500, 500], + "Slope": 1., "Intercept": 0., + "FillValue": 0, + "units": "NUL", + "band_names": "NUL", + "long_name": b"Calibration coefficient (SCALE and OFFSET)", + "valid_range": [-500, 500], }, - dims=('_num_channel', '_coefs')) + dims=("_num_channel", "_coefs")) return data def _create_channel_data(self, chs, cwls, file_type): @@ -124,11 +124,11 @@ def _create_channel_data(self, chs, cwls, file_type): dim_1 = 5 data = {} for index, _cwl in enumerate(cwls): - data['Calibration/CALChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'CAL', + data["Calibration/CALChannel" + "%02d" % chs[index]] = self.make_test_data(cwls[index], chs[index], "CAL", [dim_0, dim_1], file_type) - data['Data/NOMChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'NOM', + data["Data/NOMChannel" + "%02d" % chs[index]] = self.make_test_data(cwls[index], chs[index], "NOM", [dim_0, dim_1], file_type) - data['Calibration/CALIBRATION_COEF(SCALE+OFFSET)'] = self.make_test_data(cwls[index], chs[index], 'COEF', + data["Calibration/CALIBRATION_COEF(SCALE+OFFSET)"] = self.make_test_data(cwls[index], chs[index], "COEF", [dim_0, dim_1], file_type) return data @@ -156,39 +156,39 @@ def _get_2km_data(self, file_type): def _get_geo_data(self, file_type): dim_0 = 2 dim_1 = 5 - data = {'Navigation/NOMSunAzimuth': self.make_test_data('NUL', 'NUL', 'GEO', + data = {"Navigation/NOMSunAzimuth": self.make_test_data("NUL", "NUL", "GEO", [dim_0, dim_1], file_type)} return data def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" global_attrs = { - '/attr/NOMSubSatLat': np.array(0.0), - '/attr/NOMSubSatLon': np.array(133.0), - '/attr/NOMSatHeight': np.array(3.5786E7), - '/attr/Semi_major_axis': np.array(6378.14), - '/attr/Semi_minor_axis': np.array(6353.28), - '/attr/OBIType': 'REGX', - '/attr/RegLength': np.array(2.0), - '/attr/RegWidth': np.array(5.0), - '/attr/Corner-Point Latitudes': np.array((4.1, 5.1, 4.1, 5.1)), - '/attr/Corner-Point Longitudes': np.array((141.1, 141.1, 141.1, 151.1)), - '/attr/Begin Line Number': np.array(0), - '/attr/End Line Number': np.array(1), - '/attr/Observing Beginning Date': '2019-06-03', '/attr/Observing Beginning Time': '00:30:01.807', - '/attr/Observing Ending Date': '2019-06-03', '/attr/Observing Ending Time': '00:34:07.572', - '/attr/Satellite Name': 'FY4B', '/attr/Sensor Identification Code': 'GHI', '/attr/Sensor Name': 'GHI', + "/attr/NOMSubSatLat": np.array(0.0), + "/attr/NOMSubSatLon": np.array(133.0), + "/attr/NOMSatHeight": np.array(3.5786E7), + "/attr/Semi_major_axis": np.array(6378.14), + "/attr/Semi_minor_axis": np.array(6353.28), + "/attr/OBIType": "REGX", + "/attr/RegLength": np.array(2.0), + "/attr/RegWidth": np.array(5.0), + "/attr/Corner-Point Latitudes": np.array((4.1, 5.1, 4.1, 5.1)), + "/attr/Corner-Point Longitudes": np.array((141.1, 141.1, 141.1, 151.1)), + "/attr/Begin Line Number": np.array(0), + "/attr/End Line Number": np.array(1), + "/attr/Observing Beginning Date": "2019-06-03", "/attr/Observing Beginning Time": "00:30:01.807", + "/attr/Observing Ending Date": "2019-06-03", "/attr/Observing Ending Time": "00:34:07.572", + "/attr/Satellite Name": "FY4B", "/attr/Sensor Identification Code": "GHI", "/attr/Sensor Name": "GHI", } data = {} - if self.filetype_info['file_type'] == 'ghi_l1_0250m': - data = self._get_250m_data('250') - elif self.filetype_info['file_type'] == 'ghi_l1_0500m': - data = self._get_500m_data('500') - elif self.filetype_info['file_type'] == 'ghi_l1_2000m': - data = self._get_2km_data('2000') - elif self.filetype_info['file_type'] == 'ghi_l1_2000m_geo': - data = self._get_geo_data('2000') + if self.filetype_info["file_type"] == "ghi_l1_0250m": + data = self._get_250m_data("250") + elif self.filetype_info["file_type"] == "ghi_l1_0500m": + data = self._get_500m_data("500") + elif self.filetype_info["file_type"] == "ghi_l1_2000m": + data = self._get_2km_data("2000") + elif self.filetype_info["file_type"] == "ghi_l1_2000m_geo": + data = self._get_geo_data("2000") test_content = {} test_content.update(global_attrs) @@ -199,7 +199,7 @@ def get_test_content(self, filename, filename_info, filetype_info): def _create_filenames_from_resolutions(*resolutions): """Create filenames from the given resolutions.""" - if 'GEO' in resolutions: + if "GEO" in resolutions: return ["FY4B-_GHI---_N_REGX_1330E_L1-_GEO-_MULT_NOM_20220613145300_20220613145359_2000M_V0001.HDF"] pattern = ("FY4B-_GHI---_N_REGX_1330E_L1-_FDI-_MULT_NOM_20220613145300_20220613145359_" "{resolution:04d}M_V0001.HDF") @@ -216,21 +216,21 @@ def setup_method(self): from satpy._config import config_search_paths from satpy.readers.fy4_base import FY4Base from satpy.readers.ghi_l1 import HDF_GHI_L1 - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.fy4 = mock.patch.object(FY4Base, '__bases__', (FakeHDF5FileHandler2,)) + self.fy4 = mock.patch.object(FY4Base, "__bases__", (FakeHDF5FileHandler2,)) self.p = mock.patch.object(HDF_GHI_L1.__class__, (self.fy4,)) self.fake_handler = self.fy4.start() self.p.is_local = True self.expected = { - 'C01': np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]]), - 'C02': np.array([[4.03, 4.06, 4.09, 4.12, 4.15], [4.18, 4.21, 4.24, 4.27, 4.3]]), - 'C03': np.array([[6.05, 6.1, 6.15, 6.2, 6.25], [6.3, 6.35, 6.4, 6.45, 6.5]]), - 'C04': np.array([[8.07, 8.14, 8.21, 8.28, 8.35], [8.42, 8.49, 8.56, 8.63, 8.7]]), - 'C05': np.array([[10.09, 10.18, 10.27, 10.36, 10.45], [10.54, 10.63, 10.72, 10.81, 10.9]]), - 'C06': np.array([[12.11, 12.22, 12.33, 12.44, 12.55], [12.66, 12.77, 12.88, 12.99, 13.1]]), - 'C07': np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), + "C01": np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]]), + "C02": np.array([[4.03, 4.06, 4.09, 4.12, 4.15], [4.18, 4.21, 4.24, 4.27, 4.3]]), + "C03": np.array([[6.05, 6.1, 6.15, 6.2, 6.25], [6.3, 6.35, 6.4, 6.45, 6.5]]), + "C04": np.array([[8.07, 8.14, 8.21, 8.28, 8.35], [8.42, 8.49, 8.56, 8.63, 8.7]]), + "C05": np.array([[10.09, 10.18, 10.27, 10.36, 10.45], [10.54, 10.63, 10.72, 10.81, 10.9]]), + "C06": np.array([[12.11, 12.22, 12.33, 12.44, 12.55], [12.66, 12.77, 12.88, 12.99, 13.1]]), + "C07": np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), } def teardown_method(self): @@ -266,12 +266,12 @@ def test_ghi_orbital_parameters_are_correct(self): res = reader.load(band_names) # check whether the data type of orbital_parameters is float - orbital_parameters = res[band_names[0]].attrs['orbital_parameters'] + orbital_parameters = res[band_names[0]].attrs["orbital_parameters"] for attr in orbital_parameters: assert isinstance(orbital_parameters[attr], float) - assert orbital_parameters['satellite_nominal_latitude'] == 0. - assert orbital_parameters['satellite_nominal_longitude'] == 133.0 - assert orbital_parameters['satellite_nominal_altitude'] == 3.5786E7 + assert orbital_parameters["satellite_nominal_latitude"] == 0. + assert orbital_parameters["satellite_nominal_longitude"] == 133.0 + assert orbital_parameters["satellite_nominal_altitude"] == 3.5786E7 @staticmethod def _check_keys_for_dsq(available_datasets, resolution_to_test): @@ -282,7 +282,7 @@ def _check_keys_for_dsq(available_datasets, resolution_to_test): for band_name in band_names: ds_q = make_dsq(name=band_name, resolution=resolution_to_test) res = get_key(ds_q, available_datasets, num_results=0, best=False) - if band_name < 'C07': + if band_name < "C07": assert len(res) == 2 else: assert len(res) == 3 @@ -295,21 +295,21 @@ def test_ghi_counts_calibration(self): ds_ids = [] band_names = CHANNELS_BY_RESOLUTION[2000] for band_name in band_names: - ds_ids.append(make_dsq(name=band_name, calibration='counts')) + ds_ids.append(make_dsq(name=band_name, calibration="counts")) res = reader.load(ds_ids) assert len(res) == 7 for band_name in band_names: assert res[band_name].shape == (2, 5) - assert res[band_name].attrs['calibration'] == "counts" + assert res[band_name].attrs["calibration"] == "counts" assert res[band_name].dtype == np.uint16 - assert res[band_name].attrs['units'] == "1" + assert res[band_name].attrs["units"] == "1" def test_ghi_geo(self): """Test loading data for angles.""" from satpy.tests.utils import make_dsq - reader = self._create_reader_for_resolutions('GEO') - band_name = 'solar_azimuth_angle' + reader = self._create_reader_for_resolutions("GEO") + band_name = "solar_azimuth_angle" ds_ids = [make_dsq(name=band_name)] res = reader.load(ds_ids) assert len(res) == 1 @@ -339,26 +339,26 @@ def test_ghi_for_one_resolution(self, resolution_to_test): assert len(res) == len(band_names) self._check_calibration_and_units(band_names, res) for band_name in band_names: - np.testing.assert_allclose(np.array(res[band_name].attrs['area'].area_extent), + np.testing.assert_allclose(np.array(res[band_name].attrs["area"].area_extent), np.array(AREA_EXTENTS_BY_RESOLUTION[resolution_to_test])) def _check_calibration_and_units(self, band_names, result): for band_name in band_names: - assert result[band_name].attrs['sensor'].islower() + assert result[band_name].attrs["sensor"].islower() assert result[band_name].shape == (2, 5) np.testing.assert_allclose(result[band_name].values, self.expected[band_name], equal_nan=True) self._check_units(band_name, result) @staticmethod def _check_units(band_name, result): - if band_name <= 'C06': - assert result[band_name].attrs['calibration'] == "reflectance" + if band_name <= "C06": + assert result[band_name].attrs["calibration"] == "reflectance" else: - assert result[band_name].attrs['calibration'] == 'brightness_temperature' - if band_name <= 'C06': - assert result[band_name].attrs['units'] == "%" + assert result[band_name].attrs["calibration"] == "brightness_temperature" + if band_name <= "C06": + assert result[band_name].attrs["units"] == "%" else: - assert result[band_name].attrs['units'] == "K" + assert result[band_name].attrs["units"] == "K" @staticmethod def _assert_which_channels_are_loaded(available_datasets, band_names, resolution_to_test): @@ -375,7 +375,7 @@ def _assert_which_channels_are_loaded(available_datasets, band_names, resolution ds_q = make_dsq(name=band_name, resolution=resolution_to_test) res = get_key(ds_q, available_datasets, num_results=0, best=False) - if band_name < 'C07': + if band_name < "C07": assert len(res) == 2 else: assert len(res) == 3 diff --git a/satpy/tests/reader_tests/test_ghrsst_l2.py b/satpy/tests/reader_tests/test_ghrsst_l2.py index e33cec467a..66c030e91d 100644 --- a/satpy/tests/reader_tests/test_ghrsst_l2.py +++ b/satpy/tests/reader_tests/test_ghrsst_l2.py @@ -39,35 +39,35 @@ def setup_method(self, tmp_path): self.lat_data = np.array(([43.43, 55.56, 61.25], [41.38, 50.28, 60.80])) self.lon = xr.DataArray( self.lon_data, - dims=('nj', 'ni'), - attrs={'standard_name': 'longitude', - 'units': 'degrees_east', + dims=("nj", "ni"), + attrs={"standard_name": "longitude", + "units": "degrees_east", } ) self.lat = xr.DataArray( self.lat_data, - dims=('nj', 'ni'), - attrs={'standard_name': 'latitude', - 'units': 'degrees_north', + dims=("nj", "ni"), + attrs={"standard_name": "latitude", + "units": "degrees_north", } ) self.sst = xr.DataArray( self.base_data, - dims=('nj', 'ni'), - attrs={'scale_factor': 0.01, 'add_offset': 273.15, - '_FillValue': -32768, 'units': 'kelvin', + dims=("nj", "ni"), + attrs={"scale_factor": 0.01, "add_offset": 273.15, + "_FillValue": -32768, "units": "kelvin", } ) self.fake_dataset = xr.Dataset( data_vars={ - 'sea_surface_temperature': self.sst, - 'longitude': self.lon, - 'latitude': self.lat, + "sea_surface_temperature": self.sst, + "longitude": self.lon, + "latitude": self.lat, }, attrs={ "start_time": "20220321T112640Z", "stop_time": "20220321T145711Z", - "platform": 'NOAA20', + "platform": "NOAA20", "sensor": "VIIRS", }, ) @@ -81,12 +81,12 @@ def _create_tarfile_with_testdata(self, mypath): slstrdir.mkdir(parents=True, exist_ok=True) tarfile_path = mypath / tarfile_fakename - ncfilename = slstrdir / 'L2P_GHRSST-SSTskin-202204131200.nc' + ncfilename = slstrdir / "L2P_GHRSST-SSTskin-202204131200.nc" self.fake_dataset.to_netcdf(os.fspath(ncfilename)) - xmlfile_path = slstrdir / 'xfdumanifest.xml' + xmlfile_path = slstrdir / "xfdumanifest.xml" xmlfile_path.touch() - with tarfile.open(name=tarfile_path, mode='w') as tar: + with tarfile.open(name=tarfile_path, mode="w") as tar: tar.add(os.fspath(ncfilename), arcname=Path(slstr_fakename) / ncfilename.name) tar.add(os.fspath(xmlfile_path), arcname=Path(slstr_fakename) / xmlfile_path.name) @@ -95,7 +95,7 @@ def _create_tarfile_with_testdata(self, mypath): def test_instantiate_single_netcdf_file(self, tmp_path): """Test initialization of file handlers - given a single netCDF file.""" filename_info = {} - tmp_filepath = tmp_path / 'fake_dataset.nc' + tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) @@ -110,29 +110,29 @@ def test_instantiate_tarfile(self, tmp_path): def test_get_dataset(self, tmp_path): """Test retrieval of datasets.""" filename_info = {} - tmp_filepath = tmp_path / 'fake_dataset.nc' + tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) - test.get_dataset('longitude', {'standard_name': 'longitude'}) - test.get_dataset('latitude', {'standard_name': 'latitude'}) - test.get_dataset('sea_surface_temperature', {'standard_name': 'sea_surface_temperature'}) + test.get_dataset("longitude", {"standard_name": "longitude"}) + test.get_dataset("latitude", {"standard_name": "latitude"}) + test.get_dataset("sea_surface_temperature", {"standard_name": "sea_surface_temperature"}) with pytest.raises(KeyError): - test.get_dataset('erroneous dataset', {'standard_name': 'erroneous dataset'}) + test.get_dataset("erroneous dataset", {"standard_name": "erroneous dataset"}) def test_get_sensor(self, tmp_path): """Test retrieval of the sensor name from the netCDF file.""" dt_valid = datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z - filename_info = {'field_type': 'NARSST', 'generating_centre': 'FRA_', - 'satid': 'NOAA20_', 'valid_time': dt_valid} + filename_info = {"field_type": "NARSST", "generating_centre": "FRA_", + "satid": "NOAA20_", "valid_time": dt_valid} - tmp_filepath = tmp_path / 'fake_dataset.nc' + tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) - assert test.sensor == 'viirs' + assert test.sensor == "viirs" def test_get_start_and_end_times(self, tmp_path): """Test retrieval of the sensor name from the netCDF file.""" @@ -140,10 +140,10 @@ def test_get_start_and_end_times(self, tmp_path): good_start_time = datetime(2022, 3, 21, 11, 26, 40) # 20220321T112640Z good_stop_time = datetime(2022, 3, 21, 14, 57, 11) # 20220321T145711Z - filename_info = {'field_type': 'NARSST', 'generating_centre': 'FRA_', - 'satid': 'NOAA20_', 'valid_time': dt_valid} + filename_info = {"field_type": "NARSST", "generating_centre": "FRA_", + "satid": "NOAA20_", "valid_time": dt_valid} - tmp_filepath = tmp_path / 'fake_dataset.nc' + tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) diff --git a/satpy/tests/reader_tests/test_glm_l2.py b/satpy/tests/reader_tests/test_glm_l2.py index 57d324f0b1..3744f2c964 100644 --- a/satpy/tests/reader_tests/test_glm_l2.py +++ b/satpy/tests/reader_tests/test_glm_l2.py @@ -33,72 +33,72 @@ def setup_fake_dataset(): fed = fed.astype(np.int16) fed = xr.DataArray( fed, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'scale_factor': 0.5, - 'add_offset': -1., - '_FillValue': 0, - 'units': 'Count per nominal 3136 microradian^2 pixel per 1.0 min', - 'grid_mapping': 'goes_imager_projection', - 'standard_name': 'flash_extent_density', - 'long_name': 'Flash extent density', + "scale_factor": 0.5, + "add_offset": -1., + "_FillValue": 0, + "units": "Count per nominal 3136 microradian^2 pixel per 1.0 min", + "grid_mapping": "goes_imager_projection", + "standard_name": "flash_extent_density", + "long_name": "Flash extent density", } ) dqf = xr.DataArray( fed.data.copy().astype(np.uint8), - dims=('y', 'x'), + dims=("y", "x"), attrs={ - '_FillValue': -1, - 'units': '1', - 'grid_mapping': 'goes_imager_projection', - 'standard_name': 'status_flag', - 'long_name': 'GLM data quality flags', - 'flag_meanings': "valid invalid", + "_FillValue": -1, + "units": "1", + "grid_mapping": "goes_imager_projection", + "standard_name": "status_flag", + "long_name": "GLM data quality flags", + "flag_meanings": "valid invalid", } ) # create a variable that won't be configured to test available_datasets not_configured = xr.DataArray( fed.data.copy(), - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'scale_factor': 0.5, - 'add_offset': -1., - '_FillValue': 0, - 'units': '1', - 'grid_mapping': 'goes_imager_projection', - 'standard_name': 'test', - 'long_name': 'Test', + "scale_factor": 0.5, + "add_offset": -1., + "_FillValue": 0, + "units": "1", + "grid_mapping": "goes_imager_projection", + "standard_name": "test", + "long_name": "Test", } ) x__ = xr.DataArray( range(5), - attrs={'scale_factor': 2., 'add_offset': -1.}, - dims=('x',), + attrs={"scale_factor": 2., "add_offset": -1.}, + dims=("x",), ) y__ = xr.DataArray( range(2), - attrs={'scale_factor': -2., 'add_offset': 1.}, - dims=('y',), + attrs={"scale_factor": -2., "add_offset": 1.}, + dims=("y",), ) proj = xr.DataArray( [], attrs={ - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'perspective_point_height': 1., - 'longitude_of_projection_origin': -90., - 'latitude_of_projection_origin': 0., - 'sweep_angle_axis': u'x' + "semi_major_axis": 1., + "semi_minor_axis": 1., + "perspective_point_height": 1., + "longitude_of_projection_origin": -90., + "latitude_of_projection_origin": 0., + "sweep_angle_axis": u"x" } ) fake_dataset = xr.Dataset( data_vars={ - 'flash_extent_density': fed, - 'not_configured': not_configured, - 'DQF': dqf, - 'x': x__, - 'y': y__, - 'goes_imager_projection': proj, + "flash_extent_density": fed, + "not_configured": not_configured, + "DQF": dqf, + "x": x__, + "y": y__, + "goes_imager_projection": proj, "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02) @@ -115,16 +115,16 @@ def setup_fake_dataset(): class TestGLML2FileHandler(unittest.TestCase): """Tests for the GLM L2 reader.""" - @mock.patch('satpy.readers.abi_base.xr') + @mock.patch("satpy.readers.abi_base.xr") def setUp(self, xr_): """Create a fake file handler to test.""" from satpy.readers.glm_l2 import NCGriddedGLML2 fake_dataset = setup_fake_dataset() xr_.open_dataset.return_value = fake_dataset - self.reader = NCGriddedGLML2('filename', - {'platform_shortname': 'G16', - 'scene_abbr': 'C', 'scan_mode': 'M3'}, - {'filetype': 'glm_l2_imagery'}) + self.reader = NCGriddedGLML2("filename", + {"platform_shortname": "G16", + "scene_abbr": "C", "scan_mode": "M3"}, + {"filetype": "glm_l2_imagery"}) def test_basic_attributes(self): """Test getting basic file attributes.""" @@ -137,64 +137,64 @@ def test_basic_attributes(self): def test_get_dataset(self): """Test the get_dataset method.""" from satpy.tests.utils import make_dataid - key = make_dataid(name='flash_extent_density') - res = self.reader.get_dataset(key, {'info': 'info'}) - exp = {'instrument_ID': None, - 'modifiers': (), - 'name': 'flash_extent_density', - 'orbital_parameters': {'projection_altitude': 1.0, - 'projection_latitude': 0.0, - 'projection_longitude': -90.0, + key = make_dataid(name="flash_extent_density") + res = self.reader.get_dataset(key, {"info": "info"}) + exp = {"instrument_ID": None, + "modifiers": (), + "name": "flash_extent_density", + "orbital_parameters": {"projection_altitude": 1.0, + "projection_latitude": 0.0, + "projection_longitude": -90.0, # 'satellite_nominal_altitude': 35786.02, - 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_longitude': -89.5}, - 'orbital_slot': None, - 'platform_name': 'GOES-16', - 'platform_shortname': 'G16', - 'production_site': None, - 'scan_mode': 'M3', - 'scene_abbr': 'C', - 'scene_id': None, + "satellite_nominal_latitude": 0.0, + "satellite_nominal_longitude": -89.5}, + "orbital_slot": None, + "platform_name": "GOES-16", + "platform_shortname": "G16", + "production_site": None, + "scan_mode": "M3", + "scene_abbr": "C", + "scene_id": None, "spatial_resolution": "2km at nadir", - 'sensor': 'glm', - 'timeline_ID': None, - 'grid_mapping': 'goes_imager_projection', - 'standard_name': 'flash_extent_density', - 'long_name': 'Flash extent density', - 'units': 'Count per nominal 3136 microradian^2 pixel per 1.0 min'} + "sensor": "glm", + "timeline_ID": None, + "grid_mapping": "goes_imager_projection", + "standard_name": "flash_extent_density", + "long_name": "Flash extent density", + "units": "Count per nominal 3136 microradian^2 pixel per 1.0 min"} self.assertDictEqual(res.attrs, exp) def test_get_dataset_dqf(self): """Test the get_dataset method with special DQF var.""" from satpy.tests.utils import make_dataid - key = make_dataid(name='DQF') - res = self.reader.get_dataset(key, {'info': 'info'}) - exp = {'instrument_ID': None, - 'modifiers': (), - 'name': 'DQF', - 'orbital_parameters': {'projection_altitude': 1.0, - 'projection_latitude': 0.0, - 'projection_longitude': -90.0, + key = make_dataid(name="DQF") + res = self.reader.get_dataset(key, {"info": "info"}) + exp = {"instrument_ID": None, + "modifiers": (), + "name": "DQF", + "orbital_parameters": {"projection_altitude": 1.0, + "projection_latitude": 0.0, + "projection_longitude": -90.0, # 'satellite_nominal_altitude': 35786.02, - 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_longitude': -89.5}, - 'orbital_slot': None, - 'platform_name': 'GOES-16', - 'platform_shortname': 'G16', - 'production_site': None, - 'scan_mode': 'M3', - 'scene_abbr': 'C', - 'scene_id': None, + "satellite_nominal_latitude": 0.0, + "satellite_nominal_longitude": -89.5}, + "orbital_slot": None, + "platform_name": "GOES-16", + "platform_shortname": "G16", + "production_site": None, + "scan_mode": "M3", + "scene_abbr": "C", + "scene_id": None, "spatial_resolution": "2km at nadir", - 'sensor': 'glm', - 'timeline_ID': None, - 'grid_mapping': 'goes_imager_projection', - 'units': '1', - '_FillValue': -1, - 'standard_name': 'status_flag', - 'long_name': 'GLM data quality flags', - 'flag_meanings': "valid invalid"} + "sensor": "glm", + "timeline_ID": None, + "grid_mapping": "goes_imager_projection", + "units": "1", + "_FillValue": -1, + "standard_name": "status_flag", + "long_name": "GLM data quality flags", + "flag_meanings": "valid invalid"} self.assertDictEqual(res.attrs, exp) self.assertTrue(np.issubdtype(res.dtype, np.integer)) @@ -205,18 +205,18 @@ class TestGLML2Reader(unittest.TestCase): yaml_file = "glm_l2.yaml" - @mock.patch('satpy.readers.abi_base.xr') + @mock.patch("satpy.readers.abi_base.xr") def setUp(self, xr_): """Create a fake reader to test.""" from satpy._config import config_search_paths from satpy.readers import load_reader - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) fake_dataset = setup_fake_dataset() xr_.open_dataset.return_value = fake_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'OR_GLM-L2-GLMC-M3_G16_s20192862159000_e20192862200000_c20192862200350.nc', - 'CSPP_CG_GLM-L2-GLMC-M3_G16_s20192862159000_e20192862200000_c20192862200350.nc', + "OR_GLM-L2-GLMC-M3_G16_s20192862159000_e20192862200000_c20192862200350.nc", + "CSPP_CG_GLM-L2-GLMC-M3_G16_s20192862159000_e20192862200000_c20192862200350.nc", ]) self.assertEqual(len(loadables), 2) r.create_filehandlers(loadables) @@ -230,7 +230,7 @@ def test_available_datasets(self): # flash_extent_density, DQF, and not_configured are available in our tests self.assertEqual(len(available_datasets), 3) for ds_id in available_datasets: - self.assertEqual(ds_id['resolution'], 2000) + self.assertEqual(ds_id["resolution"], 2000) # make sure not_configured was discovered - names = [dataid['name'] for dataid in available_datasets] - assert 'not_configured' in names + names = [dataid["name"] for dataid in available_datasets] + assert "not_configured" in names diff --git a/satpy/tests/reader_tests/test_goes_imager_hrit.py b/satpy/tests/reader_tests/test_goes_imager_hrit.py index 02b9632335..4339bef47d 100644 --- a/satpy/tests/reader_tests/test_goes_imager_hrit.py +++ b/satpy/tests/reader_tests/test_goes_imager_hrit.py @@ -51,7 +51,7 @@ def test_fun(self): (100.1640625, b"\x42\x64\x2a\x00")] for expected, str_val in test_data: - val = np.frombuffer(str_val, dtype='>i4') + val = np.frombuffer(str_val, dtype=">i4") self.assertEqual(expected, make_gvar_float(val)) @@ -66,63 +66,63 @@ def test_fun(self): self.assertEqual(make_sgs_time(tcds[0]), expected) -test_pro = {'TISTR': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TCurr': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TCLMT': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'SubSatLongitude': 100.1640625, - 'TCHED': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TLTRL': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TIPFS': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TISPC': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'ReferenceLatitude': 0.0, - 'TIIRT': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TLHED': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TIVIT': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'SubSatLatitude': 0.0, - 'TIECL': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'ReferenceLongitude': 100.1640625, - 'TCTRL': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TLRAN': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TINFS': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TIBBC': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TIONA': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'ReferenceDistance': 100.1640625, - 'SatelliteID': 15} +test_pro = {"TISTR": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TCurr": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TCLMT": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "SubSatLongitude": 100.1640625, + "TCHED": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TLTRL": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TIPFS": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TISPC": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "ReferenceLatitude": 0.0, + "TIIRT": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TLHED": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TIVIT": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "SubSatLatitude": 0.0, + "TIECL": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "ReferenceLongitude": 100.1640625, + "TCTRL": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TLRAN": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TINFS": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TIBBC": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TIONA": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "ReferenceDistance": 100.1640625, + "SatelliteID": 15} class TestHRITGOESPrologueFileHandler(unittest.TestCase): """Test the HRITFileHandler.""" - @mock.patch('satpy.readers.goes_imager_hrit.recarray2dict') - @mock.patch('satpy.readers.goes_imager_hrit.np.fromfile') - @mock.patch('satpy.readers.goes_imager_hrit.HRITFileHandler.__init__') + @mock.patch("satpy.readers.goes_imager_hrit.recarray2dict") + @mock.patch("satpy.readers.goes_imager_hrit.np.fromfile") + @mock.patch("satpy.readers.goes_imager_hrit.HRITFileHandler.__init__") def test_init(self, new_fh_init, fromfile, recarray2dict): """Setup the hrit file handler for testing.""" recarray2dict.side_effect = lambda x: x[0] - new_fh_init.return_value.filename = 'filename' - HRITGOESPrologueFileHandler.filename = 'filename' - HRITGOESPrologueFileHandler.mda = {'total_header_length': 1} + new_fh_init.return_value.filename = "filename" + HRITGOESPrologueFileHandler.filename = "filename" + HRITGOESPrologueFileHandler.mda = {"total_header_length": 1} ret = {} the_time = np.array([(32, 24, 18, 146, 19, 50, 121, 153)], dtype=sgs_time)[0] - for key in ['TCurr', 'TCHED', 'TCTRL', 'TLHED', 'TLTRL', 'TIPFS', - 'TINFS', 'TISPC', 'TIECL', 'TIBBC', 'TISTR', 'TLRAN', - 'TIIRT', 'TIVIT', 'TCLMT', 'TIONA']: + for key in ["TCurr", "TCHED", "TCTRL", "TLHED", "TLTRL", "TIPFS", + "TINFS", "TISPC", "TIECL", "TIBBC", "TISTR", "TLRAN", + "TIIRT", "TIVIT", "TCLMT", "TIONA"]: ret[key] = the_time - ret['SubSatLatitude'] = np.frombuffer(b"\x00\x00\x00\x00", dtype='>i4')[0] - ret['ReferenceLatitude'] = np.frombuffer(b"\x00\x00\x00\x00", dtype='>i4')[0] - ret['SubSatLongitude'] = np.frombuffer(b"\x42\x64\x2a\x00", dtype='>i4')[0] - ret['ReferenceLongitude'] = np.frombuffer(b"\x42\x64\x2a\x00", dtype='>i4')[0] - ret['ReferenceDistance'] = np.frombuffer(b"\x42\x64\x2a\x00", dtype='>i4')[0] - ret['SatelliteID'] = 15 + ret["SubSatLatitude"] = np.frombuffer(b"\x00\x00\x00\x00", dtype=">i4")[0] + ret["ReferenceLatitude"] = np.frombuffer(b"\x00\x00\x00\x00", dtype=">i4")[0] + ret["SubSatLongitude"] = np.frombuffer(b"\x42\x64\x2a\x00", dtype=">i4")[0] + ret["ReferenceLongitude"] = np.frombuffer(b"\x42\x64\x2a\x00", dtype=">i4")[0] + ret["ReferenceDistance"] = np.frombuffer(b"\x42\x64\x2a\x00", dtype=">i4")[0] + ret["SatelliteID"] = 15 fromfile.return_value = [ret] m = mock.mock_open() - with mock.patch('satpy.readers.goes_imager_hrit.open', m, create=True) as newopen: + with mock.patch("satpy.readers.goes_imager_hrit.open", m, create=True) as newopen: newopen.return_value.__enter__.return_value.seek.return_value = 1 self.reader = HRITGOESPrologueFileHandler( - 'filename', {'platform_shortname': 'GOES15', - 'start_time': datetime.datetime(2016, 3, 3, 0, 0), - 'service': 'test_service'}, - {'filetype': 'info'}) + "filename", {"platform_shortname": "GOES15", + "start_time": datetime.datetime(2016, 3, 3, 0, 0), + "service": "test_service"}, + {"filetype": "info"}) self.assertEqual(test_pro, self.reader.prologue) @@ -130,31 +130,31 @@ def test_init(self, new_fh_init, fromfile, recarray2dict): class TestHRITGOESFileHandler(unittest.TestCase): """Test the HRITFileHandler.""" - @mock.patch('satpy.readers.goes_imager_hrit.HRITFileHandler.__init__') + @mock.patch("satpy.readers.goes_imager_hrit.HRITFileHandler.__init__") def setUp(self, new_fh_init): """Set up the hrit file handler for testing.""" - blob = '$HALFTONE:=10\r\n_NAME:=albedo\r\n_UNIT:=percent\r\n0:=0.0\r\n1023:=100.0\r\n'.encode() - mda = {'projection_parameters': {'SSP_longitude': -123.0}, - 'spectral_channel_id': 1, - 'image_data_function': blob} - HRITGOESFileHandler.filename = 'filename' + blob = "$HALFTONE:=10\r\n_NAME:=albedo\r\n_UNIT:=percent\r\n0:=0.0\r\n1023:=100.0\r\n".encode() + mda = {"projection_parameters": {"SSP_longitude": -123.0}, + "spectral_channel_id": 1, + "image_data_function": blob} + HRITGOESFileHandler.filename = "filename" HRITGOESFileHandler.mda = mda self.prologue = mock.MagicMock() self.prologue.prologue = test_pro - self.reader = HRITGOESFileHandler('filename', {}, {}, self.prologue) + self.reader = HRITGOESFileHandler("filename", {}, {}, self.prologue) def test_init(self): """Test the init.""" - blob = '$HALFTONE:=10\r\n_NAME:=albedo\r\n_UNIT:=percent\r\n0:=0.0\r\n1023:=100.0\r\n'.encode() - mda = {'spectral_channel_id': 1, - 'projection_parameters': {'SSP_longitude': 100.1640625}, - 'image_data_function': blob} + blob = "$HALFTONE:=10\r\n_NAME:=albedo\r\n_UNIT:=percent\r\n0:=0.0\r\n1023:=100.0\r\n".encode() + mda = {"spectral_channel_id": 1, + "projection_parameters": {"SSP_longitude": 100.1640625}, + "image_data_function": blob} self.assertEqual(self.reader.mda, mda) - @mock.patch('satpy.readers.goes_imager_hrit.HRITFileHandler.get_dataset') + @mock.patch("satpy.readers.goes_imager_hrit.HRITFileHandler.get_dataset") def test_get_dataset(self, base_get_dataset): """Test get_dataset.""" - key = make_dataid(name="CH1", calibration='reflectance') + key = make_dataid(name="CH1", calibration="reflectance") base_get_dataset.return_value = DataArray(np.arange(25).reshape(5, 5)) res = self.reader.get_dataset(key, {}) expected = np.array([[np.nan, 0.097752, 0.195503, 0.293255, 0.391007], @@ -164,36 +164,36 @@ def test_get_dataset(self, base_get_dataset): [1.955034, 2.052786, 2.150538, 2.248289, 2.346041]]) self.assertTrue(np.allclose(res.values, expected, equal_nan=True)) - self.assertEqual(res.attrs['units'], '%') - self.assertDictEqual(res.attrs['orbital_parameters'], - {'projection_longitude': self.reader.mda['projection_parameters']['SSP_longitude'], - 'projection_latitude': 0.0, - 'projection_altitude': ALTITUDE}) + self.assertEqual(res.attrs["units"], "%") + self.assertDictEqual(res.attrs["orbital_parameters"], + {"projection_longitude": self.reader.mda["projection_parameters"]["SSP_longitude"], + "projection_latitude": 0.0, + "projection_altitude": ALTITUDE}) def test_get_area_def(self): """Test getting the area definition.""" self.reader.mda.update({ - 'cfac': 10216334, - 'lfac': 10216334, - 'coff': 1408.0, - 'loff': 944.0, - 'number_of_lines': 464, - 'number_of_columns': 2816 + "cfac": 10216334, + "lfac": 10216334, + "coff": 1408.0, + "loff": 944.0, + "number_of_lines": 464, + "number_of_columns": 2816 }) - dsid = make_dataid(name="CH1", calibration='reflectance', + dsid = make_dataid(name="CH1", calibration="reflectance", resolution=3000) area = self.reader.get_area_def(dsid) a, b = proj4_radius_parameters(area.proj_dict) assert a == EQUATOR_RADIUS assert b == POLE_RADIUS - assert area.proj_dict['h'] == ALTITUDE - assert area.proj_dict['lon_0'] == 100.1640625 - assert area.proj_dict['proj'] == 'geos' - assert area.proj_dict['units'] == 'm' + assert area.proj_dict["h"] == ALTITUDE + assert area.proj_dict["lon_0"] == 100.1640625 + assert area.proj_dict["proj"] == "geos" + assert area.proj_dict["units"] == "m" assert area.width == 2816 assert area.height == 464 - assert area.area_id == 'goes-15_goes_imager_fd_3km' + assert area.area_id == "goes-15_goes_imager_fd_3km" area_extent_exp = (-5639254.900260435, 1925159.4881528523, 5643261.475678028, 3784210.48191544) np.testing.assert_allclose(area.area_extent, area_extent_exp) diff --git a/satpy/tests/reader_tests/test_goes_imager_nc_eum.py b/satpy/tests/reader_tests/test_goes_imager_nc_eum.py index d72271f623..bac840a55b 100644 --- a/satpy/tests/reader_tests/test_goes_imager_nc_eum.py +++ b/satpy/tests/reader_tests/test_goes_imager_nc_eum.py @@ -30,12 +30,12 @@ class GOESNCEUMFileHandlerRadianceTest(unittest.TestCase): longMessage = True - @mock.patch('satpy.readers.goes_imager_nc.xr') + @mock.patch("satpy.readers.goes_imager_nc.xr") def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESEUMNCFileHandler - self.coefs = CALIB_COEFS['GOES-15'] + self.coefs = CALIB_COEFS["GOES-15"] self.all_coefs = CALIB_COEFS self.channels = sorted(self.coefs.keys()) self.ir_channels = sorted([ch for ch in self.channels @@ -51,19 +51,19 @@ def setUp(self, xr_): nrows, ncols) # Includes invalid values to be masked xr_.open_dataset.return_value = xr.Dataset( - {'data': xr.DataArray(data=self.radiance, dims=('time', 'yc', 'xc')), - 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'), - dims=('time',)), - 'bands': xr.DataArray(data=np.array([1]))}, - attrs={'Satellite Sensor': 'G-15'}) + {"data": xr.DataArray(data=self.radiance, dims=("time", "yc", "xc")), + "time": xr.DataArray(data=np.array([0], dtype="datetime64[ms]"), + dims=("time",)), + "bands": xr.DataArray(data=np.array([1]))}, + attrs={"Satellite Sensor": "G-15"}) geo_data = xr.Dataset( - {'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')), - 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc'))}, - attrs={'Satellite Sensor': 'G-15'}) + {"lon": xr.DataArray(data=self.lon, dims=("yc", "xc")), + "lat": xr.DataArray(data=self.lat, dims=("yc", "xc"))}, + attrs={"Satellite Sensor": "G-15"}) # Instantiate reader using the mocked open_dataset() method - self.reader = GOESEUMNCFileHandler(filename='dummy', filename_info={}, + self.reader = GOESEUMNCFileHandler(filename="dummy", filename_info={}, filetype_info={}, geo_data=geo_data) def test_get_dataset_radiance(self): @@ -71,20 +71,20 @@ def test_get_dataset_radiance(self): for ch in self.channels: if not is_vis_channel(ch): radiance = self.reader.get_dataset( - key=make_dataid(name=ch, calibration='radiance'), info={}) + key=make_dataid(name=ch, calibration="radiance"), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(self.radiance == radiance.to_masked_array()), - msg='get_dataset() returns invalid radiance for ' - 'channel {}'.format(ch)) + msg="get_dataset() returns invalid radiance for " + "channel {}".format(ch)) def test_calibrate(self): """Test whether the correct calibration methods are called.""" for ch in self.channels: if not is_vis_channel(ch): - calibs = {'brightness_temperature': '_calibrate_ir'} + calibs = {"brightness_temperature": "_calibrate_ir"} for calib, method in calibs.items(): with mock.patch.object(self.reader, method) as target_func: - self.reader.calibrate(data=self.reader.nc['data'], + self.reader.calibrate(data=self.reader.nc["data"], calibration=calib, channel=ch) target_func.assert_called() @@ -107,11 +107,11 @@ def test_get_sector(self): (123, 456): UNKNOWN_SECTOR } for (nlines, ncols), sector_ref in shapes.items(): - for channel in ('00_7', '10_7'): + for channel in ("00_7", "10_7"): sector = self.reader._get_sector(channel=channel, nlines=nlines, ncols=ncols) self.assertEqual(sector, sector_ref, - msg='Incorrect sector identification') + msg="Incorrect sector identification") class GOESNCEUMFileHandlerReflectanceTest(unittest.TestCase): @@ -119,12 +119,12 @@ class GOESNCEUMFileHandlerReflectanceTest(unittest.TestCase): longMessage = True - @mock.patch('satpy.readers.goes_imager_nc.xr') + @mock.patch("satpy.readers.goes_imager_nc.xr") def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESEUMNCFileHandler - self.coefs = CALIB_COEFS['GOES-15'] + self.coefs = CALIB_COEFS["GOES-15"] self.all_coefs = CALIB_COEFS self.channels = sorted(self.coefs.keys()) self.ir_channels = sorted([ch for ch in self.channels @@ -140,19 +140,19 @@ def setUp(self, xr_): nrows, ncols) # Includes invalid values to be masked xr_.open_dataset.return_value = xr.Dataset( - {'data': xr.DataArray(data=self.reflectance, dims=('time', 'yc', 'xc')), - 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'), - dims=('time',)), - 'bands': xr.DataArray(data=np.array([1]))}, - attrs={'Satellite Sensor': 'G-15'}) + {"data": xr.DataArray(data=self.reflectance, dims=("time", "yc", "xc")), + "time": xr.DataArray(data=np.array([0], dtype="datetime64[ms]"), + dims=("time",)), + "bands": xr.DataArray(data=np.array([1]))}, + attrs={"Satellite Sensor": "G-15"}) geo_data = xr.Dataset( - {'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')), - 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc'))}, - attrs={'Satellite Sensor': 'G-15'}) + {"lon": xr.DataArray(data=self.lon, dims=("yc", "xc")), + "lat": xr.DataArray(data=self.lat, dims=("yc", "xc"))}, + attrs={"Satellite Sensor": "G-15"}) # Instantiate reader using the mocked open_dataset() method - self.reader = GOESEUMNCFileHandler(filename='dummy', filename_info={}, + self.reader = GOESEUMNCFileHandler(filename="dummy", filename_info={}, filetype_info={}, geo_data=geo_data) def test_get_dataset_reflectance(self): @@ -160,8 +160,8 @@ def test_get_dataset_reflectance(self): for ch in self.channels: if is_vis_channel(ch): refl = self.reader.get_dataset( - key=make_dataid(name=ch, calibration='reflectance'), info={}) + key=make_dataid(name=ch, calibration="reflectance"), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(self.reflectance == refl.to_masked_array()), - msg='get_dataset() returns invalid reflectance for ' - 'channel {}'.format(ch)) + msg="get_dataset() returns invalid reflectance for " + "channel {}".format(ch)) diff --git a/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py b/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py index 6369568d1f..1238594d19 100644 --- a/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py +++ b/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py @@ -39,14 +39,14 @@ class GOESNCBaseFileHandlerTest(unittest.TestCase): longMessage = True - @mock.patch('satpy.readers.goes_imager_nc.xr') - @mock.patch.multiple('satpy.readers.goes_imager_nc.GOESNCBaseFileHandler', + @mock.patch("satpy.readers.goes_imager_nc.xr") + @mock.patch.multiple("satpy.readers.goes_imager_nc.GOESNCBaseFileHandler", _get_sector=mock.MagicMock()) def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESNCBaseFileHandler - self.coefs = CALIB_COEFS['GOES-15'] + self.coefs = CALIB_COEFS["GOES-15"] # Mock file access to return a fake dataset. self.time = datetime.datetime(2018, 8, 16, 16, 7) @@ -54,27 +54,27 @@ def setUp(self, xr_): self.dummy2d = np.zeros((2, 2)) self.band = 1 self.nc = xr.Dataset( - {'data': xr.DataArray(self.dummy3d, dims=('time', 'yc', 'xc')), - 'lon': xr.DataArray(data=self.dummy2d, dims=('yc', 'xc')), - 'lat': xr.DataArray(data=self.dummy2d, dims=('yc', 'xc')), - 'time': xr.DataArray(data=np.array([self.time], - dtype='datetime64[ms]'), - dims=('time',)), - 'bands': xr.DataArray(data=np.array([self.band]))}, - attrs={'Satellite Sensor': 'G-15'}) + {"data": xr.DataArray(self.dummy3d, dims=("time", "yc", "xc")), + "lon": xr.DataArray(data=self.dummy2d, dims=("yc", "xc")), + "lat": xr.DataArray(data=self.dummy2d, dims=("yc", "xc")), + "time": xr.DataArray(data=np.array([self.time], + dtype="datetime64[ms]"), + dims=("time",)), + "bands": xr.DataArray(data=np.array([self.band]))}, + attrs={"Satellite Sensor": "G-15"}) xr_.open_dataset.return_value = self.nc # Instantiate reader using the mocked open_dataset() method. Also, make # the reader believe all abstract methods have been implemented. - self.reader = GOESNCBaseFileHandler(filename='dummy', filename_info={}, + self.reader = GOESNCBaseFileHandler(filename="dummy", filename_info={}, filetype_info={}) def test_init(self): """Tests reader initialization.""" self.assertEqual(self.reader.nlines, self.dummy2d.shape[0]) self.assertEqual(self.reader.ncols, self.dummy2d.shape[1]) - self.assertEqual(self.reader.platform_name, 'GOES-15') - self.assertEqual(self.reader.platform_shortname, 'goes15') + self.assertEqual(self.reader.platform_name, "GOES-15") + self.assertEqual(self.reader.platform_shortname, "goes15") self.assertEqual(self.reader.gvar_channel, self.band) self.assertIsInstance(self.reader.geo_data, xr.Dataset) @@ -90,13 +90,13 @@ def test_get_nadir_pixel(self): nadir_row, nadir_col = self.reader._get_nadir_pixel( earth_mask=earth_mask, sector=FULL_DISC) self.assertEqual((nadir_row, nadir_col), (2, 1), - msg='Incorrect nadir pixel') + msg="Incorrect nadir pixel") def test_viscounts2radiance(self): """Test conversion from VIS counts to radiance.""" # Reference data is for detector #1 - slope = self.coefs['00_7']['slope'][0] - offset = self.coefs['00_7']['offset'][0] + slope = self.coefs["00_7"]["slope"][0] + offset = self.coefs["00_7"]["offset"][0] counts = xr.DataArray([0, 100, 200, 500, 1000, 1023]) rad_expected = xr.DataArray( [0., 41.54896, 100.06862, @@ -104,8 +104,8 @@ def test_viscounts2radiance(self): rad = self.reader._viscounts2radiance(counts=counts, slope=slope, offset=offset) self.assertTrue(np.allclose(rad.data, rad_expected.data, atol=1E-6), - msg='Incorrect conversion from VIS counts to ' - 'radiance') + msg="Incorrect conversion from VIS counts to " + "radiance") def test_ircounts2radiance(self): """Test conversion from IR counts to radiance.""" @@ -115,10 +115,10 @@ def test_ircounts2radiance(self): # Reference Radiance from NOAA lookup tables (same for detectors 1 and # 2, see [IR]) rad_expected = { - '03_9': np.array([0, 0.140, 1.899, 4.098, 4.199]), - '06_5': np.array([0, 1.825, 12.124, 24.998, 25.590]), - '10_7': np.array([0, 16.126, 92.630, 188.259, 192.658]), - '13_3': np.array([0, 15.084, 87.421, 177.842, 182.001]) + "03_9": np.array([0, 0.140, 1.899, 4.098, 4.199]), + "06_5": np.array([0, 1.825, 12.124, 24.998, 25.590]), + "10_7": np.array([0, 16.126, 92.630, 188.259, 192.658]), + "13_3": np.array([0, 15.084, 87.421, 177.842, 182.001]) } # The input counts are exact, but the accuracy of the output radiance is @@ -128,60 +128,60 @@ def test_ircounts2radiance(self): for ch in sorted(rad_expected.keys()): coefs = self.coefs[ch] rad = self.reader._ircounts2radiance( - counts=counts, scale=coefs['scale'], offset=coefs['offset']) + counts=counts, scale=coefs["scale"], offset=coefs["offset"]) self.assertTrue(np.allclose(rad.data, rad_expected[ch], atol=atol), - msg='Incorrect conversion from IR counts to ' - 'radiance in channel {}'.format(ch)) + msg="Incorrect conversion from IR counts to " + "radiance in channel {}".format(ch)) def test_calibrate_vis(self): """Test VIS calibration.""" rad = xr.DataArray([0, 1, 10, 100, 500]) refl_expected = xr.DataArray([0., 0.188852, 1.88852, 18.8852, 94.426]) refl = self.reader._calibrate_vis(radiance=rad, - k=self.coefs['00_7']['k']) + k=self.coefs["00_7"]["k"]) self.assertTrue(np.allclose(refl.data, refl_expected.data, atol=1E-6), - msg='Incorrect conversion from radiance to ' - 'reflectance') + msg="Incorrect conversion from radiance to " + "reflectance") def test_calibrate_ir(self): """Test IR calibration.""" # Test radiance values and corresponding BT from NOAA lookup tables # rev. H (see [IR]). rad = { - '03_9': xr.DataArray([0, 0.1, 2, 3.997, 4.199]), - '06_5': xr.DataArray([0, 0.821, 12.201, 25.590, 100]), - '10_7': xr.DataArray([0, 11.727, 101.810, 189.407, 192.658]), - '13_3': xr.DataArray([0, 22.679, 90.133, 182.001, 500]) + "03_9": xr.DataArray([0, 0.1, 2, 3.997, 4.199]), + "06_5": xr.DataArray([0, 0.821, 12.201, 25.590, 100]), + "10_7": xr.DataArray([0, 11.727, 101.810, 189.407, 192.658]), + "13_3": xr.DataArray([0, 22.679, 90.133, 182.001, 500]) } bt_expected = { - '03_9': np.array([[np.nan, 253.213, 319.451, 339.983, np.nan], + "03_9": np.array([[np.nan, 253.213, 319.451, 339.983, np.nan], [np.nan, 253.213, 319.451, 339.983, np.nan]]), - '06_5': np.array([[np.nan, 200.291, 267.860, 294.988, np.nan], + "06_5": np.array([[np.nan, 200.291, 267.860, 294.988, np.nan], [np.nan, 200.308, 267.879, 295.008, np.nan]]), - '10_7': np.array([[np.nan, 200.105, 294.437, 339.960, np.nan], + "10_7": np.array([[np.nan, 200.105, 294.437, 339.960, np.nan], [np.nan, 200.097, 294.429, 339.953, np.nan]]), - '13_3': np.array([[np.nan, 200.006, 267.517, 321.986, np.nan], + "13_3": np.array([[np.nan, 200.006, 267.517, 321.986, np.nan], [np.nan, 200.014, 267.524, 321.990, np.nan]]) } # first row is for detector 1, second for detector 2. # The accuracy of the input radiance is limited to 3 digits so that # the results differ slightly. - atol = {'03_9': 0.04, '06_5': 0.03, '10_7': 0.01, '13_3': 0.01} + atol = {"03_9": 0.04, "06_5": 0.03, "10_7": 0.01, "13_3": 0.01} for ch in sorted(rad.keys()): coefs = self.coefs[ch] for det in [0, 1]: bt = self.reader._calibrate_ir(radiance=rad[ch], - coefs={'a': coefs['a'][det], - 'b': coefs['b'][det], - 'n': coefs['n'][det], - 'btmin': coefs['btmin'], - 'btmax': coefs['btmax']}) + coefs={"a": coefs["a"][det], + "b": coefs["b"][det], + "n": coefs["n"][det], + "btmin": coefs["btmin"], + "btmax": coefs["btmax"]}) self.assertTrue( np.allclose(bt.data, bt_expected[ch][det], equal_nan=True, atol=atol[ch]), - msg='Incorrect conversion from radiance to brightness ' - 'temperature in channel {} detector {}'.format(ch, det)) + msg="Incorrect conversion from radiance to brightness " + "temperature in channel {} detector {}".format(ch, det)) def test_start_time(self): """Test dataset start time stamp.""" @@ -252,13 +252,13 @@ def dataset(self, lons_lats, channel_id): bands = xr.DataArray([channel_id], dims="bands") return xr.Dataset( { - 'data': data, - 'lon': lon, - 'lat': lat, - 'time': time, - 'bands': bands, + "data": data, + "lon": lon, + "lat": lat, + "time": time, + "bands": bands, }, - attrs={'Satellite Sensor': 'G-15'} + attrs={"Satellite Sensor": "G-15"} ) @pytest.fixture @@ -290,16 +290,16 @@ def geometry(self, channel_id, yaw_flip): def expected(self, geometry, earth_mask, yaw_flip): """Define expected metadata.""" proj_dict = { - 'a': '6378169', - 'h': '35785831', - 'lon_0': '0', - 'no_defs': 'None', - 'proj': 'geos', - 'rf': '295.488065897001', - 'type': 'crs', - 'units': 'm', - 'x_0': '0', - 'y_0': '0' + "a": "6378169", + "h": "35785831", + "lon_0": "0", + "no_defs": "None", + "proj": "geos", + "rf": "295.488065897001", + "type": "crs", + "units": "m", + "x_0": "0", + "y_0": "0" } area = AreaDefinition( area_id="goes_geos_uniform", @@ -329,7 +329,7 @@ def mocked_file_handler(self, dataset): GOESNCFileHandler.ir_sectors[(3, 4)] = FULL_DISC GOESNCFileHandler.yaw_flip_sampling_distance = 1 return GOESNCFileHandler( - filename='dummy', + filename="dummy", filename_info={}, filetype_info={}, ) @@ -351,12 +351,12 @@ class GOESNCFileHandlerTest(unittest.TestCase): longMessage = True - @mock.patch('satpy.readers.goes_imager_nc.xr') + @mock.patch("satpy.readers.goes_imager_nc.xr") def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESNCFileHandler - self.coefs = CALIB_COEFS['GOES-15'] + self.coefs = CALIB_COEFS["GOES-15"] self.all_coefs = CALIB_COEFS self.channels = sorted(self.coefs.keys()) self.ir_channels = sorted([ch for ch in self.channels @@ -374,58 +374,58 @@ def setUp(self, xr_): nrows, ncols) # Includes invalid values to be masked xr_.open_dataset.return_value = xr.Dataset( - {'data': xr.DataArray(data=self.counts, dims=('time', 'yc', 'xc')), - 'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')), - 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc')), - 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'), - dims=('time',)), - 'bands': xr.DataArray(data=np.array([1]))}, - attrs={'Satellite Sensor': 'G-15'}) + {"data": xr.DataArray(data=self.counts, dims=("time", "yc", "xc")), + "lon": xr.DataArray(data=self.lon, dims=("yc", "xc")), + "lat": xr.DataArray(data=self.lat, dims=("yc", "xc")), + "time": xr.DataArray(data=np.array([0], dtype="datetime64[ms]"), + dims=("time",)), + "bands": xr.DataArray(data=np.array([1]))}, + attrs={"Satellite Sensor": "G-15"}) # Instantiate reader using the mocked open_dataset() method - self.reader = GOESNCFileHandler(filename='dummy', filename_info={}, + self.reader = GOESNCFileHandler(filename="dummy", filename_info={}, filetype_info={}) def test_get_dataset_coords(self): """Test whether coordinates returned by get_dataset() are correct.""" - lon = self.reader.get_dataset(key=make_dataid(name='longitude'), + lon = self.reader.get_dataset(key=make_dataid(name="longitude"), info={}) - lat = self.reader.get_dataset(key=make_dataid(name='latitude'), + lat = self.reader.get_dataset(key=make_dataid(name="latitude"), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(lat.to_masked_array() == self.lat), - msg='get_dataset() returns invalid latitude') + msg="get_dataset() returns invalid latitude") self.assertTrue(np.all(lon.to_masked_array() == self.lon), - msg='get_dataset() returns invalid longitude') + msg="get_dataset() returns invalid longitude") def test_get_dataset_counts(self): """Test whether counts returned by get_dataset() are correct.""" from satpy.readers.goes_imager_nc import ALTITUDE, UNKNOWN_SECTOR - self.reader.meta.update({'lon0': -75.0, - 'lat0': 0.0, - 'sector': UNKNOWN_SECTOR, - 'nadir_row': 1, - 'nadir_col': 2, - 'area_def_uni': 'some_area'}) - attrs_exp = {'orbital_parameters': {'projection_longitude': -75.0, - 'projection_latitude': 0.0, - 'projection_altitude': ALTITUDE, - 'yaw_flip': True}, - 'platform_name': 'GOES-15', - 'sensor': 'goes_imager', - 'sector': UNKNOWN_SECTOR, - 'nadir_row': 1, - 'nadir_col': 2, - 'area_def_uniform_sampling': 'some_area'} + self.reader.meta.update({"lon0": -75.0, + "lat0": 0.0, + "sector": UNKNOWN_SECTOR, + "nadir_row": 1, + "nadir_col": 2, + "area_def_uni": "some_area"}) + attrs_exp = {"orbital_parameters": {"projection_longitude": -75.0, + "projection_latitude": 0.0, + "projection_altitude": ALTITUDE, + "yaw_flip": True}, + "platform_name": "GOES-15", + "sensor": "goes_imager", + "sector": UNKNOWN_SECTOR, + "nadir_row": 1, + "nadir_col": 2, + "area_def_uniform_sampling": "some_area"} for ch in self.channels: counts = self.reader.get_dataset( - key=make_dataid(name=ch, calibration='counts'), info={}) + key=make_dataid(name=ch, calibration="counts"), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(self.counts/32. == counts.to_masked_array()), - msg='get_dataset() returns invalid counts for ' - 'channel {}'.format(ch)) + msg="get_dataset() returns invalid counts for " + "channel {}".format(ch)) # Check attributes self.assertDictEqual(counts.attrs, attrs_exp) @@ -434,12 +434,12 @@ def test_get_dataset_masks(self): """Test whether data and coordinates are masked consistently.""" # Requires that no element has been masked due to invalid # radiance/reflectance/BT (see setUp()). - lon = self.reader.get_dataset(key=make_dataid(name='longitude'), + lon = self.reader.get_dataset(key=make_dataid(name="longitude"), info={}) lon_mask = lon.to_masked_array().mask for ch in self.channels: - for calib in ('counts', 'radiance', 'reflectance', - 'brightness_temperature'): + for calib in ("counts", "radiance", "reflectance", + "brightness_temperature"): try: data = self.reader.get_dataset( key=make_dataid(name=ch, calibration=calib), info={}) @@ -447,41 +447,41 @@ def test_get_dataset_masks(self): continue data_mask = data.to_masked_array().mask self.assertTrue(np.all(data_mask == lon_mask), - msg='get_dataset() returns inconsistently ' - 'masked {} in channel {}'.format(calib, ch)) + msg="get_dataset() returns inconsistently " + "masked {} in channel {}".format(calib, ch)) def test_get_dataset_invalid(self): """Test handling of invalid calibrations.""" # VIS -> BT - args = dict(key=make_dataid(name='00_7', - calibration='brightness_temperature'), + args = dict(key=make_dataid(name="00_7", + calibration="brightness_temperature"), info={}) self.assertRaises(ValueError, self.reader.get_dataset, **args) # IR -> Reflectance - args = dict(key=make_dataid(name='10_7', - calibration='reflectance'), + args = dict(key=make_dataid(name="10_7", + calibration="reflectance"), info={}) self.assertRaises(ValueError, self.reader.get_dataset, **args) # Unsupported calibration with pytest.raises(ValueError): - args = dict(key=make_dataid(name='10_7', - calibration='invalid'), + args = dict(key=make_dataid(name="10_7", + calibration="invalid"), info={}) def test_calibrate(self): """Test whether the correct calibration methods are called.""" for ch in self.channels: if is_vis_channel(ch): - calibs = {'radiance': '_viscounts2radiance', - 'reflectance': '_calibrate_vis'} + calibs = {"radiance": "_viscounts2radiance", + "reflectance": "_calibrate_vis"} else: - calibs = {'radiance': '_ircounts2radiance', - 'brightness_temperature': '_calibrate_ir'} + calibs = {"radiance": "_ircounts2radiance", + "brightness_temperature": "_calibrate_ir"} for calib, method in calibs.items(): with mock.patch.object(self.reader, method) as target_func: - self.reader.calibrate(counts=self.reader.nc['data'], + self.reader.calibrate(counts=self.reader.nc["data"], calibration=calib, channel=ch) target_func.assert_called() @@ -515,13 +515,13 @@ def test_get_sector(self): shapes.update(shapes_vis) for (nlines, ncols), sector_ref in shapes.items(): if (nlines, ncols) in shapes_vis: - channel = '00_7' + channel = "00_7" else: - channel = '10_7' + channel = "10_7" sector = self.reader._get_sector(channel=channel, nlines=nlines, ncols=ncols) self.assertEqual(sector, sector_ref, - msg='Incorrect sector identification') + msg="Incorrect sector identification") class TestChannelIdentification: diff --git a/satpy/tests/reader_tests/test_gpm_imerg.py b/satpy/tests/reader_tests/test_gpm_imerg.py index a91f6d300f..8fc8bb855b 100644 --- a/satpy/tests/reader_tests/test_gpm_imerg.py +++ b/satpy/tests/reader_tests/test_gpm_imerg.py @@ -41,31 +41,31 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): def _get_geo_data(self, num_rows, num_cols): geo = { - 'Grid/lon': + "Grid/lon": xr.DataArray(DEFAULT_LON_DATA, - attrs={'units': 'degrees_east', }, - dims=('lon')), - 'Grid/lat': + attrs={"units": "degrees_east", }, + dims=("lon")), + "Grid/lat": xr.DataArray(DEFAULT_LAT_DATA, - attrs={'units': 'degrees_north', }, - dims=('lat')), + attrs={"units": "degrees_north", }, + dims=("lat")), } return geo def _get_precip_data(self, num_rows, num_cols): selection = { - 'Grid/IRprecipitation': + "Grid/IRprecipitation": xr.DataArray( da.ones((1, num_cols, num_rows), chunks=1024, dtype=np.float32), attrs={ - '_FillValue': -9999.9, - 'units': 'mm/hr', - 'Units': 'mm/hr', - 'badval': h5py.h5r.Reference(), - 'badvals': np.array([[h5py.h5r.Reference()]]) + "_FillValue": -9999.9, + "units": "mm/hr", + "Units": "mm/hr", + "badval": h5py.h5r.Reference(), + "badvals": np.array([[h5py.h5r.Reference()]]) }, - dims=('time', 'lon', 'lat')), + dims=("time", "lon", "lat")), } return selection @@ -93,9 +93,9 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.gpm_imerg import Hdf5IMERG - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(Hdf5IMERG, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(Hdf5IMERG, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -109,13 +109,13 @@ def test_load_data(self): # Filename to test, needed for start and end times filenames = [ - '3B-HHR.MS.MRG.3IMERG.20200131-S233000-E235959.1410.V06B.HDF5', ] + "3B-HHR.MS.MRG.3IMERG.20200131-S233000-E235959.1410.V06B.HDF5", ] # Expected projection in area def - pdict = {'proj': 'longlat', - 'datum': 'WGS84', - 'no_defs': None, - 'type': 'crs'} + pdict = {"proj": "longlat", + "datum": "WGS84", + "no_defs": None, + "type": "crs"} reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) @@ -123,19 +123,19 @@ def test_load_data(self): reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) - res = reader.load(['IRprecipitation']) + res = reader.load(["IRprecipitation"]) self.assertEqual(1, len(res)) - self.assertEqual(res['IRprecipitation'].start_time, + self.assertEqual(res["IRprecipitation"].start_time, datetime(2020, 1, 31, 23, 30, 0)) - self.assertEqual(res['IRprecipitation'].end_time, + self.assertEqual(res["IRprecipitation"].end_time, datetime(2020, 1, 31, 23, 59, 59)) - self.assertEqual(res['IRprecipitation'].resolution, + self.assertEqual(res["IRprecipitation"].resolution, 0.1) - self.assertEqual(res['IRprecipitation'].area.width, + self.assertEqual(res["IRprecipitation"].area.width, 3600) - self.assertEqual(res['IRprecipitation'].area.height, + self.assertEqual(res["IRprecipitation"].area.height, 1800) - self.assertEqual(res['IRprecipitation'].area.proj_dict, + self.assertEqual(res["IRprecipitation"].area.proj_dict, pdict) - np.testing.assert_almost_equal(res['IRprecipitation'].area.area_extent, + np.testing.assert_almost_equal(res["IRprecipitation"].area.area_extent, (-179.95, -89.95, 179.95, 89.95), 5) diff --git a/satpy/tests/reader_tests/test_grib.py b/satpy/tests/reader_tests/test_grib.py index b349e91169..dce6b3f557 100644 --- a/satpy/tests/reader_tests/test_grib.py +++ b/satpy/tests/reader_tests/test_grib.py @@ -28,14 +28,14 @@ from satpy.dataset import DataQuery # Parameterized cases -TEST_ARGS = ('proj_params', 'lon_corners', 'lat_corners') +TEST_ARGS = ("proj_params", "lon_corners", "lat_corners") TEST_PARAMS = ( (None, None, None), # cyl default case ( { - 'a': 6371229, 'b': 6371229, 'proj': 'lcc', - 'lon_0': 265.0, 'lat_0': 25.0, - 'lat_1': 25.0, 'lat_2': 25.0 + "a": 6371229, "b": 6371229, "proj": "lcc", + "lon_0": 265.0, "lat_0": 25.0, + "lat_1": 25.0, "lat_2": 25.0 }, [-133.459, -65.12555139, -152.8786225, -49.41598659], [12.19, 14.34208538, 54.56534318, 57.32843565] @@ -78,7 +78,7 @@ def __init__(self, values, proj_params=None, latlons=None, **attrs): self.attrs = attrs self.values = values if proj_params is None: - proj_params = {'a': 6371229, 'b': 6371229, 'proj': 'cyl'} + proj_params = {"a": 6371229, "b": 6371229, "proj": "cyl"} self.projparams = proj_params self._latlons = latlons @@ -111,12 +111,12 @@ def __init__(self, messages=None, proj_params=None, latlons=None): self._messages = [ FakeMessage( values=fake_gribdata(), - name='TEST', - shortName='t', + name="TEST", + shortName="t", level=100, - pressureUnits='hPa', - cfName='air_temperature', - units='K', + pressureUnits="hPa", + cfName="air_temperature", + units="K", dataDate=20180504, dataTime=1200, validityDate=20180504, @@ -124,22 +124,22 @@ def __init__(self, messages=None, proj_params=None, latlons=None): distinctLongitudes=np.arange(5.), distinctLatitudes=np.arange(5.), missingValue=9999, - modelName='notknown', + modelName="notknown", minimum=100., maximum=200., - typeOfLevel='isobaricInhPa', + typeOfLevel="isobaricInhPa", jScansPositively=0, proj_params=proj_params, latlons=latlons, ), FakeMessage( values=fake_gribdata(), - name='TEST', - shortName='t', + name="TEST", + shortName="t", level=200, - pressureUnits='hPa', - cfName='air_temperature', - units='K', + pressureUnits="hPa", + cfName="air_temperature", + units="K", dataDate=20180504, dataTime=1200, validityDate=20180504, @@ -147,22 +147,22 @@ def __init__(self, messages=None, proj_params=None, latlons=None): distinctLongitudes=np.arange(5.), distinctLatitudes=np.arange(5.), missingValue=9999, - modelName='notknown', + modelName="notknown", minimum=100., maximum=200., - typeOfLevel='isobaricInhPa', + typeOfLevel="isobaricInhPa", jScansPositively=1, proj_params=proj_params, latlons=latlons, ), FakeMessage( values=fake_gribdata(), - name='TEST', - shortName='t', + name="TEST", + shortName="t", level=300, - pressureUnits='hPa', - cfName='air_temperature', - units='K', + pressureUnits="hPa", + cfName="air_temperature", + units="K", dataDate=20180504, dataTime=1200, validityDate=20180504, @@ -172,7 +172,7 @@ def __init__(self, messages=None, proj_params=None, latlons=None): missingValue=9999, minimum=100., maximum=200., - typeOfLevel='isobaricInhPa', + typeOfLevel="isobaricInhPa", jScansPositively=0, proj_params=proj_params, latlons=latlons, @@ -208,29 +208,29 @@ class TestGRIBReader: def setup_method(self): """Wrap pygrib to read fake data.""" from satpy._config import config_search_paths - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) try: import pygrib except ImportError: pygrib = None self.orig_pygrib = pygrib - sys.modules['pygrib'] = mock.MagicMock() + sys.modules["pygrib"] = mock.MagicMock() def teardown_method(self): """Re-enable pygrib import.""" - sys.modules['pygrib'] = self.orig_pygrib + sys.modules["pygrib"] = self.orig_pygrib def _get_test_datasets(self, dataids, fake_pygrib=None): from satpy.readers import load_reader if fake_pygrib is None: fake_pygrib = FakeGRIB() - with mock.patch('satpy.readers.grib.pygrib') as pg: + with mock.patch("satpy.readers.grib.pygrib") as pg: pg.open.return_value = fake_pygrib r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'gfs.t18z.sfluxgrbf106.grib2', + "gfs.t18z.sfluxgrbf106.grib2", ]) r.create_filehandlers(loadables) datasets = r.load(dataids) @@ -262,11 +262,11 @@ def _get_fake_pygrib(proj_params, lon_corners, lat_corners): def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.grib.pygrib') as pg: + with mock.patch("satpy.readers.grib.pygrib") as pg: pg.open.return_value = FakeGRIB() r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'gfs.t18z.sfluxgrbf106.grib2', + "gfs.t18z.sfluxgrbf106.grib2", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) @@ -293,25 +293,25 @@ def test_load_all(self, proj_params, lon_corners, lat_corners): """Test loading all test datasets.""" fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) dataids = [ - DataQuery(name='t', level=100, modifiers=tuple()), - DataQuery(name='t', level=200, modifiers=tuple()), - DataQuery(name='t', level=300, modifiers=tuple()) + DataQuery(name="t", level=100, modifiers=tuple()), + DataQuery(name="t", level=200, modifiers=tuple()), + DataQuery(name="t", level=300, modifiers=tuple()) ] datasets = self._get_test_datasets(dataids, fake_pygrib) assert len(datasets) == 3 for v in datasets.values(): - assert v.attrs['units'] == 'K' + assert v.attrs["units"] == "K" assert isinstance(v, xr.DataArray) @pytest.mark.parametrize(TEST_ARGS, TEST_PARAMS) def test_area_def_crs(self, proj_params, lon_corners, lat_corners): """Check that the projection is accurate.""" fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) - dataids = [DataQuery(name='t', level=100, modifiers=tuple())] + dataids = [DataQuery(name="t", level=100, modifiers=tuple())] datasets = self._get_test_datasets(dataids, fake_pygrib) - area = datasets['t'].attrs['area'] - if not hasattr(area, 'crs'): + area = datasets["t"].attrs["area"] + if not hasattr(area, "crs"): pytest.skip("Can't test with pyproj < 2.0") _round_trip_projection_lonlat_check(area) @@ -321,12 +321,12 @@ def test_missing_attributes(self, proj_params, lon_corners, lat_corners): fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) # This has modelName - query_contains = DataQuery(name='t', level=100, modifiers=tuple()) + query_contains = DataQuery(name="t", level=100, modifiers=tuple()) # This does not have modelName - query_not_contains = DataQuery(name='t', level=300, modifiers=tuple()) + query_not_contains = DataQuery(name="t", level=300, modifiers=tuple()) dataset = self._get_test_datasets([query_contains, query_not_contains], fake_pygrib) - assert dataset[query_contains].attrs['modelName'] == 'notknown' - assert dataset[query_not_contains].attrs['modelName'] == 'unknown' + assert dataset[query_contains].attrs["modelName"] == "notknown" + assert dataset[query_not_contains].attrs["modelName"] == "unknown" @pytest.mark.parametrize(TEST_ARGS, TEST_PARAMS) def test_jscanspositively(self, proj_params, lon_corners, lat_corners): @@ -334,9 +334,9 @@ def test_jscanspositively(self, proj_params, lon_corners, lat_corners): fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) # This has no jScansPositively - query_not_contains = DataQuery(name='t', level=100, modifiers=tuple()) + query_not_contains = DataQuery(name="t", level=100, modifiers=tuple()) # This contains jScansPositively - query_contains = DataQuery(name='t', level=200, modifiers=tuple()) + query_contains = DataQuery(name="t", level=200, modifiers=tuple()) dataset = self._get_test_datasets([query_contains, query_not_contains], fake_pygrib) np.testing.assert_allclose(fake_gribdata(), dataset[query_not_contains].values) diff --git a/satpy/tests/reader_tests/test_hdf4_utils.py b/satpy/tests/reader_tests/test_hdf4_utils.py index 9a0773c2c1..1328fd3a29 100644 --- a/satpy/tests/reader_tests/test_hdf4_utils.py +++ b/satpy/tests/reader_tests/test_hdf4_utils.py @@ -67,20 +67,20 @@ class TestHDF4FileHandler(unittest.TestCase): def setUp(self): """Create a test HDF4 file.""" from pyhdf.SD import SD, SDC - h = SD('test.hdf', SDC.WRITE | SDC.CREATE | SDC.TRUNC) + h = SD("test.hdf", SDC.WRITE | SDC.CREATE | SDC.TRUNC) data = np.arange(10. * 100, dtype=np.float32).reshape((10, 100)) - v1 = h.create('ds1_f', SDC.FLOAT32, (10, 100)) + v1 = h.create("ds1_f", SDC.FLOAT32, (10, 100)) v1[:] = data - v2 = h.create('ds1_i', SDC.INT16, (10, 100)) + v2 = h.create("ds1_i", SDC.INT16, (10, 100)) v2[:] = data.astype(np.int16) # Add attributes - h.test_attr_str = 'test_string' + h.test_attr_str = "test_string" h.test_attr_int = 0 h.test_attr_float = 1.2 # h.test_attr_str_arr = np.array(b"test_string2") for d in [v1, v2]: - d.test_attr_str = 'test_string' + d.test_attr_str = "test_string" d.test_attr_int = 0 d.test_attr_float = 1.2 @@ -88,34 +88,34 @@ def setUp(self): def tearDown(self): """Remove the previously created test file.""" - os.remove('test.hdf') + os.remove("test.hdf") def test_all_basic(self): """Test everything about the HDF4 class.""" from satpy.readers.hdf4_utils import HDF4FileHandler - file_handler = HDF4FileHandler('test.hdf', {}, {}) + file_handler = HDF4FileHandler("test.hdf", {}, {}) - for ds in ('ds1_f', 'ds1_i'): - self.assertEqual(file_handler[ds + '/dtype'], np.float32 if ds.endswith('f') else np.int16) - self.assertTupleEqual(file_handler[ds + '/shape'], (10, 100)) + for ds in ("ds1_f", "ds1_i"): + self.assertEqual(file_handler[ds + "/dtype"], np.float32 if ds.endswith("f") else np.int16) + self.assertTupleEqual(file_handler[ds + "/shape"], (10, 100)) # make sure that the dtype is an instance, not the class - self.assertEqual(file_handler[ds].dtype.itemsize, 4 if ds.endswith('f') else 2) + self.assertEqual(file_handler[ds].dtype.itemsize, 4 if ds.endswith("f") else 2) attrs = file_handler[ds].attrs - self.assertEqual(attrs.get('test_attr_str'), 'test_string') - self.assertEqual(attrs.get('test_attr_int'), 0) - self.assertEqual(attrs.get('test_attr_float'), 1.2) + self.assertEqual(attrs.get("test_attr_str"), "test_string") + self.assertEqual(attrs.get("test_attr_int"), 0) + self.assertEqual(attrs.get("test_attr_float"), 1.2) - self.assertIsInstance(file_handler['/attr/test_attr_str'], str) - self.assertEqual(file_handler['/attr/test_attr_str'], 'test_string') + self.assertIsInstance(file_handler["/attr/test_attr_str"], str) + self.assertEqual(file_handler["/attr/test_attr_str"], "test_string") # self.assertEqual(file_handler['/attr/test_attr_str_arr'], 'test_string2') - self.assertIsInstance(file_handler['/attr/test_attr_int'], int) - self.assertEqual(file_handler['/attr/test_attr_int'], 0) - self.assertIsInstance(file_handler['/attr/test_attr_float'], float) - self.assertEqual(file_handler['/attr/test_attr_float'], 1.2) + self.assertIsInstance(file_handler["/attr/test_attr_int"], int) + self.assertEqual(file_handler["/attr/test_attr_int"], 0) + self.assertIsInstance(file_handler["/attr/test_attr_float"], float) + self.assertEqual(file_handler["/attr/test_attr_float"], 1.2) - self.assertIsInstance(file_handler.get('ds1_f'), xr.DataArray) - self.assertIsNone(file_handler.get('fake_ds')) - self.assertEqual(file_handler.get('fake_ds', 'test'), 'test') + self.assertIsInstance(file_handler.get("ds1_f"), xr.DataArray) + self.assertIsNone(file_handler.get("fake_ds")) + self.assertEqual(file_handler.get("fake_ds", "test"), "test") - self.assertTrue('ds1_f' in file_handler) - self.assertFalse('fake_ds' in file_handler) + self.assertTrue("ds1_f" in file_handler) + self.assertFalse("fake_ds" in file_handler) diff --git a/satpy/tests/reader_tests/test_hdf5_utils.py b/satpy/tests/reader_tests/test_hdf5_utils.py index 2c5fd2d19a..4e700a57f6 100644 --- a/satpy/tests/reader_tests/test_hdf5_utils.py +++ b/satpy/tests/reader_tests/test_hdf5_utils.py @@ -67,86 +67,86 @@ class TestHDF5FileHandler(unittest.TestCase): def setUp(self): """Create a test HDF5 file.""" import h5py - h = h5py.File('test.h5', 'w') + h = h5py.File("test.h5", "w") # Create Group - g1 = h.create_group('test_group') + g1 = h.create_group("test_group") # Add datasets - ds1_f = g1.create_dataset('ds1_f', + ds1_f = g1.create_dataset("ds1_f", shape=(10, 100), dtype=np.float32, data=np.arange(10. * 100).reshape((10, 100))) - ds1_i = g1.create_dataset('ds1_i', + ds1_i = g1.create_dataset("ds1_i", shape=(10, 100), dtype=np.int32, data=np.arange(10 * 100).reshape((10, 100))) - ds2_f = h.create_dataset('ds2_f', + ds2_f = h.create_dataset("ds2_f", shape=(10, 100), dtype=np.float32, data=np.arange(10. * 100).reshape((10, 100))) - ds2_i = h.create_dataset('ds2_i', + ds2_i = h.create_dataset("ds2_i", shape=(10, 100), dtype=np.int32, data=np.arange(10 * 100).reshape((10, 100))) # Add attributes # shows up as a scalar array of bytes (shape=(), size=1) - h.attrs['test_attr_str'] = 'test_string' - h.attrs['test_attr_byte'] = b'test_byte' - h.attrs['test_attr_int'] = 0 - h.attrs['test_attr_float'] = 1.2 + h.attrs["test_attr_str"] = "test_string" + h.attrs["test_attr_byte"] = b"test_byte" + h.attrs["test_attr_int"] = 0 + h.attrs["test_attr_float"] = 1.2 # shows up as a numpy bytes object - h.attrs['test_attr_str_arr'] = np.array(b"test_string2") - g1.attrs['test_attr_str'] = 'test_string' - g1.attrs['test_attr_byte'] = b'test_byte' - g1.attrs['test_attr_int'] = 0 - g1.attrs['test_attr_float'] = 1.2 + h.attrs["test_attr_str_arr"] = np.array(b"test_string2") + g1.attrs["test_attr_str"] = "test_string" + g1.attrs["test_attr_byte"] = b"test_byte" + g1.attrs["test_attr_int"] = 0 + g1.attrs["test_attr_float"] = 1.2 for d in [ds1_f, ds1_i, ds2_f, ds2_i]: - d.attrs['test_attr_str'] = 'test_string' - d.attrs['test_attr_byte'] = b'test_byte' - d.attrs['test_attr_int'] = 0 - d.attrs['test_attr_float'] = 1.2 - d.attrs['test_ref'] = d.ref + d.attrs["test_attr_str"] = "test_string" + d.attrs["test_attr_byte"] = b"test_byte" + d.attrs["test_attr_int"] = 0 + d.attrs["test_attr_float"] = 1.2 + d.attrs["test_ref"] = d.ref self.var_attrs = list(d.attrs.keys()) h.close() def tearDown(self): """Remove the previously created test file.""" - os.remove('test.h5') + os.remove("test.h5") def test_all_basic(self): """Test everything about the HDF5 class.""" import xarray as xr from satpy.readers.hdf5_utils import HDF5FileHandler - file_handler = HDF5FileHandler('test.h5', {}, {}) + file_handler = HDF5FileHandler("test.h5", {}, {}) - for ds_name in ('test_group/ds1_f', 'test_group/ds1_i', 'ds2_f', 'ds2_i'): + for ds_name in ("test_group/ds1_f", "test_group/ds1_i", "ds2_f", "ds2_i"): ds = file_handler[ds_name] attrs = ds.attrs - self.assertEqual(ds.dtype, np.float32 if ds_name.endswith('f') else np.int32) - self.assertTupleEqual(file_handler[ds_name + '/shape'], (10, 100)) - self.assertEqual(attrs['test_attr_str'], 'test_string') - self.assertEqual(attrs['test_attr_byte'], 'test_byte') - self.assertEqual(attrs['test_attr_int'], 0) - self.assertEqual(attrs['test_attr_float'], 1.2) - self.assertEqual(file_handler[ds_name + '/attr/test_attr_str'], 'test_string') - self.assertEqual(file_handler[ds_name + '/attr/test_attr_byte'], 'test_byte') - self.assertEqual(file_handler[ds_name + '/attr/test_attr_int'], 0) - self.assertEqual(file_handler[ds_name + '/attr/test_attr_float'], 1.2) - - self.assertEqual(file_handler['/attr/test_attr_str'], 'test_string') - self.assertEqual(file_handler['/attr/test_attr_byte'], 'test_byte') - self.assertEqual(file_handler['/attr/test_attr_str_arr'], 'test_string2') - self.assertEqual(file_handler['/attr/test_attr_int'], 0) - self.assertEqual(file_handler['/attr/test_attr_float'], 1.2) - - self.assertIsInstance(file_handler.get('ds2_f'), xr.DataArray) - self.assertIsNone(file_handler.get('fake_ds')) - self.assertEqual(file_handler.get('fake_ds', 'test'), 'test') - - self.assertTrue('ds2_f' in file_handler) - self.assertFalse('fake_ds' in file_handler) - - self.assertIsInstance(file_handler['ds2_f/attr/test_ref'], np.ndarray) + self.assertEqual(ds.dtype, np.float32 if ds_name.endswith("f") else np.int32) + self.assertTupleEqual(file_handler[ds_name + "/shape"], (10, 100)) + self.assertEqual(attrs["test_attr_str"], "test_string") + self.assertEqual(attrs["test_attr_byte"], "test_byte") + self.assertEqual(attrs["test_attr_int"], 0) + self.assertEqual(attrs["test_attr_float"], 1.2) + self.assertEqual(file_handler[ds_name + "/attr/test_attr_str"], "test_string") + self.assertEqual(file_handler[ds_name + "/attr/test_attr_byte"], "test_byte") + self.assertEqual(file_handler[ds_name + "/attr/test_attr_int"], 0) + self.assertEqual(file_handler[ds_name + "/attr/test_attr_float"], 1.2) + + self.assertEqual(file_handler["/attr/test_attr_str"], "test_string") + self.assertEqual(file_handler["/attr/test_attr_byte"], "test_byte") + self.assertEqual(file_handler["/attr/test_attr_str_arr"], "test_string2") + self.assertEqual(file_handler["/attr/test_attr_int"], 0) + self.assertEqual(file_handler["/attr/test_attr_float"], 1.2) + + self.assertIsInstance(file_handler.get("ds2_f"), xr.DataArray) + self.assertIsNone(file_handler.get("fake_ds")) + self.assertEqual(file_handler.get("fake_ds", "test"), "test") + + self.assertTrue("ds2_f" in file_handler) + self.assertFalse("fake_ds" in file_handler) + + self.assertIsInstance(file_handler["ds2_f/attr/test_ref"], np.ndarray) diff --git a/satpy/tests/reader_tests/test_hdfeos_base.py b/satpy/tests/reader_tests/test_hdfeos_base.py index 68b8928f2e..9ca6ee5fdd 100644 --- a/satpy/tests/reader_tests/test_hdfeos_base.py +++ b/satpy/tests/reader_tests/test_hdfeos_base.py @@ -19,7 +19,7 @@ import unittest -nrt_mda = '''GROUP = INVENTORYMETADATA +nrt_mda = """GROUP = INVENTORYMETADATA GROUPTYPE = MASTERGROUP GROUP = ECSDATAGRANULE @@ -439,175 +439,175 @@ END_GROUP = INVENTORYMETADATA -END''' # noqa: E501 +END""" # noqa: E501 nrt_mda_dict = { - 'INVENTORYMETADATA': { - 'ADDITIONALATTRIBUTES': { - 'ADDITIONALATTRIBUTESCONTAINER': { - 'ADDITIONALATTRIBUTENAME': { - 'VALUE': 'identifier_product_doi_authority' + "INVENTORYMETADATA": { + "ADDITIONALATTRIBUTES": { + "ADDITIONALATTRIBUTESCONTAINER": { + "ADDITIONALATTRIBUTENAME": { + "VALUE": "identifier_product_doi_authority" }, - 'INFORMATIONCONTENT': { - 'PARAMETERVALUE': { - 'VALUE': 'http://dx.doi.org' + "INFORMATIONCONTENT": { + "PARAMETERVALUE": { + "VALUE": "http://dx.doi.org" } } } }, - 'ASSOCIATEDPLATFORMINSTRUMENTSENSOR': { - 'ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER': { - 'ASSOCIATEDINSTRUMENTSHORTNAME': { - 'VALUE': 'MODIS' + "ASSOCIATEDPLATFORMINSTRUMENTSENSOR": { + "ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER": { + "ASSOCIATEDINSTRUMENTSHORTNAME": { + "VALUE": "MODIS" }, - 'ASSOCIATEDPLATFORMSHORTNAME': { - 'VALUE': 'Aqua' + "ASSOCIATEDPLATFORMSHORTNAME": { + "VALUE": "Aqua" }, - 'ASSOCIATEDSENSORSHORTNAME': { - 'VALUE': 'MODIS' + "ASSOCIATEDSENSORSHORTNAME": { + "VALUE": "MODIS" } } }, - 'COLLECTIONDESCRIPTIONCLASS': { - 'SHORTNAME': { - 'VALUE': 'MYD03' + "COLLECTIONDESCRIPTIONCLASS": { + "SHORTNAME": { + "VALUE": "MYD03" }, - 'VERSIONID': { - 'VALUE': 61 + "VERSIONID": { + "VALUE": 61 } }, - 'ECSDATAGRANULE': { - 'DAYNIGHTFLAG': { - 'VALUE': 'Day' + "ECSDATAGRANULE": { + "DAYNIGHTFLAG": { + "VALUE": "Day" }, - 'LOCALGRANULEID': { - 'VALUE': 'MYD03.A2019051.1225.061.2019051131153.NRT.hdf' + "LOCALGRANULEID": { + "VALUE": "MYD03.A2019051.1225.061.2019051131153.NRT.hdf" }, - 'LOCALVERSIONID': { - 'VALUE': '6.0.4' + "LOCALVERSIONID": { + "VALUE": "6.0.4" }, - 'PRODUCTIONDATETIME': { - 'VALUE': '2019-02-20T13:11:53.000Z' + "PRODUCTIONDATETIME": { + "VALUE": "2019-02-20T13:11:53.000Z" }, - 'REPROCESSINGACTUAL': { - 'VALUE': 'Near ' - 'Real ' - 'Time' + "REPROCESSINGACTUAL": { + "VALUE": "Near " + "Real " + "Time" }, - 'REPROCESSINGPLANNED': { - 'VALUE': 'further ' - 'update ' - 'is ' - 'anticipated' + "REPROCESSINGPLANNED": { + "VALUE": "further " + "update " + "is " + "anticipated" } }, - 'GROUPTYPE': 'MASTERGROUP', - 'INPUTGRANULE': { - 'INPUTPOINTER': { - 'VALUE': - ('MYD01.61.2019-051T12:25:00.000000Z.NA.29878844.500100_1.hdf', - 'MYD03LUT.coeff_V6.1.4', - 'PM1EPHND_NRT.A2019051.1220.061.2019051125628', - 'PM1EPHND_NRT.A2019051.1225.061.2019051125628', - 'PM1EPHND_NRT.A2019051.1230.061.2019051125628', ' ' - 'PM1ATTNR_NRT.A2019051.1220.061.2019051125628', - 'PM1ATTNR_NRT.A2019051.1225.061.2019051125628', - 'PM1ATTNR_NRT.A2019051.1230.061.2019051125628') + "GROUPTYPE": "MASTERGROUP", + "INPUTGRANULE": { + "INPUTPOINTER": { + "VALUE": + ("MYD01.61.2019-051T12:25:00.000000Z.NA.29878844.500100_1.hdf", + "MYD03LUT.coeff_V6.1.4", + "PM1EPHND_NRT.A2019051.1220.061.2019051125628", + "PM1EPHND_NRT.A2019051.1225.061.2019051125628", + "PM1EPHND_NRT.A2019051.1230.061.2019051125628", " " + "PM1ATTNR_NRT.A2019051.1220.061.2019051125628", + "PM1ATTNR_NRT.A2019051.1225.061.2019051125628", + "PM1ATTNR_NRT.A2019051.1230.061.2019051125628") } }, - 'MEASUREDPARAMETER': { - 'MEASUREDPARAMETERCONTAINER': { - 'PARAMETERNAME': { - 'VALUE': 'Geolocation' + "MEASUREDPARAMETER": { + "MEASUREDPARAMETERCONTAINER": { + "PARAMETERNAME": { + "VALUE": "Geolocation" }, - 'QAFLAGS': { - 'AUTOMATICQUALITYFLAG': { - 'VALUE': 'Passed' + "QAFLAGS": { + "AUTOMATICQUALITYFLAG": { + "VALUE": "Passed" }, - 'AUTOMATICQUALITYFLAGEXPLANATION': { - 'VALUE': - 'Set ' - 'to ' + "AUTOMATICQUALITYFLAGEXPLANATION": { + "VALUE": + "Set " + "to " "'Failed' " - 'if ' - 'processing ' - 'error ' - 'occurred, ' - 'set ' - 'to ' + "if " + "processing " + "error " + "occurred, " + "set " + "to " "'Passed' " - 'otherwise' + "otherwise" }, - 'SCIENCEQUALITYFLAG': { - 'VALUE': 'Not ' - 'Investigated' + "SCIENCEQUALITYFLAG": { + "VALUE": "Not " + "Investigated" } }, - 'QASTATS': { - 'QAPERCENTMISSINGDATA': { - 'VALUE': 0 + "QASTATS": { + "QAPERCENTMISSINGDATA": { + "VALUE": 0 }, - 'QAPERCENTOUTOFBOUNDSDATA': { - 'VALUE': 0 + "QAPERCENTOUTOFBOUNDSDATA": { + "VALUE": 0 } } } }, - 'ORBITCALCULATEDSPATIALDOMAIN': { - 'ORBITCALCULATEDSPATIALDOMAINCONTAINER': { - 'EQUATORCROSSINGDATE': { - 'VALUE': '2019-02-20' + "ORBITCALCULATEDSPATIALDOMAIN": { + "ORBITCALCULATEDSPATIALDOMAINCONTAINER": { + "EQUATORCROSSINGDATE": { + "VALUE": "2019-02-20" }, - 'EQUATORCROSSINGLONGITUDE': { - 'VALUE': -151.260740805733 + "EQUATORCROSSINGLONGITUDE": { + "VALUE": -151.260740805733 }, - 'EQUATORCROSSINGTIME': { - 'VALUE': '12:49:52.965727' + "EQUATORCROSSINGTIME": { + "VALUE": "12:49:52.965727" }, - 'ORBITNUMBER': { - 'VALUE': 89393 + "ORBITNUMBER": { + "VALUE": 89393 } } }, - 'PGEVERSIONCLASS': { - 'PGEVERSION': { - 'VALUE': '6.1.4' + "PGEVERSIONCLASS": { + "PGEVERSION": { + "VALUE": "6.1.4" } }, - 'RANGEDATETIME': { - 'RANGEBEGINNINGDATE': { - 'VALUE': '2019-02-20' + "RANGEDATETIME": { + "RANGEBEGINNINGDATE": { + "VALUE": "2019-02-20" }, - 'RANGEBEGINNINGTIME': { - 'VALUE': '12:25:00.000000' + "RANGEBEGINNINGTIME": { + "VALUE": "12:25:00.000000" }, - 'RANGEENDINGDATE': { - 'VALUE': '2019-02-20' + "RANGEENDINGDATE": { + "VALUE": "2019-02-20" }, - 'RANGEENDINGTIME': { - 'VALUE': '12:30:00.000000' + "RANGEENDINGTIME": { + "VALUE": "12:30:00.000000" } }, - 'SPATIALDOMAINCONTAINER': { - 'HORIZONTALSPATIALDOMAINCONTAINER': { - 'GPOLYGON': { - 'GPOLYGONCONTAINER': { - 'GRING': { - 'EXCLUSIONGRINGFLAG': { - 'VALUE': 'N' + "SPATIALDOMAINCONTAINER": { + "HORIZONTALSPATIALDOMAINCONTAINER": { + "GPOLYGON": { + "GPOLYGONCONTAINER": { + "GRING": { + "EXCLUSIONGRINGFLAG": { + "VALUE": "N" } }, - 'GRINGPOINT': { - 'GRINGPOINTLATITUDE': { - 'VALUE': (29.5170117594673, 26.1480434828114, + "GRINGPOINT": { + "GRINGPOINTLATITUDE": { + "VALUE": (29.5170117594673, 26.1480434828114, 43.2445462598877, 47.7959787025408) }, - 'GRINGPOINTLONGITUDE': { - 'VALUE': (25.3839329817764, 1.80418778807854, + "GRINGPOINTLONGITUDE": { + "VALUE": (25.3839329817764, 1.80418778807854, -6.50842421663422, 23.0260060198343) }, - 'GRINGPOINTSEQUENCENO': { - 'VALUE': (1, 2, 3, 4) + "GRINGPOINTSEQUENCENO": { + "VALUE": (1, 2, 3, 4) } } } diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index f8ad241532..4449c1a1a9 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -41,23 +41,23 @@ class TestHRITDecompress(unittest.TestCase): def test_xrit_cmd(self): """Test running the xrit decompress command.""" - old_env = os.environ.get('XRIT_DECOMPRESS_PATH', None) + old_env = os.environ.get("XRIT_DECOMPRESS_PATH", None) - os.environ['XRIT_DECOMPRESS_PATH'] = '/path/to/my/bin' + os.environ["XRIT_DECOMPRESS_PATH"] = "/path/to/my/bin" self.assertRaises(IOError, get_xritdecompress_cmd) - os.environ['XRIT_DECOMPRESS_PATH'] = gettempdir() + os.environ["XRIT_DECOMPRESS_PATH"] = gettempdir() self.assertRaises(IOError, get_xritdecompress_cmd) with NamedTemporaryFile() as fd: - os.environ['XRIT_DECOMPRESS_PATH'] = fd.name + os.environ["XRIT_DECOMPRESS_PATH"] = fd.name fname = fd.name res = get_xritdecompress_cmd() if old_env is not None: - os.environ['XRIT_DECOMPRESS_PATH'] = old_env + os.environ["XRIT_DECOMPRESS_PATH"] = old_env else: - os.environ.pop('XRIT_DECOMPRESS_PATH') + os.environ.pop("XRIT_DECOMPRESS_PATH") self.assertEqual(fname, res) @@ -65,53 +65,53 @@ def test_xrit_outfile(self): """Test the right decompression filename is used.""" stdout = [b"Decompressed file: bla.__\n"] outfile = get_xritdecompress_outfile(stdout) - self.assertEqual(outfile, b'bla.__') + self.assertEqual(outfile, b"bla.__") - @mock.patch('satpy.readers.hrit_base.Popen') + @mock.patch("satpy.readers.hrit_base.Popen") def test_decompress(self, popen): """Test decompression works.""" popen.return_value.returncode = 0 popen.return_value.communicate.return_value = [b"Decompressed file: bla.__\n"] - old_env = os.environ.get('XRIT_DECOMPRESS_PATH', None) + old_env = os.environ.get("XRIT_DECOMPRESS_PATH", None) with NamedTemporaryFile() as fd: - os.environ['XRIT_DECOMPRESS_PATH'] = fd.name - res = decompress('bla.C_') + os.environ["XRIT_DECOMPRESS_PATH"] = fd.name + res = decompress("bla.C_") if old_env is not None: - os.environ['XRIT_DECOMPRESS_PATH'] = old_env + os.environ["XRIT_DECOMPRESS_PATH"] = old_env else: - os.environ.pop('XRIT_DECOMPRESS_PATH') + os.environ.pop("XRIT_DECOMPRESS_PATH") - self.assertEqual(res, os.path.join('.', 'bla.__')) + self.assertEqual(res, os.path.join(".", "bla.__")) # From a compressed msg hrit file. # uncompressed data field length 17223680 # compressed data field length 1578312 -mda = {'file_type': 0, 'total_header_length': 6198, 'data_field_length': 17223680, 'number_of_bits_per_pixel': 10, - 'number_of_columns': 3712, 'number_of_lines': 464, 'compression_flag_for_data': 0, - 'projection_name': b'GEOS(+000.0) ', - 'cfac': -13642337, 'lfac': -13642337, 'coff': 1856, 'loff': 1856, - 'annotation_header': b'H-000-MSG4__-MSG4________-VIS006___-000001___-202208180730-C_', - 'cds_p_field': 64, 'timestamp': (23605, 27911151), 'GP_SC_ID': 324, - 'spectral_channel_id': 1, - 'segment_sequence_number': 1, 'planned_start_segment_number': 1, 'planned_end_segment_number': 8, - 'data_field_representation': 3, - 'image_segment_line_quality': np.array([(1, (0, 0), 1, 1, 0)] * 464, - dtype=[('line_number_in_grid', '>i4'), - ('line_mean_acquisition', [('days', '>u2'), - ('milliseconds', '>u4')]), - ('line_validity', 'u1'), - ('line_radiometric_quality', 'u1'), - ('line_geometric_quality', 'u1')]), - 'projection_parameters': {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, 'SSP_longitude': 0.0}, - 'orbital_parameters': {}} +mda = {"file_type": 0, "total_header_length": 6198, "data_field_length": 17223680, "number_of_bits_per_pixel": 10, + "number_of_columns": 3712, "number_of_lines": 464, "compression_flag_for_data": 0, + "projection_name": b"GEOS(+000.0) ", + "cfac": -13642337, "lfac": -13642337, "coff": 1856, "loff": 1856, + "annotation_header": b"H-000-MSG4__-MSG4________-VIS006___-000001___-202208180730-C_", + "cds_p_field": 64, "timestamp": (23605, 27911151), "GP_SC_ID": 324, + "spectral_channel_id": 1, + "segment_sequence_number": 1, "planned_start_segment_number": 1, "planned_end_segment_number": 8, + "data_field_representation": 3, + "image_segment_line_quality": np.array([(1, (0, 0), 1, 1, 0)] * 464, + dtype=[("line_number_in_grid", ">i4"), + ("line_mean_acquisition", [("days", ">u2"), + ("milliseconds", ">u4")]), + ("line_validity", "u1"), + ("line_radiometric_quality", "u1"), + ("line_geometric_quality", "u1")]), + "projection_parameters": {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, "SSP_longitude": 0.0}, + "orbital_parameters": {}} mda_compressed = mda.copy() mda_compressed["data_field_length"] = 1578312 -mda_compressed['compression_flag_for_data'] = 1 +mda_compressed["compression_flag_for_data"] = 1 def new_get_hd(instance, hdr_info): @@ -125,8 +125,8 @@ def new_get_hd(instance, hdr_info): def new_get_hd_compressed(instance, hdr_info): """Generate some metadata.""" instance.mda = mda.copy() - instance.mda['compression_flag_for_data'] = 1 - instance.mda['data_field_length'] = 1578312 + instance.mda["compression_flag_for_data"] = 1 + instance.mda["data_field_length"] = 1578312 @pytest.fixture @@ -139,15 +139,15 @@ def stub_hrit_file(tmp_path): def create_stub_hrit(filename, open_fun=open, meta=mda): """Create a stub hrit file.""" - nbits = meta['number_of_bits_per_pixel'] - lines = meta['number_of_lines'] - cols = meta['number_of_columns'] + nbits = meta["number_of_bits_per_pixel"] + lines = meta["number_of_lines"] + cols = meta["number_of_columns"] total_bits = lines * cols * nbits arr = np.random.randint(0, 256, size=int(total_bits / 8), dtype=np.uint8) with open_fun(filename, mode="wb") as fd: - fd.write(b" " * meta['total_header_length']) + fd.write(b" " * meta["total_header_length"]) bytes_data = arr.tobytes() fd.write(bytes_data) return filename @@ -184,19 +184,19 @@ def setup_method(self, method): """Set up the hrit file handler for testing.""" del method - with mock.patch.object(HRITFileHandler, '_get_hd', new=new_get_hd): - self.reader = HRITFileHandler('filename', - {'platform_shortname': 'MSG3', - 'start_time': datetime(2016, 3, 3, 0, 0)}, - {'filetype': 'info'}, + with mock.patch.object(HRITFileHandler, "_get_hd", new=new_get_hd): + self.reader = HRITFileHandler("filename", + {"platform_shortname": "MSG3", + "start_time": datetime(2016, 3, 3, 0, 0)}, + {"filetype": "info"}, [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()]) - self.reader.mda['cfac'] = 5 - self.reader.mda['lfac'] = 5 - self.reader.mda['coff'] = 10 - self.reader.mda['loff'] = 10 - self.reader.mda['projection_parameters']['SSP_longitude'] = 44 + self.reader.mda["cfac"] = 5 + self.reader.mda["lfac"] = 5 + self.reader.mda["coff"] = 10 + self.reader.mda["loff"] = 10 + self.reader.mda["projection_parameters"]["SSP_longitude"] = 44 def test_get_xy_from_linecol(self): """Test get_xy_from_linecol.""" @@ -220,15 +220,15 @@ def test_get_area_extent(self): def test_get_area_def(self): """Test getting an area definition.""" from pyresample.utils import proj4_radius_parameters - area = self.reader.get_area_def('VIS06') + area = self.reader.get_area_def("VIS06") proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) assert a == 6378169.0 assert b == 6356583.8 - assert proj_dict['h'] == 35785831.0 - assert proj_dict['lon_0'] == 44.0 - assert proj_dict['proj'] == 'geos' - assert proj_dict['units'] == 'm' + assert proj_dict["h"] == 35785831.0 + assert proj_dict["lon_0"] == 44.0 + assert proj_dict["proj"] == "geos" + assert proj_dict["units"] == "m" assert area.area_extent == (-77771774058.38356, -77771774058.38356, 30310525626438.438, 3720765401003.719) @@ -236,7 +236,7 @@ def test_read_band_filepath(self, stub_hrit_file): """Test reading a single band from a filepath.""" self.reader.filename = stub_hrit_file - res = self.reader.read_band('VIS006', None) + res = self.reader.read_band("VIS006", None) assert res.compute().shape == (464, 3712) def test_read_band_FSFile(self, stub_hrit_file): @@ -247,14 +247,14 @@ def test_read_band_FSFile(self, stub_hrit_file): fs_file = fsspec.open(filename) self.reader.filename = FSFile(fs_file) - res = self.reader.read_band('VIS006', None) + res = self.reader.read_band("VIS006", None) assert res.compute().shape == (464, 3712) def test_read_band_bzipped2_filepath(self, stub_bzipped_hrit_file): """Test reading a single band from a bzipped file.""" self.reader.filename = stub_bzipped_hrit_file - res = self.reader.read_band('VIS006', None) + res = self.reader.read_band("VIS006", None) assert res.compute().shape == (464, 3712) def test_read_band_gzip_stream(self, stub_gzipped_hrit_file): @@ -265,7 +265,7 @@ def test_read_band_gzip_stream(self, stub_gzipped_hrit_file): fs_file = fsspec.open(filename, compression="gzip") self.reader.filename = FSFile(fs_file) - res = self.reader.read_band('VIS006', None) + res = self.reader.read_band("VIS006", None) assert res.compute().shape == (464, 3712) def test_start_end_time(self): @@ -276,7 +276,7 @@ def test_start_end_time(self): assert self.reader.end_time == self.reader.observation_end_time -def fake_decompress(infile, outdir='.'): +def fake_decompress(infile, outdir="."): """Fake decompression.""" filename = os.fspath(infile)[:-3] return create_stub_hrit(filename) @@ -290,15 +290,15 @@ def test_read_band_filepath(self, stub_compressed_hrit_file): filename = stub_compressed_hrit_file with mock.patch("satpy.readers.hrit_base.decompress", side_effect=fake_decompress) as mock_decompress: - with mock.patch.object(HRITFileHandler, '_get_hd', side_effect=new_get_hd, autospec=True) as get_hd: + with mock.patch.object(HRITFileHandler, "_get_hd", side_effect=new_get_hd, autospec=True) as get_hd: self.reader = HRITFileHandler(filename, - {'platform_shortname': 'MSG3', - 'start_time': datetime(2016, 3, 3, 0, 0)}, - {'filetype': 'info'}, + {"platform_shortname": "MSG3", + "start_time": datetime(2016, 3, 3, 0, 0)}, + {"filetype": "info"}, [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()]) - res = self.reader.read_band('VIS006', None) + res = self.reader.read_band("VIS006", None) assert get_hd.call_count == 1 assert mock_decompress.call_count == 0 assert res.compute().shape == (464, 3712) diff --git a/satpy/tests/reader_tests/test_hsaf_grib.py b/satpy/tests/reader_tests/test_hsaf_grib.py index bc8a2c2c73..b08aa81d06 100644 --- a/satpy/tests/reader_tests/test_hsaf_grib.py +++ b/satpy/tests/reader_tests/test_hsaf_grib.py @@ -36,8 +36,8 @@ def __init__(self, values, proj_params=None, latlons=None, **attrs): self.attrs = attrs self.values = values if proj_params is None: - proj_params = {'a': 6378140.0, 'b': 6356755.0, 'lat_0': 0.0, - 'lon_0': 0.0, 'proj': 'geos', 'h': 35785830.098} + proj_params = {"a": 6378140.0, "b": 6356755.0, "lat_0": 0.0, + "lon_0": 0.0, "proj": "geos", "h": 35785830.098} self.projparams = proj_params self._latlons = latlons @@ -66,15 +66,15 @@ def __init__(self, messages=None, proj_params=None, latlons=None): self._messages = [ FakeMessage( values=np.arange(25.).reshape((5, 5)), - name='Instantaneous rain rate', - shortName='irrate', - cfName='unknown', - units='kg m**-2 s**-1', + name="Instantaneous rain rate", + shortName="irrate", + cfName="unknown", + units="kg m**-2 s**-1", dataDate=20190603, dataTime=1645, missingValue=9999, - modelName='unknown', - centreDescription='Rome', + modelName="unknown", + centreDescription="Rome", minimum=0.0, maximum=0.01475, Nx=3712, @@ -121,51 +121,51 @@ def setUp(self): except ImportError: pygrib = None self.orig_pygrib = pygrib - sys.modules['pygrib'] = mock.MagicMock() + sys.modules["pygrib"] = mock.MagicMock() def tearDown(self): """Re-enable pygrib import.""" - sys.modules['pygrib'] = self.orig_pygrib + sys.modules["pygrib"] = self.orig_pygrib - @mock.patch('satpy.readers.hsaf_grib.pygrib.open', return_value=FakeGRIB()) + @mock.patch("satpy.readers.hsaf_grib.pygrib.open", return_value=FakeGRIB()) def test_init(self, pg): """Test the init function, ensure that the correct dates and metadata are returned.""" pg.open.return_value = FakeGRIB() correct_dt = datetime(2019, 6, 3, 16, 45, 0) from satpy.readers.hsaf_grib import HSAFFileHandler - fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) + fh = HSAFFileHandler("filename", mock.MagicMock(), mock.MagicMock()) self.assertEqual(fh._analysis_time, correct_dt) - self.assertEqual(fh.metadata['projparams']['lat_0'], 0.0) - self.assertEqual(fh.metadata['shortName'], 'irrate') - self.assertEqual(fh.metadata['nx'], 3712) + self.assertEqual(fh.metadata["projparams"]["lat_0"], 0.0) + self.assertEqual(fh.metadata["shortName"], "irrate") + self.assertEqual(fh.metadata["nx"], 3712) - @mock.patch('satpy.readers.hsaf_grib.pygrib.open', return_value=FakeGRIB()) + @mock.patch("satpy.readers.hsaf_grib.pygrib.open", return_value=FakeGRIB()) def test_get_area_def(self, pg): """Test the area definition setup, checks the size and extent.""" pg.open.return_value = FakeGRIB() from satpy.readers.hsaf_grib import HSAFFileHandler - fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) - area_def = HSAFFileHandler.get_area_def(fh, 'H03B') + fh = HSAFFileHandler("filename", mock.MagicMock(), mock.MagicMock()) + area_def = HSAFFileHandler.get_area_def(fh, "H03B") self.assertEqual(area_def.width, 3712) self.assertAlmostEqual(area_def.area_extent[0], -5569209.3026, places=3) self.assertAlmostEqual(area_def.area_extent[3], 5587721.9097, places=3) - @mock.patch('satpy.readers.hsaf_grib.pygrib.open', return_value=FakeGRIB()) + @mock.patch("satpy.readers.hsaf_grib.pygrib.open", return_value=FakeGRIB()) def test_get_dataset(self, pg): """Test reading the actual datasets from a grib file.""" pg.open.return_value = FakeGRIB() from satpy.readers.hsaf_grib import HSAFFileHandler # Instantaneous precipitation - fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) + fh = HSAFFileHandler("filename", mock.MagicMock(), mock.MagicMock()) fh.filename = "H03B" - ds_id = make_dataid(name='H03B') + ds_id = make_dataid(name="H03B") data = fh.get_dataset(ds_id, mock.Mock()) np.testing.assert_array_equal(data.values, np.arange(25.).reshape((5, 5))) # Accumulated precipitation - fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) + fh = HSAFFileHandler("filename", mock.MagicMock(), mock.MagicMock()) fh.filename = "H05B" - ds_id = make_dataid(name='H05B') + ds_id = make_dataid(name="H05B") data = fh.get_dataset(ds_id, mock.Mock()) np.testing.assert_array_equal(data.values, np.arange(25.).reshape((5, 5))) diff --git a/satpy/tests/reader_tests/test_hsaf_h5.py b/satpy/tests/reader_tests/test_hsaf_h5.py index 4d574b6eb4..49658e6727 100644 --- a/satpy/tests/reader_tests/test_hsaf_h5.py +++ b/satpy/tests/reader_tests/test_hsaf_h5.py @@ -21,42 +21,42 @@ def sc_h5_file(tmp_path_factory): """Create a fake HSAF SC HDF5 file.""" filename = tmp_path_factory.mktemp("data") / "h10_20221115_day_merged.H5" h5f = h5py.File(filename, mode="w") - h5f.create_dataset('SC', SHAPE_SC, dtype=np.uint8) - h5f.create_dataset('colormap', SHAPE_SC_COLORMAP, dtype=np.uint8) + h5f.create_dataset("SC", SHAPE_SC, dtype=np.uint8) + h5f.create_dataset("colormap", SHAPE_SC_COLORMAP, dtype=np.uint8) return str(filename) def _get_scene_with_loaded_sc_datasets(filename): """Return a scene with SC and SC_pal loaded.""" loaded_scene = Scene(filenames=[filename], reader="hsaf_h5") - loaded_scene.load(['SC', 'SC_pal']) + loaded_scene.load(["SC", "SC_pal"]) return loaded_scene def test_hsaf_sc_dataset(sc_h5_file): """Test the H-SAF SC dataset.""" loaded_scene = _get_scene_with_loaded_sc_datasets(sc_h5_file) - assert loaded_scene['SC'].shape == SHAPE_SC + assert loaded_scene["SC"].shape == SHAPE_SC def test_hsaf_sc_colormap_dataset(sc_h5_file): """Test the H-SAF SC_pal dataset.""" loaded_scene = _get_scene_with_loaded_sc_datasets(sc_h5_file) - assert loaded_scene['SC_pal'].shape == SHAPE_SC_COLORMAP + assert loaded_scene["SC_pal"].shape == SHAPE_SC_COLORMAP def test_hsaf_sc_datetime(sc_h5_file): """Test the H-SAF reference time.""" loaded_scene = _get_scene_with_loaded_sc_datasets(sc_h5_file) fname = os.path.basename(sc_h5_file) - dtstr = fname.split('_')[1] + dtstr = fname.split("_")[1] obs_time = datetime.strptime(dtstr, "%Y%m%d") - assert loaded_scene['SC'].attrs['data_time'] == obs_time + assert loaded_scene["SC"].attrs["data_time"] == obs_time def test_hsaf_sc_areadef(sc_h5_file): """Test the H-SAF SC area definition.""" loaded_scene = _get_scene_with_loaded_sc_datasets(sc_h5_file) - fd_def = get_area_def('msg_seviri_fes_3km') + fd_def = get_area_def("msg_seviri_fes_3km") hsaf_def = fd_def[AREA_Y_OFFSET:AREA_Y_OFFSET+SHAPE_SC[0], AREA_X_OFFSET:AREA_X_OFFSET+SHAPE_SC[1]] - assert loaded_scene['SC'].area == hsaf_def + assert loaded_scene["SC"].area == hsaf_def diff --git a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py index b2a5d4d3e1..a0bca2143f 100644 --- a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py +++ b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py @@ -49,237 +49,237 @@ def __getitem__(self, key): def _get_geo_data(self, num_rows, num_cols): geo = { - 'wvc_lon': + "wvc_lon": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ - 'fill_value': 1.7e+38, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'degree', - 'valid range': [0, 359.99], + "fill_value": 1.7e+38, + "scale_factor": 1., + "add_offset": 0., + "units": "degree", + "valid range": [0, 359.99], }, - dims=('y', 'x')), - 'wvc_lat': + dims=("y", "x")), + "wvc_lat": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ - 'fill_value': 1.7e+38, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'degree', - 'valid range': [-90.0, 90.0], + "fill_value": 1.7e+38, + "scale_factor": 1., + "add_offset": 0., + "units": "degree", + "valid range": [-90.0, 90.0], }, - dims=('y', 'x')), + dims=("y", "x")), } return geo def _get_geo_data_nsoas(self, num_rows, num_cols): geo = { - 'wvc_lon': + "wvc_lon": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ - 'fill_value': 1.7e+38, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'degree', - 'valid_range': [0, 359.99], + "fill_value": 1.7e+38, + "scale_factor": 1., + "add_offset": 0., + "units": "degree", + "valid_range": [0, 359.99], }, - dims=('y', 'x')), - 'wvc_lat': + dims=("y", "x")), + "wvc_lat": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ - 'fill_value': 1.7e+38, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'degree', - 'valid_range': [-90.0, 90.0], + "fill_value": 1.7e+38, + "scale_factor": 1., + "add_offset": 0., + "units": "degree", + "valid_range": [-90.0, 90.0], }, - dims=('y', 'x')), + dims=("y", "x")), } return geo def _get_selection_data(self, num_rows, num_cols): selection = { - 'wvc_selection': + "wvc_selection": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ - 'fill_value': 0, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'count', - 'valid range': [1, 8], + "fill_value": 0, + "scale_factor": 1., + "add_offset": 0., + "units": "count", + "valid range": [1, 8], }, - dims=('y', 'x')), - 'wind_speed_selection': + dims=("y", "x")), + "wind_speed_selection": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 0.1, - 'add_offset': 0., - 'units': 'deg', - 'valid range': [0, 3599], + "fill_value": -32767, + "scale_factor": 0.1, + "add_offset": 0., + "units": "deg", + "valid range": [0, 3599], }, - dims=('y', 'x')), - 'wind_dir_selection': + dims=("y", "x")), + "wind_dir_selection": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 0.01, - 'add_offset': 0., - 'units': 'm/s', - 'valid range': [0, 5000], + "fill_value": -32767, + "scale_factor": 0.01, + "add_offset": 0., + "units": "m/s", + "valid range": [0, 5000], }, - dims=('y', 'x')), - 'model_dir': + dims=("y", "x")), + "model_dir": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 0.01, - 'add_offset': 0., - 'units': 'm/s', - 'valid range': [0, 5000], + "fill_value": -32767, + "scale_factor": 0.01, + "add_offset": 0., + "units": "m/s", + "valid range": [0, 5000], }, - dims=('y', 'x')), - 'model_speed': + dims=("y", "x")), + "model_speed": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 0.1, - 'add_offset': 0., - 'units': 'deg', - 'valid range': [0, 3599], + "fill_value": -32767, + "scale_factor": 0.1, + "add_offset": 0., + "units": "deg", + "valid range": [0, 3599], }, - dims=('y', 'x')), - 'num_ambigs': + dims=("y", "x")), + "num_ambigs": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ - 'fill_value': 0, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'count', - 'valid range': [1, 8], + "fill_value": 0, + "scale_factor": 1., + "add_offset": 0., + "units": "count", + "valid range": [1, 8], }, - dims=('y', 'x')), - 'num_in_aft': + dims=("y", "x")), + "num_in_aft": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ - 'fill_value': 0, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'count', - 'valid range': [1, 127], + "fill_value": 0, + "scale_factor": 1., + "add_offset": 0., + "units": "count", + "valid range": [1, 127], }, - dims=('y', 'x')), - 'num_in_fore': + dims=("y", "x")), + "num_in_fore": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ - 'fill_value': 0, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'count', - 'valid range': [1, 127], + "fill_value": 0, + "scale_factor": 1., + "add_offset": 0., + "units": "count", + "valid range": [1, 127], }, - dims=('y', 'x')), - 'num_out_aft': + dims=("y", "x")), + "num_out_aft": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ - 'fill_value': 0, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'count', - 'valid range': [1, 127], + "fill_value": 0, + "scale_factor": 1., + "add_offset": 0., + "units": "count", + "valid range": [1, 127], }, - dims=('y', 'x')), - 'num_out_fore': + dims=("y", "x")), + "num_out_fore": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ - 'fill_value': 0, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'count', - 'valid range': [1, 127], + "fill_value": 0, + "scale_factor": 1., + "add_offset": 0., + "units": "count", + "valid range": [1, 127], }, - dims=('y', 'x')), - 'wvc_quality_flag': + dims=("y", "x")), + "wvc_quality_flag": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'fill_value': 2.14748e+09, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'na', - 'valid range': [1, 2.14748e+09], + "fill_value": 2.14748e+09, + "scale_factor": 1., + "add_offset": 0., + "units": "na", + "valid range": [1, 2.14748e+09], }, - dims=('y', 'x')), + dims=("y", "x")), } return selection def _get_all_ambiguities_data(self, num_rows, num_cols, num_amb): all_amb = { - 'max_likelihood_est': + "max_likelihood_est": xr.DataArray( da.ones((num_rows, num_cols, num_amb), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'na', - 'valid range': [0, 32767], + "fill_value": -32767, + "scale_factor": 1., + "add_offset": 0., + "units": "na", + "valid range": [0, 32767], }, - dims=('y', 'x', 'selection')), - 'wind_dir': + dims=("y", "x", "selection")), + "wind_dir": xr.DataArray( da.ones((num_rows, num_cols, num_amb), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 0.1, - 'add_offset': 0., - 'units': 'deg', - 'valid range': [0, 3599], + "fill_value": -32767, + "scale_factor": 0.1, + "add_offset": 0., + "units": "deg", + "valid range": [0, 3599], }, - dims=('y', 'x', 'selection')), - 'wind_speed': + dims=("y", "x", "selection")), + "wind_speed": xr.DataArray( da.ones((num_rows, num_cols, num_amb), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 0.01, - 'add_offset': 0., - 'units': 'm/s', - 'valid range': [0, 5000], + "fill_value": -32767, + "scale_factor": 0.01, + "add_offset": 0., + "units": "m/s", + "valid range": [0, 5000], }, - dims=('y', 'x', 'selection')), + dims=("y", "x", "selection")), } return all_amb @@ -295,49 +295,49 @@ def _get_wvc_row_time(self, num_rows): "20200326T01:11:38.074", "20200326T01:11:41.887"] wvc_row_time = { - 'wvc_row_time': + "wvc_row_time": xr.DataArray(data, attrs={ - 'fill_value': "", + "fill_value": "", }, - dims=('y',)), + dims=("y",)), } return wvc_row_time def _get_global_attrs(self, num_rows, num_cols): return { - '/attr/Equator_Crossing_Longitude': '246.408397', - '/attr/Equator_Crossing_Time': '20200326T01:37:15.875', - '/attr/HDF_Version_Id': 'HDF5-1.8.16', - '/attr/Input_L2A_Filename': 'H2B_OPER_SCA_L2A_OR_20200326T010839_20200326T025757_07076_dps_250_20.h5', - '/attr/Instrument_ShorName': 'HSCAT-B', - '/attr/L2A_Inputdata_Version': '10', - '/attr/L2B_Actual_WVC_Rows': np.int32(num_rows), - '/attr/L2B_Algorithm_Descriptor': ('Wind retrieval processing uses the multiple solution scheme (MSS) for ' - 'wind inversion with the NSCAT-4 GMF,and a circular median filter ' - 'method (CMF) for ambiguity removal. The ECMWF/NCEP forescate data are ' - 'used as background winds in the CMF'), - '/attr/L2B_Data_Version': '10', - '/attr/L2B_Expected_WVC_Rows': np.int32(num_rows), - '/attr/L2B_Processing_Type': 'OPER', - '/attr/L2B_Processor_Name': 'hy2_sca_l2b_pro', - '/attr/L2B_Processor_Version': '01.00', - '/attr/Long_Name': 'HY-2B/SCAT Level 2B Ocean Wind Vectors in 25.0 km Swath Grid', - '/attr/Orbit_Inclination': np.float32(99.3401), - '/attr/Orbit_Number': '07076', - '/attr/Output_L2B_Filename': 'H2B_OPER_SCA_L2B_OR_20200326T011107_20200326T025540_07076_dps_250_20_owv.h5', - '/attr/Platform_LongName': 'Haiyang 2B Ocean Observing Satellite', - '/attr/Platform_ShortName': 'HY-2B', - '/attr/Platform_Type': 'spacecraft', - '/attr/Producer_Agency': 'Ministry of Natural Resources of the People\'s Republic of China', - '/attr/Producer_Institution': 'NSOAS', - '/attr/Production_Date_Time': '20200326T06:23:10', - '/attr/Range_Beginning_Time': '20200326T01:11:07', - '/attr/Range_Ending_Time': '20200326T02:55:40', - '/attr/Rev_Orbit_Period': '14 days', - '/attr/Short_Name': 'HY-2B SCAT-L2B-25km', - '/attr/Sigma0_Granularity': 'whole pulse', - '/attr/WVC_Size': '25000m*25000m', + "/attr/Equator_Crossing_Longitude": "246.408397", + "/attr/Equator_Crossing_Time": "20200326T01:37:15.875", + "/attr/HDF_Version_Id": "HDF5-1.8.16", + "/attr/Input_L2A_Filename": "H2B_OPER_SCA_L2A_OR_20200326T010839_20200326T025757_07076_dps_250_20.h5", + "/attr/Instrument_ShorName": "HSCAT-B", + "/attr/L2A_Inputdata_Version": "10", + "/attr/L2B_Actual_WVC_Rows": np.int32(num_rows), + "/attr/L2B_Algorithm_Descriptor": ("Wind retrieval processing uses the multiple solution scheme (MSS) for " + "wind inversion with the NSCAT-4 GMF,and a circular median filter " + "method (CMF) for ambiguity removal. The ECMWF/NCEP forescate data are " + "used as background winds in the CMF"), + "/attr/L2B_Data_Version": "10", + "/attr/L2B_Expected_WVC_Rows": np.int32(num_rows), + "/attr/L2B_Processing_Type": "OPER", + "/attr/L2B_Processor_Name": "hy2_sca_l2b_pro", + "/attr/L2B_Processor_Version": "01.00", + "/attr/Long_Name": "HY-2B/SCAT Level 2B Ocean Wind Vectors in 25.0 km Swath Grid", + "/attr/Orbit_Inclination": np.float32(99.3401), + "/attr/Orbit_Number": "07076", + "/attr/Output_L2B_Filename": "H2B_OPER_SCA_L2B_OR_20200326T011107_20200326T025540_07076_dps_250_20_owv.h5", + "/attr/Platform_LongName": "Haiyang 2B Ocean Observing Satellite", + "/attr/Platform_ShortName": "HY-2B", + "/attr/Platform_Type": "spacecraft", + "/attr/Producer_Agency": "Ministry of Natural Resources of the People\'s Republic of China", + "/attr/Producer_Institution": "NSOAS", + "/attr/Production_Date_Time": "20200326T06:23:10", + "/attr/Range_Beginning_Time": "20200326T01:11:07", + "/attr/Range_Ending_Time": "20200326T02:55:40", + "/attr/Rev_Orbit_Period": "14 days", + "/attr/Short_Name": "HY-2B SCAT-L2B-25km", + "/attr/Sigma0_Granularity": "whole pulse", + "/attr/WVC_Size": "25000m*25000m", } def get_test_content(self, filename, filename_info, filetype_info): @@ -349,11 +349,11 @@ def get_test_content(self, filename, filename_info, filetype_info): test_content = {} test_content.update(self._get_global_attrs(num_rows, num_cols)) data = {} - if 'OPER_SCA_L2B' in filename: - test_content.update({'/attr/L2B_Expected_WVC_Cells': np.int32(num_cols)}) + if "OPER_SCA_L2B" in filename: + test_content.update({"/attr/L2B_Expected_WVC_Cells": np.int32(num_cols)}) data = self._get_geo_data_nsoas(num_rows, num_cols) else: - test_content.update({'/attr/L2B_Number_WVC_cells': np.int32(num_cols)}) + test_content.update({"/attr/L2B_Number_WVC_cells": np.int32(num_cols)}) data = self._get_geo_data(num_rows, num_cols) test_content.update(data) @@ -377,9 +377,9 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.hy2_scat_l2b_h5 import HY2SCATL2BH5FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(HY2SCATL2BH5FileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(HY2SCATL2BH5FileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -391,7 +391,7 @@ def test_load_geo(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] + "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) @@ -400,14 +400,14 @@ def test_load_geo(self): # Make sure we have some files self.assertTrue(reader.file_handlers) - res = reader.load(['wvc_lon', 'wvc_lat']) + res = reader.load(["wvc_lon", "wvc_lat"]) self.assertEqual(2, len(res)) def test_load_geo_nsoas(self): """Test loading data from nsoas file.""" from satpy.readers import load_reader filenames = [ - 'H2B_OPER_SCA_L2B_OR_20210803T100304_20210803T104601_13905_pwp_250_07_owv.h5', ] + "H2B_OPER_SCA_L2B_OR_20210803T100304_20210803T104601_13905_pwp_250_07_owv.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) @@ -416,14 +416,14 @@ def test_load_geo_nsoas(self): # Make sure we have some files self.assertTrue(reader.file_handlers) - res = reader.load(['wvc_lon', 'wvc_lat']) + res = reader.load(["wvc_lon", "wvc_lat"]) self.assertEqual(2, len(res)) def test_load_data_selection(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] + "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) @@ -431,16 +431,16 @@ def test_load_data_selection(self): reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) - res = reader.load(['wind_speed_selection', - 'wind_dir_selection', - 'wvc_selection']) + res = reader.load(["wind_speed_selection", + "wind_dir_selection", + "wvc_selection"]) self.assertEqual(3, len(res)) def test_load_data_all_ambiguities(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] + "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) @@ -448,24 +448,24 @@ def test_load_data_all_ambiguities(self): reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) - res = reader.load(['wind_speed', - 'wind_dir', - 'max_likelihood_est', - 'model_dir', - 'model_speed', - 'num_ambigs', - 'num_in_aft', - 'num_in_fore', - 'num_out_aft', - 'num_out_fore', - 'wvc_quality_flag']) + res = reader.load(["wind_speed", + "wind_dir", + "max_likelihood_est", + "model_dir", + "model_speed", + "num_ambigs", + "num_in_aft", + "num_in_fore", + "num_out_aft", + "num_out_fore", + "wvc_quality_flag"]) self.assertEqual(11, len(res)) def test_load_data_row_times(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] + "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) @@ -473,38 +473,38 @@ def test_load_data_row_times(self): reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) - res = reader.load(['wvc_row_time']) + res = reader.load(["wvc_row_time"]) self.assertEqual(1, len(res)) def test_reading_attrs(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] + "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(files) # Make sure we have some files - res = reader.load(['wvc_lon']) - self.assertEqual(res['wvc_lon'].attrs['L2B_Number_WVC_cells'], 10) + res = reader.load(["wvc_lon"]) + self.assertEqual(res["wvc_lon"].attrs["L2B_Number_WVC_cells"], 10) with self.assertRaises(KeyError): - self.assertEqual(res['wvc_lon'].attrs['L2B_Expected_WVC_Cells'], 10) + self.assertEqual(res["wvc_lon"].attrs["L2B_Expected_WVC_Cells"], 10) def test_reading_attrs_nsoas(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ - 'H2B_OPER_SCA_L2B_OR_20210803T100304_20210803T104601_13905_pwp_250_07_owv.h5', ] + "H2B_OPER_SCA_L2B_OR_20210803T100304_20210803T104601_13905_pwp_250_07_owv.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(files) # Make sure we have some files - res = reader.load(['wvc_lon']) + res = reader.load(["wvc_lon"]) with self.assertRaises(KeyError): - self.assertEqual(res['wvc_lon'].attrs['L2B_Number_WVC_cells'], 10) - self.assertEqual(res['wvc_lon'].attrs['L2B_Expected_WVC_Cells'], 10) + self.assertEqual(res["wvc_lon"].attrs["L2B_Number_WVC_cells"], 10) + self.assertEqual(res["wvc_lon"].attrs["L2B_Expected_WVC_Cells"], 10) def test_properties(self): """Test platform_name.""" @@ -512,13 +512,13 @@ def test_properties(self): from satpy.readers import load_reader filenames = [ - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] + "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(files) # Make sure we have some files - res = reader.load(['wvc_lon']) - self.assertEqual(res['wvc_lon'].platform_name, 'HY-2B') - self.assertEqual(res['wvc_lon'].start_time, datetime(2020, 3, 26, 1, 11, 7)) - self.assertEqual(res['wvc_lon'].end_time, datetime(2020, 3, 26, 2, 55, 40)) + res = reader.load(["wvc_lon"]) + self.assertEqual(res["wvc_lon"].platform_name, "HY-2B") + self.assertEqual(res["wvc_lon"].start_time, datetime(2020, 3, 26, 1, 11, 7)) + self.assertEqual(res["wvc_lon"].end_time, datetime(2020, 3, 26, 2, 55, 40)) diff --git a/satpy/tests/reader_tests/test_iasi_l2.py b/satpy/tests/reader_tests/test_iasi_l2.py index 9dbfa7eef0..81ca28a5b6 100644 --- a/satpy/tests/reader_tests/test_iasi_l2.py +++ b/satpy/tests/reader_tests/test_iasi_l2.py @@ -32,85 +32,85 @@ # Structure for the test data, to be written to HDF5 file TEST_DATA = { # Not implemented in the reader - 'Amsu': { - 'FLG_AMSUBAD': {'data': np.zeros((NUM_SCANLINES, 30), dtype=np.uint8), - 'attrs': {}} + "Amsu": { + "FLG_AMSUBAD": {"data": np.zeros((NUM_SCANLINES, 30), dtype=np.uint8), + "attrs": {}} }, # Not implemented in the reader - 'INFO': { - 'OmC': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'long_name': "Cloud signal. Predicted average window channel 'Obs minus Calc", - 'units': 'K'}}, - 'mdist': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}} + "INFO": { + "OmC": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"long_name": "Cloud signal. Predicted average window channel 'Obs minus Calc", + "units": "K"}}, + "mdist": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}} }, - 'L1C': { - 'Latitude': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'degrees_north'}}, - 'Longitude': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'degrees_north'}}, - 'SatAzimuth': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'degrees'}}, - 'SatZenith': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'degrees'}}, - 'SensingTime_day': {'data': np.array([6472], dtype=np.uint16), - 'attrs': {}}, - 'SensingTime_msec': {'data': np.array([37337532], dtype=np.uint32), - 'attrs': {}}, - 'SunAzimuth': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'degrees'}}, - 'SunZenith': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'degrees'}}, + "L1C": { + "Latitude": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "degrees_north"}}, + "Longitude": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "degrees_north"}}, + "SatAzimuth": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "degrees"}}, + "SatZenith": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "degrees"}}, + "SensingTime_day": {"data": np.array([6472], dtype=np.uint16), + "attrs": {}}, + "SensingTime_msec": {"data": np.array([37337532], dtype=np.uint32), + "attrs": {}}, + "SunAzimuth": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "degrees"}}, + "SunZenith": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "degrees"}}, }, # Not implemented in the reader - 'Maps': { - 'Height': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'm'}}, - 'HeightStd': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'm'}}, + "Maps": { + "Height": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "m"}}, + "HeightStd": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "m"}}, }, # Not implemented in the reader - 'Mhs': { - 'FLG_MHSBAD': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.uint8), - 'attrs': {}} + "Mhs": { + "FLG_MHSBAD": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.uint8), + "attrs": {}} }, - 'PWLR': { - 'E': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, 10), dtype=np.float32), - 'attrs': {'emissivity_wavenumbers': np.array([699.3, 826.4, + "PWLR": { + "E": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, 10), dtype=np.float32), + "attrs": {"emissivity_wavenumbers": np.array([699.3, 826.4, 925.9, 1075.2, 1204.8, 1315.7, 1724.1, 2000.0, 2325.5, 2702.7], dtype=np.float32)}}, - 'O': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), - 'attrs': {'long_name': 'Ozone mixing ratio vertical profile', - 'units': 'kg/kg'}}, - 'OC': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'P': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), - 'attrs': {'long_name': 'Atmospheric pressures at which the vertical profiles are given. ' - 'Last value is the surface pressure', - 'units': 'hpa'}}, - 'QE': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'QO': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'QP': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'QT': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'QTs': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'QW': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'T': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), - 'attrs': {'long_name': 'Temperature vertical profile', 'units': 'K'}}, - 'Ts': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'long_name': 'Surface skin temperature', 'units': 'K'}}, - 'W': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), - 'attrs': {'long_name': 'Water vapour mixing ratio vertical profile', 'units': 'kg/kg'}}, - 'WC': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'long_name': 'Water vapour total columnar amount', 'units': 'mm'}}, + "O": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), + "attrs": {"long_name": "Ozone mixing ratio vertical profile", + "units": "kg/kg"}}, + "OC": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "P": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), + "attrs": {"long_name": "Atmospheric pressures at which the vertical profiles are given. " + "Last value is the surface pressure", + "units": "hpa"}}, + "QE": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "QO": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "QP": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "QT": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "QTs": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "QW": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "T": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), + "attrs": {"long_name": "Temperature vertical profile", "units": "K"}}, + "Ts": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"long_name": "Surface skin temperature", "units": "K"}}, + "W": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), + "attrs": {"long_name": "Water vapour mixing ratio vertical profile", "units": "kg/kg"}}, + "WC": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"long_name": "Water vapour total columnar amount", "units": "mm"}}, } } @@ -118,17 +118,17 @@ def save_test_data(path): """Save the test to the indicated directory.""" import h5py - with h5py.File(os.path.join(path, FNAME), 'w') as fid: + with h5py.File(os.path.join(path, FNAME), "w") as fid: # Create groups for grp in TEST_DATA: fid.create_group(grp) # Write datasets for dset in TEST_DATA[grp]: - fid[grp][dset] = TEST_DATA[grp][dset]['data'] + fid[grp][dset] = TEST_DATA[grp][dset]["data"] # Write dataset attributes - for attr in TEST_DATA[grp][dset]['attrs']: + for attr in TEST_DATA[grp][dset]["attrs"]: fid[grp][dset].attrs[attr] = \ - TEST_DATA[grp][dset]['attrs'][attr] + TEST_DATA[grp][dset]["attrs"][attr] class TestIasiL2(unittest.TestCase): @@ -144,16 +144,16 @@ def setUp(self): self.base_dir = tempfile.mkdtemp() save_test_data(self.base_dir) self.fname = os.path.join(self.base_dir, FNAME) - self.fname_info = {'start_time': dt.datetime(2017, 9, 20, 10, 22, 17), - 'end_time': dt.datetime(2017, 9, 20, 10, 29, 12), - 'processing_time': dt.datetime(2017, 9, 20, 10, 35, 59), - 'processing_location': 'kan', - 'long_platform_id': 'metopb', - 'instrument': 'iasi', - 'platform_id': 'M01'} - self.ftype_info = {'file_reader': IASIL2HDF5, - 'file_patterns': ['{fname}.hdf'], - 'file_type': 'iasi_l2_hdf5'} + self.fname_info = {"start_time": dt.datetime(2017, 9, 20, 10, 22, 17), + "end_time": dt.datetime(2017, 9, 20, 10, 29, 12), + "processing_time": dt.datetime(2017, 9, 20, 10, 35, 59), + "processing_location": "kan", + "long_platform_id": "metopb", + "instrument": "iasi", + "platform_id": "M01"} + self.ftype_info = {"file_reader": IASIL2HDF5, + "file_patterns": ["{fname}.hdf"], + "file_type": "iasi_l2_hdf5"} self.reader = IASIL2HDF5(self.fname, self.fname_info, self.ftype_info) def tearDown(self): @@ -168,44 +168,44 @@ def test_scene(self): """Test scene creation.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) - scn = Scene(reader='iasi_l2', filenames=[fname]) + scn = Scene(reader="iasi_l2", filenames=[fname]) assert scn.start_time is not None assert scn.end_time is not None assert scn.sensor_names - assert 'iasi' in scn.sensor_names + assert "iasi" in scn.sensor_names def test_scene_load_available_datasets(self): """Test that all datasets are available.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) - scn = Scene(reader='iasi_l2', filenames=[fname]) + scn = Scene(reader="iasi_l2", filenames=[fname]) scn.load(scn.available_dataset_names()) def test_scene_load_pressure(self): """Test loading pressure data.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) - scn = Scene(reader='iasi_l2', filenames=[fname]) - scn.load(['pressure']) - pres = scn['pressure'].compute() + scn = Scene(reader="iasi_l2", filenames=[fname]) + scn.load(["pressure"]) + pres = scn["pressure"].compute() self.check_pressure(pres, scn.attrs) def test_scene_load_emissivity(self): """Test loading emissivity data.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) - scn = Scene(reader='iasi_l2', filenames=[fname]) - scn.load(['emissivity']) - emis = scn['emissivity'].compute() + scn = Scene(reader="iasi_l2", filenames=[fname]) + scn.load(["emissivity"]) + emis = scn["emissivity"].compute() self.check_emissivity(emis) def test_scene_load_sensing_times(self): """Test loading sensing times.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) - scn = Scene(reader='iasi_l2', filenames=[fname]) - scn.load(['sensing_time']) - times = scn['sensing_time'].compute() + scn = Scene(reader="iasi_l2", filenames=[fname]) + scn.load(["sensing_time"]) + times = scn["sensing_time"].compute() self.check_sensing_times(times) def test_init(self): @@ -214,8 +214,8 @@ def test_init(self): self.assertEqual(self.reader.finfo, self.fname_info) self.assertTrue(self.reader.lons is None) self.assertTrue(self.reader.lats is None) - self.assertEqual(self.reader.mda['platform_name'], 'Metop-B') - self.assertEqual(self.reader.mda['sensor'], 'iasi') + self.assertEqual(self.reader.mda["platform_name"], "Metop-B") + self.assertEqual(self.reader.mda["sensor"], "iasi") def test_time_properties(self): """Test time properties.""" @@ -226,16 +226,16 @@ def test_time_properties(self): def test_get_dataset(self): """Test get_dataset() for different datasets.""" from satpy.tests.utils import make_dataid - info = {'eggs': 'spam'} - key = make_dataid(name='pressure') + info = {"eggs": "spam"} + key = make_dataid(name="pressure") data = self.reader.get_dataset(key, info).compute() self.check_pressure(data) - self.assertTrue('eggs' in data.attrs) - self.assertEqual(data.attrs['eggs'], 'spam') - key = make_dataid(name='emissivity') + self.assertTrue("eggs" in data.attrs) + self.assertEqual(data.attrs["eggs"], "spam") + key = make_dataid(name="emissivity") data = self.reader.get_dataset(key, info).compute() self.check_emissivity(data) - key = make_dataid(name='sensing_time') + key = make_dataid(name="sensing_time") data = self.reader.get_dataset(key, info).compute() self.assertEqual(data.shape, (NUM_SCANLINES, SCAN_WIDTH)) @@ -249,10 +249,10 @@ def check_pressure(self, pres, attrs=None): self.assertEqual(pres.y.size, NUM_SCANLINES) self.assertEqual(pres.level.size, NUM_LEVELS) if attrs: - self.assertEqual(pres.attrs['start_time'], attrs['start_time']) - self.assertEqual(pres.attrs['end_time'], attrs['end_time']) - self.assertTrue('long_name' in pres.attrs) - self.assertTrue('units' in pres.attrs) + self.assertEqual(pres.attrs["start_time"], attrs["start_time"]) + self.assertEqual(pres.attrs["end_time"], attrs["end_time"]) + self.assertTrue("long_name" in pres.attrs) + self.assertTrue("units" in pres.attrs) def check_emissivity(self, emis): """Test reading emissivity dataset. @@ -262,7 +262,7 @@ def check_emissivity(self, emis): self.assertTrue(np.all(emis == 0.0)) self.assertEqual(emis.x.size, SCAN_WIDTH) self.assertEqual(emis.y.size, NUM_SCANLINES) - self.assertTrue('emissivity_wavenumbers' in emis.attrs) + self.assertTrue("emissivity_wavenumbers" in emis.attrs) def check_sensing_times(self, times): """Test reading sensing times. @@ -281,15 +281,15 @@ def test_read_dataset(self): from satpy.readers.iasi_l2 import read_dataset from satpy.tests.utils import make_dataid - with h5py.File(self.fname, 'r') as fid: - key = make_dataid(name='pressure') + with h5py.File(self.fname, "r") as fid: + key = make_dataid(name="pressure") data = read_dataset(fid, key).compute() self.check_pressure(data) - key = make_dataid(name='emissivity') + key = make_dataid(name="emissivity") data = read_dataset(fid, key).compute() self.check_emissivity(data) # This dataset doesn't have any attributes - key = make_dataid(name='ozone_total_column') + key = make_dataid(name="ozone_total_column") data = read_dataset(fid, key).compute() self.assertEqual(len(data.attrs), 0) @@ -299,19 +299,19 @@ def test_read_geo(self): from satpy.readers.iasi_l2 import read_geo from satpy.tests.utils import make_dataid - with h5py.File(self.fname, 'r') as fid: - key = make_dataid(name='sensing_time') + with h5py.File(self.fname, "r") as fid: + key = make_dataid(name="sensing_time") data = read_geo(fid, key).compute() self.assertEqual(data.shape, (NUM_SCANLINES, SCAN_WIDTH)) - key = make_dataid(name='latitude') + key = make_dataid(name="latitude") data = read_geo(fid, key).compute() self.assertEqual(data.shape, (NUM_SCANLINES, SCAN_WIDTH)) def test_form_datetimes(self): """Test _form_datetimes() function.""" from satpy.readers.iasi_l2 import _form_datetimes - days = TEST_DATA['L1C']['SensingTime_day']['data'] - msecs = TEST_DATA['L1C']['SensingTime_msec']['data'] + days = TEST_DATA["L1C"]["SensingTime_day"]["data"] + msecs = TEST_DATA["L1C"]["SensingTime_msec"]["data"] times = _form_datetimes(days, msecs) self.check_sensing_times(times) diff --git a/satpy/tests/reader_tests/test_iasi_l2_so2_bufr.py b/satpy/tests/reader_tests/test_iasi_l2_so2_bufr.py index 85df6b64ed..58aee7d1ba 100644 --- a/satpy/tests/reader_tests/test_iasi_l2_so2_bufr.py +++ b/satpy/tests/reader_tests/test_iasi_l2_so2_bufr.py @@ -29,46 +29,46 @@ # bufr file distributed over EUMETCAST msg = { - 'unpack': 1, - 'inputDelayedDescriptorReplicationFactor': 5, - 'edition': 4, - 'masterTableNumber': 0, - 'bufrHeaderCentre': 254, - 'bufrHeaderSubCentre': 0, - 'updateSequenceNumber': 0, - 'dataCategory': 3, - 'internationalDataSubCategory': 255, - 'dataSubCategory': 230, - 'masterTablesVersionNumber': 31, - 'localTablesVersionNumber': 0, - 'typicalYear': 2020, - 'typicalMonth': 2, - 'typicalDay': 4, - 'typicalHour': 8, - 'typicalMinute': 59, - 'typicalSecond': 0, - 'numberOfSubsets': 120, - 'observedData': 1, - 'compressedData': 1, - 'unexpandedDescriptors': np.array([ + "unpack": 1, + "inputDelayedDescriptorReplicationFactor": 5, + "edition": 4, + "masterTableNumber": 0, + "bufrHeaderCentre": 254, + "bufrHeaderSubCentre": 0, + "updateSequenceNumber": 0, + "dataCategory": 3, + "internationalDataSubCategory": 255, + "dataSubCategory": 230, + "masterTablesVersionNumber": 31, + "localTablesVersionNumber": 0, + "typicalYear": 2020, + "typicalMonth": 2, + "typicalDay": 4, + "typicalHour": 8, + "typicalMinute": 59, + "typicalSecond": 0, + "numberOfSubsets": 120, + "observedData": 1, + "compressedData": 1, + "unexpandedDescriptors": np.array([ 1007, 1031, 25060, 2019, 2020, 4001, 4002, 4003, 4004, 4005, 4006, 5040, 201133, 5041, 201000, 5001, 6001, 5043, 7024, 5021, 7025, 5022, 7007, 40068, 7002, 15045, 12080, 102000, 31001, 7007, 15045], dtype=int), - '#1#satelliteIdentifier': 4, - '#1#centre': 254, - '#1#softwareIdentification': 605, - '#1#satelliteInstruments': 221, - '#1#satelliteClassification': 61, - '#1#year': 2020, - '#1#month': 2, - '#1#day': 4, - '#1#hour': 9, - '#1#minute': 1, - '#1#second': 11, - '#1#orbitNumber': 68984, - '#1#scanLineNumber': 447, - '#1#latitude': np.array([ + "#1#satelliteIdentifier": 4, + "#1#centre": 254, + "#1#softwareIdentification": 605, + "#1#satelliteInstruments": 221, + "#1#satelliteClassification": 61, + "#1#year": 2020, + "#1#month": 2, + "#1#day": 4, + "#1#hour": 9, + "#1#minute": 1, + "#1#second": 11, + "#1#orbitNumber": 68984, + "#1#scanLineNumber": 447, + "#1#latitude": np.array([ -33.4055, -33.6659, -33.738, -33.4648, -33.263, -33.5027, -33.5673, -33.3172, -33.1332, -33.3564, -33.4151, -33.1832, -33.0132, -33.2232, -33.2771, -33.0596, -32.903, -33.1021, -33.1522, -32.9466, -32.7982, -32.9884, -33.0354, -32.8395, @@ -85,7 +85,7 @@ -30.4071, -30.6153, -30.7036, -30.4967, -30.146, -30.3672, -30.4712, -30.2521, -29.8276, -30.0649, -30.1911, -29.9569, -29.4268, -29.6844, -29.8436, -29.5903]), - '#1#longitude': np.array([ + "#1#longitude": np.array([ 2.53790e+00, 2.49440e+00, 3.08690e+00, 3.12690e+00, 1.15600e+00, 1.11230e+00, 1.59640e+00, 1.63750e+00, -3.70000e-03, -4.73000e-02, 3.61900e-01, 4.03500e-01, -1.00010e+00, -1.04340e+00, -6.88300e-01, @@ -111,7 +111,7 @@ -1.59045e+01, -1.58264e+01, -1.73549e+01, -1.74460e+01, -1.69944e+01, -1.69085e+01, -1.87277e+01, -1.88302e+01, -1.82832e+01, -1.81873e+01]), - '#1#fieldOfViewNumber': np.array([ + "#1#fieldOfViewNumber": np.array([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, @@ -120,7 +120,7 @@ 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120]), - '#1#satelliteZenithAngle': np.array([ + "#1#satelliteZenithAngle": np.array([ 56.64, 56.64, 58.38, 58.37, 52.15, 52.15, 53.8, 53.79, 47.84, 47.84, 49.42, 49.42, 43.67, 43.67, 45.21, 45.2, 39.59, 39.59, 41.1, 41.09, 35.59, 35.59, 37.08, 37.07, 31.65, 31.65, 33.11, 33.1, 27.75, 27.75, 29.2, 29.19, 23.89, 23.89, 25.33, 25.32, @@ -133,7 +133,7 @@ 49.52, 49.53, 47.94, 47.94, 53.89, 53.9, 52.25, 52.25, 58.48, 58.48, 56.74, 56.75]), - '#1#bearingOrAzimuth': np.array([ + "#1#bearingOrAzimuth": np.array([ 276.93, 278.61, 278.27, 276.61, 277.64, 279.42, 279.14, 277.38, 278.22, 280.11, 279.88, 278.01, 278.69, 280.72, 280.51, 278.51, 279.09, 281.3, 281.11, 278.94, 279.41, 281.83, 281.64, 279.28, 279.68, 282.36, 282.18, 279.58, 279.88, 282.9, @@ -147,7 +147,7 @@ 107.74, 105.67, 105.47, 107.64, 108.11, 106.2, 105.99, 107.98, 108.54, 106.76, 106.53, 108.38, 109.06, 107.39, 107.14, 108.87, 109.7, 108.13, 107.83, 109.46]), - '#1#solarZenithAngle': np.array([ + "#1#solarZenithAngle": np.array([ 44.36, 44.44, 43.98, 43.89, 45.47, 45.54, 45.16, 45.08, 46.4, 46.47, 46.14, 46.07, 47.21, 47.27, 46.99, 46.92, 47.92, 47.98, 47.73, 47.67, 48.56, 48.62, 48.39, 48.33, 49.15, 49.21, 49., 48.94, 49.7, 49.75, 49.55, 49.5, 50.21, 50.26, 50.07, 50.02, @@ -160,7 +160,7 @@ 59.98, 60.04, 59.70, 59.64, 60.98, 61.05, 60.65, 60.59, 62.20, 62.27, 61.78, 61.72]), - '#1#solarAzimuth': np.array([ + "#1#solarAzimuth": np.array([ 78.89, 78.66, 78.16, 78.41, 80.00, 79.80, 79.40, 79.62, 80.92, 80.74, 80.40, 80.6, 81.69, 81.53, 81.24, 81.42, 82.36, 82.21, 81.96, 82.12, 82.96, 82.82, 82.60, 82.74, 83.49, 83.36, 83.16, 83.3, 83.98, 83.86, 83.68, 83.80, 84.43, 84.32, 84.15, 84.27, @@ -172,11 +172,11 @@ 90.58, 90.49, 90.31, 90.4, 91.09, 91., 90.81, 90.89, 91.66, 91.57, 91.35, 91.44, 92.29, 92.20, 91.95, 92.04, 93.02, 92.93, 92.64, 92.73, 93.87, 93.79, 93.45, 93.54]), - '#1#height': 83270, - '#1#generalRetrievalQualityFlagForSo2': 9, - '#2#height': -1e+100, - '#1#sulphurDioxide': -1e+100, - '#1#brightnessTemperatureRealPart': np.array([ + "#1#height": 83270, + "#1#generalRetrievalQualityFlagForSo2": 9, + "#2#height": -1e+100, + "#1#sulphurDioxide": -1e+100, + "#1#brightnessTemperatureRealPart": np.array([ 0.11, 0.11, -0.07, 0.08, 0.13, 0.15, 0.10, 0.06, -0.02, -0.03, 0.08, 0.17, -0.05, 0.12, 0.08, -0.06, 0.15, 0.08, -0.04, -0.01, 0.06, 0.17, -0.01, 0.15, 0.18, 0.05, 0.11, -0.03, 0.09, 0.02, 0.04, 0.10, 0.00, 0.00, 0.01, 0.18, @@ -188,8 +188,8 @@ 0.08, -0.05, -0.08, 0.41, -0.19, -0.22, -0.03, 0.11, -0.26, -0.33, -0.08, 0.03, -0.05, 0.02, 0.17, -0.10, 0.01, 0.01, 0.05, 0.01, 0.15, -0.06, -0.14, 0.38]), - '#3#height': 7000, - '#2#sulphurDioxide': np.array([ + "#3#height": 7000, + "#2#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, @@ -209,8 +209,8 @@ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), - '#4#height': 10000, - '#3#sulphurDioxide': np.array([ + "#4#height": 10000, + "#3#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, @@ -227,8 +227,8 @@ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), - '#5#height': 13000, - '#4#sulphurDioxide': np.array([ + "#5#height": 13000, + "#4#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, @@ -245,8 +245,8 @@ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), - '#6#height': 16000, - '#5#sulphurDioxide': np.array([ + "#6#height": 16000, + "#5#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, @@ -263,8 +263,8 @@ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), - '#7#height': 25000, - '#6#sulphurDioxide': np.array([ + "#7#height": 25000, + "#6#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, @@ -283,23 +283,23 @@ } # the notional filename that would contain the above test message data -FILENAME = 'W_XX-EUMETSAT-Darmstadt,SOUNDING+SATELLITE,METOPA+IASI_C_EUMC_20200204091455_68977_eps_o_so2_l2.bin' +FILENAME = "W_XX-EUMETSAT-Darmstadt,SOUNDING+SATELLITE,METOPA+IASI_C_EUMC_20200204091455_68977_eps_o_so2_l2.bin" # the information that would be extracted from the above filename according to the pattern in the .yaml FILENAME_INFO = { - 'reception_location': 'EUMETSAT-Darmstadt', - 'platform': 'METOPA', - 'instrument': 'IASI', - 'start_time': '20200204091455', - 'perigee': '68977', - 'species': 'so2', - 'level': 'l2' + "reception_location": "EUMETSAT-Darmstadt", + "platform": "METOPA", + "instrument": "IASI", + "start_time": "20200204091455", + "perigee": "68977", + "species": "so2", + "level": "l2" } # file type info for the above file that is defined in the .yaml FILETYPE_INFO = { - 'file_type': 'iasi_l2_so2_bufr', - 'file_reader': 'IASIL2SO2BUFR' + "file_type": "iasi_l2_so2_bufr", + "file_reader": "IASIL2SO2BUFR" } # number of cross track samples in one IASI scan @@ -314,7 +314,7 @@ def save_test_data(path): for m in [msg]: - buf = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') + buf = ec.codes_bufr_new_from_samples("BUFR4_local_satellite") for key in m: val = m[key] @@ -325,7 +325,7 @@ def save_test_data(path): else: ec.codes_set_array(buf, key, val) - ec.codes_set(buf, 'pack', 1) + ec.codes_set(buf, "pack", 1) ec.codes_write(buf, f) ec.codes_release(buf) @@ -354,36 +354,36 @@ def tearDown(self): except OSError: pass - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene(self): """Test scene creation.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) - scn = Scene(reader='iasi_l2_so2_bufr', filenames=[fname]) + scn = Scene(reader="iasi_l2_so2_bufr", filenames=[fname]) assert scn.start_time is not None assert scn.end_time is not None assert scn.sensor_names - assert 'iasi' in scn.sensor_names + assert "iasi" in scn.sensor_names - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_load_available_datasets(self): """Test that all datasets are available.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) - scn = Scene(reader='iasi_l2_so2_bufr', filenames=[fname]) + scn = Scene(reader="iasi_l2_so2_bufr", filenames=[fname]) scn.load(scn.available_dataset_names()) - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_dataset_values(self): """Test loading data.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) - scn = Scene(reader='iasi_l2_so2_bufr', filenames=[fname]) + scn = Scene(reader="iasi_l2_so2_bufr", filenames=[fname]) for name in scn.available_dataset_names(): @@ -391,13 +391,13 @@ def test_scene_dataset_values(self): loaded_values = scn[name].values - fill_value = scn[name].attrs['fill_value'] + fill_value = scn[name].attrs["fill_value"] # replace nans in data loaded from file with the fill value defined in the .yaml # to make them comparable loaded_values_nan_filled = np.nan_to_num(loaded_values, nan=fill_value) - key = scn[name].attrs['key'] + key = scn[name].attrs["key"] original_values = msg[key] @@ -407,7 +407,7 @@ def test_scene_dataset_values(self): self.assertTrue(np.allclose(original_values, loaded_values_nan_filled)) - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_init(self): """Test reader initialization.""" self.assertTrue(True) diff --git a/satpy/tests/reader_tests/test_ici_l1b_nc.py b/satpy/tests/reader_tests/test_ici_l1b_nc.py index d98da24f31..96c1ac2354 100644 --- a/satpy/tests/reader_tests/test_ici_l1b_nc.py +++ b/satpy/tests/reader_tests/test_ici_l1b_nc.py @@ -49,21 +49,21 @@ def reader(fake_file): return IciL1bNCFileHandler( filename=fake_file, filename_info={ - 'sensing_start_time': ( - datetime.fromisoformat('2000-01-01T01:00:00') + "sensing_start_time": ( + datetime.fromisoformat("2000-01-01T01:00:00") ), - 'sensing_end_time': ( - datetime.fromisoformat('2000-01-01T02:00:00') + "sensing_end_time": ( + datetime.fromisoformat("2000-01-01T02:00:00") ), - 'creation_time': ( - datetime.fromisoformat('2000-01-01T03:00:00') + "creation_time": ( + datetime.fromisoformat("2000-01-01T03:00:00") ), }, filetype_info={ - 'longitude': 'data/navigation_data/longitude', - 'latitude': 'data/navigation_data/latitude', - 'solar_azimuth': 'data/navigation_data/ici_solar_azimuth_angle', - 'solar_zenith': 'data/navigation_data/ici_solar_zenith_angle', + "longitude": "data/navigation_data/longitude", + "latitude": "data/navigation_data/latitude", + "solar_azimuth": "data/navigation_data/ici_solar_azimuth_angle", + "solar_zenith": "data/navigation_data/ici_solar_zenith_angle", } ) @@ -71,7 +71,7 @@ def reader(fake_file): @pytest.fixture def fake_file(tmp_path): """Return file path to level1b file.""" - file_path = tmp_path / 'test_file_ici_l1b_nc.nc' + file_path = tmp_path / "test_file_ici_l1b_nc.nc" writer = IciL1bFakeFileWriter(file_path) writer.write() yield file_path @@ -81,13 +81,13 @@ def fake_file(tmp_path): def dataset_info(): """Return dataset info.""" return { - 'name': '1', - 'file_type': 'nc_ici_l1b_rad', - 'file_key': 'data/measurement_data/ici_radiance_183', - 'coordinates': ['lat_pixels_horn_1', 'lon_pixels_horn_1'], - 'n_183': 0, - 'chan_index': 0, - 'calibration': 'brightness_temperature', + "name": "1", + "file_type": "nc_ici_l1b_rad", + "file_key": "data/measurement_data/ici_radiance_183", + "coordinates": ["lat_pixels_horn_1", "lon_pixels_horn_1"], + "n_183": 0, + "chan_index": 0, + "calibration": "brightness_temperature", } @@ -100,10 +100,10 @@ def __init__(self, file_path): def write(self): """Write fake data to file.""" - with Dataset(self.file_path, 'w') as dataset: + with Dataset(self.file_path, "w") as dataset: self._write_attributes(dataset) self._write_quality_group(dataset) - data_group = dataset.createGroup('data') + data_group = dataset.createGroup("data") self._write_measurement_data_group(data_group) self._write_navigation_data_group(data_group) @@ -118,59 +118,59 @@ def _write_attributes(dataset): @staticmethod def _write_quality_group(dataset): """Write the quality group.""" - group = dataset.createGroup('quality') + group = dataset.createGroup("quality") group.overall_quality_flag = 0 duration_of_product = group.createVariable( - 'duration_of_product', "f4" + "duration_of_product", "f4" ) duration_of_product[:] = 1000. @staticmethod def _write_navigation_data_group(dataset): """Write the navigation data group.""" - group = dataset.createGroup('navigation_data') - group.createDimension('n_scan', N_SCAN) - group.createDimension('n_samples', N_SAMPLES) - group.createDimension('n_subs', N_SUBS) - group.createDimension('n_horns', N_HORNS) - subs = group.createVariable('n_subs', "i4", dimensions=('n_subs',)) + group = dataset.createGroup("navigation_data") + group.createDimension("n_scan", N_SCAN) + group.createDimension("n_samples", N_SAMPLES) + group.createDimension("n_subs", N_SUBS) + group.createDimension("n_horns", N_HORNS) + subs = group.createVariable("n_subs", "i4", dimensions=("n_subs",)) subs[:] = np.arange(N_SUBS) - dimensions = ('n_scan', 'n_subs', 'n_horns') + dimensions = ("n_scan", "n_subs", "n_horns") shape = (N_SCAN, N_SUBS, N_HORNS) longitude = group.createVariable( - 'longitude', + "longitude", np.float32, dimensions=dimensions, ) longitude[:] = np.ones(shape) latitude = group.createVariable( - 'latitude', + "latitude", np.float32, dimensions=dimensions, ) latitude[:] = 2. * np.ones(shape) azimuth = group.createVariable( - 'ici_solar_azimuth_angle', + "ici_solar_azimuth_angle", np.float32, dimensions=dimensions, ) azimuth[:] = 3. * np.ones(shape) zenith = group.createVariable( - 'ici_solar_zenith_angle', + "ici_solar_zenith_angle", np.float32, dimensions=dimensions, ) zenith[:] = 4. * np.ones(shape) - dimensions = ('n_scan', 'n_samples', 'n_horns') + dimensions = ("n_scan", "n_samples", "n_horns") shape = (N_SCAN, N_SAMPLES, N_HORNS) delta_longitude = group.createVariable( - 'delta_longitude', + "delta_longitude", np.float32, dimensions=dimensions, ) delta_longitude[:] = 1000. * np.ones(shape) delta_latitude = group.createVariable( - 'delta_latitude', + "delta_latitude", np.float32, dimensions=dimensions, ) @@ -179,35 +179,35 @@ def _write_navigation_data_group(dataset): @staticmethod def _write_measurement_data_group(dataset): """Write the measurement data group.""" - group = dataset.createGroup('measurement_data') - group.createDimension('n_scan', N_SCAN) - group.createDimension('n_samples', N_SAMPLES) - group.createDimension('n_channels', N_CHANNELS) - group.createDimension('n_183', N_183) - scan = group.createVariable('n_scan', "i4", dimensions=('n_scan',)) + group = dataset.createGroup("measurement_data") + group.createDimension("n_scan", N_SCAN) + group.createDimension("n_samples", N_SAMPLES) + group.createDimension("n_channels", N_CHANNELS) + group.createDimension("n_183", N_183) + scan = group.createVariable("n_scan", "i4", dimensions=("n_scan",)) scan[:] = np.arange(N_SCAN) samples = group.createVariable( - 'n_samples', "i4", dimensions=('n_samples',) + "n_samples", "i4", dimensions=("n_samples",) ) samples[:] = np.arange(N_SAMPLES) bt_a = group.createVariable( - 'bt_conversion_a', np.float32, dimensions=('n_channels',) + "bt_conversion_a", np.float32, dimensions=("n_channels",) ) bt_a[:] = np.ones(N_CHANNELS) bt_b = group.createVariable( - 'bt_conversion_b', np.float32, dimensions=('n_channels',) + "bt_conversion_b", np.float32, dimensions=("n_channels",) ) bt_b[:] = np.zeros(N_CHANNELS) cw = group.createVariable( - 'centre_wavenumber', np.float32, dimensions=('n_channels',) + "centre_wavenumber", np.float32, dimensions=("n_channels",) ) cw[:] = np.array( [6.0] * 3 + [8.0] * 2 + [11.0] * 3 + [15.0] * 3 + [22.0] * 2 ) ici_radiance_183 = group.createVariable( - 'ici_radiance_183', + "ici_radiance_183", np.float32, - dimensions=('n_scan', 'n_samples', 'n_183'), + dimensions=("n_scan", "n_samples", "n_183"), ) ici_radiance_183[:] = 0.08 * np.ones((N_SCAN, N_SAMPLES, N_183)) @@ -254,11 +254,11 @@ def test_solar_zenith(self, reader): def test_calibrate_raises_for_unknown_calibration_method(self, reader): """Test perform calibration raises for unknown calibration method.""" variable = xr.DataArray(np.ones(3)) - dataset_info = {'calibration': 'unknown', 'name': 'radiance'} - with pytest.raises(ValueError, match='Unknown calibration'): + dataset_info = {"calibration": "unknown", "name": "radiance"} + with pytest.raises(ValueError, match="Unknown calibration"): reader._calibrate(variable, dataset_info) - @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt') + @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt") def test_calibrate_does_not_call_calibrate_bt_if_not_needed( self, mocked_calibrate, @@ -270,13 +270,13 @@ def test_calibrate_does_not_call_calibrate_bt_if_not_needed( [0.060, 0.065, 0.070, 0.075], [0.080, 0.085, 0.090, 0.095], ]), - dims=('n_scan', 'n_samples'), + dims=("n_scan", "n_samples"), ) - dataset_info = {'calibration': 'radiance'} + dataset_info = {"calibration": "radiance"} reader._calibrate(variable, dataset_info) mocked_calibrate.assert_not_called() - @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt') + @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt") def test_calibrate_calls_calibrate_bt( self, mocked_calibrate_bt, @@ -288,11 +288,11 @@ def test_calibrate_calls_calibrate_bt( [0.060, 0.065, 0.070, 0.075], [0.080, 0.085, 0.090, 0.095], ]), - dims=('n_scan', 'n_samples'), + dims=("n_scan", "n_samples"), ) dataset_info = { - 'calibration': 'brightness_temperature', - 'chan_index': 2, + "calibration": "brightness_temperature", + "chan_index": 2, } reader._calibrate(variable, dataset_info) mocked_calibrate_bt.assert_called_once_with( @@ -320,9 +320,9 @@ def test_calibrate_bt(self, reader): ]) np.testing.assert_allclose(bt, expected_bt) - @pytest.mark.parametrize('dims', ( - ('n_scan', 'n_samples'), - ('x', 'y'), + @pytest.mark.parametrize("dims", ( + ("n_scan", "n_samples"), + ("x", "y"), )) def test_standardize_dims(self, reader, dims): """Test standardize dims.""" @@ -331,11 +331,11 @@ def test_standardize_dims(self, reader, dims): dims=dims, ) standardized = reader._standardize_dims(variable) - assert standardized.dims == ('y', 'x') + assert standardized.dims == ("y", "x") - @pytest.mark.parametrize('dims,data_info,expect', ( - (('y', 'x', 'n_horns'), {"n_horns": 1}, 1), - (('y', 'x', 'n_183'), {"n_183": 2}, 2), + @pytest.mark.parametrize("dims,data_info,expect", ( + (("y", "x", "n_horns"), {"n_horns": 1}, 1), + (("y", "x", "n_183"), {"n_183": 2}, 2), )) def test_filter_variable(self, reader, dims, data_info, expect): """Test filter variable.""" @@ -345,7 +345,7 @@ def test_filter_variable(self, reader, dims, data_info, expect): dims=dims, ) filtered = reader._filter_variable(variable, data_info) - assert filtered.dims == ('y', 'x') + assert filtered.dims == ("y", "x") assert (filtered == data[:, :, expect]).all() def test_drop_coords(self, reader): @@ -353,7 +353,7 @@ def test_drop_coords(self, reader): coords = "dummy" data = xr.DataArray( np.ones(10), - dims=('y'), + dims=("y"), coords={coords: 0}, ) assert coords in data.coords @@ -362,22 +362,22 @@ def test_drop_coords(self, reader): def test_get_third_dimension_name(self, reader): """Test get third dimension name.""" - data = xr.DataArray(np.ones((1, 1, 1)), dims=('x', 'y', 'z')) - assert reader._get_third_dimension_name(data) == 'z' + data = xr.DataArray(np.ones((1, 1, 1)), dims=("x", "y", "z")) + assert reader._get_third_dimension_name(data) == "z" def test_get_third_dimension_name_return_none_for_2d_data(self, reader): """Test get third dimension name return none for 2d data.""" - data = xr.DataArray(np.ones((1, 1)), dims=('x', 'y')) + data = xr.DataArray(np.ones((1, 1)), dims=("x", "y")) assert reader._get_third_dimension_name(data) is None def test_get_dataset_return_none_if_data_not_exist(self, reader): """Tes get dataset return none if data does not exist.""" - dataset_id = {'name': 'unknown'} - dataset_info = {'file_key': 'non/existing/data'} + dataset_id = {"name": "unknown"} + dataset_info = {"file_key": "non/existing/data"} dataset = reader.get_dataset(dataset_id, dataset_info) assert dataset is None - @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt') + @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt") def test_get_dataset_does_not_calibrate_if_not_desired( self, mocked_calibrate, @@ -385,10 +385,10 @@ def test_get_dataset_does_not_calibrate_if_not_desired( dataset_info, ): """Test get dataset does not calibrate if not desired.""" - dataset_id = {'name': '1'} - dataset_info.pop('calibration') + dataset_id = {"name": "1"} + dataset_info.pop("calibration") dataset = reader.get_dataset(dataset_id, dataset_info) - assert dataset.dims == ('y', 'x') + assert dataset.dims == ("y", "x") mocked_calibrate.assert_not_called() assert isinstance(dataset, xr.DataArray) @@ -397,15 +397,15 @@ def test_get_dataset_orthorectifies_if_orthorect_data_defined( reader, ): """Test get dataset orthorectifies if orthorect data is defined.""" - dataset_id = {'name': 'lon_pixels_horn_1'} + dataset_id = {"name": "lon_pixels_horn_1"} dataset_info = { - 'name': 'lon_pixels_horn_1', - 'file_type': 'nc_ici_l1b_rad', - 'file_key': 'longitude', - 'orthorect_data': 'data/navigation_data/delta_longitude', - 'standard_name': 'longitude', - 'n_horns': 0, - 'modifiers': (), + "name": "lon_pixels_horn_1", + "file_type": "nc_ici_l1b_rad", + "file_key": "longitude", + "orthorect_data": "data/navigation_data/delta_longitude", + "standard_name": "longitude", + "n_horns": 0, + "modifiers": (), } dataset = reader.get_dataset(dataset_id, dataset_info) np.testing.assert_allclose(dataset, 1.009139, atol=1e-6) @@ -416,7 +416,7 @@ def test_get_dataset_handles_calibration( dataset_info, ): """Test get dataset handles calibration.""" - dataset_id = {'name': '1'} + dataset_id = {"name": "1"} dataset = reader.get_dataset(dataset_id, dataset_info) assert dataset.attrs["calibration"] == "brightness_temperature" np.testing.assert_allclose(dataset, 272.73734) @@ -428,13 +428,13 @@ def test_interpolate_returns_none_if_dataset_not_exist(self, reader): ) assert azimuth is None and zenith is None - @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._interpolate_geo') + @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._interpolate_geo") def test_interpolate_calls_interpolate_geo(self, mock, reader): """Test interpolate calls interpolate_geo.""" reader._interpolate(InterpolationType.LONLAT) mock.assert_called_once() - @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._interpolate_viewing_angle') # noqa: E501 + @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._interpolate_viewing_angle") # noqa: E501 def test_interpolate_calls_interpolate_viewing_angles(self, mock, reader): """Test interpolate calls interpolate viewing_angles.""" reader._interpolate(InterpolationType.SOLAR_ANGLES) @@ -443,13 +443,13 @@ def test_interpolate_calls_interpolate_viewing_angles(self, mock, reader): def test_interpolate_geo(self, reader): """Test interpolate geographic coordinates.""" shape = (N_SCAN, N_SUBS, N_HORNS) - dims = ('n_scan', 'n_subs', 'n_horns') + dims = ("n_scan", "n_subs", "n_horns") longitude = xr.DataArray( 2. * np.ones(shape), dims=dims, coords={ - 'n_horns': np.arange(N_HORNS), - 'n_subs': np.arange(N_SUBS), + "n_horns": np.arange(N_HORNS), + "n_subs": np.arange(N_SUBS), }, ) latitude = xr.DataArray(np.ones(shape), dims=dims) @@ -467,13 +467,13 @@ def test_interpolate_geo(self, reader): def test_interpolate_viewing_angle(self, reader): """Test interpolate viewing angle.""" shape = (N_SCAN, N_SUBS, N_HORNS) - dims = ('n_scan', 'n_subs', 'n_horns') + dims = ("n_scan", "n_subs", "n_horns") azimuth = xr.DataArray( np.ones(shape), dims=dims, coords={ - 'n_horns': np.arange(N_HORNS), - 'n_subs': np.arange(N_SUBS), + "n_horns": np.arange(N_HORNS), + "n_subs": np.arange(N_SUBS), }, ) zenith = xr.DataArray(100. * np.ones(shape), dims=dims) @@ -492,11 +492,11 @@ def test_orthorectify(self, reader): """Test orthorectify.""" variable = xr.DataArray( np.ones((N_SCAN, N_SAMPLES, N_HORNS)), - dims=('y', 'x', 'n_horns'), - coords={'n_horns': np.arange(N_HORNS)} + dims=("y", "x", "n_horns"), + coords={"n_horns": np.arange(N_HORNS)} ) - variable = variable.sel({'n_horns': 0}) - orthorect_data_name = 'data/navigation_data/delta_longitude' + variable = variable.sel({"n_horns": 0}) + orthorect_data_name = "data/navigation_data/delta_longitude" orthorectified = reader._orthorectify( variable, orthorect_data_name, @@ -507,18 +507,18 @@ def test_get_global_attributes(self, reader): """Test get global attributes.""" attributes = reader._get_global_attributes() assert attributes == { - 'filename': reader.filename, - 'start_time': datetime(2000, 1, 2, 3, 4, 5), - 'end_time': datetime(2000, 1, 2, 4, 5, 6), - 'spacecraft_name': 'SGB', - 'ssp_lon': None, - 'sensor': 'ICI', - 'filename_start_time': datetime(2000, 1, 1, 1, 0), - 'filename_end_time': datetime(2000, 1, 1, 2, 0), - 'platform_name': 'SGB', - 'quality_group': { - 'duration_of_product': np.array(1000., dtype=np.float32), - 'overall_quality_flag': 0, + "filename": reader.filename, + "start_time": datetime(2000, 1, 2, 3, 4, 5), + "end_time": datetime(2000, 1, 2, 4, 5, 6), + "spacecraft_name": "SGB", + "ssp_lon": None, + "sensor": "ICI", + "filename_start_time": datetime(2000, 1, 1, 1, 0), + "filename_end_time": datetime(2000, 1, 1, 2, 0), + "platform_name": "SGB", + "quality_group": { + "duration_of_product": np.array(1000., dtype=np.float32), + "overall_quality_flag": 0, } } @@ -526,12 +526,12 @@ def test_get_quality_attributes(self, reader): """Test get quality attributes.""" attributes = reader._get_quality_attributes() assert attributes == { - 'duration_of_product': np.array(1000., dtype=np.float32), - 'overall_quality_flag': 0, + "duration_of_product": np.array(1000., dtype=np.float32), + "overall_quality_flag": 0, } @patch( - 'satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._get_global_attributes', + "satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._get_global_attributes", return_value={"mocked_global_attributes": True}, ) def test_manage_attributes(self, mock, reader): @@ -540,11 +540,11 @@ def test_manage_attributes(self, mock, reader): np.ones(N_SCAN), attrs={"season": "summer"}, ) - dataset_info = {'name': 'ici_1', 'units': 'K'} + dataset_info = {"name": "ici_1", "units": "K"} variable = reader._manage_attributes(variable, dataset_info) assert variable.attrs == { - 'season': 'summer', - 'units': 'K', - 'name': 'ici_1', - 'mocked_global_attributes': True, + "season": "summer", + "units": "K", + "name": "ici_1", + "mocked_global_attributes": True, } diff --git a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py index 1c22ef515d..26c2a9a7f8 100644 --- a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py +++ b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py @@ -228,7 +228,7 @@ def test_filehandler_returns_masked_data_in_space(insat_filehandler): fh = insat_filehandler ds_info = None - ds_id = make_dataid(name="VIS", resolution=1000, calibration='reflectance') + ds_id = make_dataid(name="VIS", resolution=1000, calibration="reflectance") darr = fh.get_dataset(ds_id, ds_info) assert np.isnan(darr[0, 0]) @@ -238,7 +238,7 @@ def test_insat3d_has_orbital_parameters(insat_filehandler): fh = insat_filehandler ds_info = None - ds_id = make_dataid(name="VIS", resolution=1000, calibration='reflectance') + ds_id = make_dataid(name="VIS", resolution=1000, calibration="reflectance") darr = fh.get_dataset(ds_id, ds_info) assert "orbital_parameters" in darr.attrs diff --git a/satpy/tests/reader_tests/test_li_l2_nc.py b/satpy/tests/reader_tests/test_li_l2_nc.py index 40042aa1de..62eff6d18f 100644 --- a/satpy/tests/reader_tests/test_li_l2_nc.py +++ b/satpy/tests/reader_tests/test_li_l2_nc.py @@ -43,7 +43,7 @@ def std_filetype_infos(): cfg = load_yaml_configs(cpaths[0]) # get the li_l2 filetype: - ftypes = cfg['file_types'] + ftypes = cfg["file_types"] yield ftypes @@ -69,31 +69,31 @@ def _test_dataset_single_variable(self, vname, desc, settings, handler): """Check the validity of a given variable.""" dname = vname - dims = settings.get('dimensions', {}) + dims = settings.get("dimensions", {}) - var_path = settings.get('variable_path', '') + var_path = settings.get("variable_path", "") # Compute shape from dimensions: - if desc['shape'] == (): + if desc["shape"] == (): # scalar case, dim should have been added in the code by validate_array_dimensions shape = (1,) else: - shape = tuple([dims[dim_name] for dim_name in desc['shape']]) + shape = tuple([dims[dim_name] for dim_name in desc["shape"]]) dataset_info = { - 'name': dname, - 'variable_name': vname, - 'use_rescaling': False, + "name": dname, + "variable_name": vname, + "use_rescaling": False, } var_params = [dataset_info, desc, dname, handler, shape, var_path] self._test_dataset_variable(var_params) def _test_dataset_variables(self, settings, ds_desc, handler): """Check the loading of the non in sector variables.""" - assert 'variables' in ds_desc - all_vars = ds_desc['variables'] + assert "variables" in ds_desc + all_vars = ds_desc["variables"] - variables = settings.get('variables') + variables = settings.get("variables") for vname, desc in variables.items(): # variable should be in list of dataset: assert vname in all_vars @@ -105,17 +105,17 @@ def _test_dataset_single_sector_variable(self, names, desc, settings, handler): dname = f"{vname}_{sname}_sector" - dims = settings.get('dimensions', {}) + dims = settings.get("dimensions", {}) - var_path = settings.get('variable_path', '') + var_path = settings.get("variable_path", "") - shape = tuple([dims[dim_name] for dim_name in desc['shape']]) + shape = tuple([dims[dim_name] for dim_name in desc["shape"]]) dataset_info = { - 'name': dname, - 'variable_name': vname, - 'sector_name': sname, - 'use_rescaling': False, + "name": dname, + "variable_name": vname, + "sector_name": sname, + "use_rescaling": False, } var_params = [dataset_info, desc, vname, handler, shape, var_path] self._test_dataset_variable(var_params, sname=sname) @@ -125,7 +125,7 @@ def _test_dataset_variable(self, var_params, sname=""): dataset_info, desc, dname, handler, shape, var_path = var_params res = self.get_variable_dataset(dataset_info, dname, handler) assert res.shape == shape - assert res.dims[0] == 'y' + assert res.dims[0] == "y" # Should retrieve content with fullname key: full_name = self.create_fullname_key(desc, var_path, dname, sname=sname) # Note: 'content' is not recognized as a valid member of the class below @@ -140,23 +140,23 @@ def get_variable_dataset(self, dataset_info, dname, handler): res = handler.get_dataset(dataset_id, dataset_info) return res - def create_fullname_key(self, desc, var_path, vname, sname=''): + def create_fullname_key(self, desc, var_path, vname, sname=""): """Create full name key for sector/non-sector content retrieval.""" - vpath = desc.get('path', var_path) - if vpath != "" and vpath[-1] != '/': - vpath += '/' + vpath = desc.get("path", var_path) + if vpath != "" and vpath[-1] != "/": + vpath += "/" if sname != "": - sname += '/' + sname += "/" full_name = f"{vpath}{sname}{vname}" return full_name def _test_dataset_sector_variables(self, settings, ds_desc, handler): """Check the loading of the in sector variables.""" - sector_vars = settings.get('sector_variables') - sectors = settings.get('sectors', ['north', 'east', 'south', 'west']) + sector_vars = settings.get("sector_variables") + sectors = settings.get("sectors", ["north", "east", "south", "west"]) - assert 'sector_variables' in ds_desc - all_vars = ds_desc['sector_variables'] + assert "sector_variables" in ds_desc + all_vars = ds_desc["sector_variables"] for sname in sectors: for vname, desc in sector_vars.items(): @@ -168,33 +168,33 @@ def test_dataset_loading(self, filetype_infos): """Test loading of all datasets from all products.""" # Iterate on all the available product types: for ptype, pinfo in products_dict.items(): - ftype = pinfo['ftype'] + ftype = pinfo["ftype"] filename_info = { - 'start_time': "0000", - 'end_time': "1000" + "start_time": "0000", + "end_time": "1000" } - handler = LIL2NCFileHandler('filename', filename_info, extract_filetype_info(filetype_infos, ftype)) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, ftype)) ds_desc = handler.ds_desc # retrieve the schema that what used to generate the content for that product: settings = get_product_schema(ptype) # Now we check all the variables are available: - if 'variables' in settings: + if "variables" in settings: self._test_dataset_variables(settings, ds_desc, handler) # check the sector variables: - if 'sector_variables' in settings: + if "sector_variables" in settings: self._test_dataset_sector_variables(settings, ds_desc, handler) def test_unregistered_dataset_loading(self, filetype_infos): """Test loading of an unregistered dataset.""" # Iterate on all the available product types: - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc")) - dataset_id = make_dataid(name='test_dataset') + dataset_id = make_dataid(name="test_dataset") with pytest.raises(KeyError): handler.get_dataset(dataset_id) @@ -202,22 +202,22 @@ def test_dataset_not_in_provided_dataset(self, filetype_infos): """Test loading of a dataset that is not provided.""" # Iterate on all the available product types: - dataset_dict = {'name': 'test_dataset'} + dataset_dict = {"name": "test_dataset"} - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc")) - dataset_id = make_dataid(name='test_dataset') + dataset_id = make_dataid(name="test_dataset") assert handler.get_dataset(dataset_id, ds_info=dataset_dict) is None def test_filename_infos(self, filetype_infos): """Test settings retrieved from filename.""" filename_info = { - 'start_time': "20101112131415", - 'end_time': "20101112131416" + "start_time": "20101112131415", + "end_time": "20101112131416" } - handler = LIL2NCFileHandler('filename', filename_info, extract_filetype_info(filetype_infos, 'li_l2_af_nc')) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_af_nc")) # Start and end time should come from filename info: assert handler.start_time == "20101112131415" @@ -236,19 +236,19 @@ def test_filename_infos(self, filetype_infos): assert len(handler.provided_datasets) > 0 # Sensor names should be just 'li' - assert handler.sensor_names == {'li'} + assert handler.sensor_names == {"li"} # check product type: - assert handler.product_type == '2-AF' + assert handler.product_type == "2-AF" def test_var_path_exists(self, filetype_infos): """Test variable_path_exists from li reader.""" filename_info = { - 'start_time': "20101112131415", - 'end_time': "20101112131416", + "start_time": "20101112131415", + "end_time": "20101112131416", } - handler = LIL2NCFileHandler('filename', filename_info, extract_filetype_info(filetype_infos, 'li_l2_lef_nc')) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_lef_nc")) # Check variable paths: assert handler.variable_path_exists("dummy") is False @@ -265,11 +265,11 @@ def test_var_path_exists(self, filetype_infos): def test_get_first_valid_variable(self, filetype_infos): """Test get_first_valid_variable from li reader.""" filename_info = { - 'start_time': "20101112131415", - 'end_time': "20101112131416", + "start_time": "20101112131415", + "end_time": "20101112131416", } - handler = LIL2NCFileHandler('filename', filename_info, extract_filetype_info(filetype_infos, 'li_l2_lef_nc')) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_lef_nc")) # Check variable paths: var1 = handler.get_first_valid_variable(["dummy/path", "data/north/event_id"]) @@ -311,30 +311,30 @@ def test_get_first_valid_variable(self, filetype_infos): assert id(meas2) == id(var3) # We should have a fill value on those variables: - assert var1.attrs.get('_FillValue') == 65535 - assert var2.attrs.get('_FillValue') == 65535 + assert var1.attrs.get("_FillValue") == 65535 + assert var2.attrs.get("_FillValue") == 65535 def test_get_first_valid_variable_not_found(self, filetype_infos): """Test get_first_valid_variable from li reader if the variable is not found.""" filename_info = { - 'start_time': "20101112131415", - 'end_time': "20101112131416", + "start_time": "20101112131415", + "end_time": "20101112131416", } - handler = LIL2NCFileHandler('filename', filename_info, extract_filetype_info(filetype_infos, 'li_l2_lef_nc')) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_lef_nc")) with pytest.raises(KeyError): handler.get_first_valid_variable(["dummy/path", "data/test/test_var"]) def test_available_datasets(self, filetype_infos): """Test available_datasets from li reader.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_lef_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_lef_nc")) # get current ds_infos. These should all be returned by the available_datasets ds_infos_to_compare = handler.dataset_infos.copy() # now add a dummy configured dataset to make sure that it is included in the available_datasets output - ds_info_dummy = {'test': 'test'} + ds_info_dummy = {"test": "test"} conf_ds_dummy = [(True, ds_info_dummy)] ds_infos_to_compare.insert(0, ds_info_dummy) @@ -343,11 +343,11 @@ def test_available_datasets(self, filetype_infos): def test_variable_scaling(self, filetype_infos): """Test automatic rescaling with offset and scale attributes.""" filename_info = { - 'start_time': "20101112131415", - 'end_time': "20101112131416" + "start_time": "20101112131415", + "end_time": "20101112131416" } - handler = LIL2NCFileHandler('filename', filename_info, extract_filetype_info(filetype_infos, 'li_l2_lfl_nc')) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_lfl_nc")) # Get the raw variable without rescaling: vname = "latitude" @@ -355,9 +355,9 @@ def test_variable_scaling(self, filetype_infos): # Get the dataset without rescaling: dataset_info = { - 'name': vname, - 'variable_name': vname, - 'use_rescaling': False, + "name": vname, + "variable_name": vname, + "use_rescaling": False, } dataset_id = make_dataid(name=vname) @@ -365,7 +365,7 @@ def test_variable_scaling(self, filetype_infos): assert np.all(lat_noscale.values == rawlat) # Now get the dataset with scaling: - dataset_info['use_rescaling'] = True + dataset_info["use_rescaling"] = True lat_scaled = handler.get_dataset(dataset_id, dataset_info) # By default we write data in the ranges [-88.3/0.0027, 88.3/0.0027] for latitude and longitude: @@ -374,12 +374,12 @@ def test_variable_scaling(self, filetype_infos): def test_swath_coordinates(self, filetype_infos): """Test that swath coordinates are used correctly to assign coordinates to some datasets.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_lfl_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_lfl_nc")) # Check latitude: dsid = make_dataid(name="latitude") dset = handler.get_dataset(dsid) - assert 'coordinates' not in dset.attrs + assert "coordinates" not in dset.attrs # get_area_def should raise exception: with pytest.raises(NotImplementedError): @@ -388,21 +388,21 @@ def test_swath_coordinates(self, filetype_infos): # Check radiance: dsid = make_dataid(name="radiance") dset = handler.get_dataset(dsid) - assert 'coordinates' in dset.attrs - assert dset.attrs['coordinates'][0] == "longitude" - assert dset.attrs['coordinates'][1] == "latitude" + assert "coordinates" in dset.attrs + assert dset.attrs["coordinates"][0] == "longitude" + assert dset.attrs["coordinates"][1] == "latitude" with pytest.raises(NotImplementedError): handler.get_area_def(dsid) def test_report_datetimes(self, filetype_infos): """Should report time variables as numpy datetime64 type and time durations as timedelta64.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_le_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_le_nc")) # Check epoch_time: dsid = make_dataid(name="epoch_time_north_sector") dset = handler.get_dataset(dsid) - assert dset.values.dtype == np.dtype('datetime64[ns]') + assert dset.values.dtype == np.dtype("datetime64[ns]") # The default epoch_time should be 1.234 seconds after epoch: ref_time = np.datetime64(datetime(2000, 1, 1, 0, 0, 1, 234000)) @@ -411,14 +411,14 @@ def test_report_datetimes(self, filetype_infos): # Check time_offset: dsid = make_dataid(name="time_offset_east_sector") dset = handler.get_dataset(dsid) - assert dset.values.dtype == np.dtype('timedelta64[ns]') + assert dset.values.dtype == np.dtype("timedelta64[ns]") # The default time_offset should be: np.linspace(0.0, 1000.0, nobs) # but then we first multiply by 1e6 to generate us times: # Note that below no automatic transform to np.float64 is happening: nobs = dset.shape[0] ref_data = np.linspace(0.0, 1000.0, nobs).astype(np.float32) - ref_data = (ref_data * 1e9).astype('timedelta64[ns]') + ref_data = (ref_data * 1e9).astype("timedelta64[ns]") # And not absolutely sure why, but we always get the timedelta in ns from the dataset: # ref_data = (ref_data).astype('timedelta64[ns]') @@ -427,33 +427,33 @@ def test_report_datetimes(self, filetype_infos): def test_milliseconds_to_timedelta(self, filetype_infos): """Should covert milliseconds to timedelta.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_lfl_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_lfl_nc")) # Check flash_duration: dsid = make_dataid(name="flash_duration") dset = handler.get_dataset(dsid) - assert dset.values.dtype == np.dtype('timedelta64[ns]') + assert dset.values.dtype == np.dtype("timedelta64[ns]") nobs = dset.shape[0] - ref_data = np.linspace(0, 1000, nobs).astype('u2') - ref_data = (ref_data * 1e6).astype('timedelta64[ns]') + ref_data = np.linspace(0, 1000, nobs).astype("u2") + ref_data = (ref_data * 1e6).astype("timedelta64[ns]") assert np.all(dset.values == ref_data) def test_apply_accumulate_index_offset(self, filetype_infos): """Should accumulate index offsets.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_le_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_le_nc")) # Check time offset: dsid = make_dataid(name="l1b_chunk_offsets_north_sector") dset = handler.get_dataset(dsid) nobs = dset.shape[0] - ref_data = (np.arange(nobs)).astype('u4') + ref_data = (np.arange(nobs)).astype("u4") # check first execution without offset assert np.all(dset.values == ref_data) # check that the offset is being stored - assert handler.current_ds_info['__index_offset'] == 123 + assert handler.current_ds_info["__index_offset"] == 123 # check execution with offset value # this simulates the case where we are loading this variable from multiple files and concatenating it @@ -462,62 +462,62 @@ def test_apply_accumulate_index_offset(self, filetype_infos): def test_combine_info(self, filetype_infos): """Test overridden combine_info.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_le_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_le_nc")) # get a dataset including the index_offset in the ds_info dsid = make_dataid(name="l1b_chunk_offsets_north_sector") - ds_info = {'name': 'l1b_chunk_offsets_north_sector', - 'variable_name': 'l1b_chunk_offsets', - 'sector_name': 'north', - '__index_offset': 1000, - 'accumulate_index_offset': "{sector_name}/l1b_window"} + ds_info = {"name": "l1b_chunk_offsets_north_sector", + "variable_name": "l1b_chunk_offsets", + "sector_name": "north", + "__index_offset": 1000, + "accumulate_index_offset": "{sector_name}/l1b_window"} dset = handler.get_dataset(dsid, ds_info=ds_info) handler.combine_info([dset.attrs]) # combine_info should have removed the index_offset key from the ds_info passed to get_dataset - assert '__index_offset' not in ds_info + assert "__index_offset" not in ds_info # and reset the current_ds_info dict, in order to avoid failures if we call combine_info again assert handler.current_ds_info is None def test_coordinates_projection(self, filetype_infos): """Should automatically generate lat/lon coords from projection data.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc")) dsid = make_dataid(name="flash_accumulation") dset = handler.get_dataset(dsid) - assert 'coordinates' in dset.attrs + assert "coordinates" in dset.attrs - assert dset.attrs['coordinates'][0] == "longitude" - assert dset.attrs['coordinates'][1] == "latitude" + assert dset.attrs["coordinates"][0] == "longitude" + assert dset.attrs["coordinates"][1] == "latitude" with pytest.raises(NotImplementedError): handler.get_area_def(dsid) - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_afr_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_afr_nc")) dsid = make_dataid(name="flash_radiance") dset = handler.get_dataset(dsid) - assert 'coordinates' in dset.attrs + assert "coordinates" in dset.attrs - assert dset.attrs['coordinates'][0] == "longitude" - assert dset.attrs['coordinates'][1] == "latitude" + assert dset.attrs["coordinates"][0] == "longitude" + assert dset.attrs["coordinates"][1] == "latitude" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_afa_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_afa_nc")) dsid = make_dataid(name="accumulated_flash_area") dset = handler.get_dataset(dsid) - assert 'coordinates' in dset.attrs + assert "coordinates" in dset.attrs - assert dset.attrs['coordinates'][0] == "longitude" - assert dset.attrs['coordinates'][1] == "latitude" + assert dset.attrs["coordinates"][0] == "longitude" + assert dset.attrs["coordinates"][1] == "latitude" def test_generate_coords_on_accumulated_prods(self, filetype_infos): """Test daskified generation of coords.""" - accumulated_products = ['li_l2_af_nc', 'li_l2_afr_nc', 'li_l2_afa_nc'] - coordinate_datasets = ['longitude', 'latitude'] + accumulated_products = ["li_l2_af_nc", "li_l2_afr_nc", "li_l2_afa_nc"] + coordinate_datasets = ["longitude", "latitude"] for accum_prod in accumulated_products: for ds_name in coordinate_datasets: - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, accum_prod)) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, accum_prod)) dsid = make_dataid(name=ds_name) dset = handler.get_dataset(dsid) # Check dataset type @@ -527,12 +527,12 @@ def test_generate_coords_on_accumulated_prods(self, filetype_infos): def test_generate_coords_on_lon_lat(self, filetype_infos): """Test getting lon/lat dataset on accumulated product.""" - accumulated_products = ['li_l2_af_nc', 'li_l2_afr_nc', 'li_l2_afa_nc'] - coordinate_datasets = ['longitude', 'latitude'] + accumulated_products = ["li_l2_af_nc", "li_l2_afr_nc", "li_l2_afa_nc"] + coordinate_datasets = ["longitude", "latitude"] for accum_prod in accumulated_products: for ds_name in coordinate_datasets: - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, accum_prod)) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, accum_prod)) dsid = make_dataid(name=ds_name) handler.generate_coords_from_scan_angles = mock.MagicMock( side_effect=handler.generate_coords_from_scan_angles) @@ -541,12 +541,12 @@ def test_generate_coords_on_lon_lat(self, filetype_infos): def test_generate_coords_inverse_proj(self, filetype_infos): """Test inverse_projection execution delayed until .values is called on the dataset.""" - accumulated_products = ['li_l2_af_nc', 'li_l2_afr_nc', 'li_l2_afa_nc'] - coordinate_datasets = ['longitude', 'latitude'] + accumulated_products = ["li_l2_af_nc", "li_l2_afr_nc", "li_l2_afa_nc"] + coordinate_datasets = ["longitude", "latitude"] for accum_prod in accumulated_products: for ds_name in coordinate_datasets: - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, accum_prod)) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, accum_prod)) dsid = make_dataid(name=ds_name) handler.inverse_projection = mock.MagicMock(side_effect=handler.inverse_projection) dset = handler.get_dataset(dsid) @@ -557,17 +557,17 @@ def test_generate_coords_inverse_proj(self, filetype_infos): def test_generate_coords_not_called_on_non_coord_dataset(self, filetype_infos): """Test that the method is not called when getting non-coord dataset.""" - handler = self.generate_coords(filetype_infos, 'li_l2_af_nc', 'flash_accumulation') + handler = self.generate_coords(filetype_infos, "li_l2_af_nc", "flash_accumulation") assert not handler.generate_coords_from_scan_angles.called def test_generate_coords_not_called_on_non_accum_dataset(self, filetype_infos): """Test that the method is not called when getting non-accum dataset.""" - handler = self.generate_coords(filetype_infos, 'li_l2_lef_nc', 'latitude_north_sector') + handler = self.generate_coords(filetype_infos, "li_l2_lef_nc", "latitude_north_sector") assert not handler.generate_coords_from_scan_angles.called def generate_coords(self, filetype_infos, file_type_name, variable_name): """Generate file handler and mimic coordinate generator call.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, file_type_name)) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, file_type_name)) dsid = make_dataid(name=variable_name) handler.generate_coords_from_scan_angles = mock.MagicMock( side_effect=handler.generate_coords_from_scan_angles) @@ -576,10 +576,10 @@ def generate_coords(self, filetype_infos, file_type_name, variable_name): def test_generate_coords_called_once(Self, filetype_infos): """Test that the method is called only once.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc")) # check internal variable is empty assert len(handler.internal_variables) == 0 - coordinate_datasets = ['longitude', 'latitude'] + coordinate_datasets = ["longitude", "latitude"] handler.generate_coords_from_scan_angles = mock.MagicMock(side_effect=handler.generate_coords_from_scan_angles) for ds_name in coordinate_datasets: @@ -593,34 +593,34 @@ def test_generate_coords_called_once(Self, filetype_infos): def test_coords_generation(self, filetype_infos): """Compare daskified coords generation results with non-daskified.""" # Prepare dummy (but somewhat realistic) arrays of azimuth/elevation values. - products = ['li_l2_af_nc', - 'li_l2_afr_nc', - 'li_l2_afa_nc'] + products = ["li_l2_af_nc", + "li_l2_afr_nc", + "li_l2_afa_nc"] for prod in products: - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, prod)) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, prod)) # Get azimuth/elevation arrays from handler - azimuth = handler.get_measured_variable(handler.swath_coordinates['azimuth']) + azimuth = handler.get_measured_variable(handler.swath_coordinates["azimuth"]) azimuth = handler.apply_use_rescaling(azimuth) - elevation = handler.get_measured_variable(handler.swath_coordinates['elevation']) + elevation = handler.get_measured_variable(handler.swath_coordinates["elevation"]) elevation = handler.apply_use_rescaling(elevation) # Initialize proj_dict - proj_var = handler.swath_coordinates['projection'] + proj_var = handler.swath_coordinates["projection"] geos_proj = handler.get_measured_variable(proj_var, fill_value=None) major_axis = float(geos_proj.attrs["semi_major_axis"]) point_height = 35786400.0 # float(geos_proj.attrs["perspective_point_height"]) inv_flattening = float(geos_proj.attrs["inverse_flattening"]) lon_0 = float(geos_proj.attrs["longitude_of_projection_origin"]) sweep = str(geos_proj.attrs["sweep_angle_axis"]) - proj_dict = {'a': major_axis, - 'lon_0': lon_0, - 'h': point_height, + proj_dict = {"a": major_axis, + "lon_0": lon_0, + "h": point_height, "rf": inv_flattening, - 'proj': 'geos', - 'units': 'm', + "proj": "geos", + "units": "m", "sweep": sweep} # Compute reference values @@ -633,8 +633,8 @@ def test_coords_generation(self, filetype_infos): lat_ref = lat_ref.astype(np.float32) handler.generate_coords_from_scan_angles() - lon = handler.internal_variables['longitude'].values - lat = handler.internal_variables['latitude'].values + lon = handler.internal_variables["longitude"].values + lat = handler.internal_variables["latitude"].values # Compare the arrays, should be the same: np.testing.assert_equal(lon, lon_ref) @@ -642,7 +642,7 @@ def test_coords_generation(self, filetype_infos): def test_get_area_def_acc_products(self, filetype_infos): """Test retrieval of area def for accumulated products.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc'), + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), with_area_definition=True) dsid = make_dataid(name="flash_accumulation") @@ -656,7 +656,7 @@ def test_get_area_def_acc_products(self, filetype_infos): def test_get_area_def_non_acc_products(self, filetype_infos): """Test retrieval of area def for non-accumulated products.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_lgr_nc'), + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_lgr_nc"), with_area_definition=True) # Should throw for non-accum products: with pytest.raises(NotImplementedError): @@ -672,9 +672,9 @@ def write_flash_accum(_vname, _ocname, _settings): # We return the settings we want to use here to generate our custom/fixed product content: return { - 'num_obs': 1234, - 'providers': { - 'flash_accumulation': write_flash_accum, + "num_obs": 1234, + "providers": { + "flash_accumulation": write_flash_accum, } } @@ -682,7 +682,7 @@ def test_without_area_def(self, filetype_infos): """Test accumulated products data array without area definition.""" # without area definition handler_without_area_def = LIL2NCFileHandler( - 'filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc'), with_area_definition=False) + "filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), with_area_definition=False) dsid = make_dataid(name="flash_accumulation") @@ -692,7 +692,7 @@ def test_without_area_def(self, filetype_infos): def test_with_area_def(self, filetype_infos): """Test accumulated products data array with area definition.""" - handler = self.handler_with_area(filetype_infos, 'li_l2_af_nc') + handler = self.handler_with_area(filetype_infos, "li_l2_af_nc") dsid = make_dataid(name="flash_accumulation") # Retrieve the 2D array: arr = handler.get_dataset(dsid).values @@ -700,7 +700,7 @@ def test_with_area_def(self, filetype_infos): def test_get_on_fci_grid_exc(self, filetype_infos): """Test the execution of the get_on_fci_grid function for an accumulated gridded variable.""" - handler = self.handler_with_area(filetype_infos, 'li_l2_af_nc') + handler = self.handler_with_area(filetype_infos, "li_l2_af_nc") handler.get_array_on_fci_grid = mock.MagicMock(side_effect=handler.get_array_on_fci_grid) dsid = make_dataid(name="flash_accumulation") handler.get_dataset(dsid) @@ -708,7 +708,7 @@ def test_get_on_fci_grid_exc(self, filetype_infos): def test_get_on_fci_grid_exc_non_grid(self, filetype_infos): """Test the non-execution of the get_on_fci_grid function for an accumulated non-gridded variable.""" - handler = self.handler_with_area(filetype_infos, 'li_l2_af_nc') + handler = self.handler_with_area(filetype_infos, "li_l2_af_nc") handler.get_array_on_fci_grid = mock.MagicMock(side_effect=handler.get_array_on_fci_grid) dsid = make_dataid(name="accumulation_offsets") handler.get_dataset(dsid) @@ -716,7 +716,7 @@ def test_get_on_fci_grid_exc_non_grid(self, filetype_infos): def test_get_on_fci_grid_exc_non_accum(self, filetype_infos): """Test the non-execution of the get_on_fci_grid function for a non-accumulated variable.""" - handler = self.handler_with_area(filetype_infos, 'li_l2_lef_nc') + handler = self.handler_with_area(filetype_infos, "li_l2_lef_nc") handler.get_array_on_fci_grid = mock.MagicMock(side_effect=handler.get_array_on_fci_grid) dsid = make_dataid(name="radiance_north_sector") handler.get_dataset(dsid) @@ -724,7 +724,7 @@ def test_get_on_fci_grid_exc_non_accum(self, filetype_infos): def test_with_area_def_vars_with_no_pattern(self, filetype_infos): """Test accumulated products variable with no patterns and with area definition.""" - handler = self.handler_with_area(filetype_infos, 'li_l2_af_nc') + handler = self.handler_with_area(filetype_infos, "li_l2_af_nc") # variable with no patterns dsid = make_dataid(name="accumulation_offsets") assert handler.get_dataset(dsid).shape == (1,) @@ -734,7 +734,7 @@ def handler_with_area(self, filetype_infos, product_name): # Note: we need a test param provider here to ensure we write the same values for both handlers below: FakeLIFileHandlerBase.schema_parameters = TestLIL2.param_provider # with area definition - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, product_name), + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, product_name), with_area_definition=True) return handler @@ -743,7 +743,7 @@ def test_with_area_def_pixel_placement(self, filetype_infos): # with area definition FakeLIFileHandlerBase.schema_parameters = TestLIL2.param_provider - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc'), + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), with_area_definition=True) dsid = make_dataid(name="flash_accumulation") @@ -751,11 +751,11 @@ def test_with_area_def_pixel_placement(self, filetype_infos): arr = handler.get_dataset(dsid).values # Retrieve the x/y coordinates: - xarr = handler.get_measured_variable('x').values.astype(int) - yarr = handler.get_measured_variable('y').values.astype(int) + xarr = handler.get_measured_variable("x").values.astype(int) + yarr = handler.get_measured_variable("y").values.astype(int) handler_without_area_def = LIL2NCFileHandler( - 'filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc'), with_area_definition=False) + "filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), with_area_definition=False) FakeLIFileHandlerBase.schema_parameters = None diff --git a/satpy/tests/reader_tests/test_meris_nc.py b/satpy/tests/reader_tests/test_meris_nc.py index 926eccc672..0ab28b1fef 100644 --- a/satpy/tests/reader_tests/test_meris_nc.py +++ b/satpy/tests/reader_tests/test_meris_nc.py @@ -21,49 +21,49 @@ class TestMERISReader(unittest.TestCase): """Test various meris_nc_sen3 filehandlers.""" - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_instantiate(self, mocked_dataset): """Test initialization of file handlers.""" from satpy.readers.meris_nc_sen3 import NCMERIS2, NCMERISCal, NCMERISGeo from satpy.tests.utils import make_dataid - ds_id = make_dataid(name='M01', calibration='reflectance') - ds_id2 = make_dataid(name='wsqf', calibration='reflectance') - filename_info = {'mission_id': 'ENV', 'dataset_name': 'M01', 'start_time': 0, 'end_time': 0} + ds_id = make_dataid(name="M01", calibration="reflectance") + ds_id2 = make_dataid(name="wsqf", calibration="reflectance") + filename_info = {"mission_id": "ENV", "dataset_name": "M01", "start_time": 0, "end_time": 0} - test = NCMERISCal('somedir/somefile.nc', filename_info, 'c') + test = NCMERISCal("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - test = NCMERISGeo('somedir/somefile.nc', filename_info, 'c') + test = NCMERISGeo("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - test = NCMERIS2('somedir/somefile.nc', filename_info, 'c') - test.get_dataset(ds_id, {'nc_key': 'the_key'}) - test.get_dataset(ds_id2, {'nc_key': 'the_key'}) + test = NCMERIS2("somedir/somefile.nc", filename_info, "c") + test.get_dataset(ds_id, {"nc_key": "the_key"}) + test.get_dataset(ds_id2, {"nc_key": "the_key"}) mocked_dataset.assert_called() mocked_dataset.reset_mock() - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_open_file_objects(self, mocked_open_dataset): """Test initialization of file handlers.""" from satpy.readers.olci_nc import NCOLCIBase - filename_info = {'mission_id': 'ENV', 'dataset_name': 'M01', 'start_time': 0, 'end_time': 0} + filename_info = {"mission_id": "ENV", "dataset_name": "M01", "start_time": 0, "end_time": 0} open_file = mock.MagicMock() - file_handler = NCOLCIBase(open_file, filename_info, 'c') + file_handler = NCOLCIBase(open_file, filename_info, "c") # deepcode ignore W0104: This is a property that is actually a function call. file_handler.nc # pylint: disable=W0104 mocked_open_dataset.assert_called() open_file.open.assert_called() assert (open_file.open.return_value in mocked_open_dataset.call_args[0] or - open_file.open.return_value == mocked_open_dataset.call_args[1].get('filename_or_obj')) + open_file.open.return_value == mocked_open_dataset.call_args[1].get("filename_or_obj")) - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_get_dataset(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -71,17 +71,17 @@ def test_get_dataset(self, mocked_dataset): from satpy.readers.meris_nc_sen3 import NCMERIS2 from satpy.tests.utils import make_dataid - mocked_dataset.return_value = xr.Dataset({'mask': (['rows', 'columns'], + mocked_dataset.return_value = xr.Dataset({"mask": (["rows", "columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}) - ds_id = make_dataid(name='mask') - filename_info = {'mission_id': 'ENV', 'dataset_name': 'mask', 'start_time': 0, 'end_time': 0} - test = NCMERIS2('somedir/somefile.nc', filename_info, 'c') - res = test.get_dataset(ds_id, {'nc_key': 'mask'}) - self.assertEqual(res.dtype, np.dtype('bool')) - - @mock.patch('xarray.open_dataset') + coords={"rows": np.arange(5), + "columns": np.arange(6)}) + ds_id = make_dataid(name="mask") + filename_info = {"mission_id": "ENV", "dataset_name": "mask", "start_time": 0, "end_time": 0} + test = NCMERIS2("somedir/somefile.nc", filename_info, "c") + res = test.get_dataset(ds_id, {"nc_key": "mask"}) + self.assertEqual(res.dtype, np.dtype("bool")) + + @mock.patch("xarray.open_dataset") def test_meris_angles(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -90,31 +90,31 @@ def test_meris_angles(self, mocked_dataset): from satpy.readers.meris_nc_sen3 import NCMERISAngles from satpy.tests.utils import make_dataid attr_dict = { - 'ac_subsampling_factor': 1, - 'al_subsampling_factor': 2, + "ac_subsampling_factor": 1, + "al_subsampling_factor": 2, } - mocked_dataset.return_value = xr.Dataset({'SAA': (['tie_rows', 'tie_columns'], + mocked_dataset.return_value = xr.Dataset({"SAA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'SZA': (['tie_rows', 'tie_columns'], + "SZA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'OAA': (['tie_rows', 'tie_columns'], + "OAA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'OZA': (['tie_rows', 'tie_columns'], + "OZA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}, + coords={"rows": np.arange(5), + "columns": np.arange(6)}, attrs=attr_dict) - filename_info = {'mission_id': 'ENV', 'dataset_name': 'M01', 'start_time': 0, 'end_time': 0} + filename_info = {"mission_id": "ENV", "dataset_name": "M01", "start_time": 0, "end_time": 0} - ds_id = make_dataid(name='solar_azimuth_angle') - ds_id2 = make_dataid(name='satellite_zenith_angle') - test = NCMERISAngles('somedir/somefile.nc', filename_info, 'c') + ds_id = make_dataid(name="solar_azimuth_angle") + ds_id2 = make_dataid(name="satellite_zenith_angle") + test = NCMERISAngles("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_meris_meteo(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -123,26 +123,26 @@ def test_meris_meteo(self, mocked_dataset): from satpy.readers.meris_nc_sen3 import NCMERISMeteo from satpy.tests.utils import make_dataid attr_dict = { - 'ac_subsampling_factor': 1, - 'al_subsampling_factor': 2, + "ac_subsampling_factor": 1, + "al_subsampling_factor": 2, } - data = {'humidity': (['tie_rows', 'tie_columns'], + data = {"humidity": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'total_ozone': (['tie_rows', 'tie_columns'], + "total_ozone": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'sea_level_pressure': (['tie_rows', 'tie_columns'], + "sea_level_pressure": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'total_columnar_water_vapour': (['tie_rows', 'tie_columns'], + "total_columnar_water_vapour": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))} mocked_dataset.return_value = xr.Dataset(data, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}, + coords={"rows": np.arange(5), + "columns": np.arange(6)}, attrs=attr_dict) - filename_info = {'mission_id': 'ENV', 'dataset_name': 'humidity', 'start_time': 0, 'end_time': 0} + filename_info = {"mission_id": "ENV", "dataset_name": "humidity", "start_time": 0, "end_time": 0} - ds_id = make_dataid(name='humidity') - ds_id2 = make_dataid(name='total_ozone') - test = NCMERISMeteo('somedir/somefile.nc', filename_info, 'c') + ds_id = make_dataid(name="humidity") + ds_id2 = make_dataid(name="total_ozone") + test = NCMERISMeteo("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() @@ -160,21 +160,21 @@ def test_bitflags(self): from satpy.readers.olci_nc import BitFlags - flag_list = ['SEA_ICE', 'MEGLINT', 'HIGHGLINT', 'CASE2_S', 'CASE2_ANOM', - 'HAZE_OVER_WATER', 'WHITECAPS', 'AC_FAIL', 'BPAC_ON', 'WHITE_SCATT', - 'LOWRW', 'HIGHRW', 'OUT_OF_RANGE_AAC', 'OUT_OF_SCOPE_AAC', - 'OUT_OF_RANGE_OC_NN', 'OUT_OF_SCOPE_OC_NN', - 'OUT_OF_RANGE_CHL_OC4ME_INPUT', 'OUT_OF_RANGE_CHL_OC4ME'] + flag_list = ["SEA_ICE", "MEGLINT", "HIGHGLINT", "CASE2_S", "CASE2_ANOM", + "HAZE_OVER_WATER", "WHITECAPS", "AC_FAIL", "BPAC_ON", "WHITE_SCATT", + "LOWRW", "HIGHRW", "OUT_OF_RANGE_AAC", "OUT_OF_SCOPE_AAC", + "OUT_OF_RANGE_OC_NN", "OUT_OF_SCOPE_OC_NN", + "OUT_OF_RANGE_CHL_OC4ME_INPUT", "OUT_OF_RANGE_CHL_OC4ME"] bits = np.array([1 << x for x in range(len(flag_list))]) bflags = BitFlags(bits, flag_list=flag_list) - items = ['SEA_ICE', 'MEGLINT', 'HIGHGLINT', - 'HAZE_OVER_WATER', 'WHITECAPS', 'AC_FAIL', 'WHITE_SCATT', - 'LOWRW', 'HIGHRW', 'OUT_OF_RANGE_AAC', 'OUT_OF_SCOPE_AAC', - 'OUT_OF_RANGE_OC_NN', 'OUT_OF_SCOPE_OC_NN', - 'OUT_OF_RANGE_CHL_OC4ME_INPUT', 'OUT_OF_RANGE_CHL_OC4ME'] + items = ["SEA_ICE", "MEGLINT", "HIGHGLINT", + "HAZE_OVER_WATER", "WHITECAPS", "AC_FAIL", "WHITE_SCATT", + "LOWRW", "HIGHRW", "OUT_OF_RANGE_AAC", "OUT_OF_SCOPE_AAC", + "OUT_OF_RANGE_OC_NN", "OUT_OF_SCOPE_OC_NN", + "OUT_OF_RANGE_CHL_OC4ME_INPUT", "OUT_OF_RANGE_CHL_OC4ME"] mask = reduce(np.logical_or, [bflags[item] for item in items]) expected = np.array([True, True, True, False, False, True, True, diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index acccb7a28d..1df0d41f12 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -29,185 +29,185 @@ def _get_calibration(num_scans): calibration = { - 'Calibration/VIS_Cal_Coeff': + "Calibration/VIS_Cal_Coeff": xr.DataArray( da.ones((19, 3), chunks=1024), - attrs={'Slope': np.array([1.] * 19), 'Intercept': np.array([0.] * 19)}, - dims=('_bands', '_coeffs')), - 'Calibration/IR_Cal_Coeff': + attrs={"Slope": np.array([1.] * 19), "Intercept": np.array([0.] * 19)}, + dims=("_bands", "_coeffs")), + "Calibration/IR_Cal_Coeff": xr.DataArray( da.ones((6, 4, num_scans), chunks=1024), - attrs={'Slope': np.array([1.] * 6), 'Intercept': np.array([0.] * 6)}, - dims=('_bands', '_coeffs', '_scans')), + attrs={"Slope": np.array([1.] * 6), "Intercept": np.array([0.] * 6)}, + dims=("_bands", "_coeffs", "_scans")), } return calibration def _get_250m_data(num_scans, rows_per_scan, num_cols): # Set some default attributes - def_attrs = {'FillValue': 65535, - 'valid_range': [0, 4095], - 'Slope': np.array([1.] * 1), 'Intercept': np.array([0.] * 1) + def_attrs = {"FillValue": 65535, + "valid_range": [0, 4095], + "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1) } - nounits_attrs = {**def_attrs, **{'units': 'NO'}} - radunits_attrs = {**def_attrs, **{'units': 'mW/ (m2 cm-1 sr)'}} + nounits_attrs = {**def_attrs, **{"units": "NO"}} + radunits_attrs = {**def_attrs, **{"units": "mW/ (m2 cm-1 sr)"}} data = { - 'Data/EV_250_RefSB_b1': + "Data/EV_250_RefSB_b1": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, - dims=('_rows', '_cols')), - 'Data/EV_250_RefSB_b2': + dims=("_rows", "_cols")), + "Data/EV_250_RefSB_b2": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, - dims=('_rows', '_cols')), - 'Data/EV_250_RefSB_b3': + dims=("_rows", "_cols")), + "Data/EV_250_RefSB_b3": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, - dims=('_rows', '_cols')), - 'Data/EV_250_RefSB_b4': + dims=("_rows", "_cols")), + "Data/EV_250_RefSB_b4": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, - dims=('_rows', '_cols')), - 'Data/EV_250_Emissive_b24': + dims=("_rows", "_cols")), + "Data/EV_250_Emissive_b24": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=radunits_attrs, - dims=('_rows', '_cols')), - 'Data/EV_250_Emissive_b25': + dims=("_rows", "_cols")), + "Data/EV_250_Emissive_b25": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=radunits_attrs, - dims=('_rows', '_cols')), + dims=("_rows", "_cols")), } return data def _get_1km_data(num_scans, rows_per_scan, num_cols): data = { - 'Data/EV_1KM_LL': + "Data/EV_1KM_LL": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'Slope': np.array([1.]), 'Intercept': np.array([0.]), - 'FillValue': 65535, - 'units': 'NO', - 'valid_range': [0, 4095], - 'long_name': b'1km Earth View Science Data', + "Slope": np.array([1.]), "Intercept": np.array([0.]), + "FillValue": 65535, + "units": "NO", + "valid_range": [0, 4095], + "long_name": b"1km Earth View Science Data", }, - dims=('_rows', '_cols')), - 'Data/EV_1KM_RefSB': + dims=("_rows", "_cols")), + "Data/EV_1KM_RefSB": xr.DataArray( da.ones((15, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'Slope': np.array([1.] * 15), 'Intercept': np.array([0.] * 15), - 'FillValue': 65535, - 'units': 'NO', - 'valid_range': [0, 4095], - 'long_name': b'1km Earth View Science Data', + "Slope": np.array([1.] * 15), "Intercept": np.array([0.] * 15), + "FillValue": 65535, + "units": "NO", + "valid_range": [0, 4095], + "long_name": b"1km Earth View Science Data", }, - dims=('_ref_bands', '_rows', '_cols')), - 'Data/EV_1KM_Emissive': + dims=("_ref_bands", "_rows", "_cols")), + "Data/EV_1KM_Emissive": xr.DataArray( da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'Slope': np.array([1.] * 4), 'Intercept': np.array([0.] * 4), - 'FillValue': 65535, - 'units': 'mW/ (m2 cm-1 sr)', - 'valid_range': [0, 25000], - 'long_name': b'1km Emissive Bands Earth View ' - b'Science Data', + "Slope": np.array([1.] * 4), "Intercept": np.array([0.] * 4), + "FillValue": 65535, + "units": "mW/ (m2 cm-1 sr)", + "valid_range": [0, 25000], + "long_name": b"1km Emissive Bands Earth View " + b"Science Data", }, - dims=('_ir_bands', '_rows', '_cols')), - 'Data/EV_250_Aggr.1KM_RefSB': + dims=("_ir_bands", "_rows", "_cols")), + "Data/EV_250_Aggr.1KM_RefSB": xr.DataArray( da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'Slope': np.array([1.] * 4), 'Intercept': np.array([0.] * 4), - 'FillValue': 65535, - 'units': 'NO', - 'valid_range': [0, 4095], - 'long_name': b'250m Reflective Bands Earth View ' - b'Science Data Aggregated to 1 km' + "Slope": np.array([1.] * 4), "Intercept": np.array([0.] * 4), + "FillValue": 65535, + "units": "NO", + "valid_range": [0, 4095], + "long_name": b"250m Reflective Bands Earth View " + b"Science Data Aggregated to 1 km" }, - dims=('_ref250_bands', '_rows', '_cols')), - 'Data/EV_250_Aggr.1KM_Emissive': + dims=("_ref250_bands", "_rows", "_cols")), + "Data/EV_250_Aggr.1KM_Emissive": xr.DataArray( da.ones((2, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'Slope': np.array([1.] * 2), 'Intercept': np.array([0.] * 2), - 'FillValue': 65535, - 'units': 'mW/ (m2 cm-1 sr)', - 'valid_range': [0, 4095], - 'long_name': b'250m Emissive Bands Earth View ' - b'Science Data Aggregated to 1 km' + "Slope": np.array([1.] * 2), "Intercept": np.array([0.] * 2), + "FillValue": 65535, + "units": "mW/ (m2 cm-1 sr)", + "valid_range": [0, 4095], + "long_name": b"250m Emissive Bands Earth View " + b"Science Data Aggregated to 1 km" }, - dims=('_ir250_bands', '_rows', '_cols')), + dims=("_ir250_bands", "_rows", "_cols")), } return data def _get_250m_ll_data(num_scans, rows_per_scan, num_cols): # Set some default attributes - def_attrs = {'FillValue': 65535, - 'valid_range': [0, 4095], - 'Slope': np.array([1.]), 'Intercept': np.array([0.]), - 'long_name': b'250m Earth View Science Data', - 'units': 'mW/ (m2 cm-1 sr)', + def_attrs = {"FillValue": 65535, + "valid_range": [0, 4095], + "Slope": np.array([1.]), "Intercept": np.array([0.]), + "long_name": b"250m Earth View Science Data", + "units": "mW/ (m2 cm-1 sr)", } data = { - 'Data/EV_250_Emissive_b6': + "Data/EV_250_Emissive_b6": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=def_attrs, - dims=('_rows', '_cols')), - 'Data/EV_250_Emissive_b7': + dims=("_rows", "_cols")), + "Data/EV_250_Emissive_b7": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=def_attrs, - dims=('_rows', '_cols')), + dims=("_rows", "_cols")), } return data def _get_geo_data(num_scans, rows_per_scan, num_cols, prefix): geo = { - prefix + 'Longitude': + prefix + "Longitude": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), attrs={ - 'Slope': np.array([1.] * 1), 'Intercept': np.array([0.] * 1), - 'units': 'degree', - 'valid_range': [-90, 90], + "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1), + "units": "degree", + "valid_range": [-90, 90], }, - dims=('_rows', '_cols')), - prefix + 'Latitude': + dims=("_rows", "_cols")), + prefix + "Latitude": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), attrs={ - 'Slope': np.array([1.] * 1), 'Intercept': np.array([0.] * 1), - 'units': 'degree', - 'valid_range': [-180, 180], + "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1), + "units": "degree", + "valid_range": [-180, 180], }, - dims=('_rows', '_cols')), - prefix + 'SensorZenith': + dims=("_rows", "_cols")), + prefix + "SensorZenith": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), attrs={ - 'Slope': np.array([.01] * 1), 'Intercept': np.array([0.] * 1), - 'units': 'degree', - 'valid_range': [0, 28000], + "Slope": np.array([.01] * 1), "Intercept": np.array([0.] * 1), + "units": "degree", + "valid_range": [0, 28000], }, - dims=('_rows', '_cols')), + dims=("_rows", "_cols")), } return geo @@ -225,15 +225,15 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): @property def _rows_per_scan(self): - return self.filetype_info.get('rows_per_scan', 10) + return self.filetype_info.get("rows_per_scan", 10) def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" global_attrs = { - '/attr/Observing Beginning Date': '2019-01-01', - '/attr/Observing Ending Date': '2019-01-01', - '/attr/Observing Beginning Time': '18:27:39.720', - '/attr/Observing Ending Time': '18:38:36.728', + "/attr/Observing Beginning Date": "2019-01-01", + "/attr/Observing Ending Date": "2019-01-01", + "/attr/Observing Beginning Time": "18:27:39.720", + "/attr/Observing Ending Time": "18:38:36.728", } global_attrs = self._set_sensor_attrs(global_attrs) @@ -247,12 +247,12 @@ def get_test_content(self, filename, filename_info, filetype_info): return test_content def _set_sensor_attrs(self, global_attrs): - if 'mersi2_l1b' in self.filetype_info['file_type']: - global_attrs['/attr/Satellite Name'] = 'FY-3D' - global_attrs['/attr/Sensor Identification Code'] = 'MERSI' - elif 'mersi_ll' in self.filetype_info['file_type']: - global_attrs['/attr/Satellite Name'] = 'FY-3E' - global_attrs['/attr/Sensor Identification Code'] = 'MERSI LL' + if "mersi2_l1b" in self.filetype_info["file_type"]: + global_attrs["/attr/Satellite Name"] = "FY-3D" + global_attrs["/attr/Sensor Identification Code"] = "MERSI" + elif "mersi_ll" in self.filetype_info["file_type"]: + global_attrs["/attr/Satellite Name"] = "FY-3E" + global_attrs["/attr/Sensor Identification Code"] = "MERSI LL" return global_attrs def _get_data_file_content(self): @@ -272,7 +272,7 @@ def _add_band_data_file_content(self): num_scans = self.num_scans rows_per_scan = self._rows_per_scan is_mersi2 = self.filetype_info["file_type"].startswith("mersi2_") - is_1km = "_1000" in self.filetype_info['file_type'] + is_1km = "_1000" in self.filetype_info["file_type"] data_func = _get_1km_data if is_1km else (_get_250m_data if is_mersi2 else _get_250m_ll_data) return data_func(num_scans, rows_per_scan, num_cols) @@ -280,12 +280,12 @@ def _add_tbb_coefficients(self, global_attrs): if not self.filetype_info["file_type"].startswith("mersi2_"): return - if "_1000" in self.filetype_info['file_type']: - global_attrs['/attr/TBB_Trans_Coefficient_A'] = np.array([1.0] * 6) - global_attrs['/attr/TBB_Trans_Coefficient_B'] = np.array([0.0] * 6) + if "_1000" in self.filetype_info["file_type"]: + global_attrs["/attr/TBB_Trans_Coefficient_A"] = np.array([1.0] * 6) + global_attrs["/attr/TBB_Trans_Coefficient_B"] = np.array([0.0] * 6) else: - global_attrs['/attr/TBB_Trans_Coefficient_A'] = np.array([0.0] * 6) - global_attrs['/attr/TBB_Trans_Coefficient_B'] = np.array([0.0] * 6) + global_attrs["/attr/TBB_Trans_Coefficient_A"] = np.array([0.0] * 6) + global_attrs["/attr/TBB_Trans_Coefficient_B"] = np.array([0.0] * 6) @property def _num_cols_for_file_type(self): @@ -298,18 +298,18 @@ def _geo_prefix_for_file_type(self): def _test_helper(res): """Remove test code duplication.""" - assert (2 * 40, 2048 * 2) == res['1'].shape - assert 'reflectance' == res['1'].attrs['calibration'] - assert '%' == res['1'].attrs['units'] - assert (2 * 40, 2048 * 2) == res['2'].shape - assert 'reflectance' == res['2'].attrs['calibration'] - assert '%' == res['2'].attrs['units'] - assert (2 * 40, 2048 * 2) == res['3'].shape - assert 'reflectance' == res['3'].attrs['calibration'] - assert '%' == res['3'].attrs['units'] - assert (2 * 40, 2048 * 2) == res['4'].shape - assert 'reflectance' == res['4'].attrs['calibration'] - assert '%' == res['4'].attrs['units'] + assert (2 * 40, 2048 * 2) == res["1"].shape + assert "reflectance" == res["1"].attrs["calibration"] + assert "%" == res["1"].attrs["units"] + assert (2 * 40, 2048 * 2) == res["2"].shape + assert "reflectance" == res["2"].attrs["calibration"] + assert "%" == res["2"].attrs["units"] + assert (2 * 40, 2048 * 2) == res["3"].shape + assert "reflectance" == res["3"].attrs["calibration"] + assert "%" == res["3"].attrs["units"] + assert (2 * 40, 2048 * 2) == res["4"].shape + assert "reflectance" == res["4"].attrs["calibration"] + assert "%" == res["4"].attrs["units"] class MERSIL1BTester: @@ -319,9 +319,9 @@ def setup_method(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.mersi_l1b import MERSIL1B - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(MERSIL1B, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(MERSIL1B, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -334,8 +334,8 @@ class TestMERSI2L1B(MERSIL1BTester): """Test the FY3D MERSI2 L1B reader.""" yaml_file = "mersi2_l1b.yaml" - filenames_1000m = ['tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF'] - filenames_250m = ['tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF'] + filenames_1000m = ["tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF", "tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF"] + filenames_250m = ["tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF", "tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF"] filenames_all = filenames_1000m + filenames_250m def test_all_resolutions(self): @@ -355,8 +355,8 @@ def test_all_resolutions(self): # - Bands 1-4 (visible) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids - for band_name in ('1', '2', '3', '4', '24', '25'): - if band_name in ('24', '25'): + for band_name in ("1", "2", "3", "4", "24", "25"): + if band_name in ("24", "25"): # don't know how to get radiance for IR bands num_results = 2 else: @@ -370,20 +370,20 @@ def test_all_resolutions(self): num_results=num_results, best=False) assert num_results == len(res) - res = reader.load(['1', '2', '3', '4', '5', '20', '24', '25']) + res = reader.load(["1", "2", "3", "4", "5", "20", "24", "25"]) assert len(res) == 8 - assert res['5'].shape == (2 * 10, 2048) - assert res['5'].attrs['calibration'] == 'reflectance' - assert res['5'].attrs['units'] == '%' - assert res['20'].shape == (2 * 10, 2048) - assert res['20'].attrs['calibration'] == 'brightness_temperature' - assert res['20'].attrs['units'] == 'K' - assert res['24'].shape == (2 * 40, 2048 * 2) - assert res['24'].attrs['calibration'] == 'brightness_temperature' - assert res['24'].attrs['units'] == 'K' - assert res['25'].shape == (2 * 40, 2048 * 2) - assert res['25'].attrs['calibration'] == 'brightness_temperature' - assert res['25'].attrs['units'] == 'K' + assert res["5"].shape == (2 * 10, 2048) + assert res["5"].attrs["calibration"] == "reflectance" + assert res["5"].attrs["units"] == "%" + assert res["20"].shape == (2 * 10, 2048) + assert res["20"].attrs["calibration"] == "brightness_temperature" + assert res["20"].attrs["units"] == "K" + assert res["24"].shape == (2 * 40, 2048 * 2) + assert res["24"].attrs["calibration"] == "brightness_temperature" + assert res["24"].attrs["units"] == "K" + assert res["25"].shape == (2 * 40, 2048 * 2) + assert res["25"].attrs["calibration"] == "brightness_temperature" + assert res["25"].attrs["units"] == "K" def test_counts_calib(self): """Test loading data at counts calibration.""" @@ -398,43 +398,43 @@ def test_counts_calib(self): assert reader.file_handlers ds_ids = [] - for band_name in ['1', '2', '3', '4', '5', '20', '24', '25']: - ds_ids.append(make_dataid(name=band_name, calibration='counts')) - ds_ids.append(make_dataid(name='satellite_zenith_angle')) + for band_name in ["1", "2", "3", "4", "5", "20", "24", "25"]: + ds_ids.append(make_dataid(name=band_name, calibration="counts")) + ds_ids.append(make_dataid(name="satellite_zenith_angle")) res = reader.load(ds_ids) assert len(res) == 9 - assert res['1'].shape == (2 * 40, 2048 * 2) - assert res['1'].attrs['calibration'] == 'counts' - assert res['1'].dtype == np.uint16 - assert res['1'].attrs['units'] == '1' - assert res['2'].shape == (2 * 40, 2048 * 2) - assert res['2'].attrs['calibration'] == 'counts' - assert res['2'].dtype == np.uint16 - assert res['2'].attrs['units'] == '1' - assert res['3'].shape == (2 * 40, 2048 * 2) - assert res['3'].attrs['calibration'] == 'counts' - assert res['3'].dtype == np.uint16 - assert res['3'].attrs['units'] == '1' - assert res['4'].shape == (2 * 40, 2048 * 2) - assert res['4'].attrs['calibration'] == 'counts' - assert res['4'].dtype == np.uint16 - assert res['4'].attrs['units'] == '1' - assert res['5'].shape == (2 * 10, 2048) - assert res['5'].attrs['calibration'] == 'counts' - assert res['5'].dtype == np.uint16 - assert res['5'].attrs['units'] == '1' - assert res['20'].shape == (2 * 10, 2048) - assert res['20'].attrs['calibration'] == 'counts' - assert res['20'].dtype == np.uint16 - assert res['20'].attrs['units'] == '1' - assert res['24'].shape == (2 * 40, 2048 * 2) - assert res['24'].attrs['calibration'] == 'counts' - assert res['24'].dtype == np.uint16 - assert res['24'].attrs['units'] == '1' - assert res['25'].shape == (2 * 40, 2048 * 2) - assert res['25'].attrs['calibration'] == 'counts' - assert res['25'].dtype == np.uint16 - assert res['25'].attrs['units'] == '1' + assert res["1"].shape == (2 * 40, 2048 * 2) + assert res["1"].attrs["calibration"] == "counts" + assert res["1"].dtype == np.uint16 + assert res["1"].attrs["units"] == "1" + assert res["2"].shape == (2 * 40, 2048 * 2) + assert res["2"].attrs["calibration"] == "counts" + assert res["2"].dtype == np.uint16 + assert res["2"].attrs["units"] == "1" + assert res["3"].shape == (2 * 40, 2048 * 2) + assert res["3"].attrs["calibration"] == "counts" + assert res["3"].dtype == np.uint16 + assert res["3"].attrs["units"] == "1" + assert res["4"].shape == (2 * 40, 2048 * 2) + assert res["4"].attrs["calibration"] == "counts" + assert res["4"].dtype == np.uint16 + assert res["4"].attrs["units"] == "1" + assert res["5"].shape == (2 * 10, 2048) + assert res["5"].attrs["calibration"] == "counts" + assert res["5"].dtype == np.uint16 + assert res["5"].attrs["units"] == "1" + assert res["20"].shape == (2 * 10, 2048) + assert res["20"].attrs["calibration"] == "counts" + assert res["20"].dtype == np.uint16 + assert res["20"].attrs["units"] == "1" + assert res["24"].shape == (2 * 40, 2048 * 2) + assert res["24"].attrs["calibration"] == "counts" + assert res["24"].dtype == np.uint16 + assert res["24"].attrs["units"] == "1" + assert res["25"].shape == (2 * 40, 2048 * 2) + assert res["25"].attrs["calibration"] == "counts" + assert res["25"].dtype == np.uint16 + assert res["25"].attrs["units"] == "1" def test_rad_calib(self): """Test loading data at radiance calibration.""" @@ -449,25 +449,25 @@ def test_rad_calib(self): assert reader.file_handlers ds_ids = [] - for band_name in ['1', '2', '3', '4', '5']: - ds_ids.append(make_dataid(name=band_name, calibration='radiance')) + for band_name in ["1", "2", "3", "4", "5"]: + ds_ids.append(make_dataid(name=band_name, calibration="radiance")) res = reader.load(ds_ids) assert len(res) == 5 - assert res['1'].shape == (2 * 40, 2048 * 2) - assert res['1'].attrs['calibration'] == 'radiance' - assert res['1'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['2'].shape == (2 * 40, 2048 * 2) - assert res['2'].attrs['calibration'] == 'radiance' - assert res['2'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['3'].shape == (2 * 40, 2048 * 2) - assert res['3'].attrs['calibration'] == 'radiance' - assert res['3'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['4'].shape == (2 * 40, 2048 * 2) - assert res['4'].attrs['calibration'] == 'radiance' - assert res['4'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['5'].shape == (2 * 10, 2048) - assert res['5'].attrs['calibration'] == 'radiance' - assert res['5'].attrs['units'] == 'mW/ (m2 cm-1 sr)' + assert res["1"].shape == (2 * 40, 2048 * 2) + assert res["1"].attrs["calibration"] == "radiance" + assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["2"].shape == (2 * 40, 2048 * 2) + assert res["2"].attrs["calibration"] == "radiance" + assert res["2"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["3"].shape == (2 * 40, 2048 * 2) + assert res["3"].attrs["calibration"] == "radiance" + assert res["3"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["4"].shape == (2 * 40, 2048 * 2) + assert res["4"].attrs["calibration"] == "radiance" + assert res["4"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["5"].shape == (2 * 10, 2048) + assert res["5"].attrs["calibration"] == "radiance" + assert res["5"].attrs["units"] == "mW/ (m2 cm-1 sr)" def test_1km_resolutions(self): """Test loading data when only 1km resolutions are available.""" @@ -486,8 +486,8 @@ def test_1km_resolutions(self): # - Bands 1-4 (visible) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids - for band_name in ('1', '2', '3', '4', '24', '25'): - if band_name in ('24', '25'): + for band_name in ("1", "2", "3", "4", "24", "25"): + if band_name in ("24", "25"): # don't know how to get radiance for IR bands num_results = 2 else: @@ -500,32 +500,32 @@ def test_1km_resolutions(self): num_results=num_results, best=False) assert num_results == len(res) - res = reader.load(['1', '2', '3', '4', '5', '20', '24', '25']) + res = reader.load(["1", "2", "3", "4", "5", "20", "24", "25"]) assert len(res) == 8 - assert res['1'].shape == (2 * 10, 2048) - assert res['1'].attrs['calibration'] == 'reflectance' - assert res['1'].attrs['units'] == '%' - assert res['2'].shape == (2 * 10, 2048) - assert res['2'].attrs['calibration'] == 'reflectance' - assert res['2'].attrs['units'] == '%' - assert res['3'].shape == (2 * 10, 2048) - assert res['3'].attrs['calibration'] == 'reflectance' - assert res['3'].attrs['units'] == '%' - assert res['4'].shape == (2 * 10, 2048) - assert res['4'].attrs['calibration'] == 'reflectance' - assert res['4'].attrs['units'] == '%' - assert res['5'].shape == (2 * 10, 2048) - assert res['5'].attrs['calibration'] == 'reflectance' - assert res['5'].attrs['units'] == '%' - assert res['20'].shape == (2 * 10, 2048) - assert res['20'].attrs['calibration'] == 'brightness_temperature' - assert res['20'].attrs['units'] == 'K' - assert res['24'].shape == (2 * 10, 2048) - assert res['24'].attrs['calibration'] == 'brightness_temperature' - assert res['24'].attrs['units'] == 'K' - assert res['25'].shape == (2 * 10, 2048) - assert res['25'].attrs['calibration'] == 'brightness_temperature' - assert res['25'].attrs['units'] == 'K' + assert res["1"].shape == (2 * 10, 2048) + assert res["1"].attrs["calibration"] == "reflectance" + assert res["1"].attrs["units"] == "%" + assert res["2"].shape == (2 * 10, 2048) + assert res["2"].attrs["calibration"] == "reflectance" + assert res["2"].attrs["units"] == "%" + assert res["3"].shape == (2 * 10, 2048) + assert res["3"].attrs["calibration"] == "reflectance" + assert res["3"].attrs["units"] == "%" + assert res["4"].shape == (2 * 10, 2048) + assert res["4"].attrs["calibration"] == "reflectance" + assert res["4"].attrs["units"] == "%" + assert res["5"].shape == (2 * 10, 2048) + assert res["5"].attrs["calibration"] == "reflectance" + assert res["5"].attrs["units"] == "%" + assert res["20"].shape == (2 * 10, 2048) + assert res["20"].attrs["calibration"] == "brightness_temperature" + assert res["20"].attrs["units"] == "K" + assert res["24"].shape == (2 * 10, 2048) + assert res["24"].attrs["calibration"] == "brightness_temperature" + assert res["24"].attrs["units"] == "K" + assert res["25"].shape == (2 * 10, 2048) + assert res["25"].attrs["calibration"] == "brightness_temperature" + assert res["25"].attrs["units"] == "K" def test_250_resolutions(self): """Test loading data when only 250m resolutions are available.""" @@ -544,8 +544,8 @@ def test_250_resolutions(self): # - Bands 1-4 (visible) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids - for band_name in ('1', '2', '3', '4', '24', '25'): - if band_name in ('24', '25'): + for band_name in ("1", "2", "3", "4", "24", "25"): + if band_name in ("24", "25"): # don't know how to get radiance for IR bands num_results = 2 else: @@ -558,27 +558,27 @@ def test_250_resolutions(self): with pytest.raises(KeyError): get_key(ds_id, available_datasets, num_results=num_results, best=False) - res = reader.load(['1', '2', '3', '4', '5', '20', '24', '25']) + res = reader.load(["1", "2", "3", "4", "5", "20", "24", "25"]) assert len(res) == 6 with pytest.raises(KeyError): - res.__getitem__('5') + res.__getitem__("5") with pytest.raises(KeyError): - res.__getitem__('20') + res.__getitem__("20") _test_helper(res) - assert res['24'].shape == (2 * 40, 2048 * 2) - assert res['24'].attrs['calibration'] == 'brightness_temperature' - assert res['24'].attrs['units'] == 'K' - assert res['25'].shape == (2 * 40, 2048 * 2) - assert res['25'].attrs['calibration'] == 'brightness_temperature' - assert res['25'].attrs['units'] == 'K' + assert res["24"].shape == (2 * 40, 2048 * 2) + assert res["24"].attrs["calibration"] == "brightness_temperature" + assert res["24"].attrs["units"] == "K" + assert res["25"].shape == (2 * 40, 2048 * 2) + assert res["25"].attrs["calibration"] == "brightness_temperature" + assert res["25"].attrs["units"] == "K" class TestMERSILLL1B(MERSIL1BTester): """Test the FY3E MERSI-LL L1B reader.""" yaml_file = "mersi_ll_l1b.yaml" - filenames_1000m = ['FY3E_MERSI_GRAN_L1_20230410_1910_1000M_V0.HDF', 'FY3E_MERSI_GRAN_L1_20230410_1910_GEO1K_V0.HDF'] - filenames_250m = ['FY3E_MERSI_GRAN_L1_20230410_1910_0250M_V0.HDF', 'FY3E_MERSI_GRAN_L1_20230410_1910_GEOQK_V0.HDF'] + filenames_1000m = ["FY3E_MERSI_GRAN_L1_20230410_1910_1000M_V0.HDF", "FY3E_MERSI_GRAN_L1_20230410_1910_GEO1K_V0.HDF"] + filenames_250m = ["FY3E_MERSI_GRAN_L1_20230410_1910_0250M_V0.HDF", "FY3E_MERSI_GRAN_L1_20230410_1910_GEOQK_V0.HDF"] filenames_all = filenames_1000m + filenames_250m def test_all_resolutions(self): @@ -598,7 +598,7 @@ def test_all_resolutions(self): # - Bands 1-4 (visible) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids - for band_name in ('6', '7'): + for band_name in ("6", "7"): num_results = 2 ds_id = make_dataid(name=band_name, resolution=250) res = get_key(ds_id, available_datasets, @@ -609,17 +609,17 @@ def test_all_resolutions(self): num_results=num_results, best=False) assert num_results == len(res) - res = reader.load(['1', '2', '4', '7']) + res = reader.load(["1", "2", "4", "7"]) assert len(res) == 4 - assert res['4'].shape == (2 * 10, 2048) - assert res['1'].attrs['calibration'] == 'radiance' - assert res['1'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['2'].shape == (2 * 10, 2048) - assert res['2'].attrs['calibration'] == 'brightness_temperature' - assert res['2'].attrs['units'] == 'K' - assert res['7'].shape == (2 * 40, 2048 * 2) - assert res['7'].attrs['calibration'] == 'brightness_temperature' - assert res['7'].attrs['units'] == 'K' + assert res["4"].shape == (2 * 10, 2048) + assert res["1"].attrs["calibration"] == "radiance" + assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["2"].shape == (2 * 10, 2048) + assert res["2"].attrs["calibration"] == "brightness_temperature" + assert res["2"].attrs["units"] == "K" + assert res["7"].shape == (2 * 40, 2048 * 2) + assert res["7"].attrs["calibration"] == "brightness_temperature" + assert res["7"].attrs["units"] == "K" def test_rad_calib(self): """Test loading data at radiance calibration.""" @@ -634,25 +634,25 @@ def test_rad_calib(self): assert reader.file_handlers ds_ids = [] - for band_name in ['1', '3', '4', '6', '7']: - ds_ids.append(make_dataid(name=band_name, calibration='radiance')) + for band_name in ["1", "3", "4", "6", "7"]: + ds_ids.append(make_dataid(name=band_name, calibration="radiance")) res = reader.load(ds_ids) assert len(res) == 5 - assert res['1'].shape == (2 * 10, 2048) - assert res['1'].attrs['calibration'] == 'radiance' - assert res['1'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['3'].shape == (2 * 10, 2048) - assert res['3'].attrs['calibration'] == 'radiance' - assert res['3'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['4'].shape == (2 * 10, 2048) - assert res['4'].attrs['calibration'] == 'radiance' - assert res['4'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['6'].shape == (2 * 40, 2048 * 2) - assert res['6'].attrs['calibration'] == 'radiance' - assert res['6'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['7'].shape == (2 * 40, 2048 * 2) - assert res['7'].attrs['calibration'] == 'radiance' - assert res['7'].attrs['units'] == 'mW/ (m2 cm-1 sr)' + assert res["1"].shape == (2 * 10, 2048) + assert res["1"].attrs["calibration"] == "radiance" + assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["3"].shape == (2 * 10, 2048) + assert res["3"].attrs["calibration"] == "radiance" + assert res["3"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["4"].shape == (2 * 10, 2048) + assert res["4"].attrs["calibration"] == "radiance" + assert res["4"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["6"].shape == (2 * 40, 2048 * 2) + assert res["6"].attrs["calibration"] == "radiance" + assert res["6"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["7"].shape == (2 * 40, 2048 * 2) + assert res["7"].attrs["calibration"] == "radiance" + assert res["7"].attrs["units"] == "mW/ (m2 cm-1 sr)" def test_1km_resolutions(self): """Test loading data when only 1km resolutions are available.""" @@ -671,8 +671,8 @@ def test_1km_resolutions(self): # - Band 6-7 (IR) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids - for band_name in ('1', '2', '3', '4', '6', '7'): - if band_name == '1': + for band_name in ("1", "2", "3", "4", "6", "7"): + if band_name == "1": # don't know how to get anything apart from radiance for LL band num_results = 1 else: @@ -683,31 +683,31 @@ def test_1km_resolutions(self): ds_id = make_dataid(name=band_name, resolution=1000) res = get_key(ds_id, available_datasets, num_results=num_results, best=False) - if band_name == '1': + if band_name == "1": assert num_results == len([res]) else: assert num_results == len(res) - res = reader.load(['1', '2', '3', '5', '6', '7']) + res = reader.load(["1", "2", "3", "5", "6", "7"]) assert len(res) == 6 - assert res['1'].shape == (2 * 10, 2048) - assert 'radiance' == res['1'].attrs['calibration'] - assert res['1'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['2'].shape == (2 * 10, 2048) - assert 'brightness_temperature' == res['2'].attrs['calibration'] - assert res['2'].attrs['units'] == 'K' - assert res['3'].shape == (2 * 10, 2048) - assert 'brightness_temperature' == res['3'].attrs['calibration'] - assert res['3'].attrs['units'] == 'K' - assert res['5'].shape == (2 * 10, 2048) - assert 'brightness_temperature' == res['5'].attrs['calibration'] - assert res['5'].attrs['units'] == 'K' - assert res['6'].shape == (2 * 10, 2048) - assert 'brightness_temperature' == res['6'].attrs['calibration'] - assert res['6'].attrs['units'] == 'K' - assert res['7'].shape == (2 * 10, 2048) - assert 'brightness_temperature' == res['7'].attrs['calibration'] - assert res['7'].attrs['units'] == 'K' + assert res["1"].shape == (2 * 10, 2048) + assert "radiance" == res["1"].attrs["calibration"] + assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["2"].shape == (2 * 10, 2048) + assert "brightness_temperature" == res["2"].attrs["calibration"] + assert res["2"].attrs["units"] == "K" + assert res["3"].shape == (2 * 10, 2048) + assert "brightness_temperature" == res["3"].attrs["calibration"] + assert res["3"].attrs["units"] == "K" + assert res["5"].shape == (2 * 10, 2048) + assert "brightness_temperature" == res["5"].attrs["calibration"] + assert res["5"].attrs["units"] == "K" + assert res["6"].shape == (2 * 10, 2048) + assert "brightness_temperature" == res["6"].attrs["calibration"] + assert res["6"].attrs["units"] == "K" + assert res["7"].shape == (2 * 10, 2048) + assert "brightness_temperature" == res["7"].attrs["calibration"] + assert res["7"].attrs["units"] == "K" def test_250_resolutions(self): """Test loading data when only 250m resolutions are available.""" @@ -725,7 +725,7 @@ def test_250_resolutions(self): # Verify that we have multiple resolutions for: # - Bands 6-7 available_datasets = reader.available_dataset_ids - for band_name in ('6', '7'): + for band_name in ("6", "7"): num_results = 2 ds_id = make_dataid(name=band_name, resolution=250) res = get_key(ds_id, available_datasets, @@ -735,13 +735,13 @@ def test_250_resolutions(self): with pytest.raises(KeyError): get_key(ds_id, available_datasets, num_results=num_results, best=False) - res = reader.load(['1', '6', '7']) + res = reader.load(["1", "6", "7"]) assert 2 == len(res) with pytest.raises(KeyError): - res.__getitem__('1') - assert (2 * 40, 2048 * 2) == res['6'].shape - assert 'brightness_temperature' == res['6'].attrs['calibration'] - assert 'K' == res['6'].attrs['units'] - assert (2 * 40, 2048 * 2) == res['7'].shape - assert 'brightness_temperature' == res['7'].attrs['calibration'] - assert 'K' == res['7'].attrs['units'] + res.__getitem__("1") + assert (2 * 40, 2048 * 2) == res["6"].shape + assert "brightness_temperature" == res["6"].attrs["calibration"] + assert "K" == res["6"].attrs["units"] + assert (2 * 40, 2048 * 2) == res["7"].shape + assert "brightness_temperature" == res["7"].attrs["calibration"] + assert "K" == res["7"].attrs["units"] diff --git a/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py b/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py index cafadf9e77..77344e6856 100644 --- a/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py +++ b/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py @@ -39,9 +39,9 @@ DEFAULT_FILE_DATE_DATA = np.clip(DEFAULT_FILE_FLOAT_DATA, 0, 1049) DEFAULT_FILE_UBYTE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=np.ubyte) -float_variables = ['tpwGrid', 'tpwGridPrior', 'tpwGridSubseq', 'footGridPrior', 'footGridSubseq'] -date_variables = ['timeAwayGridPrior', 'timeAwayGridSubseq'] -ubyte_variables = ['satGridPrior', 'satGridSubseq'] +float_variables = ["tpwGrid", "tpwGridPrior", "tpwGridSubseq", "footGridPrior", "footGridSubseq"] +date_variables = ["timeAwayGridPrior", "timeAwayGridSubseq"] +ubyte_variables = ["satGridPrior", "satGridSubseq"] file_content_attr = dict() @@ -50,57 +50,57 @@ class FakeNetCDF4FileHandlerMimicLow(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content for lower resolution files.""" - dt_s = filename_info.get('start_time', DEFAULT_DATE) - dt_e = filename_info.get('end_time', DEFAULT_DATE) + dt_s = filename_info.get("start_time", DEFAULT_DATE) + dt_e = filename_info.get("end_time", DEFAULT_DATE) - if filetype_info['file_type'] == 'mimicTPW2_comp': + if filetype_info["file_type"] == "mimicTPW2_comp": file_content = { - '/attr/start_time': dt_s.strftime('%Y%m%d.%H%M%S'), - '/attr/end_time': dt_e.strftime('%Y%m%d.%H%M%S'), - '/attr/platform_shortname': 'aggregated microwave', - '/attr/sensor': 'mimic', + "/attr/start_time": dt_s.strftime("%Y%m%d.%H%M%S"), + "/attr/end_time": dt_e.strftime("%Y%m%d.%H%M%S"), + "/attr/platform_shortname": "aggregated microwave", + "/attr/sensor": "mimic", } - file_content['latArr'] = DEFAULT_LAT - file_content['latArr/shape'] = (DEFAULT_FILE_SHAPE[0],) - file_content['latArr/attr/units'] = 'degress_north' + file_content["latArr"] = DEFAULT_LAT + file_content["latArr/shape"] = (DEFAULT_FILE_SHAPE[0],) + file_content["latArr/attr/units"] = "degress_north" - file_content['lonArr'] = DEFAULT_LON - file_content['lonArr/shape'] = (DEFAULT_FILE_SHAPE[1],) - file_content['lonArr/attr/units'] = 'degrees_east' + file_content["lonArr"] = DEFAULT_LON + file_content["lonArr/shape"] = (DEFAULT_FILE_SHAPE[1],) + file_content["lonArr/attr/units"] = "degrees_east" - file_content['/dimension/lat'] = DEFAULT_FILE_SHAPE[0] - file_content['/dimension/lon'] = DEFAULT_FILE_SHAPE[1] + file_content["/dimension/lat"] = DEFAULT_FILE_SHAPE[0] + file_content["/dimension/lon"] = DEFAULT_FILE_SHAPE[1] for float_var in float_variables: file_content[float_var] = DEFAULT_FILE_FLOAT_DATA.reshape(DEFAULT_FILE_SHAPE) - file_content['{}/shape'.format(float_var)] = DEFAULT_FILE_SHAPE + file_content["{}/shape".format(float_var)] = DEFAULT_FILE_SHAPE file_content_attr[float_var] = {"units": "mm"} for date_var in date_variables: file_content[date_var] = DEFAULT_FILE_DATE_DATA.reshape(DEFAULT_FILE_SHAPE) - file_content['{}/shape'.format(date_var)] = DEFAULT_FILE_SHAPE + file_content["{}/shape".format(date_var)] = DEFAULT_FILE_SHAPE file_content_attr[date_var] = {"units": "minutes"} for ubyte_var in ubyte_variables: file_content[ubyte_var] = DEFAULT_FILE_UBYTE_DATA.reshape(DEFAULT_FILE_SHAPE) - file_content['{}/shape'.format(ubyte_var)] = DEFAULT_FILE_SHAPE + file_content["{}/shape".format(ubyte_var)] = DEFAULT_FILE_SHAPE file_content_attr[ubyte_var] = {"source_key": "Key: 0: None, 1: NOAA-N, 2: NOAA-P, 3: Metop-A, \ 4: Metop-B, 5: SNPP, 6: SSMI-17, 7: SSMI-18"} # convert to xarrays for key, val in file_content.items(): - if key == 'lonArr' or key == 'latArr': + if key == "lonArr" or key == "latArr": file_content[key] = xr.DataArray(val) elif isinstance(val, np.ndarray): if val.ndim > 1: - file_content[key] = xr.DataArray(val, dims=('y', 'x'), attrs=file_content_attr[key]) + file_content[key] = xr.DataArray(val, dims=("y", "x"), attrs=file_content_attr[key]) else: file_content[key] = xr.DataArray(val) for key in itertools.chain(float_variables, ubyte_variables): - file_content[key].attrs['_FillValue'] = -999.0 - file_content[key].attrs['name'] = key - file_content[key].attrs['file_key'] = key - file_content[key].attrs['file_type'] = self.filetype_info['file_type'] + file_content[key].attrs["_FillValue"] = -999.0 + file_content[key].attrs["name"] = key + file_content[key].attrs["file_key"] = key + file_content[key].attrs["file_type"] = self.filetype_info["file_type"] else: - msg = 'Wrong Test Reader for file_type {}'.format(filetype_info['file_type']) + msg = "Wrong Test Reader for file_type {}".format(filetype_info["file_type"]) raise AssertionError(msg) return file_content @@ -115,9 +115,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.mimic_TPW2_nc import MimicTPW2FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(MimicTPW2FileHandler, '__bases__', (FakeNetCDF4FileHandlerMimicLow,)) + self.p = mock.patch.object(MimicTPW2FileHandler, "__bases__", (FakeNetCDF4FileHandlerMimicLow,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -130,7 +130,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'comp20190619.130000.nc', + "comp20190619.130000.nc", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -141,54 +141,54 @@ def test_load_mimic_float(self): """Load TPW mimic float data.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.mimic_TPW2_nc.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.mimic_TPW2_nc.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'comp20190619.130000.nc', + "comp20190619.130000.nc", ]) r.create_filehandlers(loadables) ds = r.load(float_variables) self.assertEqual(len(ds), len(float_variables)) for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'aggregated microwave') - self.assertEqual(d.attrs['sensor'], 'mimic') - self.assertEqual(d.attrs['units'], 'mm') - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + self.assertEqual(d.attrs["platform_shortname"], "aggregated microwave") + self.assertEqual(d.attrs["sensor"], "mimic") + self.assertEqual(d.attrs["units"], "mm") + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) def test_load_mimic_timedelta(self): """Load TPW mimic timedelta data (data latency variables).""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.mimic_TPW2_nc.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.mimic_TPW2_nc.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'comp20190619.130000.nc', + "comp20190619.130000.nc", ]) r.create_filehandlers(loadables) ds = r.load(date_variables) self.assertEqual(len(ds), len(date_variables)) for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'aggregated microwave') - self.assertEqual(d.attrs['sensor'], 'mimic') - self.assertEqual(d.attrs['units'], 'minutes') - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + self.assertEqual(d.attrs["platform_shortname"], "aggregated microwave") + self.assertEqual(d.attrs["sensor"], "mimic") + self.assertEqual(d.attrs["units"], "minutes") + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) self.assertEqual(d.dtype, DEFAULT_FILE_DTYPE) def test_load_mimic_ubyte(self): """Load TPW mimic sensor grids.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.mimic_TPW2_nc.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.mimic_TPW2_nc.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'comp20190619.130000.nc', + "comp20190619.130000.nc", ]) r.create_filehandlers(loadables) ds = r.load(ubyte_variables) self.assertEqual(len(ds), len(ubyte_variables)) for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'aggregated microwave') - self.assertEqual(d.attrs['sensor'], 'mimic') - self.assertIn('source_key', d.attrs) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + self.assertEqual(d.attrs["platform_shortname"], "aggregated microwave") + self.assertEqual(d.attrs["sensor"], "mimic") + self.assertIn("source_key", d.attrs) + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) self.assertEqual(d.dtype, np.uint8) diff --git a/satpy/tests/reader_tests/test_mimic_TPW2_nc.py b/satpy/tests/reader_tests/test_mimic_TPW2_nc.py index 544c805e70..9c6c24b5a7 100644 --- a/satpy/tests/reader_tests/test_mimic_TPW2_nc.py +++ b/satpy/tests/reader_tests/test_mimic_TPW2_nc.py @@ -43,42 +43,42 @@ class FakeNetCDF4FileHandlerMimic(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" from xarray import DataArray - dt_s = filename_info.get('start_time', datetime(2019, 6, 19, 13, 0)) - dt_e = filename_info.get('end_time', datetime(2019, 6, 19, 13, 0)) + dt_s = filename_info.get("start_time", datetime(2019, 6, 19, 13, 0)) + dt_e = filename_info.get("end_time", datetime(2019, 6, 19, 13, 0)) - if filetype_info['file_type'] == 'mimicTPW2_comp': + if filetype_info["file_type"] == "mimicTPW2_comp": file_content = { - '/attr/start_time': dt_s.strftime('%Y%m%d.%H%M%S'), - '/attr/end_time': dt_e.strftime('%Y%m%d.%H%M%S'), - '/attr/platform_shortname': 'aggregated microwave', - '/attr/sensor': 'mimic', + "/attr/start_time": dt_s.strftime("%Y%m%d.%H%M%S"), + "/attr/end_time": dt_e.strftime("%Y%m%d.%H%M%S"), + "/attr/platform_shortname": "aggregated microwave", + "/attr/sensor": "mimic", } - file_content['latArr'] = DEFAULT_LAT - file_content['latArr/shape'] = (DEFAULT_FILE_SHAPE[0],) - file_content['latArr/attr/units'] = 'degress_north' + file_content["latArr"] = DEFAULT_LAT + file_content["latArr/shape"] = (DEFAULT_FILE_SHAPE[0],) + file_content["latArr/attr/units"] = "degress_north" - file_content['lonArr'] = DEFAULT_LON - file_content['lonArr/shape'] = (DEFAULT_FILE_SHAPE[1],) - file_content['lonArr/attr/units'] = 'degrees_east' + file_content["lonArr"] = DEFAULT_LON + file_content["lonArr/shape"] = (DEFAULT_FILE_SHAPE[1],) + file_content["lonArr/attr/units"] = "degrees_east" - file_content['tpwGrid'] = DEFAULT_FILE_DATA - file_content['tpwGrid/shape'] = DEFAULT_FILE_SHAPE - file_content_units['tpwGrid'] = 'mm' + file_content["tpwGrid"] = DEFAULT_FILE_DATA + file_content["tpwGrid/shape"] = DEFAULT_FILE_SHAPE + file_content_units["tpwGrid"] = "mm" - file_content['/dimension/lat'] = DEFAULT_FILE_SHAPE[0] - file_content['/dimension/lon'] = DEFAULT_FILE_SHAPE[1] + file_content["/dimension/lat"] = DEFAULT_FILE_SHAPE[0] + file_content["/dimension/lon"] = DEFAULT_FILE_SHAPE[1] # convert to xarrays for key, val in file_content.items(): - if key == 'lonArr' or key == 'latArr': + if key == "lonArr" or key == "latArr": file_content[key] = DataArray(val) elif isinstance(val, np.ndarray): if val.ndim > 1: - file_content[key] = DataArray(val, dims=('y', 'x'), attrs={"units": file_content_units[key]}) + file_content[key] = DataArray(val, dims=("y", "x"), attrs={"units": file_content_units[key]}) else: file_content[key] = DataArray(val) else: - msg = 'Wrong Test Reader for file_type {}'.format(filetype_info['file_type']) + msg = "Wrong Test Reader for file_type {}".format(filetype_info["file_type"]) raise AssertionError(msg) return file_content @@ -93,9 +93,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.mimic_TPW2_nc import MimicTPW2FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(MimicTPW2FileHandler, '__bases__', (FakeNetCDF4FileHandlerMimic,)) + self.p = mock.patch.object(MimicTPW2FileHandler, "__bases__", (FakeNetCDF4FileHandlerMimic,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -108,7 +108,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'comp20190619.130000.nc', + "comp20190619.130000.nc", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -119,16 +119,16 @@ def test_load_mimic(self): """Load Mimic data.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.mimic_TPW2_nc.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.mimic_TPW2_nc.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'comp20190619.130000.nc', + "comp20190619.130000.nc", ]) r.create_filehandlers(loadables) - ds = r.load(['tpwGrid']) + ds = r.load(["tpwGrid"]) self.assertEqual(len(ds), 1) for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'aggregated microwave') - self.assertEqual(d.attrs['sensor'], 'mimic') - self.assertIn('area', d.attrs) - self.assertIn('units', d.attrs) - self.assertIsNotNone(d.attrs['area']) + self.assertEqual(d.attrs["platform_shortname"], "aggregated microwave") + self.assertEqual(d.attrs["sensor"], "mimic") + self.assertIn("area", d.attrs) + self.assertIn("units", d.attrs) + self.assertIsNotNone(d.attrs["area"]) diff --git a/satpy/tests/reader_tests/test_mirs.py b/satpy/tests/reader_tests/test_mirs.py index b726a519e5..69f5543411 100644 --- a/satpy/tests/reader_tests/test_mirs.py +++ b/satpy/tests/reader_tests/test_mirs.py @@ -46,17 +46,17 @@ FREQ = xr.DataArray([23.8, 31.4, 50.3, 51.76, 52.8, 53.596, 54.4, 54.94, 55.5, 57.29, 57.29, 57.29, 57.29, 57.29, 57.29, 88.2, 165.5, 183.31, 183.31, 183.31, 183.31, 183.31][:N_CHANNEL], - dims='Channel', - attrs={'description': "Central Frequencies (GHz)"}) + dims="Channel", + attrs={"description": "Central Frequencies (GHz)"}) POLO = xr.DataArray([2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, - 3, 3, 3][:N_CHANNEL], dims='Channel', - attrs={'description': "Polarizations"}) - -DS_IDS = ['RR', 'longitude', 'latitude'] -TEST_VARS = ['btemp_88v', 'btemp_165h', - 'btemp_23v', 'RR', 'Sfc_type'] -DEFAULT_UNITS = {'btemp_88v': 'K', 'btemp_165h': 'K', - 'btemp_23v': 'K', 'RR': 'mm/hr', 'Sfc_type': "1"} + 3, 3, 3][:N_CHANNEL], dims="Channel", + attrs={"description": "Polarizations"}) + +DS_IDS = ["RR", "longitude", "latitude"] +TEST_VARS = ["btemp_88v", "btemp_165h", + "btemp_23v", "RR", "Sfc_type"] +DEFAULT_UNITS = {"btemp_88v": "K", "btemp_165h": "K", + "btemp_23v": "K", "RR": "mm/hr", "Sfc_type": "1"} PLATFORM = {"M2": "metop-a", "NPP": "npp", "GPM": "gpm"} SENSOR = {"m2": "amsu-mhs", "npp": "atms", "gpm": "GPI"} @@ -96,20 +96,20 @@ def fake_coeff_from_fn(fn): coeff_str = [] for idx in range(1, N_CHANNEL + 1): nx = idx - 1 - coeff_str.append('\n') - next_line = ' {} {} {}\n'.format(idx, all_nchx[nx], ameans[nx]) + coeff_str.append("\n") + next_line = " {} {} {}\n".format(idx, all_nchx[nx], ameans[nx]) coeff_str.append(next_line) - next_line = ' {}\n'.format(" ".join([str(x) for x in locations[idx - 1]])) + next_line = " {}\n".format(" ".join([str(x) for x in locations[idx - 1]])) coeff_str.append(next_line) for fov in range(1, N_FOV+1): random_coeff = np.random.rand(all_nchx[nx]) random_coeff = np.ones(all_nchx[nx]) - str_coeff = ' '.join([str(x) for x in random_coeff]) + str_coeff = " ".join([str(x) for x in random_coeff]) random_means = np.random.uniform(261, 267, all_nchx[nx]) random_means = np.zeros(all_nchx[nx]) - str_means = ' '.join([str(x) for x in random_means]) + str_means = " ".join([str(x) for x in random_means]) error_val = np.random.uniform(0, 4) - coeffs_line = ' {:>2} {:>2} {} {} {}\n'.format(idx, fov, + coeffs_line = " {:>2} {:>2} {} {} {}\n".format(idx, fov, str_coeff, str_means, error_val) @@ -122,50 +122,50 @@ def _get_datasets_with_attributes(**kwargs): """Represent files with two resolution of variables in them (ex. OCEAN).""" bt = xr.DataArray(np.linspace(1830, 3930, N_SCANLINE * N_FOV * N_CHANNEL). reshape(N_SCANLINE, N_FOV, N_CHANNEL), - attrs={'long_name': "Channel Temperature (K)", - 'units': "Kelvin", - 'coordinates': "Longitude Latitude Freq", - 'scale_factor': 0.01, - '_FillValue': -999, - 'valid_range': [0, 50000]}, - dims=('Scanline', 'Field_of_view', 'Channel')) + attrs={"long_name": "Channel Temperature (K)", + "units": "Kelvin", + "coordinates": "Longitude Latitude Freq", + "scale_factor": 0.01, + "_FillValue": -999, + "valid_range": [0, 50000]}, + dims=("Scanline", "Field_of_view", "Channel")) rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV)), - attrs={'long_name': "Rain Rate (mm/hr)", - 'units': "mm/hr", - 'coordinates': "Longitude Latitude", - 'scale_factor': 0.1, - '_FillValue': -999, - 'valid_range': [0, 1000]}, - dims=('Scanline', 'Field_of_view')) + attrs={"long_name": "Rain Rate (mm/hr)", + "units": "mm/hr", + "coordinates": "Longitude Latitude", + "scale_factor": 0.1, + "_FillValue": -999, + "valid_range": [0, 1000]}, + dims=("Scanline", "Field_of_view")) sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV)), - attrs={'description': "type of surface:0-ocean," + + attrs={"description": "type of surface:0-ocean," + "1-sea ice,2-land,3-snow", - 'units': "1", - 'coordinates': "Longitude Latitude", - '_FillValue': -999, - 'valid_range': [0, 3] + "units": "1", + "coordinates": "Longitude Latitude", + "_FillValue": -999, + "valid_range": [0, 3] }, - dims=('Scanline', 'Field_of_view')) + dims=("Scanline", "Field_of_view")) latitude = xr.DataArray(DEFAULT_LAT.reshape(DEFAULT_2D_SHAPE), - attrs={'long_name': + attrs={"long_name": "Latitude of the view (-90,90)"}, - dims=('Scanline', 'Field_of_view')) + dims=("Scanline", "Field_of_view")) longitude = xr.DataArray(DEFAULT_LON.reshape(DEFAULT_2D_SHAPE), - attrs={'long_name': + attrs={"long_name": "Longitude of the view (-180,180)"}, - dims=('Scanline', 'Field_of_view')) + dims=("Scanline", "Field_of_view")) ds_vars = { - 'Freq': FREQ, - 'Polo': POLO, - 'BT': bt, - 'RR': rr, - 'Sfc_type': sfc_type, - 'Latitude': latitude, - 'Longitude': longitude + "Freq": FREQ, + "Polo": POLO, + "BT": bt, + "RR": rr, + "Sfc_type": sfc_type, + "Latitude": latitude, + "Longitude": longitude } - attrs = {'missing_value': -999.} + attrs = {"missing_value": -999.} ds = xr.Dataset(ds_vars, attrs=attrs) ds = ds.assign_coords({"Freq": FREQ, "Latitude": latitude, "Longitude": longitude}) return ds @@ -175,38 +175,38 @@ def _get_datasets_with_less_attributes(): """Represent files with two resolution of variables in them (ex. OCEAN).""" bt = xr.DataArray(np.linspace(1830, 3930, N_SCANLINE * N_FOV * N_CHANNEL). reshape(N_SCANLINE, N_FOV, N_CHANNEL), - attrs={'long_name': "Channel Temperature (K)", - 'scale_factor': 0.01}, - dims=('Scanline', 'Field_of_view', 'Channel')) + attrs={"long_name": "Channel Temperature (K)", + "scale_factor": 0.01}, + dims=("Scanline", "Field_of_view", "Channel")) rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV)), - attrs={'long_name': "Rain Rate (mm/hr)", - 'scale_factor': 0.1}, - dims=('Scanline', 'Field_of_view')) + attrs={"long_name": "Rain Rate (mm/hr)", + "scale_factor": 0.1}, + dims=("Scanline", "Field_of_view")) sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV)), - attrs={'description': "type of surface:0-ocean," + + attrs={"description": "type of surface:0-ocean," + "1-sea ice,2-land,3-snow"}, - dims=('Scanline', 'Field_of_view')) + dims=("Scanline", "Field_of_view")) latitude = xr.DataArray(DEFAULT_LAT.reshape(DEFAULT_2D_SHAPE), - attrs={'long_name': + attrs={"long_name": "Latitude of the view (-90,90)"}, - dims=('Scanline', 'Field_of_view')) + dims=("Scanline", "Field_of_view")) longitude = xr.DataArray(DEFAULT_LON.reshape(DEFAULT_2D_SHAPE), attrs={"long_name": "Longitude of the view (-180,180)"}, - dims=('Scanline', 'Field_of_view')) + dims=("Scanline", "Field_of_view")) ds_vars = { - 'Freq': FREQ, - 'Polo': POLO, - 'BT': bt, - 'RR': rr, - 'Sfc_type': sfc_type, - 'Longitude': longitude, - 'Latitude': latitude + "Freq": FREQ, + "Polo": POLO, + "BT": bt, + "RR": rr, + "Sfc_type": sfc_type, + "Longitude": longitude, + "Latitude": latitude } - attrs = {'missing_value': -999.} + attrs = {"missing_value": -999.} ds = xr.Dataset(ds_vars, attrs=attrs) ds = ds.assign_coords({"Freq": FREQ, "Latitude": latitude, "Longitude": longitude}) return ds @@ -227,7 +227,7 @@ class TestMirsL2_NcReader: def setup_method(self): """Read fake data.""" from satpy._config import config_search_paths - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) @pytest.mark.parametrize( ("filenames", "expected_loadables"), @@ -240,7 +240,7 @@ def setup_method(self): def test_reader_creation(self, filenames, expected_loadables): """Test basic initialization.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.mirs.xr.open_dataset') as od: + with mock.patch("satpy.readers.mirs.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) @@ -260,7 +260,7 @@ def test_reader_creation(self, filenames, expected_loadables): def test_available_datasets(self, filenames, expected_datasets): """Test that variables are dynamically discovered.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.mirs.xr.open_dataset') as od: + with mock.patch("satpy.readers.mirs.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) @@ -272,12 +272,12 @@ def test_available_datasets(self, filenames, expected_datasets): @staticmethod def _check_area(data_arr): from pyresample.geometry import SwathDefinition - area = data_arr.attrs['area'] + area = data_arr.attrs["area"] assert isinstance(area, SwathDefinition) @staticmethod def _check_fill(data_arr): - assert '_FillValue' not in data_arr.attrs + assert "_FillValue" not in data_arr.attrs if np.issubdtype(data_arr.dtype, np.floating): # we started with float32, it should stay that way assert data_arr.dtype.type == np.float64 @@ -285,23 +285,23 @@ def _check_fill(data_arr): @staticmethod def _check_valid_range(data_arr, test_valid_range): # valid_range is popped out of data_arr.attrs when it is applied - assert 'valid_range' not in data_arr.attrs + assert "valid_range" not in data_arr.attrs assert data_arr.data.min() >= test_valid_range[0] assert data_arr.data.max() <= test_valid_range[1] @staticmethod def _check_fill_value(data_arr, test_fill_value): - assert '_FillValue' not in data_arr.attrs + assert "_FillValue" not in data_arr.attrs assert not (data_arr.data == test_fill_value).any() @staticmethod def _check_attrs(data_arr, platform_name): attrs = data_arr.attrs - assert 'scale_factor' not in attrs - assert 'platform_name' in attrs - assert attrs['platform_name'] == platform_name - assert attrs['start_time'] == START_TIME - assert attrs['end_time'] == END_TIME + assert "scale_factor" not in attrs + assert "platform_name" in attrs + assert attrs["platform_name"] == platform_name + assert attrs["start_time"] == START_TIME + assert attrs["end_time"] == END_TIME @pytest.mark.parametrize( ("filenames", "loadable_ids", "platform_name"), @@ -312,18 +312,18 @@ def _check_attrs(data_arr, platform_name): ([OTHER_MIRS_L2_SWATH], TEST_VARS, "gpm"), ] ) - @pytest.mark.parametrize('reader_kw', [{}, {'limb_correction': False}]) + @pytest.mark.parametrize("reader_kw", [{}, {"limb_correction": False}]) def test_basic_load(self, filenames, loadable_ids, platform_name, reader_kw): """Test that variables are loaded properly.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.mirs.xr.open_dataset') as od: + with mock.patch("satpy.readers.mirs.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables, fh_kwargs=reader_kw) - with mock.patch('satpy.readers.mirs.read_atms_coeff_to_string') as \ - fd, mock.patch('satpy.readers.mirs.retrieve'): + with mock.patch("satpy.readers.mirs.read_atms_coeff_to_string") as \ + fd, mock.patch("satpy.readers.mirs.retrieve"): fd.side_effect = fake_coeff_from_fn loaded_data_arrs = r.load(loadable_ids) assert len(loaded_data_arrs) == len(loadable_ids) @@ -332,12 +332,12 @@ def test_basic_load(self, filenames, loadable_ids, for _data_id, data_arr in loaded_data_arrs.items(): data_arr = data_arr.compute() var_name = data_arr.attrs["name"] - if var_name not in ['latitude', 'longitude']: + if var_name not in ["latitude", "longitude"]: self._check_area(data_arr) self._check_fill(data_arr) self._check_attrs(data_arr, platform_name) - input_fake_data = test_data['BT'] if "btemp" in var_name \ + input_fake_data = test_data["BT"] if "btemp" in var_name \ else test_data[var_name] if "valid_range" in input_fake_data.attrs: valid_range = input_fake_data.attrs["valid_range"] @@ -346,9 +346,9 @@ def test_basic_load(self, filenames, loadable_ids, fill_value = input_fake_data.attrs["_FillValue"] self._check_fill_value(data_arr, fill_value) - sensor = data_arr.attrs['sensor'] - if reader_kw.get('limb_correction', True) and sensor == 'atms': + sensor = data_arr.attrs["sensor"] + if reader_kw.get("limb_correction", True) and sensor == "atms": fd.assert_called() else: fd.assert_not_called() - assert data_arr.attrs['units'] == DEFAULT_UNITS[var_name] + assert data_arr.attrs["units"] == DEFAULT_UNITS[var_name] diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 57ae3dfc31..5f8490151f 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -955,7 +955,7 @@ def test_xml_calibration_to_radiance(self): def test_xml_navigation(self): """Test the navigation.""" from pyproj import CRS - crs = CRS('EPSG:32616') + crs = CRS("EPSG:32616") dsid = make_dataid(name="B01", resolution=60) result = self.xml_tile_fh.get_area_def(dsid) diff --git a/satpy/tests/reader_tests/test_msu_gsa_l1b.py b/satpy/tests/reader_tests/test_msu_gsa_l1b.py index a5efc52be6..f55c9638c8 100644 --- a/satpy/tests/reader_tests/test_msu_gsa_l1b.py +++ b/satpy/tests/reader_tests/test_msu_gsa_l1b.py @@ -27,7 +27,7 @@ from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler from satpy.tests.utils import make_dataid -SOLCONST = '273.59' +SOLCONST = "273.59" class FakeHDF5FileHandler2(FakeHDF5FileHandler): @@ -35,70 +35,70 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): def _get_data(self, num_scans, num_cols): data = { - 'Data/resolution_1km/Solar_Zenith_Angle': + "Data/resolution_1km/Solar_Zenith_Angle": xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), - 'Geolocation/resolution_1km/Latitude': + dims=("x", "y")), + "Geolocation/resolution_1km/Latitude": xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), - 'Geolocation/resolution_1km/Longitude': + dims=("x", "y")), + "Geolocation/resolution_1km/Longitude": xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), - 'Data/resolution_1km/Radiance_01': + dims=("x", "y")), + "Data/resolution_1km/Radiance_01": xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999., 'F_solar_constant': SOLCONST + "scale": 0.01, "offset": 0., "fill_value": -999., "F_solar_constant": SOLCONST }, - dims=('x', 'y')), - 'Data/resolution_4km/Solar_Zenith_Angle': + dims=("x", "y")), + "Data/resolution_4km/Solar_Zenith_Angle": xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), - 'Geolocation/resolution_4km/Latitude': + dims=("x", "y")), + "Geolocation/resolution_4km/Latitude": xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), - 'Geolocation/resolution_4km/Longitude': + dims=("x", "y")), + "Geolocation/resolution_4km/Longitude": xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), - 'Data/resolution_4km/Brightness_Temperature_09': + dims=("x", "y")), + "Data/resolution_4km/Brightness_Temperature_09": xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), + dims=("x", "y")), } return data @@ -107,10 +107,10 @@ def get_test_content(self, filename, filename_info, filetype_info): num_scans = 20 num_cols = 2048 global_attrs = { - '/attr/timestamp_without_timezone': '2022-01-13T12:45:00', - '/attr/satellite_observation_point_height': '38500.0', - '/attr/satellite_observation_point_latitude': '71.25', - '/attr/satellite_observation_point_longitude': '21.44', + "/attr/timestamp_without_timezone": "2022-01-13T12:45:00", + "/attr/satellite_observation_point_height": "38500.0", + "/attr/satellite_observation_point_latitude": "71.25", + "/attr/satellite_observation_point_longitude": "21.44", } data = self._get_data(num_scans, num_cols) @@ -131,13 +131,13 @@ def setup_method(self): from satpy._config import config_search_paths from satpy.readers import load_reader from satpy.readers.msu_gsa_l1b import MSUGSAFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(MSUGSAFileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(MSUGSAFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True - filenames = ['ArcticaM1_202201131245.h5'] + filenames = ["ArcticaM1_202201131245.h5"] self.reader = load_reader(self.reader_configs) files = self.reader.select_files_from_pathnames(filenames) self.reader.create_filehandlers(files) @@ -148,34 +148,34 @@ def teardown_method(self): def test_irbt(self): """Test retrieval in brightness temperature.""" - ds_ids = [make_dataid(name='C09', calibration='brightness_temperature')] + ds_ids = [make_dataid(name="C09", calibration="brightness_temperature")] res = self.reader.load(ds_ids) - assert 'C09' in res - assert res['C09'].attrs['calibration'] == 'brightness_temperature' - assert res['C09'].attrs['platform_name'] == 'Arctica-M-N1' - assert res['C09'].attrs['sat_latitude'] == 71.25 - assert res['C09'].attrs['sat_longitude'] == 21.44 - assert res['C09'].attrs['sat_altitude'] == 38500. - assert res['C09'].attrs['resolution'] == 4000 + assert "C09" in res + assert res["C09"].attrs["calibration"] == "brightness_temperature" + assert res["C09"].attrs["platform_name"] == "Arctica-M-N1" + assert res["C09"].attrs["sat_latitude"] == 71.25 + assert res["C09"].attrs["sat_longitude"] == 21.44 + assert res["C09"].attrs["sat_altitude"] == 38500. + assert res["C09"].attrs["resolution"] == 4000 def test_nocounts(self): """Test we can't get IR or VIS data as counts.""" - ds_ids = [make_dataid(name='C01', calibration='counts')] + ds_ids = [make_dataid(name="C01", calibration="counts")] with pytest.raises(KeyError): self.reader.load(ds_ids) - ds_ids = [make_dataid(name='C09', calibration='counts')] + ds_ids = [make_dataid(name="C09", calibration="counts")] with pytest.raises(KeyError): self.reader.load(ds_ids) def test_vis_cal(self): """Test that we can retrieve VIS data as both radiance and reflectance.""" - ds_ids = [make_dataid(name='C01', calibration='radiance')] + ds_ids = [make_dataid(name="C01", calibration="radiance")] res = self.reader.load(ds_ids) - rad = res['C01'].data - ds_ids = [make_dataid(name='C01', calibration='reflectance')] + rad = res["C01"].data + ds_ids = [make_dataid(name="C01", calibration="reflectance")] res = self.reader.load(ds_ids) - refl = res['C01'].data + refl = res["C01"].data # Check the RAD->REFL conversion np.testing.assert_allclose(100 * np.pi * rad / float(SOLCONST), refl) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 749386bb98..8a57507141 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -44,27 +44,27 @@ # - request attrs_exp: dict = { - 'platform': 'MET7', - 'raw_metadata': {'foo': 'bar'}, - 'sensor': 'MVIRI', - 'orbital_parameters': { - 'projection_longitude': 57.0, - 'projection_latitude': 0.0, - 'projection_altitude': 35785860.0, - 'satellite_actual_longitude': 57.1, - 'satellite_actual_latitude': 0.1, + "platform": "MET7", + "raw_metadata": {"foo": "bar"}, + "sensor": "MVIRI", + "orbital_parameters": { + "projection_longitude": 57.0, + "projection_latitude": 0.0, + "projection_altitude": 35785860.0, + "satellite_actual_longitude": 57.1, + "satellite_actual_latitude": 0.1, } } attrs_refl_exp = attrs_exp.copy() attrs_refl_exp.update( - {'sun_earth_distance_correction_applied': True, - 'sun_earth_distance_correction_factor': 1.} + {"sun_earth_distance_correction_applied": True, + "sun_earth_distance_correction_factor": 1.} ) -acq_time_vis_exp = [np.datetime64('1970-01-01 00:30'), - np.datetime64('1970-01-01 00:30'), - np.datetime64('1970-01-01 02:30'), - np.datetime64('1970-01-01 02:30')] +acq_time_vis_exp = [np.datetime64("1970-01-01 00:30"), + np.datetime64("1970-01-01 00:30"), + np.datetime64("1970-01-01 02:30"), + np.datetime64("1970-01-01 02:30")] vis_counts_exp = xr.DataArray( np.array( [[0., 17., 34., 51.], @@ -73,9 +73,9 @@ [204., 221., 238., 255]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_vis_exp), + "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_exp ) @@ -87,9 +87,9 @@ [235.48, 255.2, 274.92, 294.64]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_vis_exp), + "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_exp ) @@ -104,9 +104,9 @@ # (0, 0) and (2, 2) are NaN because radiance is NaN # (0, 2) is NaN because SZA >= 90 degrees # Last row/col is NaN due to SZA interpolation - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_vis_exp), + "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_refl_exp ) @@ -118,23 +118,23 @@ [1.3, 1.4, 1.5, 1.6]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_vis_exp), + "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_exp ) -acq_time_ir_wv_exp = [np.datetime64('1970-01-01 00:30'), - np.datetime64('1970-01-01 02:30')] +acq_time_ir_wv_exp = [np.datetime64("1970-01-01 00:30"), + np.datetime64("1970-01-01 02:30")] wv_counts_exp = xr.DataArray( np.array( [[0, 85], [170, 255]], dtype=np.uint8 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_ir_wv_exp), + "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) @@ -144,9 +144,9 @@ [8, 12.25]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_ir_wv_exp), + "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) @@ -156,9 +156,9 @@ [252.507448, 266.863289]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_ir_wv_exp), + "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) @@ -168,9 +168,9 @@ [170, 255]], dtype=np.uint8 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_ir_wv_exp), + "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) @@ -180,9 +180,9 @@ [165, 250]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_ir_wv_exp), + "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) @@ -192,9 +192,9 @@ [204.32955838, 223.28709913]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_ir_wv_exp), + "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) @@ -206,9 +206,9 @@ [0, 0, 0, 0]], dtype=np.uint8 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_vis_exp), + "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_exp ) @@ -220,7 +220,7 @@ [np.nan, np.nan, np.nan, np.nan]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), attrs=attrs_exp ) sza_ir_wv_exp = xr.DataArray( @@ -229,33 +229,33 @@ [0, 45]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), attrs=attrs_exp ) area_vis_exp = AreaDefinition( - area_id='geos_mviri_4x4', - proj_id='geos_mviri_4x4', - description='MVIRI Geostationary Projection', + area_id="geos_mviri_4x4", + proj_id="geos_mviri_4x4", + description="MVIRI Geostationary Projection", projection={ - 'proj': 'geos', - 'lon_0': 57.0, - 'h': ALTITUDE, - 'a': EQUATOR_RADIUS, - 'b': POLE_RADIUS + "proj": "geos", + "lon_0": 57.0, + "h": ALTITUDE, + "a": EQUATOR_RADIUS, + "b": POLE_RADIUS }, width=4, height=4, area_extent=[5621229.74392, 5621229.74392, -5621229.74392, -5621229.74392] ) area_ir_wv_exp = area_vis_exp.copy( - area_id='geos_mviri_2x2', - proj_id='geos_mviri_2x2', + area_id="geos_mviri_2x2", + proj_id="geos_mviri_2x2", width=2, height=2 ) -@pytest.fixture(name='fake_dataset') +@pytest.fixture(name="fake_dataset") def fixture_fake_dataset(): """Create fake dataset.""" count_ir = da.linspace(0, 255, 4, dtype=np.uint8).reshape(2, 2) @@ -277,55 +277,55 @@ def fixture_fake_dataset(): dtype=np.uint8 ) ) - time = np.arange(4).astype('datetime64[h]').reshape(2, 2) + time = np.arange(4).astype("datetime64[h]").reshape(2, 2) ds = xr.Dataset( data_vars={ - 'count_vis': (('y', 'x'), count_vis), - 'count_wv': (('y_ir_wv', 'x_ir_wv'), count_wv), - 'count_ir': (('y_ir_wv', 'x_ir_wv'), count_ir), - 'toa_bidirectional_reflectance_vis': vis_refl_exp / 100, - 'u_independent_toa_bidirectional_reflectance': u_vis_refl_exp / 100, - 'quality_pixel_bitmask': (('y', 'x'), mask), - 'solar_zenith_angle': (('y_tie', 'x_tie'), sza), - 'time_ir_wv': (('y_ir_wv', 'x_ir_wv'), time), - 'a_ir': -5.0, - 'b_ir': 1.0, - 'bt_a_ir': 10.0, - 'bt_b_ir': -1000.0, - 'a_wv': -0.5, - 'b_wv': 0.05, - 'bt_a_wv': 10.0, - 'bt_b_wv': -2000.0, - 'years_since_launch': 20.0, - 'a0_vis': 1.0, - 'a1_vis': 0.01, - 'a2_vis': -0.0001, - 'mean_count_space_vis': 1.0, - 'distance_sun_earth': 1.0, - 'solar_irradiance_vis': 650.0, - 'sub_satellite_longitude_start': 57.1, - 'sub_satellite_longitude_end': np.nan, - 'sub_satellite_latitude_start': np.nan, - 'sub_satellite_latitude_end': 0.1, + "count_vis": (("y", "x"), count_vis), + "count_wv": (("y_ir_wv", "x_ir_wv"), count_wv), + "count_ir": (("y_ir_wv", "x_ir_wv"), count_ir), + "toa_bidirectional_reflectance_vis": vis_refl_exp / 100, + "u_independent_toa_bidirectional_reflectance": u_vis_refl_exp / 100, + "quality_pixel_bitmask": (("y", "x"), mask), + "solar_zenith_angle": (("y_tie", "x_tie"), sza), + "time_ir_wv": (("y_ir_wv", "x_ir_wv"), time), + "a_ir": -5.0, + "b_ir": 1.0, + "bt_a_ir": 10.0, + "bt_b_ir": -1000.0, + "a_wv": -0.5, + "b_wv": 0.05, + "bt_a_wv": 10.0, + "bt_b_wv": -2000.0, + "years_since_launch": 20.0, + "a0_vis": 1.0, + "a1_vis": 0.01, + "a2_vis": -0.0001, + "mean_count_space_vis": 1.0, + "distance_sun_earth": 1.0, + "solar_irradiance_vis": 650.0, + "sub_satellite_longitude_start": 57.1, + "sub_satellite_longitude_end": np.nan, + "sub_satellite_latitude_start": np.nan, + "sub_satellite_latitude_end": 0.1, }, coords={ - 'y': [1, 2, 3, 4], - 'x': [1, 2, 3, 4], - 'y_ir_wv': [1, 2], - 'x_ir_wv': [1, 2], - 'y_tie': [1, 2], - 'x_tie': [1, 2] + "y": [1, 2, 3, 4], + "x": [1, 2, 3, 4], + "y_ir_wv": [1, 2], + "x_ir_wv": [1, 2], + "y_tie": [1, 2], + "x_tie": [1, 2] }, - attrs={'foo': 'bar'} + attrs={"foo": "bar"} ) - ds['count_ir'].attrs['ancillary_variables'] = 'a_ir b_ir' - ds['count_wv'].attrs['ancillary_variables'] = 'a_wv b_wv' + ds["count_ir"].attrs["ancillary_variables"] = "a_ir b_ir" + ds["count_wv"].attrs["ancillary_variables"] = "a_wv b_wv" return ds @pytest.fixture( - name='file_handler', + name="file_handler", params=[FiduceoMviriEasyFcdrFileHandler, FiduceoMviriFullFcdrFileHandler] ) @@ -334,21 +334,21 @@ def fixture_file_handler(fake_dataset, request): marker = request.node.get_closest_marker("file_handler_data") mask_bad_quality = True if marker: - mask_bad_quality = marker.kwargs['mask_bad_quality'] + mask_bad_quality = marker.kwargs["mask_bad_quality"] fh_class = request.param - with mock.patch('satpy.readers.mviri_l1b_fiduceo_nc.xr.open_dataset') as open_dataset: + with mock.patch("satpy.readers.mviri_l1b_fiduceo_nc.xr.open_dataset") as open_dataset: open_dataset.return_value = fake_dataset return fh_class( - filename='filename', - filename_info={'platform': 'MET7', - 'sensor': 'MVIRI', - 'projection_longitude': '57.0'}, - filetype_info={'foo': 'bar'}, + filename="filename", + filename_info={"platform": "MET7", + "sensor": "MVIRI", + "projection_longitude": "57.0"}, + filetype_info={"foo": "bar"}, mask_bad_quality=mask_bad_quality ) -@pytest.fixture(name='reader') +@pytest.fixture(name="reader") def fixture_reader(): """Return MVIRI FIDUCEO FCDR reader.""" from satpy._config import config_search_paths @@ -369,35 +369,35 @@ def test_init(self, file_handler): assert file_handler.mask_bad_quality is True @pytest.mark.parametrize( - ('name', 'calibration', 'resolution', 'expected'), + ("name", "calibration", "resolution", "expected"), [ - ('VIS', 'counts', 2250, vis_counts_exp), - ('VIS', 'radiance', 2250, vis_rad_exp), - ('VIS', 'reflectance', 2250, vis_refl_exp), - ('WV', 'counts', 4500, wv_counts_exp), - ('WV', 'radiance', 4500, wv_rad_exp), - ('WV', 'brightness_temperature', 4500, wv_bt_exp), - ('IR', 'counts', 4500, ir_counts_exp), - ('IR', 'radiance', 4500, ir_rad_exp), - ('IR', 'brightness_temperature', 4500, ir_bt_exp), - ('quality_pixel_bitmask', None, 2250, quality_pixel_bitmask_exp), - ('solar_zenith_angle', None, 2250, sza_vis_exp), - ('solar_zenith_angle', None, 4500, sza_ir_wv_exp), - ('u_independent_toa_bidirectional_reflectance', None, 4500, u_vis_refl_exp) + ("VIS", "counts", 2250, vis_counts_exp), + ("VIS", "radiance", 2250, vis_rad_exp), + ("VIS", "reflectance", 2250, vis_refl_exp), + ("WV", "counts", 4500, wv_counts_exp), + ("WV", "radiance", 4500, wv_rad_exp), + ("WV", "brightness_temperature", 4500, wv_bt_exp), + ("IR", "counts", 4500, ir_counts_exp), + ("IR", "radiance", 4500, ir_rad_exp), + ("IR", "brightness_temperature", 4500, ir_bt_exp), + ("quality_pixel_bitmask", None, 2250, quality_pixel_bitmask_exp), + ("solar_zenith_angle", None, 2250, sza_vis_exp), + ("solar_zenith_angle", None, 4500, sza_ir_wv_exp), + ("u_independent_toa_bidirectional_reflectance", None, 4500, u_vis_refl_exp) ] ) def test_get_dataset(self, file_handler, name, calibration, resolution, expected): """Test getting datasets.""" - id_keys = {'name': name, 'resolution': resolution} + id_keys = {"name": name, "resolution": resolution} if calibration: - id_keys['calibration'] = calibration + id_keys["calibration"] = calibration dataset_id = make_dataid(**id_keys) - dataset_info = {'platform': 'MET7'} + dataset_info = {"platform": "MET7"} is_easy = isinstance(file_handler, FiduceoMviriEasyFcdrFileHandler) - is_vis = name == 'VIS' - is_refl = calibration == 'reflectance' + is_vis = name == "VIS" + is_refl = calibration == "reflectance" if is_easy and is_vis and not is_refl: # VIS counts/radiance not available in easy FCDR with pytest.raises(ValueError): @@ -412,34 +412,34 @@ def test_get_dataset_corrupt(self, file_handler): """Test getting datasets with known corruptions.""" # Time may have different names and satellite position might be missing file_handler.nc.nc = file_handler.nc.nc.rename( - {'time_ir_wv': 'time'} + {"time_ir_wv": "time"} ) file_handler.nc.nc = file_handler.nc.nc.drop_vars( - ['sub_satellite_longitude_start'] + ["sub_satellite_longitude_start"] ) dataset_id = make_dataid( - name='VIS', - calibration='reflectance', + name="VIS", + calibration="reflectance", resolution=2250 ) - ds = file_handler.get_dataset(dataset_id, {'platform': 'MET7'}) - assert 'actual_satellite_longitude' not in ds.attrs['orbital_parameters'] - assert 'actual_satellite_latitude' not in ds.attrs['orbital_parameters'] + ds = file_handler.get_dataset(dataset_id, {"platform": "MET7"}) + assert "actual_satellite_longitude" not in ds.attrs["orbital_parameters"] + assert "actual_satellite_latitude" not in ds.attrs["orbital_parameters"] xr.testing.assert_allclose(ds, vis_refl_exp) @mock.patch( - 'satpy.readers.mviri_l1b_fiduceo_nc.Interpolator.interp_acq_time' + "satpy.readers.mviri_l1b_fiduceo_nc.Interpolator.interp_acq_time" ) def test_time_cache(self, interp_acq_time, file_handler): """Test caching of acquisition times.""" dataset_id = make_dataid( - name='VIS', + name="VIS", resolution=2250, - calibration='reflectance' + calibration="reflectance" ) info = {} - interp_acq_time.return_value = xr.DataArray([1, 2, 3, 4], dims='y') + interp_acq_time.return_value = xr.DataArray([1, 2, 3, 4], dims="y") # Cache init file_handler.get_dataset(dataset_id, info) @@ -451,22 +451,22 @@ def test_time_cache(self, interp_acq_time, file_handler): interp_acq_time.assert_not_called() # Cache miss - interp_acq_time.return_value = xr.DataArray([1, 2], dims='y') + interp_acq_time.return_value = xr.DataArray([1, 2], dims="y") another_id = make_dataid( - name='IR', + name="IR", resolution=4500, - calibration='brightness_temperature' + calibration="brightness_temperature" ) interp_acq_time.reset_mock() file_handler.get_dataset(another_id, info) interp_acq_time.assert_called() @mock.patch( - 'satpy.readers.mviri_l1b_fiduceo_nc.Interpolator.interp_tiepoints' + "satpy.readers.mviri_l1b_fiduceo_nc.Interpolator.interp_tiepoints" ) def test_angle_cache(self, interp_tiepoints, file_handler): """Test caching of angle datasets.""" - dataset_id = make_dataid(name='solar_zenith_angle', + dataset_id = make_dataid(name="solar_zenith_angle", resolution=2250) info = {} @@ -480,21 +480,21 @@ def test_angle_cache(self, interp_tiepoints, file_handler): interp_tiepoints.assert_not_called() # Cache miss - another_id = make_dataid(name='solar_zenith_angle', + another_id = make_dataid(name="solar_zenith_angle", resolution=4500) interp_tiepoints.reset_mock() file_handler.get_dataset(another_id, info) interp_tiepoints.assert_called() @pytest.mark.parametrize( - ('name', 'resolution', 'area_exp'), + ("name", "resolution", "area_exp"), [ - ('VIS', 2250, area_vis_exp), - ('WV', 4500, area_ir_wv_exp), - ('IR', 4500, area_ir_wv_exp), - ('quality_pixel_bitmask', 2250, area_vis_exp), - ('solar_zenith_angle', 2250, area_vis_exp), - ('solar_zenith_angle', 4500, area_ir_wv_exp) + ("VIS", 2250, area_vis_exp), + ("WV", 4500, area_ir_wv_exp), + ("IR", 4500, area_ir_wv_exp), + ("quality_pixel_bitmask", 2250, area_vis_exp), + ("solar_zenith_angle", 2250, area_vis_exp), + ("solar_zenith_angle", 4500, area_ir_wv_exp) ] ) def test_get_area_definition(self, file_handler, name, resolution, @@ -508,7 +508,7 @@ def test_get_area_definition(self, file_handler, name, resolution, assert b == b_exp assert area.width == area_exp.width assert area.height == area_exp.height - for key in ['h', 'lon_0', 'proj', 'units']: + for key in ["h", "lon_0", "proj", "units"]: assert area.proj_dict[key] == area_exp.proj_dict[key] np.testing.assert_allclose(area.area_extent, area_exp.area_extent) @@ -516,38 +516,38 @@ def test_calib_exceptions(self, file_handler): """Test calibration exceptions.""" with pytest.raises(KeyError): file_handler.get_dataset( - make_dataid(name='solar_zenith_angle', calibration='counts'), + make_dataid(name="solar_zenith_angle", calibration="counts"), {} ) with pytest.raises(KeyError): file_handler.get_dataset( make_dataid( - name='VIS', + name="VIS", resolution=2250, - calibration='brightness_temperature'), + calibration="brightness_temperature"), {} ) with pytest.raises(KeyError): file_handler.get_dataset( make_dataid( - name='IR', + name="IR", resolution=4500, - calibration='reflectance'), + calibration="reflectance"), {} ) if isinstance(file_handler, FiduceoMviriEasyFcdrFileHandler): with pytest.raises(KeyError): file_handler.get_dataset( - {'name': 'VIS', 'calibration': 'counts'}, + {"name": "VIS", "calibration": "counts"}, {} ) # not available in easy FCDR @pytest.mark.file_handler_data(mask_bad_quality=False) def test_bad_quality_warning(self, file_handler): """Test warning about bad VIS quality.""" - file_handler.nc.nc['quality_pixel_bitmask'] = 2 - vis = make_dataid(name='VIS', resolution=2250, - calibration='reflectance') + file_handler.nc.nc["quality_pixel_bitmask"] = 2 + vis = make_dataid(name="VIS", resolution=2250, + calibration="reflectance") with pytest.warns(UserWarning): file_handler.get_dataset(vis, {}) @@ -579,25 +579,25 @@ def test_reassign_coords(self): """ nc = mock.MagicMock( coords={ - 'y': [.1, .2], - 'x': [.3, .4] + "y": [.1, .2], + "x": [.3, .4] }, - dims=('y', 'x') + dims=("y", "x") ) nc.__getitem__.return_value = xr.DataArray( [[1, 2], [3, 4]], - dims=('y', 'x') + dims=("y", "x") ) foo_exp = xr.DataArray( [[1, 2], [3, 4]], - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'y': [.1, .2], - 'x': [.3, .4] + "y": [.1, .2], + "x": [.3, .4] } ) ds = DatasetWrapper(nc) - foo = ds['foo'] + foo = ds["foo"] xr.testing.assert_equal(foo, foo_exp) diff --git a/satpy/tests/reader_tests/test_mws_l1b_nc.py b/satpy/tests/reader_tests/test_mws_l1b_nc.py index a304b18782..89a6eb4700 100644 --- a/satpy/tests/reader_tests/test_mws_l1b_nc.py +++ b/satpy/tests/reader_tests/test_mws_l1b_nc.py @@ -49,23 +49,23 @@ def reader(fake_file): return MWSL1BFile( filename=fake_file, filename_info={ - 'start_time': ( - datetime.fromisoformat('2000-01-01T01:00:00') + "start_time": ( + datetime.fromisoformat("2000-01-01T01:00:00") ), - 'end_time': ( - datetime.fromisoformat('2000-01-01T02:00:00') + "end_time": ( + datetime.fromisoformat("2000-01-01T02:00:00") ), - 'creation_time': ( - datetime.fromisoformat('2000-01-01T03:00:00') + "creation_time": ( + datetime.fromisoformat("2000-01-01T03:00:00") ), }, filetype_info={ - 'longitude': 'data/navigation_data/mws_lon', - 'latitude': 'data/navigation_data/mws_lat', - 'solar_azimuth': 'data/navigation/mws_solar_azimuth_angle', - 'solar_zenith': 'data/navigation/mws_solar_zenith_angle', - 'satellite_azimuth': 'data/navigation/mws_satellite_azimuth_angle', - 'satellite_zenith': 'data/navigation/mws_satellite_zenith_angle', + "longitude": "data/navigation_data/mws_lon", + "latitude": "data/navigation_data/mws_lat", + "solar_azimuth": "data/navigation/mws_solar_azimuth_angle", + "solar_zenith": "data/navigation/mws_solar_zenith_angle", + "satellite_azimuth": "data/navigation/mws_satellite_azimuth_angle", + "satellite_zenith": "data/navigation/mws_satellite_zenith_angle", } ) @@ -73,7 +73,7 @@ def reader(fake_file): @pytest.fixture def fake_file(tmp_path): """Return file path to level-1b file.""" - file_path = tmp_path / 'test_file_mws_l1b.nc' + file_path = tmp_path / "test_file_mws_l1b.nc" writer = MWSL1BFakeFileWriter(file_path) writer.write() yield file_path @@ -88,11 +88,11 @@ def __init__(self, file_path): def write(self): """Write fake data to file.""" - with Dataset(self.file_path, 'w') as dataset: + with Dataset(self.file_path, "w") as dataset: self._write_attributes(dataset) self._write_status_group(dataset) self._write_quality_group(dataset) - data_group = dataset.createGroup('data') + data_group = dataset.createGroup("data") self._create_scan_dimensions(data_group) self._write_navigation_data_group(data_group) self._write_calibration_data_group(data_group) @@ -109,45 +109,45 @@ def _write_attributes(dataset): @staticmethod def _write_status_group(dataset): """Write the status group.""" - group = dataset.createGroup('/status/satellite') + group = dataset.createGroup("/status/satellite") subsat_latitude_start = group.createVariable( - 'subsat_latitude_start', "f4" + "subsat_latitude_start", "f4" ) subsat_latitude_start[:] = 52.19 subsat_longitude_start = group.createVariable( - 'subsat_longitude_start', "f4" + "subsat_longitude_start", "f4" ) subsat_longitude_start[:] = 23.26 subsat_latitude_end = group.createVariable( - 'subsat_latitude_end', "f4" + "subsat_latitude_end", "f4" ) subsat_latitude_end[:] = 60.00 subsat_longitude_end = group.createVariable( - 'subsat_longitude_end', "f4" + "subsat_longitude_end", "f4" ) subsat_longitude_end[:] = 2.47 @staticmethod def _write_quality_group(dataset): """Write the quality group.""" - group = dataset.createGroup('quality') + group = dataset.createGroup("quality") group.overall_quality_flag = 0 duration_of_product = group.createVariable( - 'duration_of_product', "f4" + "duration_of_product", "f4" ) duration_of_product[:] = 5944. @staticmethod def _write_navigation_data_group(dataset): """Write the navigation data group.""" - group = dataset.createGroup('navigation') - dimensions = ('n_scans', 'n_fovs') + group = dataset.createGroup("navigation") + dimensions = ("n_scans", "n_fovs") shape = (N_SCANS, N_FOVS) longitude = group.createVariable( - 'mws_lon', + "mws_lon", np.int32, dimensions=dimensions, ) @@ -157,14 +157,14 @@ def _write_navigation_data_group(dataset): longitude[:] = 35.7535 * np.ones(shape) latitude = group.createVariable( - 'mws_lat', + "mws_lat", np.float32, dimensions=dimensions, ) latitude[:] = 2. * np.ones(shape) azimuth = group.createVariable( - 'mws_solar_azimuth_angle', + "mws_solar_azimuth_angle", np.float32, dimensions=dimensions, ) @@ -173,19 +173,19 @@ def _write_navigation_data_group(dataset): @staticmethod def _create_scan_dimensions(dataset): """Create the scan/fovs dimensions.""" - dataset.createDimension('n_channels', N_CHANNELS) - dataset.createDimension('n_channels_os', N_CHANNELS_OS) - dataset.createDimension('n_scans', N_SCANS) - dataset.createDimension('n_fovs', N_FOVS) - dataset.createDimension('n_prts', N_PRTS) - dataset.createDimension('n_fovs_cal', N_FOVS_CAL) + dataset.createDimension("n_channels", N_CHANNELS) + dataset.createDimension("n_channels_os", N_CHANNELS_OS) + dataset.createDimension("n_scans", N_SCANS) + dataset.createDimension("n_fovs", N_FOVS) + dataset.createDimension("n_prts", N_PRTS) + dataset.createDimension("n_fovs_cal", N_FOVS_CAL) @staticmethod def _write_calibration_data_group(dataset): """Write the calibration data group.""" - group = dataset.createGroup('calibration') + group = dataset.createGroup("calibration") toa_bt = group.createVariable( - 'mws_toa_brightness_temperature', np.float32, dimensions=('n_scans', 'n_fovs', 'n_channels',) + "mws_toa_brightness_temperature", np.float32, dimensions=("n_scans", "n_fovs", "n_channels",) ) toa_bt.scale_factor = 1.0 # 1.0E-8 toa_bt.add_offset = 0.0 @@ -195,9 +195,9 @@ def _write_calibration_data_group(dataset): @staticmethod def _write_measurement_data_group(dataset): """Write the measurement data group.""" - group = dataset.createGroup('measurement') + group = dataset.createGroup("measurement") counts = group.createVariable( - 'mws_earth_view_counts', np.int32, dimensions=('n_scans', 'n_fovs', 'n_channels',) + "mws_earth_view_counts", np.int32, dimensions=("n_scans", "n_fovs", "n_channels",) ) counts[:] = 24100 * np.ones((N_SCANS, N_FOVS, N_CHANNELS), dtype=np.int32) @@ -239,9 +239,9 @@ def test_sub_satellite_latitude_end(self, reader): def test_get_dataset_get_channeldata_counts(self, reader): """Test getting channel data.""" - dataset_id = {'name': '1', 'units': None, - 'calibration': 'counts'} - dataset_info = {'file_key': 'data/measurement/mws_earth_view_counts'} + dataset_id = {"name": "1", "units": None, + "calibration": "counts"} + dataset_info = {"file_key": "data/measurement/mws_earth_view_counts"} dataset = reader.get_dataset(dataset_id, dataset_info) expected_bt = np.array([[24100, 24100], @@ -251,9 +251,9 @@ def test_get_dataset_get_channeldata_counts(self, reader): def test_get_dataset_get_channeldata_bts(self, reader): """Test getting channel data.""" - dataset_id = {'name': '1', 'units': 'K', - 'calibration': 'brightness_temperature'} - dataset_info = {'file_key': 'data/calibration/mws_toa_brightness_temperature'} + dataset_id = {"name": "1", "units": "K", + "calibration": "brightness_temperature"} + dataset_info = {"file_key": "data/calibration/mws_toa_brightness_temperature"} dataset = reader.get_dataset(dataset_id, dataset_info) @@ -268,15 +268,15 @@ def test_get_dataset_get_channeldata_bts(self, reader): def test_get_dataset_return_none_if_data_not_exist(self, reader): """Test get dataset return none if data does not exist.""" - dataset_id = {'name': 'unknown'} - dataset_info = {'file_key': 'non/existing/data'} + dataset_id = {"name": "unknown"} + dataset_info = {"file_key": "non/existing/data"} dataset = reader.get_dataset(dataset_id, dataset_info) assert dataset is None def test_get_navigation_longitudes(self, caplog, fake_file, reader): """Test get the longitudes.""" - dataset_id = {'name': 'mws_lon'} - dataset_info = {'file_key': 'data/navigation_data/mws_lon'} + dataset_id = {"name": "mws_lon"} + dataset_info = {"file_key": "data/navigation_data/mws_lon"} dataset = reader.get_dataset(dataset_id, dataset_info) @@ -291,8 +291,8 @@ def test_get_navigation_longitudes(self, caplog, fake_file, reader): def test_get_dataset_logs_debug_message(self, caplog, fake_file, reader): """Test get dataset return none if data does not exist.""" - dataset_id = {'name': 'mws_lon'} - dataset_info = {'file_key': 'data/navigation_data/mws_lon'} + dataset_id = {"name": "mws_lon"} + dataset_info = {"file_key": "data/navigation_data/mws_lon"} with caplog.at_level(logging.DEBUG): _ = reader.get_dataset(dataset_id, dataset_info) @@ -302,8 +302,8 @@ def test_get_dataset_logs_debug_message(self, caplog, fake_file, reader): def test_get_dataset_aux_data_not_supported(self, reader): """Test get auxillary dataset not supported.""" - dataset_id = {'name': 'scantime_utc'} - dataset_info = {'file_key': 'non/existing'} + dataset_id = {"name": "scantime_utc"} + dataset_info = {"file_key": "non/existing"} with pytest.raises(NotImplementedError) as exec_info: _ = reader.get_dataset(dataset_id, dataset_info) @@ -312,8 +312,8 @@ def test_get_dataset_aux_data_not_supported(self, reader): def test_get_dataset_aux_data_expected_data_missing(self, caplog, reader): """Test get auxillary dataset which is not present but supposed to be in file.""" - dataset_id = {'name': 'surface_type'} - dataset_info = {'file_key': 'non/existing'} + dataset_id = {"name": "surface_type"} + dataset_info = {"file_key": "non/existing"} with caplog.at_level(logging.ERROR): with pytest.raises(KeyError) as exec_info: @@ -325,9 +325,9 @@ def test_get_dataset_aux_data_expected_data_missing(self, caplog, reader): " no valid Dataset created") assert log_output in caplog.text - @pytest.mark.parametrize('dims', ( - ('n_scans', 'n_fovs'), - ('x', 'y'), + @pytest.mark.parametrize("dims", ( + ("n_scans", "n_fovs"), + ("x", "y"), )) def test_standardize_dims(self, reader, dims): """Test standardize dims.""" @@ -336,7 +336,7 @@ def test_standardize_dims(self, reader, dims): dims=dims, ) standardized = reader._standardize_dims(variable) - assert standardized.dims == ('y', 'x') + assert standardized.dims == ("y", "x") @staticmethod def test_drop_coords(reader): @@ -344,7 +344,7 @@ def test_drop_coords(reader): coords = "dummy" data = xr.DataArray( np.ones(10), - dims=('y'), + dims=("y"), coords={coords: 0}, ) assert coords in data.coords @@ -355,22 +355,22 @@ def test_get_global_attributes(self, reader): """Test get global attributes.""" attributes = reader._get_global_attributes() assert attributes == { - 'filename': reader.filename, - 'start_time': datetime(2000, 1, 2, 3, 4, 5), - 'end_time': datetime(2000, 1, 2, 4, 5, 6), - 'spacecraft_name': 'Metop-SG-A1', - 'sensor': 'MWS', - 'filename_start_time': datetime(2000, 1, 1, 1, 0), - 'filename_end_time': datetime(2000, 1, 1, 2, 0), - 'platform_name': 'Metop-SG-A1', - 'quality_group': { - 'duration_of_product': np.array(5944., dtype=np.float32), - 'overall_quality_flag': 0, + "filename": reader.filename, + "start_time": datetime(2000, 1, 2, 3, 4, 5), + "end_time": datetime(2000, 1, 2, 4, 5, 6), + "spacecraft_name": "Metop-SG-A1", + "sensor": "MWS", + "filename_start_time": datetime(2000, 1, 1, 1, 0), + "filename_end_time": datetime(2000, 1, 1, 2, 0), + "platform_name": "Metop-SG-A1", + "quality_group": { + "duration_of_product": np.array(5944., dtype=np.float32), + "overall_quality_flag": 0, } } @patch( - 'satpy.readers.mws_l1b.MWSL1BFile._get_global_attributes', + "satpy.readers.mws_l1b.MWSL1BFile._get_global_attributes", return_value={"mocked_global_attributes": True}, ) def test_manage_attributes(self, mock, reader): @@ -379,17 +379,17 @@ def test_manage_attributes(self, mock, reader): np.ones(N_SCANS), attrs={"season": "summer"}, ) - dataset_info = {'name': '1', 'units': 'K'} + dataset_info = {"name": "1", "units": "K"} variable = reader._manage_attributes(variable, dataset_info) assert variable.attrs == { - 'season': 'summer', - 'units': 'K', - 'name': '1', - 'mocked_global_attributes': True, + "season": "summer", + "units": "K", + "name": "1", + "mocked_global_attributes": True, } -@pytest.mark.parametrize("name, index", [('1', 0), ('2', 1), ('24', 23)]) +@pytest.mark.parametrize("name, index", [("1", 0), ("2", 1), ("24", 23)]) def test_get_channel_index_from_name(name, index): """Test getting the MWS channel index from the channel name.""" ch_idx = get_channel_index_from_name(name) @@ -399,7 +399,7 @@ def test_get_channel_index_from_name(name, index): def test_get_channel_index_from_name_throw_exception(): """Test that an excpetion is thrown when getting the MWS channel index from an unsupported name.""" with pytest.raises(Exception) as excinfo: - _ = get_channel_index_from_name('channel 1') + _ = get_channel_index_from_name("channel 1") assert str(excinfo.value) == "Channel name 'channel 1' not supported" assert excinfo.type == AttributeError diff --git a/satpy/tests/reader_tests/test_netcdf_utils.py b/satpy/tests/reader_tests/test_netcdf_utils.py index 8570d8bd34..16dfc57a83 100644 --- a/satpy/tests/reader_tests/test_netcdf_utils.py +++ b/satpy/tests/reader_tests/test_netcdf_utils.py @@ -76,26 +76,26 @@ class TestNetCDF4FileHandler(unittest.TestCase): def setUp(self): """Create a test NetCDF4 file.""" from netCDF4 import Dataset - with Dataset('test.nc', 'w') as nc: + with Dataset("test.nc", "w") as nc: # Create dimensions - nc.createDimension('rows', 10) - nc.createDimension('cols', 100) + nc.createDimension("rows", 10) + nc.createDimension("cols", 100) # Create Group - g1 = nc.createGroup('test_group') + g1 = nc.createGroup("test_group") # Add datasets - ds1_f = g1.createVariable('ds1_f', np.float32, - dimensions=('rows', 'cols')) + ds1_f = g1.createVariable("ds1_f", np.float32, + dimensions=("rows", "cols")) ds1_f[:] = np.arange(10. * 100).reshape((10, 100)) - ds1_i = g1.createVariable('ds1_i', np.int32, - dimensions=('rows', 'cols')) + ds1_i = g1.createVariable("ds1_i", np.int32, + dimensions=("rows", "cols")) ds1_i[:] = np.arange(10 * 100).reshape((10, 100)) - ds2_f = nc.createVariable('ds2_f', np.float32, - dimensions=('rows', 'cols')) + ds2_f = nc.createVariable("ds2_f", np.float32, + dimensions=("rows", "cols")) ds2_f[:] = np.arange(10. * 100).reshape((10, 100)) - ds2_i = nc.createVariable('ds2_i', np.int32, - dimensions=('rows', 'cols')) + ds2_i = nc.createVariable("ds2_i", np.int32, + dimensions=("rows", "cols")) ds2_i[:] = np.arange(10 * 100).reshape((10, 100)) ds2_s = nc.createVariable("ds2_s", np.int8, dimensions=("rows",)) @@ -104,63 +104,63 @@ def setUp(self): ds2_sc[:] = 42 # Add attributes - nc.test_attr_str = 'test_string' + nc.test_attr_str = "test_string" nc.test_attr_int = 0 nc.test_attr_float = 1.2 nc.test_attr_str_arr = np.array(b"test_string2") - g1.test_attr_str = 'test_string' + g1.test_attr_str = "test_string" g1.test_attr_int = 0 g1.test_attr_float = 1.2 for d in [ds1_f, ds1_i, ds2_f, ds2_i]: - d.test_attr_str = 'test_string' + d.test_attr_str = "test_string" d.test_attr_int = 0 d.test_attr_float = 1.2 def tearDown(self): """Remove the previously created test file.""" - os.remove('test.nc') + os.remove("test.nc") def test_all_basic(self): """Test everything about the NetCDF4 class.""" import xarray as xr from satpy.readers.netcdf_utils import NetCDF4FileHandler - file_handler = NetCDF4FileHandler('test.nc', {}, {}) + file_handler = NetCDF4FileHandler("test.nc", {}, {}) - self.assertEqual(file_handler['/dimension/rows'], 10) - self.assertEqual(file_handler['/dimension/cols'], 100) + self.assertEqual(file_handler["/dimension/rows"], 10) + self.assertEqual(file_handler["/dimension/cols"], 100) - for ds in ('test_group/ds1_f', 'test_group/ds1_i', 'ds2_f', 'ds2_i'): - self.assertEqual(file_handler[ds].dtype, np.float32 if ds.endswith('f') else np.int32) - self.assertTupleEqual(file_handler[ds + '/shape'], (10, 100)) - self.assertEqual(file_handler[ds + '/dimensions'], ("rows", "cols")) - self.assertEqual(file_handler[ds + '/attr/test_attr_str'], 'test_string') - self.assertEqual(file_handler[ds + '/attr/test_attr_int'], 0) - self.assertEqual(file_handler[ds + '/attr/test_attr_float'], 1.2) + for ds in ("test_group/ds1_f", "test_group/ds1_i", "ds2_f", "ds2_i"): + self.assertEqual(file_handler[ds].dtype, np.float32 if ds.endswith("f") else np.int32) + self.assertTupleEqual(file_handler[ds + "/shape"], (10, 100)) + self.assertEqual(file_handler[ds + "/dimensions"], ("rows", "cols")) + self.assertEqual(file_handler[ds + "/attr/test_attr_str"], "test_string") + self.assertEqual(file_handler[ds + "/attr/test_attr_int"], 0) + self.assertEqual(file_handler[ds + "/attr/test_attr_float"], 1.2) - test_group = file_handler['test_group'] - self.assertTupleEqual(test_group['ds1_i'].shape, (10, 100)) - self.assertTupleEqual(test_group['ds1_i'].dims, ('rows', 'cols')) + test_group = file_handler["test_group"] + self.assertTupleEqual(test_group["ds1_i"].shape, (10, 100)) + self.assertTupleEqual(test_group["ds1_i"].dims, ("rows", "cols")) - self.assertEqual(file_handler['/attr/test_attr_str'], 'test_string') - self.assertEqual(file_handler['/attr/test_attr_str_arr'], 'test_string2') - self.assertEqual(file_handler['/attr/test_attr_int'], 0) - self.assertEqual(file_handler['/attr/test_attr_float'], 1.2) + self.assertEqual(file_handler["/attr/test_attr_str"], "test_string") + self.assertEqual(file_handler["/attr/test_attr_str_arr"], "test_string2") + self.assertEqual(file_handler["/attr/test_attr_int"], 0) + self.assertEqual(file_handler["/attr/test_attr_float"], 1.2) global_attrs = { - 'test_attr_str': 'test_string', - 'test_attr_str_arr': 'test_string2', - 'test_attr_int': 0, - 'test_attr_float': 1.2 + "test_attr_str": "test_string", + "test_attr_str_arr": "test_string2", + "test_attr_int": 0, + "test_attr_float": 1.2 } - self.assertEqual(file_handler['/attrs'], global_attrs) + self.assertEqual(file_handler["/attrs"], global_attrs) - self.assertIsInstance(file_handler.get('ds2_f')[:], xr.DataArray) - self.assertIsNone(file_handler.get('fake_ds')) - self.assertEqual(file_handler.get('fake_ds', 'test'), 'test') + self.assertIsInstance(file_handler.get("ds2_f")[:], xr.DataArray) + self.assertIsNone(file_handler.get("fake_ds")) + self.assertEqual(file_handler.get("fake_ds", "test"), "test") - self.assertTrue('ds2_f' in file_handler) - self.assertFalse('fake_ds' in file_handler) + self.assertTrue("ds2_f" in file_handler) + self.assertFalse("fake_ds" in file_handler) self.assertIsNone(file_handler.file_handle) self.assertEqual(file_handler["ds2_sc"], 42) @@ -169,43 +169,43 @@ def test_listed_variables(self): from satpy.readers.netcdf_utils import NetCDF4FileHandler filetype_info = { - 'required_netcdf_variables': [ - 'test_group/attr/test_attr_str', - 'attr/test_attr_str', + "required_netcdf_variables": [ + "test_group/attr/test_attr_str", + "attr/test_attr_str", ] } - file_handler = NetCDF4FileHandler('test.nc', {}, filetype_info) + file_handler = NetCDF4FileHandler("test.nc", {}, filetype_info) assert len(file_handler.file_content) == 2 - assert 'test_group/attr/test_attr_str' in file_handler.file_content - assert 'attr/test_attr_str' in file_handler.file_content + assert "test_group/attr/test_attr_str" in file_handler.file_content + assert "attr/test_attr_str" in file_handler.file_content def test_listed_variables_with_composing(self): """Test that composing for listed variables is performed.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler filetype_info = { - 'required_netcdf_variables': [ - 'test_group/{some_parameter}/attr/test_attr_str', - 'test_group/attr/test_attr_str', + "required_netcdf_variables": [ + "test_group/{some_parameter}/attr/test_attr_str", + "test_group/attr/test_attr_str", ], - 'variable_name_replacements': { - 'some_parameter': [ - 'ds1_f', - 'ds1_i', + "variable_name_replacements": { + "some_parameter": [ + "ds1_f", + "ds1_i", ], - 'another_parameter': [ - 'not_used' + "another_parameter": [ + "not_used" ], } } - file_handler = NetCDF4FileHandler('test.nc', {}, filetype_info) + file_handler = NetCDF4FileHandler("test.nc", {}, filetype_info) assert len(file_handler.file_content) == 3 - assert 'test_group/ds1_f/attr/test_attr_str' in file_handler.file_content - assert 'test_group/ds1_i/attr/test_attr_str' in file_handler.file_content - assert not any('not_used' in var for var in file_handler.file_content) - assert not any('some_parameter' in var for var in file_handler.file_content) - assert not any('another_parameter' in var for var in file_handler.file_content) - assert 'test_group/attr/test_attr_str' in file_handler.file_content + assert "test_group/ds1_f/attr/test_attr_str" in file_handler.file_content + assert "test_group/ds1_i/attr/test_attr_str" in file_handler.file_content + assert not any("not_used" in var for var in file_handler.file_content) + assert not any("some_parameter" in var for var in file_handler.file_content) + assert not any("another_parameter" in var for var in file_handler.file_content) + assert "test_group/attr/test_attr_str" in file_handler.file_content def test_caching(self): """Test that caching works as intended.""" @@ -241,21 +241,21 @@ def test_get_and_cache_npxr_is_xr(self): import xarray as xr from satpy.readers.netcdf_utils import NetCDF4FileHandler - file_handler = NetCDF4FileHandler('test.nc', {}, {}, cache_handle=True) + file_handler = NetCDF4FileHandler("test.nc", {}, {}, cache_handle=True) - data = file_handler.get_and_cache_npxr('test_group/ds1_f') + data = file_handler.get_and_cache_npxr("test_group/ds1_f") assert isinstance(data, xr.DataArray) def test_get_and_cache_npxr_data_is_cached(self): """Test that the data are cached when get_and_cache_npxr() is called.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler - file_handler = NetCDF4FileHandler('test.nc', {}, {}, cache_handle=True) - data = file_handler.get_and_cache_npxr('test_group/ds1_f') + file_handler = NetCDF4FileHandler("test.nc", {}, {}, cache_handle=True) + data = file_handler.get_and_cache_npxr("test_group/ds1_f") # Delete the dataset from the file content dict, it should be available from the cache del file_handler.file_content["test_group/ds1_f"] - data2 = file_handler.get_and_cache_npxr('test_group/ds1_f') + data2 = file_handler.get_and_cache_npxr("test_group/ds1_f") assert np.all(data == data2) diff --git a/satpy/tests/reader_tests/test_nucaps.py b/satpy/tests/reader_tests/test_nucaps.py index 2f7b0c97a5..5b1c061798 100644 --- a/satpy/tests/reader_tests/test_nucaps.py +++ b/satpy/tests/reader_tests/test_nucaps.py @@ -61,93 +61,93 @@ class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { - '/attr/time_coverage_start': "2020-10-20T12:00:00.5Z", - '/attr/time_coverage_end': "2020-10-20T12:00:36Z", - '/attr/start_orbit_number': 1, - '/attr/end_orbit_number': 2, - '/attr/platform_name': 'NPP', - '/attr/instrument_name': 'CrIS, ATMS, VIIRS', + "/attr/time_coverage_start": "2020-10-20T12:00:00.5Z", + "/attr/time_coverage_end": "2020-10-20T12:00:36Z", + "/attr/start_orbit_number": 1, + "/attr/end_orbit_number": 2, + "/attr/platform_name": "NPP", + "/attr/instrument_name": "CrIS, ATMS, VIIRS", } for k, units, standard_name in [ - ('Solar_Zenith', 'degrees', 'solar_zenith_angle'), - ('Topography', 'meters', ''), - ('Land_Fraction', '1', ''), - ('Surface_Pressure', 'mb', ''), - ('Skin_Temperature', 'Kelvin', 'surface_temperature'), + ("Solar_Zenith", "degrees", "solar_zenith_angle"), + ("Topography", "meters", ""), + ("Land_Fraction", "1", ""), + ("Surface_Pressure", "mb", ""), + ("Skin_Temperature", "Kelvin", "surface_temperature"), ]: file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/units'] = units - file_content[k + '/attr/valid_range'] = (0., 120.) - file_content[k + '/attr/_FillValue'] = -9999. + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/units"] = units + file_content[k + "/attr/valid_range"] = (0., 120.) + file_content[k + "/attr/_FillValue"] = -9999. if standard_name: - file_content[k + '/attr/standard_name'] = standard_name + file_content[k + "/attr/standard_name"] = standard_name for k, units, standard_name in [ - ('Temperature', 'Kelvin', 'air_temperature'), - ('Effective_Pressure', 'mb', ''), - ('H2O', '1', ''), - ('H2O_MR', 'g/g', ''), - ('O3', '1', ''), - ('O3_MR', '1', ''), - ('Liquid_H2O', '1', ''), - ('Liquid_H2O_MR', 'g/g', 'cloud_liquid_water_mixing_ratio'), - ('CO', '1', ''), - ('CO_MR', '1', ''), - ('CH4', '1', ''), - ('CH4_MR', '1', ''), - ('CO2', '1', ''), - ('HNO3', '1', ''), - ('HNO3_MR', '1', ''), - ('N2O', '1', ''), - ('N2O_MR', '1', ''), - ('SO2', '1', ''), - ('SO2_MR', '1', ''), + ("Temperature", "Kelvin", "air_temperature"), + ("Effective_Pressure", "mb", ""), + ("H2O", "1", ""), + ("H2O_MR", "g/g", ""), + ("O3", "1", ""), + ("O3_MR", "1", ""), + ("Liquid_H2O", "1", ""), + ("Liquid_H2O_MR", "g/g", "cloud_liquid_water_mixing_ratio"), + ("CO", "1", ""), + ("CO_MR", "1", ""), + ("CH4", "1", ""), + ("CH4_MR", "1", ""), + ("CO2", "1", ""), + ("HNO3", "1", ""), + ("HNO3_MR", "1", ""), + ("N2O", "1", ""), + ("N2O_MR", "1", ""), + ("SO2", "1", ""), + ("SO2_MR", "1", ""), ]: file_content[k] = DEFAULT_PRES_FILE_DATA - file_content[k + '/shape'] = DEFAULT_PRES_FILE_SHAPE - file_content[k + '/attr/units'] = units - file_content[k + '/attr/valid_range'] = (0., 120.) - file_content[k + '/attr/_FillValue'] = -9999. + file_content[k + "/shape"] = DEFAULT_PRES_FILE_SHAPE + file_content[k + "/attr/units"] = units + file_content[k + "/attr/valid_range"] = (0., 120.) + file_content[k + "/attr/_FillValue"] = -9999. if standard_name: - file_content[k + '/attr/standard_name'] = standard_name - k = 'Pressure' + file_content[k + "/attr/standard_name"] = standard_name + k = "Pressure" file_content[k] = ALL_PRESSURE_LEVELS - file_content[k + '/shape'] = DEFAULT_PRES_FILE_SHAPE - file_content[k + '/attr/units'] = 'mb' - file_content[k + '/attr/valid_range'] = (0., 2000.) - file_content[k + '/attr/_FillValue'] = -9999. + file_content[k + "/shape"] = DEFAULT_PRES_FILE_SHAPE + file_content[k + "/attr/units"] = "mb" + file_content[k + "/attr/valid_range"] = (0., 2000.) + file_content[k + "/attr/_FillValue"] = -9999. - k = 'Quality_Flag' + k = "Quality_Flag" file_content[k] = DEFAULT_FILE_DATA.astype(np.int32) - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/valid_range'] = (0, 31) - file_content[k + '/attr/_FillValue'] = -9999. + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/valid_range"] = (0, 31) + file_content[k + "/attr/_FillValue"] = -9999. - k = 'Longitude' + k = "Longitude" file_content[k] = DEFAULT_LON_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/units'] = 'degrees_east' - file_content[k + '/attr/valid_range'] = (-180., 180.) - file_content[k + '/attr/standard_name'] = 'longitude' - file_content[k + '/attr/_FillValue'] = -9999. + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/units"] = "degrees_east" + file_content[k + "/attr/valid_range"] = (-180., 180.) + file_content[k + "/attr/standard_name"] = "longitude" + file_content[k + "/attr/_FillValue"] = -9999. - k = 'Latitude' + k = "Latitude" file_content[k] = DEFAULT_LAT_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/units'] = 'degrees_north' - file_content[k + '/attr/valid_range'] = (-90., 90.) - file_content[k + '/attr/standard_name'] = 'latitude' - file_content[k + '/attr/_FillValue'] = -9999. - - attrs = ('_FillValue', 'flag_meanings', 'flag_values', 'units') - cris_fors_dim_name = 'Number_of_CrIS_FORs' - pressure_levels_dim_name = 'Number_of_P_Levels' - if ('_v1' in filename): - cris_fors_dim_name = 'number_of_FORs' - pressure_levels_dim_name = 'number_of_p_levels' + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/units"] = "degrees_north" + file_content[k + "/attr/valid_range"] = (-90., 90.) + file_content[k + "/attr/standard_name"] = "latitude" + file_content[k + "/attr/_FillValue"] = -9999. + + attrs = ("_FillValue", "flag_meanings", "flag_values", "units") + cris_fors_dim_name = "Number_of_CrIS_FORs" + pressure_levels_dim_name = "Number_of_P_Levels" + if ("_v1" in filename): + cris_fors_dim_name = "number_of_FORs" + pressure_levels_dim_name = "number_of_p_levels" convert_file_content_to_data_array( file_content, attrs=attrs, - dims=('z', cris_fors_dim_name, pressure_levels_dim_name)) + dims=("z", cris_fors_dim_name, pressure_levels_dim_name)) return file_content @@ -160,9 +160,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.nucaps import NUCAPSFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(NUCAPSFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) + self.p = mock.patch.object(NUCAPSFileHandler, "__bases__", (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -175,7 +175,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -187,10 +187,10 @@ def test_init_with_kwargs(self): from satpy.readers import load_reader r = load_reader(self.reader_configs, mask_surface=False) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) self.assertEqual(len(loadables), 1) - r.create_filehandlers(loadables, fh_kwargs={'mask_surface': False}) + r.create_filehandlers(loadables, fh_kwargs={"mask_surface": False}) # make sure we have some files self.assertTrue(r.file_handlers) @@ -199,70 +199,70 @@ def test_load_nonpressure_based(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Solar_Zenith', - 'Topography', - 'Land_Fraction', - 'Surface_Pressure', - 'Skin_Temperature', - 'Quality_Flag', + datasets = r.load(["Solar_Zenith", + "Topography", + "Land_Fraction", + "Surface_Pressure", + "Skin_Temperature", + "Quality_Flag", ]) self.assertEqual(len(datasets), 6) for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) # self.assertEqual(v.info['units'], 'degrees') self.assertEqual(v.ndim, 1) - self.assertEqual(v.attrs['sensor'], set(['cris', 'atms', 'viirs'])) - self.assertEqual(type(v.attrs['start_time']), datetime.datetime) - self.assertEqual(type(v.attrs['end_time']), datetime.datetime) + self.assertEqual(v.attrs["sensor"], set(["cris", "atms", "viirs"])) + self.assertEqual(type(v.attrs["start_time"]), datetime.datetime) + self.assertEqual(type(v.attrs["end_time"]), datetime.datetime) def test_load_pressure_based(self): """Test loading all channels based on pressure.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature', - 'Effective_Pressure', - 'H2O', - 'H2O_MR', - 'O3', - 'O3_MR', - 'Liquid_H2O', - 'Liquid_H2O_MR', - 'CO', - 'CO_MR', - 'CH4', - 'CH4_MR', - 'CO2', - 'HNO3', - 'HNO3_MR', - 'N2O', - 'N2O_MR', - 'SO2', - 'SO2_MR', + datasets = r.load(["Temperature", + "Effective_Pressure", + "H2O", + "H2O_MR", + "O3", + "O3_MR", + "Liquid_H2O", + "Liquid_H2O_MR", + "CO", + "CO_MR", + "CH4", + "CH4_MR", + "CO2", + "HNO3", + "HNO3_MR", + "N2O", + "N2O_MR", + "SO2", + "SO2_MR", ]) self.assertEqual(len(datasets), 19) for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) self.assertEqual(v.ndim, 2) if np.issubdtype(v.dtype, np.floating): - assert '_FillValue' not in v.attrs + assert "_FillValue" not in v.attrs def test_load_multiple_files_pressure(self): """Test loading Temperature from multiple input files.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', - 'NUCAPS-EDR_v1r0_npp_s201603011159009_e201603011159307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", + "NUCAPS-EDR_v1r0_npp_s201603011159009_e201603011159307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=True) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=True) self.assertEqual(len(datasets), 100) for v in datasets.values(): self.assertEqual(v.ndim, 1) @@ -272,10 +272,10 @@ def test_load_individual_pressure_levels_true(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=True) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=True) self.assertEqual(len(datasets), 100) for v in datasets.values(): self.assertEqual(v.ndim, 1) @@ -285,10 +285,10 @@ def test_load_individual_pressure_levels_min_max(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(100., 150.)) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(100., 150.)) self.assertEqual(len(datasets), 6) for v in datasets.values(): self.assertEqual(v.ndim, 1) @@ -298,10 +298,10 @@ def test_load_individual_pressure_levels_single(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(103.017,)) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 1) @@ -311,10 +311,10 @@ def test_load_pressure_levels_true(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature'], pressure_levels=True) + datasets = r.load(["Temperature"], pressure_levels=True) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) @@ -325,10 +325,10 @@ def test_load_pressure_levels_min_max(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature'], pressure_levels=(100., 150.)) + datasets = r.load(["Temperature"], pressure_levels=(100., 150.)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) @@ -340,10 +340,10 @@ def test_load_pressure_levels_single(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature'], pressure_levels=(103.017,)) + datasets = r.load(["Temperature"], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) @@ -355,16 +355,16 @@ def test_load_pressure_levels_single_and_pressure_levels(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature', 'Pressure_Levels'], pressure_levels=(103.017,)) + datasets = r.load(["Temperature", "Pressure_Levels"], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 2) - t_ds = datasets['Temperature'] + t_ds = datasets["Temperature"] self.assertEqual(t_ds.ndim, 2) self.assertTupleEqual(t_ds.shape, (DEFAULT_PRES_FILE_SHAPE[0], 1)) - pl_ds = datasets['Pressure_Levels'] + pl_ds = datasets["Pressure_Levels"] self.assertTupleEqual(pl_ds.shape, (1,)) @@ -377,9 +377,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.nucaps import NUCAPSFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(NUCAPSFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) + self.p = mock.patch.object(NUCAPSFileHandler, "__bases__", (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -392,7 +392,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -404,46 +404,46 @@ def test_load_nonpressure_based(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Topography', - 'Land_Fraction', - 'Surface_Pressure', - 'Skin_Temperature', - 'Quality_Flag', + datasets = r.load(["Topography", + "Land_Fraction", + "Surface_Pressure", + "Skin_Temperature", + "Quality_Flag", ]) self.assertEqual(len(datasets), 5) for v in datasets.values(): self.assertEqual(v.ndim, 1) - self.assertEqual(v.attrs['sensor'], set(['cris', 'atms', 'viirs'])) - self.assertEqual(type(v.attrs['start_time']), datetime.datetime) - self.assertEqual(type(v.attrs['end_time']), datetime.datetime) + self.assertEqual(v.attrs["sensor"], set(["cris", "atms", "viirs"])) + self.assertEqual(type(v.attrs["start_time"]), datetime.datetime) + self.assertEqual(type(v.attrs["end_time"]), datetime.datetime) def test_load_pressure_based(self): """Test loading all channels based on pressure.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature', - 'H2O', - 'H2O_MR', - 'O3', - 'O3_MR', - 'CO', - 'CO_MR', - 'CH4', - 'CH4_MR', - 'CO2', - 'HNO3', - 'HNO3_MR', - 'N2O', - 'N2O_MR', - 'SO2', - 'SO2_MR', + datasets = r.load(["Temperature", + "H2O", + "H2O_MR", + "O3", + "O3_MR", + "CO", + "CO_MR", + "CH4", + "CH4_MR", + "CO2", + "HNO3", + "HNO3_MR", + "N2O", + "N2O_MR", + "SO2", + "SO2_MR", ]) self.assertEqual(len(datasets), 16) for v in datasets.values(): @@ -455,10 +455,10 @@ def test_load_individual_pressure_levels_true(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=True) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=True) self.assertEqual(len(datasets), 100) for v in datasets.values(): self.assertEqual(v.ndim, 1) @@ -468,10 +468,10 @@ def test_load_individual_pressure_levels_min_max(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(100., 150.)) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(100., 150.)) self.assertEqual(len(datasets), 6) for v in datasets.values(): self.assertEqual(v.ndim, 1) @@ -481,10 +481,10 @@ def test_load_individual_pressure_levels_single(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(103.017,)) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 1) @@ -494,10 +494,10 @@ def test_load_pressure_levels_true(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature'], pressure_levels=True) + datasets = r.load(["Temperature"], pressure_levels=True) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) @@ -508,10 +508,10 @@ def test_load_pressure_levels_min_max(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature'], pressure_levels=(100., 150.)) + datasets = r.load(["Temperature"], pressure_levels=(100., 150.)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) @@ -523,10 +523,10 @@ def test_load_pressure_levels_single(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature'], pressure_levels=(103.017,)) + datasets = r.load(["Temperature"], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) @@ -538,14 +538,14 @@ def test_load_pressure_levels_single_and_pressure_levels(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature', 'Pressure_Levels'], pressure_levels=(103.017,)) + datasets = r.load(["Temperature", "Pressure_Levels"], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 2) - t_ds = datasets['Temperature'] + t_ds = datasets["Temperature"] self.assertEqual(t_ds.ndim, 2) self.assertTupleEqual(t_ds.shape, (DEFAULT_PRES_FILE_SHAPE[0], 1)) - pl_ds = datasets['Pressure_Levels'] + pl_ds = datasets["Pressure_Levels"] self.assertTupleEqual(pl_ds.shape, (1,)) diff --git a/satpy/tests/reader_tests/test_nwcsaf_msg.py b/satpy/tests/reader_tests/test_nwcsaf_msg.py index 5e3053058e..e323baeb20 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_msg.py +++ b/satpy/tests/reader_tests/test_nwcsaf_msg.py @@ -428,9 +428,9 @@ } AREA_DEF_DICT = { - "proj_dict": {'proj': 'geos', 'lon_0': 0, 'h': 35785831, 'x_0': 0, 'y_0': 0, - 'a': 6378169, 'b': 6356583.8, 'units': 'm', 'no_defs': None, 'type': 'crs'}, - "area_id": 'MSG-N', + "proj_dict": {"proj": "geos", "lon_0": 0, "h": 35785831, "x_0": 0, "y_0": 0, + "a": 6378169, "b": 6356583.8, "units": "m", "no_defs": None, "type": "crs"}, + "area_id": "MSG-N", "x_size": 3712, "y_size": 1856, "area_extent": (-5570248.2825, 1501.0099, 5567247.8793, 5570247.8784) @@ -481,20 +481,20 @@ def test_get_area_def(self): area_def = test.get_area_def(dsid) - aext_res = AREA_DEF_DICT['area_extent'] + aext_res = AREA_DEF_DICT["area_extent"] for i in range(4): self.assertAlmostEqual(area_def.area_extent[i], aext_res[i], 4) - proj_dict = AREA_DEF_DICT['proj_dict'] - self.assertEqual(proj_dict['proj'], area_def.proj_dict['proj']) + proj_dict = AREA_DEF_DICT["proj_dict"] + self.assertEqual(proj_dict["proj"], area_def.proj_dict["proj"]) # Not all elements passed on Appveyor, so skip testing every single element of the proj-dict: # for key in proj_dict: # self.assertEqual(proj_dict[key], area_def.proj_dict[key]) - self.assertEqual(AREA_DEF_DICT['x_size'], area_def.width) - self.assertEqual(AREA_DEF_DICT['y_size'], area_def.height) + self.assertEqual(AREA_DEF_DICT["x_size"], area_def.width) + self.assertEqual(AREA_DEF_DICT["y_size"], area_def.height) - self.assertEqual(AREA_DEF_DICT['area_id'], area_def.area_id) + self.assertEqual(AREA_DEF_DICT["area_id"], area_def.area_id) def test_get_dataset(self): """Retrieve datasets from a NWCSAF msgv2013 hdf5 file.""" diff --git a/satpy/tests/reader_tests/test_nwcsaf_nc.py b/satpy/tests/reader_tests/test_nwcsaf_nc.py index 095533d959..fb7187af1f 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_nc.py +++ b/satpy/tests/reader_tests/test_nwcsaf_nc.py @@ -23,19 +23,19 @@ from satpy.readers.nwcsaf_nc import NcNWCSAF, read_nwcsaf_time -PROJ_KM = {'gdal_projection': '+proj=geos +a=6378.137000 +b=6356.752300 +lon_0=0.000000 +h=35785.863000', - 'gdal_xgeo_up_left': -5569500.0, - 'gdal_ygeo_up_left': 5437500.0, - 'gdal_xgeo_low_right': 5566500.0, - 'gdal_ygeo_low_right': 2653500.0} +PROJ_KM = {"gdal_projection": "+proj=geos +a=6378.137000 +b=6356.752300 +lon_0=0.000000 +h=35785.863000", + "gdal_xgeo_up_left": -5569500.0, + "gdal_ygeo_up_left": 5437500.0, + "gdal_xgeo_low_right": 5566500.0, + "gdal_ygeo_low_right": 2653500.0} NOMINAL_ALTITUDE = 35785863.0 -PROJ = {'gdal_projection': f'+proj=geos +a=6378137.000 +b=6356752.300 +lon_0=0.000000 +h={NOMINAL_ALTITUDE:.3f}', - 'gdal_xgeo_up_left': -5569500.0, - 'gdal_ygeo_up_left': 5437500.0, - 'gdal_xgeo_low_right': 5566500.0, - 'gdal_ygeo_low_right': 2653500.0} +PROJ = {"gdal_projection": f"+proj=geos +a=6378137.000 +b=6356752.300 +lon_0=0.000000 +h={NOMINAL_ALTITUDE:.3f}", + "gdal_xgeo_up_left": -5569500.0, + "gdal_ygeo_up_left": 5437500.0, + "gdal_xgeo_low_right": 5566500.0, + "gdal_ygeo_low_right": 2653500.0} dimensions = {"nx": 1530, @@ -263,13 +263,13 @@ def test_sensor_name_sat_id(self, nwcsaf_geo_ct_filehandler, platform, instrumen def test_get_area_def(self, nwcsaf_geo_ct_filehandler): """Test that get_area_def() returns proper area.""" - dsid = {'name': 'ct'} + dsid = {"name": "ct"} _check_area_def(nwcsaf_geo_ct_filehandler.get_area_def(dsid)) def test_get_area_def_km(self, nwcsaf_old_geo_ct_filehandler): """Test that get_area_def() returns proper area when the projection is in km.""" - dsid = {'name': 'ct'} + dsid = {"name": "ct"} _check_area_def(nwcsaf_old_geo_ct_filehandler.get_area_def(dsid)) def test_scale_dataset_attr_removal(self, nwcsaf_geo_ct_filehandler): @@ -277,58 +277,58 @@ def test_scale_dataset_attr_removal(self, nwcsaf_geo_ct_filehandler): import numpy as np import xarray as xr - attrs = {'scale_factor': np.array(10), - 'add_offset': np.array(20)} + attrs = {"scale_factor": np.array(10), + "add_offset": np.array(20)} var = xr.DataArray([1, 2, 3], attrs=attrs) - var = nwcsaf_geo_ct_filehandler.scale_dataset(var, 'dummy') + var = nwcsaf_geo_ct_filehandler.scale_dataset(var, "dummy") np.testing.assert_allclose(var, [30, 40, 50]) - assert 'scale_factor' not in var.attrs - assert 'add_offset' not in var.attrs + assert "scale_factor" not in var.attrs + assert "add_offset" not in var.attrs - @pytest.mark.parametrize("attrs, expected", [({'scale_factor': np.array(1.5), - 'add_offset': np.array(2.5), - '_FillValue': 1}, + @pytest.mark.parametrize("attrs, expected", [({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "_FillValue": 1}, [np.nan, 5.5, 7]), - ({'scale_factor': np.array(1.5), - 'add_offset': np.array(2.5), - 'valid_min': 1.1}, + ({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "valid_min": 1.1}, [np.nan, 5.5, 7]), - ({'scale_factor': np.array(1.5), - 'add_offset': np.array(2.5), - 'valid_max': 2.1}, + ({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "valid_max": 2.1}, [4, 5.5, np.nan]), - ({'scale_factor': np.array(1.5), - 'add_offset': np.array(2.5), - 'valid_range': (1.1, 2.1)}, + ({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "valid_range": (1.1, 2.1)}, [np.nan, 5.5, np.nan])]) def test_scale_dataset_floating(self, nwcsaf_geo_ct_filehandler, attrs, expected): """Test the scaling of the dataset with floating point values.""" var = xr.DataArray([1, 2, 3], attrs=attrs) - var = nwcsaf_geo_ct_filehandler.scale_dataset(var, 'dummy') + var = nwcsaf_geo_ct_filehandler.scale_dataset(var, "dummy") np.testing.assert_allclose(var, expected) - assert 'scale_factor' not in var.attrs - assert 'add_offset' not in var.attrs + assert "scale_factor" not in var.attrs + assert "add_offset" not in var.attrs def test_scale_dataset_floating_nwcsaf_geo_ctth(self, nwcsaf_geo_ct_filehandler): """Test the scaling of the dataset with floating point values for CTTH NWCSAF/Geo v2016/v2018.""" - attrs = {'scale_factor': np.array(1.), - 'add_offset': np.array(-2000.), - 'valid_range': (0., 27000.)} + attrs = {"scale_factor": np.array(1.), + "add_offset": np.array(-2000.), + "valid_range": (0., 27000.)} var = xr.DataArray([1, 2, 3], attrs=attrs) - var = nwcsaf_geo_ct_filehandler.scale_dataset(var, 'dummy') + var = nwcsaf_geo_ct_filehandler.scale_dataset(var, "dummy") np.testing.assert_allclose(var, [-1999., -1998., -1997.]) - assert 'scale_factor' not in var.attrs - assert 'add_offset' not in var.attrs - np.testing.assert_equal(var.attrs['valid_range'], (-2000., 25000.)) + assert "scale_factor" not in var.attrs + assert "add_offset" not in var.attrs + np.testing.assert_equal(var.attrs["valid_range"], (-2000., 25000.)) def test_orbital_parameters_are_correct(self, nwcsaf_geo_ct_filehandler): """Test that orbital parameters are present in the dataset attributes.""" - dsid = {'name': 'ct'} + dsid = {"name": "ct"} var = nwcsaf_geo_ct_filehandler.get_dataset(dsid, {}) assert "orbital_parameters" in var.attrs - for param in var.attrs['orbital_parameters']: - assert isinstance(var.attrs['orbital_parameters'][param], (float, int)) + for param in var.attrs["orbital_parameters"]: + assert isinstance(var.attrs["orbital_parameters"][param], (float, int)) assert var.attrs["orbital_parameters"]["satellite_nominal_altitude"] == NOMINAL_ALTITUDE assert var.attrs["orbital_parameters"]["satellite_nominal_longitude"] == NOMINAL_LONGITUDE @@ -336,7 +336,7 @@ def test_orbital_parameters_are_correct(self, nwcsaf_geo_ct_filehandler): def test_times_are_in_dataset_attributes(self, nwcsaf_geo_ct_filehandler): """Check that start/end times are in the attributes of datasets.""" - dsid = {'name': 'ct'} + dsid = {"name": "ct"} var = nwcsaf_geo_ct_filehandler.get_dataset(dsid, {}) assert "start_time" in var.attrs assert "end_time" in var.attrs @@ -363,29 +363,29 @@ def test_end_time(self, nwcsaf_pps_cmic_filehandler): def test_drop_xycoords(self, nwcsaf_pps_cmic_filehandler): """Test the drop of x and y coords.""" - y_line = xr.DataArray(list(range(5)), dims=('y'), attrs={"long_name": "scan line number"}) - x_pixel = xr.DataArray(list(range(10)), dims=('x'), attrs={"long_name": "pixel number"}) + y_line = xr.DataArray(list(range(5)), dims=("y"), attrs={"long_name": "scan line number"}) + x_pixel = xr.DataArray(list(range(10)), dims=("x"), attrs={"long_name": "pixel number"}) lat = xr.DataArray(np.ones((5, 10)), - dims=('y', 'x'), - coords={'y': y_line, 'x': x_pixel}, - attrs={'name': 'lat', - 'standard_name': 'latitude'}) + dims=("y", "x"), + coords={"y": y_line, "x": x_pixel}, + attrs={"name": "lat", + "standard_name": "latitude"}) lon = xr.DataArray(np.ones((5, 10)), - dims=('y', 'x'), - coords={'y': y_line, 'x': x_pixel}, - attrs={'name': 'lon', - 'standard_name': 'longitude'}) + dims=("y", "x"), + coords={"y": y_line, "x": x_pixel}, + attrs={"name": "lon", + "standard_name": "longitude"}) data_array_in = xr.DataArray(np.ones((5, 10)), attrs={"scale_factor": np.array(0, dtype=float), "add_offset": np.array(1, dtype=float)}, - dims=('y', 'x'), - coords={'lon': lon, 'lat': lat, 'y': y_line, 'x': x_pixel}) + dims=("y", "x"), + coords={"lon": lon, "lat": lat, "y": y_line, "x": x_pixel}) data_array_out = nwcsaf_pps_cmic_filehandler.drop_xycoords(data_array_in) - assert 'y' not in data_array_out.coords + assert "y" not in data_array_out.coords def test_get_dataset_scales_and_offsets(self, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() returns scaled and offseted data.""" - dsid = {'name': 'cpp_cot'} + dsid = {"name": "cpp_cot"} info = dict(name="cpp_cot", file_type="nc_nwcsaf_cpp") @@ -395,7 +395,7 @@ def test_get_dataset_scales_and_offsets(self, nwcsaf_pps_cpp_filehandler): def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(self, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() returns scaled palette_meanings with another dataset as scaling source.""" - dsid = {'name': 'cpp_cot_pal'} + dsid = {"name": "cpp_cot_pal"} info = dict(name="cpp_cot_pal", file_type="nc_nwcsaf_cpp", @@ -407,7 +407,7 @@ def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(sel def test_get_palette_fill_value_color_added(self, nwcsaf_pps_ctth_filehandler): """Test that get_dataset() returns scaled palette_meanings with fill_value_color added.""" - dsid = {'name': 'ctth_alti_pal'} + dsid = {"name": "ctth_alti_pal"} info = dict(name="ctth_alti_pal", file_type="nc_nwcsaf_ctth", @@ -420,7 +420,7 @@ def test_get_palette_fill_value_color_added(self, nwcsaf_pps_ctth_filehandler): def test_get_dataset_raises_when_dataset_missing(self, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() raises an error when the requested dataset is missing.""" - dsid = {'name': 'cpp_phase'} + dsid = {"name": "cpp_phase"} info = dict(name="cpp_phase", file_type="nc_nwcsaf_cpp") with pytest.raises(KeyError): @@ -428,8 +428,8 @@ def test_get_dataset_raises_when_dataset_missing(self, nwcsaf_pps_cpp_filehandle def test_get_dataset_uses_file_key_if_present(self, nwcsaf_pps_cmic_filehandler, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() uses a file_key if present.""" - dsid_cpp = {'name': 'cpp_cot'} - dsid_cmic = {'name': 'cmic_cot'} + dsid_cpp = {"name": "cpp_cot"} + dsid_cmic = {"name": "cmic_cot"} file_key = "cmic_cot" @@ -449,17 +449,17 @@ def test_get_dataset_uses_file_key_if_present(self, nwcsaf_pps_cmic_filehandler, def test_get_dataset_can_handle_file_key_list(self, nwcsaf_pps_cmic_filehandler, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() can handle a list of file_keys.""" - dsid_cpp = {'name': 'cpp_reff'} - dsid_cmic = {'name': 'cmic_cre'} + dsid_cpp = {"name": "cpp_reff"} + dsid_cmic = {"name": "cmic_cre"} info_cpp = dict(name="cmic_reff", - file_key=['reff', 'cre'], + file_key=["reff", "cre"], file_type="nc_nwcsaf_cpp") res_cpp = nwcsaf_pps_cpp_filehandler.get_dataset(dsid_cpp, info_cpp) info_cmic = dict(name="cmic_reff", - file_key=['reff', 'cre'], + file_key=["reff", "cre"], file_type="nc_nwcsaf_cpp") res_cmic = nwcsaf_pps_cmic_filehandler.get_dataset(dsid_cmic, info_cmic) @@ -471,8 +471,8 @@ class TestNcNWCSAFFileKeyPrefix: def test_get_dataset_uses_file_key_prefix(self, nwcsaf_pps_cmic_filehandler): """Test that get_dataset() uses a file_key_prefix.""" - dsid_cpp = {'name': 'cpp_cot'} - dsid_cmic = {'name': 'cmic_cot'} + dsid_cpp = {"name": "cpp_cot"} + dsid_cmic = {"name": "cmic_cot"} file_key = "cot" @@ -490,7 +490,7 @@ def test_get_dataset_uses_file_key_prefix(self, nwcsaf_pps_cmic_filehandler): def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(self, nwcsaf_pps_cmic_filehandler): """Test that get_dataset() returns scaled palette_meanings using another dataset as scaling source.""" - dsid = {'name': 'cpp_cot_pal'} + dsid = {"name": "cpp_cot_pal"} info = dict(name="cpp_cot_pal", file_key="cot_pal", @@ -503,11 +503,11 @@ def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(sel def _check_area_def(area_definition): - correct_h = float(PROJ['gdal_projection'].split('+h=')[-1]) - correct_a = float(PROJ['gdal_projection'].split('+a=')[-1].split()[0]) - assert area_definition.proj_dict['h'] == correct_h - assert area_definition.proj_dict['a'] == correct_a - assert area_definition.proj_dict['units'] == 'm' + correct_h = float(PROJ["gdal_projection"].split("+h=")[-1]) + correct_a = float(PROJ["gdal_projection"].split("+a=")[-1].split()[0]) + assert area_definition.proj_dict["h"] == correct_h + assert area_definition.proj_dict["a"] == correct_a + assert area_definition.proj_dict["units"] == "m" correct_extent = (PROJ["gdal_xgeo_up_left"], PROJ["gdal_ygeo_low_right"], PROJ["gdal_xgeo_low_right"], diff --git a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py index 8575c337cb..f0ed47f4f8 100644 --- a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py +++ b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py @@ -49,12 +49,12 @@ def fake_dataset(): nobs = xr.DataArray( [[5, 118, 5, 100], [0, 15, 0, 1]], dims=("lat", "lon"), - attrs={'_FillValue': 0} + attrs={"_FillValue": 0} ) nobs_filt = xr.DataArray( [[5, 118, 5, 100], [np.nan, 15, np.nan, 1]], dims=("lat", "lon"), - attrs={'_FillValue': 0} + attrs={"_FillValue": 0} ) watcls = xr.DataArray( [[12.2, 0.01, 6.754, 5.33], [12.5, 101.5, 103.5, 204.]], @@ -83,15 +83,15 @@ def fake_dataset(): ) -ds_dict = {'adg_490': 'adg_490', - 'water_class10': 'water_class10', - 'seawifs_nobs_sum': 'test_nobs', - 'kd_490': 'kd_490', - 'atot_665': 'atot_665'} +ds_dict = {"adg_490": "adg_490", + "water_class10": "water_class10", + "seawifs_nobs_sum": "test_nobs", + "kd_490": "kd_490", + "atot_665": "atot_665"} -ds_list_all = ['adg_490', 'water_class10', 'seawifs_nobs_sum', 'kd_490', 'atot_665'] -ds_list_iop = ['adg_490', 'water_class10', 'seawifs_nobs_sum', 'atot_665'] -ds_list_kd = ['kd_490', 'water_class10', 'seawifs_nobs_sum'] +ds_list_all = ["adg_490", "water_class10", "seawifs_nobs_sum", "kd_490", "atot_665"] +ds_list_iop = ["adg_490", "water_class10", "seawifs_nobs_sum", "atot_665"] +ds_list_kd = ["kd_490", "water_class10", "seawifs_nobs_sum"] @pytest.fixture @@ -100,31 +100,31 @@ def fake_file_dict(fake_dataset, tmp_path): fdict = {} filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-10M_MONTHLY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['bad_month'] = filename + fdict["bad_month"] = filename filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-2D_DAILY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['bad_day'] = filename + fdict["bad_day"] = filename filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-1M_MONTHLY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['ocprod_1m'] = filename + fdict["ocprod_1m"] = filename filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-5D_DAILY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['ocprod_5d'] = filename + fdict["ocprod_5d"] = filename filename = tmp_path / "ESACCI-OC-L3S-IOP-MERGED-8D_DAILY_4km_GEO_PML_RRS-20211117-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['iop_8d'] = filename + fdict["iop_8d"] = filename filename = tmp_path / "ESACCI-OC-L3S-IOP-MERGED-1D_DAILY_4km_GEO_PML_OCx-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['iop_1d'] = filename + fdict["iop_1d"] = filename filename = tmp_path / "ESACCI-OC-L3S-K_490-MERGED-1D_DAILY_4km_GEO_PML_RRS-20210113-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['k490_1d'] = filename + fdict["k490_1d"] = filename yield fdict @@ -137,7 +137,7 @@ def setup_method(self): from satpy._config import config_search_paths self.yaml_file = "oceancolorcci_l3_nc.yaml" - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) def _create_reader_for_resolutions(self, filename): from satpy.readers import load_reader @@ -152,7 +152,7 @@ def _create_reader_for_resolutions(self, filename): @pytest.fixture def area_exp(self): """Get expected area definition.""" - proj_dict = {'datum': 'WGS84', 'no_defs': 'None', 'proj': 'longlat', 'type': 'crs'} + proj_dict = {"datum": "WGS84", "no_defs": "None", "proj": "longlat", "type": "crs"} return AreaDefinition( area_id="gridded_occci", @@ -166,9 +166,9 @@ def area_exp(self): def test_get_area_def(self, area_exp, fake_file_dict): """Test area definition.""" - reader = self._create_reader_for_resolutions([fake_file_dict['ocprod_1m']]) + reader = self._create_reader_for_resolutions([fake_file_dict["ocprod_1m"]]) res = reader.load([ds_list_all[0]]) - area = res[ds_list_all[0]].attrs['area'] + area = res[ds_list_all[0]].attrs["area"] assert area.area_id == area_exp.area_id assert area.area_extent == area_exp.area_extent @@ -178,75 +178,75 @@ def test_get_area_def(self, area_exp, fake_file_dict): def test_bad_fname(self, fake_dataset, fake_file_dict): """Test case where an incorrect composite period is given.""" - reader = self._create_reader_for_resolutions([fake_file_dict['bad_month']]) + reader = self._create_reader_for_resolutions([fake_file_dict["bad_month"]]) res = reader.load([ds_list_all[0]]) assert len(res) == 0 - reader = self._create_reader_for_resolutions([fake_file_dict['bad_day']]) + reader = self._create_reader_for_resolutions([fake_file_dict["bad_day"]]) res = reader.load([ds_list_all[0]]) assert len(res) == 0 def test_get_dataset_monthly_allprods(self, fake_dataset, fake_file_dict): """Test dataset loading.""" - reader = self._create_reader_for_resolutions([fake_file_dict['ocprod_1m']]) + reader = self._create_reader_for_resolutions([fake_file_dict["ocprod_1m"]]) # Check how many datasets are available. This file contains all of them. assert len(list(reader.available_dataset_names)) == 94 res = reader.load(ds_list_all) assert len(res) == len(ds_list_all) for curds in ds_list_all: np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values) - assert res[curds].attrs['sensor'] == 'merged' - assert res[curds].attrs['composite_period'] == 'monthly' + assert res[curds].attrs["sensor"] == "merged" + assert res[curds].attrs["composite_period"] == "monthly" def test_get_dataset_8d_iopprods(self, fake_dataset, fake_file_dict): """Test dataset loading.""" - reader = self._create_reader_for_resolutions([fake_file_dict['iop_8d']]) + reader = self._create_reader_for_resolutions([fake_file_dict["iop_8d"]]) # Check how many datasets are available. This file contains all of them. assert len(list(reader.available_dataset_names)) == 70 res = reader.load(ds_list_iop) assert len(res) == len(ds_list_iop) for curds in ds_list_iop: np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values) - assert res[curds].attrs['sensor'] == 'merged' - assert res[curds].attrs['composite_period'] == '8-day' + assert res[curds].attrs["sensor"] == "merged" + assert res[curds].attrs["composite_period"] == "8-day" def test_get_dataset_1d_kprods(self, fake_dataset, fake_file_dict): """Test dataset loading.""" - reader = self._create_reader_for_resolutions([fake_file_dict['k490_1d']]) + reader = self._create_reader_for_resolutions([fake_file_dict["k490_1d"]]) # Check how many datasets are available. This file contains all of them. assert len(list(reader.available_dataset_names)) == 25 res = reader.load(ds_list_kd) assert len(res) == len(ds_list_kd) for curds in ds_list_kd: np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values) - assert res[curds].attrs['sensor'] == 'merged' - assert res[curds].attrs['composite_period'] == 'daily' + assert res[curds].attrs["sensor"] == "merged" + assert res[curds].attrs["composite_period"] == "daily" def test_get_dataset_5d_allprods(self, fake_dataset, fake_file_dict): """Test dataset loading.""" - reader = self._create_reader_for_resolutions([fake_file_dict['ocprod_5d']]) + reader = self._create_reader_for_resolutions([fake_file_dict["ocprod_5d"]]) # Check how many datasets are available. This file contains all of them. assert len(list(reader.available_dataset_names)) == 94 res = reader.load(ds_list_all) assert len(res) == len(ds_list_all) for curds in ds_list_all: np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values) - assert res[curds].attrs['sensor'] == 'merged' - assert res[curds].attrs['composite_period'] == '5-day' + assert res[curds].attrs["sensor"] == "merged" + assert res[curds].attrs["composite_period"] == "5-day" def test_start_time(self, fake_file_dict): """Test start time property.""" - reader = self._create_reader_for_resolutions([fake_file_dict['k490_1d']]) + reader = self._create_reader_for_resolutions([fake_file_dict["k490_1d"]]) assert reader.start_time == datetime(2021, 8, 1, 0, 0, 0) def test_end_time(self, fake_file_dict): """Test end time property.""" - reader = self._create_reader_for_resolutions([fake_file_dict['iop_8d']]) + reader = self._create_reader_for_resolutions([fake_file_dict["iop_8d"]]) assert reader.end_time == datetime(2021, 8, 31, 23, 59, 0) def test_correct_dimnames(self, fake_file_dict): """Check that the loaded dimension names are correct.""" - reader = self._create_reader_for_resolutions([fake_file_dict['ocprod_5d']]) + reader = self._create_reader_for_resolutions([fake_file_dict["ocprod_5d"]]) res = reader.load(ds_list_all) for dsname in ds_list_all: - assert res[dsname].dims[0] == 'y' - assert res[dsname].dims[1] == 'x' + assert res[dsname].dims[0] == "y" + assert res[dsname].dims[1] == "x" diff --git a/satpy/tests/reader_tests/test_olci_nc.py b/satpy/tests/reader_tests/test_olci_nc.py index 6761511cf5..b6f5863a25 100644 --- a/satpy/tests/reader_tests/test_olci_nc.py +++ b/satpy/tests/reader_tests/test_olci_nc.py @@ -24,7 +24,7 @@ class TestOLCIReader(unittest.TestCase): """Test various olci_nc filehandlers.""" - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_instantiate(self, mocked_dataset): """Test initialization of file handlers.""" import xarray as xr @@ -34,66 +34,66 @@ def test_instantiate(self, mocked_dataset): cal_data = xr.Dataset( { - 'solar_flux': (('bands'), [0, 1, 2]), - 'detector_index': (('bands'), [0, 1, 2]), + "solar_flux": (("bands"), [0, 1, 2]), + "detector_index": (("bands"), [0, 1, 2]), }, - {'bands': [0, 1, 2], }, + {"bands": [0, 1, 2], }, ) - ds_id = make_dataid(name='Oa01', calibration='reflectance') - ds_id2 = make_dataid(name='wsqf', calibration='reflectance') - filename_info = {'mission_id': 'S3A', 'dataset_name': 'Oa01', 'start_time': 0, 'end_time': 0} + ds_id = make_dataid(name="Oa01", calibration="reflectance") + ds_id2 = make_dataid(name="wsqf", calibration="reflectance") + filename_info = {"mission_id": "S3A", "dataset_name": "Oa01", "start_time": 0, "end_time": 0} - test = NCOLCIBase('somedir/somefile.nc', filename_info, 'c') + test = NCOLCIBase("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - test = NCOLCICal('somedir/somefile.nc', filename_info, 'c') + test = NCOLCICal("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - test = NCOLCIGeo('somedir/somefile.nc', filename_info, 'c') + test = NCOLCIGeo("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - test = NCOLCIChannelBase('somedir/somefile.nc', filename_info, 'c') + test = NCOLCIChannelBase("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() cal = mock.Mock() cal.nc = cal_data - test = NCOLCI1B('somedir/somefile.nc', filename_info, 'c', cal) + test = NCOLCI1B("somedir/somefile.nc", filename_info, "c", cal) test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - test = NCOLCI2('somedir/somefile.nc', filename_info, 'c') - test.get_dataset(ds_id, {'nc_key': 'the_key'}) - test.get_dataset(ds_id2, {'nc_key': 'the_key'}) + test = NCOLCI2("somedir/somefile.nc", filename_info, "c") + test.get_dataset(ds_id, {"nc_key": "the_key"}) + test.get_dataset(ds_id2, {"nc_key": "the_key"}) mocked_dataset.assert_called() mocked_dataset.reset_mock() - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_open_file_objects(self, mocked_open_dataset): """Test initialization of file handlers.""" from satpy.readers.olci_nc import NCOLCIBase - filename_info = {'mission_id': 'S3A', 'dataset_name': 'Oa01', 'start_time': 0, 'end_time': 0} + filename_info = {"mission_id": "S3A", "dataset_name": "Oa01", "start_time": 0, "end_time": 0} open_file = mock.MagicMock() - file_handler = NCOLCIBase(open_file, filename_info, 'c') + file_handler = NCOLCIBase(open_file, filename_info, "c") # deepcode ignore W0104: This is a property that is actually a function call. file_handler.nc # pylint: disable=W0104 mocked_open_dataset.assert_called() open_file.open.assert_called() assert (open_file.open.return_value in mocked_open_dataset.call_args[0] or - open_file.open.return_value == mocked_open_dataset.call_args[1].get('filename_or_obj')) + open_file.open.return_value == mocked_open_dataset.call_args[1].get("filename_or_obj")) - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_get_mask(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -101,15 +101,15 @@ def test_get_mask(self, mocked_dataset): from satpy.readers.olci_nc import NCOLCI2 from satpy.tests.utils import make_dataid - mocked_dataset.return_value = xr.Dataset({'mask': (['rows', 'columns'], + mocked_dataset.return_value = xr.Dataset({"mask": (["rows", "columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}) - ds_id = make_dataid(name='mask') - filename_info = {'mission_id': 'S3A', 'dataset_name': 'mask', 'start_time': 0, 'end_time': 0} - test = NCOLCI2('somedir/somefile.nc', filename_info, 'c') - res = test.get_dataset(ds_id, {'nc_key': 'mask'}) - self.assertEqual(res.dtype, np.dtype('bool')) + coords={"rows": np.arange(5), + "columns": np.arange(6)}) + ds_id = make_dataid(name="mask") + filename_info = {"mission_id": "S3A", "dataset_name": "mask", "start_time": 0, "end_time": 0} + test = NCOLCI2("somedir/somefile.nc", filename_info, "c") + res = test.get_dataset(ds_id, {"nc_key": "mask"}) + self.assertEqual(res.dtype, np.dtype("bool")) expected = np.array([[True, False, True, True, True, True], [False, False, True, True, False, False], [False, False, False, False, False, True], @@ -117,7 +117,7 @@ def test_get_mask(self, mocked_dataset): [True, False, False, True, False, False]]) np.testing.assert_array_equal(res.values, expected) - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_get_mask_with_alternative_items(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -125,19 +125,19 @@ def test_get_mask_with_alternative_items(self, mocked_dataset): from satpy.readers.olci_nc import NCOLCI2 from satpy.tests.utils import make_dataid - mocked_dataset.return_value = xr.Dataset({'mask': (['rows', 'columns'], + mocked_dataset.return_value = xr.Dataset({"mask": (["rows", "columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}) - ds_id = make_dataid(name='mask') - filename_info = {'mission_id': 'S3A', 'dataset_name': 'mask', 'start_time': 0, 'end_time': 0} - test = NCOLCI2('somedir/somefile.nc', filename_info, 'c', mask_items=["INVALID"]) - res = test.get_dataset(ds_id, {'nc_key': 'mask'}) - self.assertEqual(res.dtype, np.dtype('bool')) + coords={"rows": np.arange(5), + "columns": np.arange(6)}) + ds_id = make_dataid(name="mask") + filename_info = {"mission_id": "S3A", "dataset_name": "mask", "start_time": 0, "end_time": 0} + test = NCOLCI2("somedir/somefile.nc", filename_info, "c", mask_items=["INVALID"]) + res = test.get_dataset(ds_id, {"nc_key": "mask"}) + self.assertEqual(res.dtype, np.dtype("bool")) expected = np.array([True] + [False] * 29).reshape(5, 6) np.testing.assert_array_equal(res.values, expected) - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_olci_angles(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -146,31 +146,31 @@ def test_olci_angles(self, mocked_dataset): from satpy.readers.olci_nc import NCOLCIAngles from satpy.tests.utils import make_dataid attr_dict = { - 'ac_subsampling_factor': 1, - 'al_subsampling_factor': 2, + "ac_subsampling_factor": 1, + "al_subsampling_factor": 2, } - mocked_dataset.return_value = xr.Dataset({'SAA': (['tie_rows', 'tie_columns'], + mocked_dataset.return_value = xr.Dataset({"SAA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'SZA': (['tie_rows', 'tie_columns'], + "SZA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'OAA': (['tie_rows', 'tie_columns'], + "OAA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'OZA': (['tie_rows', 'tie_columns'], + "OZA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}, + coords={"rows": np.arange(5), + "columns": np.arange(6)}, attrs=attr_dict) - filename_info = {'mission_id': 'S3A', 'dataset_name': 'Oa01', 'start_time': 0, 'end_time': 0} + filename_info = {"mission_id": "S3A", "dataset_name": "Oa01", "start_time": 0, "end_time": 0} - ds_id = make_dataid(name='solar_azimuth_angle') - ds_id2 = make_dataid(name='satellite_zenith_angle') - test = NCOLCIAngles('somedir/somefile.nc', filename_info, 'c') + ds_id = make_dataid(name="solar_azimuth_angle") + ds_id2 = make_dataid(name="satellite_zenith_angle") + test = NCOLCIAngles("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_olci_meteo(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -179,26 +179,26 @@ def test_olci_meteo(self, mocked_dataset): from satpy.readers.olci_nc import NCOLCIMeteo from satpy.tests.utils import make_dataid attr_dict = { - 'ac_subsampling_factor': 1, - 'al_subsampling_factor': 2, + "ac_subsampling_factor": 1, + "al_subsampling_factor": 2, } - data = {'humidity': (['tie_rows', 'tie_columns'], + data = {"humidity": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'total_ozone': (['tie_rows', 'tie_columns'], + "total_ozone": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'sea_level_pressure': (['tie_rows', 'tie_columns'], + "sea_level_pressure": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'total_columnar_water_vapour': (['tie_rows', 'tie_columns'], + "total_columnar_water_vapour": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))} mocked_dataset.return_value = xr.Dataset(data, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}, + coords={"rows": np.arange(5), + "columns": np.arange(6)}, attrs=attr_dict) - filename_info = {'mission_id': 'S3A', 'dataset_name': 'humidity', 'start_time': 0, 'end_time': 0} + filename_info = {"mission_id": "S3A", "dataset_name": "humidity", "start_time": 0, "end_time": 0} - ds_id = make_dataid(name='humidity') - ds_id2 = make_dataid(name='total_ozone') - test = NCOLCIMeteo('somedir/somefile.nc', filename_info, 'c') + ds_id = make_dataid(name="humidity") + ds_id2 = make_dataid(name="total_ozone") + test = NCOLCIMeteo("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() @@ -213,28 +213,28 @@ def test_chl_nn(self, mocked_dataset): from satpy.readers.olci_nc import NCOLCI2 from satpy.tests.utils import make_dataid attr_dict = { - 'ac_subsampling_factor': 64, - 'al_subsampling_factor': 1, + "ac_subsampling_factor": 64, + "al_subsampling_factor": 1, } - data = {'CHL_NN': (['rows', 'columns'], + data = {"CHL_NN": (["rows", "columns"], np.arange(30).reshape(5, 6).astype(float), {"units": "lg(re mg.m-3)"})} mocked_dataset.return_value = xr.Dataset(data, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}, + coords={"rows": np.arange(5), + "columns": np.arange(6)}, attrs=attr_dict) - ds_info = {'name': 'chl_nn', 'sensor': 'olci', 'resolution': 300, - 'standard_name': 'algal_pigment_concentration', 'units': 'lg(re mg.m-3)', - 'coordinates': ('longitude', 'latitude'), 'file_type': 'esa_l2_chl_nn', 'nc_key': 'CHL_NN', - 'modifiers': ()} - filename_info = {'mission_id': 'S3A', 'datatype_id': 'WFR', - 'start_time': datetime.datetime(2019, 9, 24, 9, 29, 39), - 'end_time': datetime.datetime(2019, 9, 24, 9, 32, 39), - 'creation_time': datetime.datetime(2019, 9, 24, 11, 40, 26), 'duration': 179, 'cycle': 49, - 'relative_orbit': 307, 'frame': 1800, 'centre': 'MAR', 'mode': 'O', 'timeliness': 'NR', - 'collection': '002'} - ds_id = make_dataid(name='chl_nn') - file_handler = NCOLCI2('somedir/somefile.nc', filename_info, None, unlog=True) + ds_info = {"name": "chl_nn", "sensor": "olci", "resolution": 300, + "standard_name": "algal_pigment_concentration", "units": "lg(re mg.m-3)", + "coordinates": ("longitude", "latitude"), "file_type": "esa_l2_chl_nn", "nc_key": "CHL_NN", + "modifiers": ()} + filename_info = {"mission_id": "S3A", "datatype_id": "WFR", + "start_time": datetime.datetime(2019, 9, 24, 9, 29, 39), + "end_time": datetime.datetime(2019, 9, 24, 9, 32, 39), + "creation_time": datetime.datetime(2019, 9, 24, 11, 40, 26), "duration": 179, "cycle": 49, + "relative_orbit": 307, "frame": 1800, "centre": "MAR", "mode": "O", "timeliness": "NR", + "collection": "002"} + ds_id = make_dataid(name="chl_nn") + file_handler = NCOLCI2("somedir/somefile.nc", filename_info, None, unlog=True) res = file_handler.get_dataset(ds_id, ds_info) assert res.attrs["units"] == "mg.m-3" @@ -251,13 +251,13 @@ def test_bitflags(self): import numpy as np from satpy.readers.olci_nc import BitFlags - flag_list = ['INVALID', 'WATER', 'LAND', 'CLOUD', 'SNOW_ICE', - 'INLAND_WATER', 'TIDAL', 'COSMETIC', 'SUSPECT', 'HISOLZEN', - 'SATURATED', 'MEGLINT', 'HIGHGLINT', 'WHITECAPS', - 'ADJAC', 'WV_FAIL', 'PAR_FAIL', 'AC_FAIL', 'OC4ME_FAIL', - 'OCNN_FAIL', 'Extra_1', 'KDM_FAIL', 'Extra_2', - 'CLOUD_AMBIGUOUS', 'CLOUD_MARGIN', 'BPAC_ON', - 'WHITE_SCATT', 'LOWRW', 'HIGHRW'] + flag_list = ["INVALID", "WATER", "LAND", "CLOUD", "SNOW_ICE", + "INLAND_WATER", "TIDAL", "COSMETIC", "SUSPECT", "HISOLZEN", + "SATURATED", "MEGLINT", "HIGHGLINT", "WHITECAPS", + "ADJAC", "WV_FAIL", "PAR_FAIL", "AC_FAIL", "OC4ME_FAIL", + "OCNN_FAIL", "Extra_1", "KDM_FAIL", "Extra_2", + "CLOUD_AMBIGUOUS", "CLOUD_MARGIN", "BPAC_ON", + "WHITE_SCATT", "LOWRW", "HIGHRW"] bits = np.array([1 << x for x in range(len(flag_list))]) diff --git a/satpy/tests/reader_tests/test_omps_edr.py b/satpy/tests/reader_tests/test_omps_edr.py index 2c211013cc..f89e41f5d0 100644 --- a/satpy/tests/reader_tests/test_omps_edr.py +++ b/satpy/tests/reader_tests/test_omps_edr.py @@ -44,120 +44,120 @@ def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = {} attrs = [] - if 'SO2NRT' in filename: - k = 'HDFEOS/SWATHS/OMPS Column Amount SO2/Data Fields/ColumnAmountSO2_TRM' + if "SO2NRT" in filename: + k = "HDFEOS/SWATHS/OMPS Column Amount SO2/Data Fields/ColumnAmountSO2_TRM" file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/ScaleFactor'] = 1.1 - file_content[k + '/attr/Offset'] = 0.1 - file_content[k + '/attr/MissingValue'] = -1 - file_content[k + '/attr/Title'] = 'Vertical Column Amount SO2 (TRM)' - file_content[k + '/attr/Units'] = 'D.U.' - file_content[k + '/attr/ValidRange'] = (-10, 2000) - k = 'HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Longitude' + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/ScaleFactor"] = 1.1 + file_content[k + "/attr/Offset"] = 0.1 + file_content[k + "/attr/MissingValue"] = -1 + file_content[k + "/attr/Title"] = "Vertical Column Amount SO2 (TRM)" + file_content[k + "/attr/Units"] = "D.U." + file_content[k + "/attr/ValidRange"] = (-10, 2000) + k = "HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Longitude" file_content[k] = DEFAULT_LON_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/ScaleFactor'] = 1.1 - file_content[k + '/attr/Offset'] = 0.1 - file_content[k + '/attr/Units'] = 'deg' - file_content[k + '/attr/MissingValue'] = -1 - file_content[k + '/attr/Title'] = 'Geodetic Longitude' - file_content[k + '/attr/ValidRange'] = (-180, 180) - k = 'HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Latitude' + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/ScaleFactor"] = 1.1 + file_content[k + "/attr/Offset"] = 0.1 + file_content[k + "/attr/Units"] = "deg" + file_content[k + "/attr/MissingValue"] = -1 + file_content[k + "/attr/Title"] = "Geodetic Longitude" + file_content[k + "/attr/ValidRange"] = (-180, 180) + k = "HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Latitude" file_content[k] = DEFAULT_LAT_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/ScaleFactor'] = 1.1 - file_content[k + '/attr/Offset'] = 0.1 - file_content[k + '/attr/Units'] = 'deg' - file_content[k + '/attr/MissingValue'] = -1 - file_content[k + '/attr/Title'] = 'Geodetic Latitude' - file_content[k + '/attr/ValidRange'] = (-90, 90) - elif 'NMSO2' in filename: - file_content['GEOLOCATION_DATA/Longitude'] = DEFAULT_LON_DATA - file_content['GEOLOCATION_DATA/Longitude/shape'] = DEFAULT_FILE_SHAPE - file_content['GEOLOCATION_DATA/Longitude/attr/valid_max'] = 180 - file_content['GEOLOCATION_DATA/Longitude/attr/valid_min'] = -180 - file_content['GEOLOCATION_DATA/Longitude/attr/_FillValue'] = -1.26765e+30 - file_content['GEOLOCATION_DATA/Longitude/attr/long_name'] = 'Longitude' - file_content['GEOLOCATION_DATA/Longitude/attr/standard_name'] = 'longitude' - file_content['GEOLOCATION_DATA/Longitude/attr/units'] = 'degrees_east' - file_content['GEOLOCATION_DATA/Latitude'] = DEFAULT_LAT_DATA - file_content['GEOLOCATION_DATA/Latitude/shape'] = DEFAULT_FILE_SHAPE - file_content['GEOLOCATION_DATA/Latitude/attr/valid_max'] = 90 - file_content['GEOLOCATION_DATA/Latitude/attr/valid_min'] = -90 - file_content['GEOLOCATION_DATA/Latitude/attr/_FillValue'] = -1.26765e+30 - file_content['GEOLOCATION_DATA/Latitude/attr/long_name'] = 'Latitude' - file_content['GEOLOCATION_DATA/Latitude/attr/standard_name'] = 'latitude' - file_content['GEOLOCATION_DATA/Latitude/attr/units'] = 'degress_north' + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/ScaleFactor"] = 1.1 + file_content[k + "/attr/Offset"] = 0.1 + file_content[k + "/attr/Units"] = "deg" + file_content[k + "/attr/MissingValue"] = -1 + file_content[k + "/attr/Title"] = "Geodetic Latitude" + file_content[k + "/attr/ValidRange"] = (-90, 90) + elif "NMSO2" in filename: + file_content["GEOLOCATION_DATA/Longitude"] = DEFAULT_LON_DATA + file_content["GEOLOCATION_DATA/Longitude/shape"] = DEFAULT_FILE_SHAPE + file_content["GEOLOCATION_DATA/Longitude/attr/valid_max"] = 180 + file_content["GEOLOCATION_DATA/Longitude/attr/valid_min"] = -180 + file_content["GEOLOCATION_DATA/Longitude/attr/_FillValue"] = -1.26765e+30 + file_content["GEOLOCATION_DATA/Longitude/attr/long_name"] = "Longitude" + file_content["GEOLOCATION_DATA/Longitude/attr/standard_name"] = "longitude" + file_content["GEOLOCATION_DATA/Longitude/attr/units"] = "degrees_east" + file_content["GEOLOCATION_DATA/Latitude"] = DEFAULT_LAT_DATA + file_content["GEOLOCATION_DATA/Latitude/shape"] = DEFAULT_FILE_SHAPE + file_content["GEOLOCATION_DATA/Latitude/attr/valid_max"] = 90 + file_content["GEOLOCATION_DATA/Latitude/attr/valid_min"] = -90 + file_content["GEOLOCATION_DATA/Latitude/attr/_FillValue"] = -1.26765e+30 + file_content["GEOLOCATION_DATA/Latitude/attr/long_name"] = "Latitude" + file_content["GEOLOCATION_DATA/Latitude/attr/standard_name"] = "latitude" + file_content["GEOLOCATION_DATA/Latitude/attr/units"] = "degress_north" - k = 'SCIENCE_DATA/ColumnAmountSO2_TRM' + k = "SCIENCE_DATA/ColumnAmountSO2_TRM" file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/_FillValue'] = -1.26765e+30 - file_content[k + '/attr/long_name'] = 'Column Amount SO2 (TRM)' - file_content[k + '/attr/units'] = 'DU' - file_content[k + '/attr/valid_max'] = 2000 - file_content[k + '/attr/valid_min'] = -10 + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/_FillValue"] = -1.26765e+30 + file_content[k + "/attr/long_name"] = "Column Amount SO2 (TRM)" + file_content[k + "/attr/units"] = "DU" + file_content[k + "/attr/valid_max"] = 2000 + file_content[k + "/attr/valid_min"] = -10 - k = 'SCIENCE_DATA/ColumnAmountSO2_STL' + k = "SCIENCE_DATA/ColumnAmountSO2_STL" file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/_FillValue'] = -1.26765e+30 - file_content[k + '/attr/long_name'] = 'Column Amount SO2 (STL)' - file_content[k + '/attr/units'] = 'DU' + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/_FillValue"] = -1.26765e+30 + file_content[k + "/attr/long_name"] = "Column Amount SO2 (STL)" + file_content[k + "/attr/units"] = "DU" - k = 'SCIENCE_DATA/ColumnAmountSO2_TRL' + k = "SCIENCE_DATA/ColumnAmountSO2_TRL" file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/_FillValue'] = -1.26765e+30 - file_content[k + '/attr/long_name'] = 'Column Amount SO2 (TRL)' - file_content[k + '/attr/units'] = 'DU' - file_content[k + '/attr/valid_max'] = 2000 - file_content[k + '/attr/valid_min'] = -10 - file_content[k + '/attr/DIMENSION_LIST'] = [10, 10] - attrs = ['_FillValue', 'long_name', 'units', 'valid_max', 'valid_min', 'DIMENSION_LIST'] + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/_FillValue"] = -1.26765e+30 + file_content[k + "/attr/long_name"] = "Column Amount SO2 (TRL)" + file_content[k + "/attr/units"] = "DU" + file_content[k + "/attr/valid_max"] = 2000 + file_content[k + "/attr/valid_min"] = -10 + file_content[k + "/attr/DIMENSION_LIST"] = [10, 10] + attrs = ["_FillValue", "long_name", "units", "valid_max", "valid_min", "DIMENSION_LIST"] - k = 'SCIENCE_DATA/ColumnAmountSO2_TRU' + k = "SCIENCE_DATA/ColumnAmountSO2_TRU" file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/long_name'] = 'Column Amount SO2 (TRU)' - file_content[k + '/attr/units'] = 'DU' - file_content[k + '/attr/valid_max'] = 2000 - file_content[k + '/attr/valid_min'] = -10 + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/long_name"] = "Column Amount SO2 (TRU)" + file_content[k + "/attr/units"] = "DU" + file_content[k + "/attr/valid_max"] = 2000 + file_content[k + "/attr/valid_min"] = -10 # Dataset with out unit - k = 'SCIENCE_DATA/ColumnAmountSO2_PBL' + k = "SCIENCE_DATA/ColumnAmountSO2_PBL" file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/_FillValue'] = -1.26765e+30 - file_content[k + '/attr/long_name'] = 'Column Amount SO2 (PBL)' - file_content[k + '/attr/valid_max'] = 2000 - file_content[k + '/attr/valid_min'] = -10 + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/_FillValue"] = -1.26765e+30 + file_content[k + "/attr/long_name"] = "Column Amount SO2 (PBL)" + file_content[k + "/attr/valid_max"] = 2000 + file_content[k + "/attr/valid_min"] = -10 else: - for k in ['Reflectivity331', 'UVAerosolIndex']: - k = 'SCIENCE_DATA/' + k + for k in ["Reflectivity331", "UVAerosolIndex"]: + k = "SCIENCE_DATA/" + k file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/Units'] = 'Unitless' - if k == 'UVAerosolIndex': - file_content[k + '/attr/ValidRange'] = (-30, 30) - file_content[k + '/attr/Title'] = 'UV Aerosol Index' + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/Units"] = "Unitless" + if k == "UVAerosolIndex": + file_content[k + "/attr/ValidRange"] = (-30, 30) + file_content[k + "/attr/Title"] = "UV Aerosol Index" else: - file_content[k + '/attr/ValidRange'] = (-0.15, 1.15) - file_content[k + '/attr/Title'] = 'Effective Surface Reflectivity at 331 nm' - file_content[k + '/attr/_FillValue'] = -1. - file_content['GEOLOCATION_DATA/Longitude'] = DEFAULT_LON_DATA - file_content['GEOLOCATION_DATA/Longitude/shape'] = DEFAULT_FILE_SHAPE - file_content['GEOLOCATION_DATA/Longitude/attr/ValidRange'] = (-180, 180) - file_content['GEOLOCATION_DATA/Longitude/attr/_FillValue'] = -999. - file_content['GEOLOCATION_DATA/Longitude/attr/Title'] = 'Geodetic Longitude' - file_content['GEOLOCATION_DATA/Longitude/attr/Units'] = 'deg' - file_content['GEOLOCATION_DATA/Latitude'] = DEFAULT_LAT_DATA - file_content['GEOLOCATION_DATA/Latitude/shape'] = DEFAULT_FILE_SHAPE - file_content['GEOLOCATION_DATA/Latitude/attr/ValidRange'] = (-90, 90) - file_content['GEOLOCATION_DATA/Latitude/attr/_FillValue'] = -999. - file_content['GEOLOCATION_DATA/Latitude/attr/Title'] = 'Geodetic Latitude' - file_content['GEOLOCATION_DATA/Latitude/attr/Units'] = 'deg' + file_content[k + "/attr/ValidRange"] = (-0.15, 1.15) + file_content[k + "/attr/Title"] = "Effective Surface Reflectivity at 331 nm" + file_content[k + "/attr/_FillValue"] = -1. + file_content["GEOLOCATION_DATA/Longitude"] = DEFAULT_LON_DATA + file_content["GEOLOCATION_DATA/Longitude/shape"] = DEFAULT_FILE_SHAPE + file_content["GEOLOCATION_DATA/Longitude/attr/ValidRange"] = (-180, 180) + file_content["GEOLOCATION_DATA/Longitude/attr/_FillValue"] = -999. + file_content["GEOLOCATION_DATA/Longitude/attr/Title"] = "Geodetic Longitude" + file_content["GEOLOCATION_DATA/Longitude/attr/Units"] = "deg" + file_content["GEOLOCATION_DATA/Latitude"] = DEFAULT_LAT_DATA + file_content["GEOLOCATION_DATA/Latitude/shape"] = DEFAULT_FILE_SHAPE + file_content["GEOLOCATION_DATA/Latitude/attr/ValidRange"] = (-90, 90) + file_content["GEOLOCATION_DATA/Latitude/attr/_FillValue"] = -999. + file_content["GEOLOCATION_DATA/Latitude/attr/Title"] = "Geodetic Latitude" + file_content["GEOLOCATION_DATA/Latitude/attr/Units"] = "deg" convert_file_content_to_data_array(file_content, attrs) return file_content @@ -172,12 +172,12 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.omps_edr import EDREOSFileHandler, EDRFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(EDRFileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(EDRFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True - self.p2 = mock.patch.object(EDREOSFileHandler, '__bases__', (EDRFileHandler,)) + self.p2 = mock.patch.object(EDREOSFileHandler, "__bases__", (EDRFileHandler,)) self.fake_handler2 = self.p2.start() self.p2.is_local = True @@ -191,9 +191,9 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5', - 'OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5', - 'OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5', + "OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5", + "OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5", + "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) self.assertEqual(len(loadables), 3) r.create_filehandlers(loadables) @@ -205,35 +205,35 @@ def test_basic_load_so2(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5', - 'OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5', - 'OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5', + "OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5", + "OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5", + "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) self.assertEqual(len(loadables), 3) r.create_filehandlers(loadables) - ds = r.load(['so2_trm']) + ds = r.load(["so2_trm"]) self.assertEqual(len(ds), 1) for d in ds.values(): - self.assertEqual(d.attrs['resolution'], 50000) + self.assertEqual(d.attrs["resolution"], 50000) self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) - ds = r.load(['tcso2_trm_sampo']) + ds = r.load(["tcso2_trm_sampo"]) self.assertEqual(len(ds), 1) for d in ds.values(): - self.assertEqual(d.attrs['resolution'], 50000) + self.assertEqual(d.attrs["resolution"], 50000) self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) - ds = r.load(['tcso2_stl_sampo']) + ds = r.load(["tcso2_stl_sampo"]) self.assertEqual(len(ds), 0) # Dataset without _FillValue - ds = r.load(['tcso2_tru_sampo']) + ds = r.load(["tcso2_tru_sampo"]) self.assertEqual(len(ds), 1) # Dataset without unit - ds = r.load(['tcso2_pbl_sampo']) + ds = r.load(["tcso2_pbl_sampo"]) self.assertEqual(len(ds), 0) def test_basic_load_to3(self): @@ -241,22 +241,22 @@ def test_basic_load_to3(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5', - 'OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5', - 'OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5', + "OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5", + "OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5", + "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) self.assertEqual(len(loadables), 3) r.create_filehandlers(loadables) - ds = r.load(['reflectivity_331', 'uvaerosol_index']) + ds = r.load(["reflectivity_331", "uvaerosol_index"]) self.assertEqual(len(ds), 2) for d in ds.values(): - self.assertEqual(d.attrs['resolution'], 50000) + self.assertEqual(d.attrs["resolution"], 50000) self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) - @mock.patch('satpy.readers.hdf5_utils.HDF5FileHandler._get_reference') - @mock.patch('h5py.File') + @mock.patch("satpy.readers.hdf5_utils.HDF5FileHandler._get_reference") + @mock.patch("h5py.File") def test_load_so2_DIMENSION_LIST(self, mock_h5py_file, mock_hdf5_utils_get_reference): """Test load of so2 datasets with DIMENSION_LIST.""" from satpy.readers import load_reader @@ -264,9 +264,9 @@ def test_load_so2_DIMENSION_LIST(self, mock_h5py_file, mock_hdf5_utils_get_refer mock_hdf5_utils_get_reference.return_value = [[[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]]] r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5', + "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) r.create_filehandlers(loadables) - ds = r.load(['tcso2_trl_sampo']) + ds = r.load(["tcso2_trl_sampo"]) self.assertEqual(len(ds), 1) diff --git a/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py b/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py index 937470724f..a529ae9f50 100644 --- a/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py +++ b/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py @@ -28,40 +28,40 @@ class TestSAFENC(unittest.TestCase): """Test various SAFE SAR L2 OCN file handlers.""" - @mock.patch('satpy.readers.safe_sar_l2_ocn.xr') + @mock.patch("satpy.readers.safe_sar_l2_ocn.xr") def setUp(self, xr_): """Set up the tests.""" from satpy.readers.safe_sar_l2_ocn import SAFENC - self.channels = ['owiWindSpeed', 'owiLon', 'owiLat', 'owiHs', 'owiNrcs', 'foo', - 'owiPolarisationName', 'owiCalConstObsi'] + self.channels = ["owiWindSpeed", "owiLon", "owiLat", "owiHs", "owiNrcs", "foo", + "owiPolarisationName", "owiCalConstObsi"] # Mock file access to return a fake dataset. self.dummy3d = np.zeros((2, 2, 1)) self.dummy2d = np.zeros((2, 2)) self.dummy1d = np.zeros((2)) self.band = 1 self.nc = xr.Dataset( - {'owiWindSpeed': xr.DataArray(self.dummy2d, dims=('owiAzSize', 'owiRaSize'), attrs={'_FillValue': np.nan}), - 'owiLon': xr.DataArray(data=self.dummy2d, dims=('owiAzSize', 'owiRaSize')), - 'owiLat': xr.DataArray(data=self.dummy2d, dims=('owiAzSize', 'owiRaSize')), - 'owiHs': xr.DataArray(data=self.dummy3d, dims=('owiAzSize', 'owiRaSize', 'oswPartition')), - 'owiNrcs': xr.DataArray(data=self.dummy3d, dims=('owiAzSize', 'owiRaSize', 'oswPolarization')), - 'foo': xr.DataArray(self.dummy2d, dims=('owiAzSize', 'owiRaSize')), - 'owiPolarisationName': xr.DataArray(self.dummy1d, dims=('owiPolarisation')), - 'owiCalConstObsi': xr.DataArray(self.dummy1d, dims=('owiIncSize')) + {"owiWindSpeed": xr.DataArray(self.dummy2d, dims=("owiAzSize", "owiRaSize"), attrs={"_FillValue": np.nan}), + "owiLon": xr.DataArray(data=self.dummy2d, dims=("owiAzSize", "owiRaSize")), + "owiLat": xr.DataArray(data=self.dummy2d, dims=("owiAzSize", "owiRaSize")), + "owiHs": xr.DataArray(data=self.dummy3d, dims=("owiAzSize", "owiRaSize", "oswPartition")), + "owiNrcs": xr.DataArray(data=self.dummy3d, dims=("owiAzSize", "owiRaSize", "oswPolarization")), + "foo": xr.DataArray(self.dummy2d, dims=("owiAzSize", "owiRaSize")), + "owiPolarisationName": xr.DataArray(self.dummy1d, dims=("owiPolarisation")), + "owiCalConstObsi": xr.DataArray(self.dummy1d, dims=("owiIncSize")) }, - attrs={'_FillValue': np.nan, - 'missionName': 'S1A'}) + attrs={"_FillValue": np.nan, + "missionName": "S1A"}) xr_.open_dataset.return_value = self.nc # Instantiate reader using the mocked open_dataset() method. Also, make # the reader believe all abstract methods have been implemented. - self.reader = SAFENC(filename='dummy', - filename_info={'start_time': 0, - 'end_time': 0, - 'fstart_time': 0, - 'fend_time': 0, - 'polarization': 'vv'}, + self.reader = SAFENC(filename="dummy", + filename_info={"start_time": 0, + "end_time": 0, + "fstart_time": 0, + "fend_time": 0, + "polarization": "vv"}, filetype_info={}) def test_init(self): @@ -78,5 +78,5 @@ def test_get_dataset(self): key=make_dataid(name=ch), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(self.nc[ch] == dt.to_masked_array()), - msg='get_dataset() returns invalid data for ' - 'dataset {}'.format(ch)) + msg="get_dataset() returns invalid data for " + "dataset {}".format(ch)) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index e796c11b77..4ac4d97cfe 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -33,54 +33,54 @@ class TestSAFEGRD(unittest.TestCase): """Test the SAFE GRD file handler.""" - @mock.patch('rasterio.open') + @mock.patch("rasterio.open") def setUp(self, mocked_rio_open): """Set up the test case.""" from satpy.readers.sar_c_safe import SAFEGRD - filename_info = {'mission_id': 'S1A', 'dataset_name': 'foo', 'start_time': 0, 'end_time': 0, - 'polarization': 'vv'} - filetype_info = 'bla' + filename_info = {"mission_id": "S1A", "dataset_name": "foo", "start_time": 0, "end_time": 0, + "polarization": "vv"} + filetype_info = "bla" self.noisefh = mock.MagicMock() - self.noisefh.get_noise_correction.return_value = xr.DataArray(np.zeros((2, 2)), dims=['y', 'x']) + self.noisefh.get_noise_correction.return_value = xr.DataArray(np.zeros((2, 2)), dims=["y", "x"]) self.calfh = mock.MagicMock() self.calfh.get_calibration_constant.return_value = 1 - self.calfh.get_calibration.return_value = xr.DataArray(np.ones((2, 2)), dims=['y', 'x']) + self.calfh.get_calibration.return_value = xr.DataArray(np.ones((2, 2)), dims=["y", "x"]) self.annotationfh = mock.MagicMock() - self.test_fh = SAFEGRD('S1A_IW_GRDH_1SDV_20190201T024655_20190201T024720_025730_02DC2A_AE07.SAFE/measurement/' - 's1a-iw-grd-vv-20190201t024655-20190201t024720-025730-02dc2a-001.tiff', + self.test_fh = SAFEGRD("S1A_IW_GRDH_1SDV_20190201T024655_20190201T024720_025730_02DC2A_AE07.SAFE/measurement/" + "s1a-iw-grd-vv-20190201t024655-20190201t024720-025730-02dc2a-001.tiff", filename_info, filetype_info, self.calfh, self.noisefh, self.annotationfh) self.mocked_rio_open = mocked_rio_open def test_instantiate(self): """Test initialization of file handlers.""" - assert self.test_fh._polarization == 'vv' + assert self.test_fh._polarization == "vv" assert self.test_fh.calibration == self.calfh assert self.test_fh.noise == self.noisefh self.mocked_rio_open.assert_called() - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_read_calibrated_natural(self, mocked_xarray_open): """Test the calibration routines.""" calibration = mock.MagicMock() calibration.name = "sigma_nought" mocked_xarray_open.return_value.__getitem__.return_value = xr.DataArray(da.from_array(np.array([[0, 1], [2, 3]])), - dims=['y', 'x']) + dims=["y", "x"]) xarr = self.test_fh.get_dataset(DataQuery(name="measurement", polarization="vv", - calibration=calibration, quantity='natural'), info=dict()) + calibration=calibration, quantity="natural"), info=dict()) np.testing.assert_allclose(xarr, [[np.nan, 2], [5, 10]]) - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_read_calibrated_dB(self, mocked_xarray_open): """Test the calibration routines.""" calibration = mock.MagicMock() calibration.name = "sigma_nought" mocked_xarray_open.return_value.__getitem__.return_value = xr.DataArray(da.from_array(np.array([[0, 1], [2, 3]])), - dims=['y', 'x']) + dims=["y", "x"]) xarr = self.test_fh.get_dataset(DataQuery(name="measurement", polarization="vv", - calibration=calibration, quantity='dB'), info=dict()) + calibration=calibration, quantity="dB"), info=dict()) np.testing.assert_allclose(xarr, [[np.nan, 3.0103], [6.9897, 10]]) def test_read_lon_lats(self): @@ -109,7 +109,7 @@ def __init__(self, *args): FakeGCP(15, 0, 0, 3, 0), ] - crs = dict(init='epsg:4326') + crs = dict(init="epsg:4326") self.mocked_rio_open.return_value.gcps = [gcps, crs] self.mocked_rio_open.return_value.shape = [16, 16] diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index 605f595e1f..b335fd09c8 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -42,46 +42,46 @@ def _create_test_netcdf(filename, resolution=742): lon = -13.0 * data_visir lat = xr.DataArray(lat, - dims=('y', 'x'), - attrs={'name': 'lat', - 'standard_name': 'latitude', - 'modifiers': np.array([])}) + dims=("y", "x"), + attrs={"name": "lat", + "standard_name": "latitude", + "modifiers": np.array([])}) lon = xr.DataArray(lon, - dims=('y', 'x'), - attrs={'name': 'lon', - 'standard_name': 'longitude', - 'modifiers': np.array([])}) + dims=("y", "x"), + attrs={"name": "lon", + "standard_name": "longitude", + "modifiers": np.array([])}) solar_zenith_angle_i = xr.DataArray(data_visir, - dims=('y', 'x'), - attrs={'name': 'solar_zenith_angle', - 'coordinates': 'lat lon', - 'resolution': resolution}) + dims=("y", "x"), + attrs={"name": "solar_zenith_angle", + "coordinates": "lat lon", + "resolution": resolution}) scene = Scene() - scene.attrs['sensor'] = ['viirs'] + scene.attrs["sensor"] = ["viirs"] scene_dict = { - 'lat': lat, - 'lon': lon, - 'solar_zenith_angle': solar_zenith_angle_i + "lat": lat, + "lon": lon, + "solar_zenith_angle": solar_zenith_angle_i } tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) common_attrs = { - 'start_time': tstart, - 'end_time': tend, - 'platform_name': 'NOAA 20', - 'orbit_number': 99999 + "start_time": tstart, + "end_time": tend, + "platform_name": "NOAA 20", + "orbit_number": 99999 } for key in scene_dict: scene[key] = scene_dict[key] - if key != 'swath_data': + if key != "swath_data": scene[key].attrs.update(common_attrs) - scene.save_datasets(writer='cf', + scene.save_datasets(writer="cf", filename=filename, - engine='h5netcdf', + engine="h5netcdf", flatten_attrs=True, pretty=True) return filename @@ -100,15 +100,15 @@ def _cf_scene(): lon = -13.0 * np.array([[1, 2], [3, 4]]) proj_dict = { - 'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, - 'lon_0': 0.0, 'proj': 'geos', 'units': 'm' + "a": 6378169.0, "b": 6356583.8, "h": 35785831.0, + "lon_0": 0.0, "proj": "geos", "units": "m" } x_size, y_size = data_visir.shape area_extent = (339045.5577, 4365586.6063, 1068143.527, 4803645.4685) area = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, x_size, y_size, @@ -120,94 +120,94 @@ def _cf_scene(): x_visir = x[0, :] common_attrs = { - 'start_time': tstart, - 'end_time': tend, - 'platform_name': 'tirosn', - 'orbit_number': 99999, - 'area': area + "start_time": tstart, + "end_time": tend, + "platform_name": "tirosn", + "orbit_number": 99999, + "area": area } vis006 = xr.DataArray(data_visir, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006)}, + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir, "acq_time": ("y", time_vis006)}, attrs={ - 'name': 'image0', 'id_tag': 'ch_r06', - 'coordinates': 'lat lon', 'resolution': 1000, 'calibration': 'reflectance', - 'wavelength': WavelengthRange(min=0.58, central=0.63, max=0.68, unit='µm'), - 'orbital_parameters': { - 'projection_longitude': 1, - 'projection_latitude': 1, - 'projection_altitude': 1, - 'satellite_nominal_longitude': 1, - 'satellite_nominal_latitude': 1, - 'satellite_actual_longitude': 1, - 'satellite_actual_latitude': 1, - 'satellite_actual_altitude': 1, - 'nadir_longitude': 1, - 'nadir_latitude': 1, - 'only_in_1': False + "name": "image0", "id_tag": "ch_r06", + "coordinates": "lat lon", "resolution": 1000, "calibration": "reflectance", + "wavelength": WavelengthRange(min=0.58, central=0.63, max=0.68, unit="µm"), + "orbital_parameters": { + "projection_longitude": 1, + "projection_latitude": 1, + "projection_altitude": 1, + "satellite_nominal_longitude": 1, + "satellite_nominal_latitude": 1, + "satellite_actual_longitude": 1, + "satellite_actual_latitude": 1, + "satellite_actual_altitude": 1, + "nadir_longitude": 1, + "nadir_latitude": 1, + "only_in_1": False } }) ir_108 = xr.DataArray(data_visir, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006)}, - attrs={'name': 'image1', 'id_tag': 'ch_tb11', 'coordinates': 'lat lon'}) + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir, "acq_time": ("y", time_vis006)}, + attrs={"name": "image1", "id_tag": "ch_tb11", "coordinates": "lat lon"}) qual_f = xr.DataArray(qual_data, - dims=('y', 'z'), - coords={'y': y_visir, 'z': z_visir, 'acq_time': ('y', time_vis006)}, + dims=("y", "z"), + coords={"y": y_visir, "z": z_visir, "acq_time": ("y", time_vis006)}, attrs={ - 'name': 'qual_flags', - 'id_tag': 'qual_flags' + "name": "qual_flags", + "id_tag": "qual_flags" }) lat = xr.DataArray(lat, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir}, + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir}, attrs={ - 'name': 'lat', - 'standard_name': 'latitude', - 'modifiers': np.array([]) + "name": "lat", + "standard_name": "latitude", + "modifiers": np.array([]) }) lon = xr.DataArray(lon, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir}, + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir}, attrs={ - 'name': 'lon', - 'standard_name': 'longitude', - 'modifiers': np.array([]) + "name": "lon", + "standard_name": "longitude", + "modifiers": np.array([]) }) # for prefix testing prefix_data = xr.DataArray(data_visir, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir}, + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir}, attrs={ - 'name': '1', 'id_tag': 'ch_r06', - 'coordinates': 'lat lon', 'resolution': 1000, 'calibration': 'reflectance', - 'wavelength': WavelengthRange(min=0.58, central=0.63, max=0.68, unit='µm'), - 'area': area + "name": "1", "id_tag": "ch_r06", + "coordinates": "lat lon", "resolution": 1000, "calibration": "reflectance", + "wavelength": WavelengthRange(min=0.58, central=0.63, max=0.68, unit="µm"), + "area": area }) # for swath testing area = SwathDefinition(lons=lon, lats=lat) swath_data = prefix_data.copy() - swath_data.attrs.update({'name': 'swath_data', 'area': area}) + swath_data.attrs.update({"name": "swath_data", "area": area}) scene = Scene() - scene.attrs['sensor'] = ['avhrr-1', 'avhrr-2', 'avhrr-3'] + scene.attrs["sensor"] = ["avhrr-1", "avhrr-2", "avhrr-3"] scene_dict = { - 'image0': vis006, - 'image1': ir_108, - 'swath_data': swath_data, - '1': prefix_data, - 'lat': lat, - 'lon': lon, - 'qual_flags': qual_f + "image0": vis006, + "image1": ir_108, + "swath_data": swath_data, + "1": prefix_data, + "lat": lat, + "lon": lon, + "qual_flags": qual_f } for key in scene_dict: scene[key] = scene_dict[key] - if key != 'swath_data': + if key != "swath_data": scene[key].attrs.update(common_attrs) return scene @@ -215,14 +215,14 @@ def _cf_scene(): @pytest.fixture def _nc_filename(tmp_path): now = datetime.utcnow() - filename = f'testingcfwriter{now:%Y%j%H%M%S}-viirs-mband-20201007075915-20201007080744.nc' + filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-mband-20201007075915-20201007080744.nc" return str(tmp_path / filename) @pytest.fixture def _nc_filename_i(tmp_path): now = datetime.utcnow() - filename = f'testingcfwriter{now:%Y%j%H%M%S}-viirs-iband-20201007075915-20201007080744.nc' + filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-iband-20201007075915-20201007080744.nc" return str(tmp_path / filename) @@ -231,20 +231,20 @@ class TestCFReader: def test_write_and_read_with_area_definition(self, _cf_scene, _nc_filename): """Save a dataset with an area definition to file with cf_writer and read the data again.""" - _cf_scene.save_datasets(writer='cf', + _cf_scene.save_datasets(writer="cf", filename=_nc_filename, - engine='h5netcdf', + engine="h5netcdf", flatten_attrs=True, pretty=True) - scn_ = Scene(reader='satpy_cf_nc', + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename]) - scn_.load(['image0', 'image1', 'lat']) - np.testing.assert_array_equal(scn_['image0'].data, _cf_scene['image0'].data) - np.testing.assert_array_equal(scn_['lat'].data, _cf_scene['lat'].data) # lat loaded as dataset - np.testing.assert_array_equal(scn_['image0'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord - assert isinstance(scn_['image0'].attrs['wavelength'], WavelengthRange) - expected_area = _cf_scene['image0'].attrs['area'] - actual_area = scn_['image0'].attrs['area'] + scn_.load(["image0", "image1", "lat"]) + np.testing.assert_array_equal(scn_["image0"].data, _cf_scene["image0"].data) + np.testing.assert_array_equal(scn_["lat"].data, _cf_scene["lat"].data) # lat loaded as dataset + np.testing.assert_array_equal(scn_["image0"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord + assert isinstance(scn_["image0"].attrs["wavelength"], WavelengthRange) + expected_area = _cf_scene["image0"].attrs["area"] + actual_area = scn_["image0"].attrs["area"] assert pytest.approx(expected_area.area_extent, 0.000001) == actual_area.area_extent assert expected_area.proj_dict == actual_area.proj_dict assert expected_area.shape == actual_area.shape @@ -254,146 +254,146 @@ def test_write_and_read_with_area_definition(self, _cf_scene, _nc_filename): def test_write_and_read_with_swath_definition(self, _cf_scene, _nc_filename): """Save a dataset with a swath definition to file with cf_writer and read the data again.""" - _cf_scene.save_datasets(writer='cf', + _cf_scene.save_datasets(writer="cf", filename=_nc_filename, - engine='h5netcdf', + engine="h5netcdf", flatten_attrs=True, pretty=True, datasets=["swath_data"]) - scn_ = Scene(reader='satpy_cf_nc', + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename]) - scn_.load(['swath_data']) - expected_area = _cf_scene['swath_data'].attrs['area'] - actual_area = scn_['swath_data'].attrs['area'] + scn_.load(["swath_data"]) + expected_area = _cf_scene["swath_data"].attrs["area"] + actual_area = scn_["swath_data"].attrs["area"] assert expected_area.shape == actual_area.shape np.testing.assert_array_equal(expected_area.lons.data, actual_area.lons.data) np.testing.assert_array_equal(expected_area.lats.data, actual_area.lats.data) def test_fix_modifier_attr(self): """Check that fix modifier can handle empty list as modifier attribute.""" - reader = SatpyCFFileHandler('filename', + reader = SatpyCFFileHandler("filename", {}, - {'filetype': 'info'}) - ds_info = {'modifiers': []} + {"filetype": "info"}) + ds_info = {"modifiers": []} reader.fix_modifier_attr(ds_info) - assert ds_info['modifiers'] == () + assert ds_info["modifiers"] == () def test_read_prefixed_channels(self, _cf_scene, _nc_filename): """Check channels starting with digit is prefixed and read back correctly.""" - _cf_scene.save_datasets(writer='cf', + _cf_scene.save_datasets(writer="cf", filename=_nc_filename, - engine='netcdf4', + engine="netcdf4", flatten_attrs=True, pretty=True) - scn_ = Scene(reader='satpy_cf_nc', + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename]) - scn_.load(['1']) - np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loaded as coord + scn_.load(["1"]) + np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loaded as coord - scn_ = Scene(reader='satpy_cf_nc', + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename], reader_kwargs={}) - scn_.load(['1']) - np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loaded as coord + scn_.load(["1"]) + np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loaded as coord # Check that variables starting with a digit is written to filename variable prefixed with xr.open_dataset(_nc_filename) as ds_disk: - np.testing.assert_array_equal(ds_disk['CHANNEL_1'].data, _cf_scene['1'].data) + np.testing.assert_array_equal(ds_disk["CHANNEL_1"].data, _cf_scene["1"].data) def test_read_prefixed_channels_include_orig_name(self, _cf_scene, _nc_filename): """Check channels starting with digit and includeed orig name is prefixed and read back correctly.""" - _cf_scene.save_datasets(writer='cf', + _cf_scene.save_datasets(writer="cf", filename=_nc_filename, - engine='netcdf4', + engine="netcdf4", flatten_attrs=True, pretty=True, include_orig_name=True) - scn_ = Scene(reader='satpy_cf_nc', + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename]) - scn_.load(['1']) - np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loaded as coord + scn_.load(["1"]) + np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loaded as coord - assert scn_['1'].attrs['original_name'] == '1' + assert scn_["1"].attrs["original_name"] == "1" # Check that variables starting with a digit is written to filename variable prefixed with xr.open_dataset(_nc_filename) as ds_disk: - np.testing.assert_array_equal(ds_disk['CHANNEL_1'].data, _cf_scene['1'].data) + np.testing.assert_array_equal(ds_disk["CHANNEL_1"].data, _cf_scene["1"].data) def test_read_prefixed_channels_by_user(self, _cf_scene, _nc_filename): """Check channels starting with digit is prefixed by user and read back correctly.""" - _cf_scene.save_datasets(writer='cf', + _cf_scene.save_datasets(writer="cf", filename=_nc_filename, - engine='netcdf4', + engine="netcdf4", flatten_attrs=True, pretty=True, - numeric_name_prefix='USER') - scn_ = Scene(reader='satpy_cf_nc', - filenames=[_nc_filename], reader_kwargs={'numeric_name_prefix': 'USER'}) - scn_.load(['1']) - np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord + numeric_name_prefix="USER") + scn_ = Scene(reader="satpy_cf_nc", + filenames=[_nc_filename], reader_kwargs={"numeric_name_prefix": "USER"}) + scn_.load(["1"]) + np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord # Check that variables starting with a digit is written to filename variable prefixed with xr.open_dataset(_nc_filename) as ds_disk: - np.testing.assert_array_equal(ds_disk['USER1'].data, _cf_scene['1'].data) + np.testing.assert_array_equal(ds_disk["USER1"].data, _cf_scene["1"].data) def test_read_prefixed_channels_by_user2(self, _cf_scene, _nc_filename): """Check channels starting with digit is prefixed by user when saving and read back correctly without prefix.""" - _cf_scene.save_datasets(writer='cf', + _cf_scene.save_datasets(writer="cf", filename=_nc_filename, - engine='netcdf4', + engine="netcdf4", flatten_attrs=True, pretty=True, include_orig_name=False, - numeric_name_prefix='USER') - scn_ = Scene(reader='satpy_cf_nc', + numeric_name_prefix="USER") + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename]) - scn_.load(['USER1']) - np.testing.assert_array_equal(scn_['USER1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['USER1'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord + scn_.load(["USER1"]) + np.testing.assert_array_equal(scn_["USER1"].data, _cf_scene["1"].data) + np.testing.assert_array_equal(scn_["USER1"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord def test_read_prefixed_channels_by_user_include_prefix(self, _cf_scene, _nc_filename): """Check channels starting with digit is prefixed by user and include original name when saving.""" - _cf_scene.save_datasets(writer='cf', + _cf_scene.save_datasets(writer="cf", filename=_nc_filename, - engine='netcdf4', + engine="netcdf4", flatten_attrs=True, pretty=True, include_orig_name=True, - numeric_name_prefix='USER') - scn_ = Scene(reader='satpy_cf_nc', + numeric_name_prefix="USER") + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename]) - scn_.load(['1']) - np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord + scn_.load(["1"]) + np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord def test_read_prefixed_channels_by_user_no_prefix(self, _cf_scene, _nc_filename): """Check channels starting with digit is not prefixed by user.""" with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=UserWarning, message=".*starts with a digit.*") - _cf_scene.save_datasets(writer='cf', + _cf_scene.save_datasets(writer="cf", filename=_nc_filename, - engine='netcdf4', + engine="netcdf4", flatten_attrs=True, pretty=True, - numeric_name_prefix='') - scn_ = Scene(reader='satpy_cf_nc', + numeric_name_prefix="") + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename]) - scn_.load(['1']) - np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord + scn_.load(["1"]) + np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord def test_orbital_parameters(self, _cf_scene, _nc_filename): """Test that the orbital parameters in attributes are handled correctly.""" - _cf_scene.save_datasets(writer='cf', + _cf_scene.save_datasets(writer="cf", filename=_nc_filename) - scn_ = Scene(reader='satpy_cf_nc', + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename]) - scn_.load(['image0']) - orig_attrs = _cf_scene['image0'].attrs['orbital_parameters'] - new_attrs = scn_['image0'].attrs['orbital_parameters'] + scn_.load(["image0"]) + orig_attrs = _cf_scene["image0"].attrs["orbital_parameters"] + new_attrs = scn_["image0"].attrs["orbital_parameters"] assert isinstance(new_attrs, dict) for key in orig_attrs: assert orig_attrs[key] == new_attrs[key] @@ -402,30 +402,30 @@ def test_write_and_read_from_two_files(self, _nc_filename, _nc_filename_i): """Save two datasets with different resolution and read the solar_zenith_angle again.""" _create_test_netcdf(_nc_filename, resolution=742) _create_test_netcdf(_nc_filename_i, resolution=371) - scn_ = Scene(reader='satpy_cf_nc', + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename, _nc_filename_i]) - scn_.load(['solar_zenith_angle'], resolution=742) - assert scn_['solar_zenith_angle'].attrs['resolution'] == 742 + scn_.load(["solar_zenith_angle"], resolution=742) + assert scn_["solar_zenith_angle"].attrs["resolution"] == 742 scn_.unload() - scn_.load(['solar_zenith_angle'], resolution=371) - assert scn_['solar_zenith_angle'].attrs['resolution'] == 371 + scn_.load(["solar_zenith_angle"], resolution=371) + assert scn_["solar_zenith_angle"].attrs["resolution"] == 371 def test_dataid_attrs_equal_matching_dataset(self, _cf_scene, _nc_filename): """Check that get_dataset returns valid dataset when keys matches.""" from satpy.dataset.dataid import DataID, default_id_keys_config _create_test_netcdf(_nc_filename, resolution=742) - reader = SatpyCFFileHandler(_nc_filename, {}, {'filetype': 'info'}) - ds_id = DataID(default_id_keys_config, name='solar_zenith_angle', resolution=742, modifiers=()) + reader = SatpyCFFileHandler(_nc_filename, {}, {"filetype": "info"}) + ds_id = DataID(default_id_keys_config, name="solar_zenith_angle", resolution=742, modifiers=()) res = reader.get_dataset(ds_id, {}) - assert res.attrs['resolution'] == 742 + assert res.attrs["resolution"] == 742 def test_dataid_attrs_equal_not_matching_dataset(self, _cf_scene, _nc_filename): """Check that get_dataset returns None when key(s) are not matching.""" from satpy.dataset.dataid import DataID, default_id_keys_config _create_test_netcdf(_nc_filename, resolution=742) - reader = SatpyCFFileHandler(_nc_filename, {}, {'filetype': 'info'}) + reader = SatpyCFFileHandler(_nc_filename, {}, {"filetype": "info"}) not_existing_resolution = 9999999 - ds_id = DataID(default_id_keys_config, name='solar_zenith_angle', resolution=not_existing_resolution, + ds_id = DataID(default_id_keys_config, name="solar_zenith_angle", resolution=not_existing_resolution, modifiers=()) assert reader.get_dataset(ds_id, {}) is None @@ -433,8 +433,8 @@ def test_dataid_attrs_equal_contains_not_matching_key(self, _cf_scene, _nc_filen """Check that get_dataset returns valid dataset when dataid have key(s) not existing in data.""" from satpy.dataset.dataid import DataID, default_id_keys_config _create_test_netcdf(_nc_filename, resolution=742) - reader = SatpyCFFileHandler(_nc_filename, {}, {'filetype': 'info'}) - ds_id = DataID(default_id_keys_config, name='solar_zenith_angle', resolution=742, - modifiers=(), calibration='counts') + reader = SatpyCFFileHandler(_nc_filename, {}, {"filetype": "info"}) + ds_id = DataID(default_id_keys_config, name="solar_zenith_angle", resolution=742, + modifiers=(), calibration="counts") res = reader.get_dataset(ds_id, {}) - assert res.attrs['resolution'] == 742 + assert res.attrs["resolution"] == 742 diff --git a/satpy/tests/reader_tests/test_scmi.py b/satpy/tests/reader_tests/test_scmi.py index 6d42720c8d..45fcc9caee 100644 --- a/satpy/tests/reader_tests/test_scmi.py +++ b/satpy/tests/reader_tests/test_scmi.py @@ -56,7 +56,7 @@ def close(self): class TestSCMIFileHandler(unittest.TestCase): """Test the SCMIFileHandler reader.""" - @mock.patch('satpy.readers.scmi.xr') + @mock.patch("satpy.readers.scmi.xr") def setUp(self, xr_): """Set up for test.""" from satpy.readers.scmi import SCMIFileHandler @@ -68,37 +68,37 @@ def setUp(self, xr_): time = xr.DataArray(0.) rad = xr.DataArray( rad_data, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'scale_factor': 0.5, - 'add_offset': -1., - '_FillValue': 20, - 'standard_name': 'toa_bidirectional_reflectance', + "scale_factor": 0.5, + "add_offset": -1., + "_FillValue": 20, + "standard_name": "toa_bidirectional_reflectance", }, coords={ - 'time': time, + "time": time, } ) xr_.open_dataset.return_value = FakeDataset( { - 'Sectorized_CMI': rad, + "Sectorized_CMI": rad, "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02), }, { - 'start_date_time': "2017210120000", - 'satellite_id': 'GOES-16', - 'satellite_longitude': -90., - 'satellite_latitude': 0., - 'satellite_altitude': 35785831., + "start_date_time": "2017210120000", + "satellite_id": "GOES-16", + "satellite_longitude": -90., + "satellite_latitude": 0., + "satellite_altitude": 35785831., }, - {'y': 2, 'x': 5}, + {"y": 2, "x": 5}, ) - self.reader = SCMIFileHandler('filename', - {'platform_shortname': 'G16'}, - {'filetype': 'info'}) + self.reader = SCMIFileHandler("filename", + {"platform_shortname": "G16"}, + {"filetype": "info"}) def test_basic_attributes(self): """Test getting basic file attributes.""" @@ -109,74 +109,74 @@ def test_basic_attributes(self): datetime(2017, 7, 29, 12, 0, 0, 0)) self.assertEqual(self.reader.end_time, datetime(2017, 7, 29, 12, 0, 0, 0)) - self.assertEqual(self.reader.get_shape(make_dataid(name='C05'), {}), + self.assertEqual(self.reader.get_shape(make_dataid(name="C05"), {}), (2, 5)) def test_data_load(self): """Test data loading.""" from satpy.tests.utils import make_dataid res = self.reader.get_dataset( - make_dataid(name='C05', calibration='reflectance'), {}) + make_dataid(name="C05", calibration="reflectance"), {}) np.testing.assert_allclose(res.data, self.expected_rad, equal_nan=True) - self.assertNotIn('scale_factor', res.attrs) - self.assertNotIn('_FillValue', res.attrs) - self.assertEqual(res.attrs['standard_name'], - 'toa_bidirectional_reflectance') - assert 'orbital_parameters' in res.attrs - orb_params = res.attrs['orbital_parameters'] - assert orb_params['projection_longitude'] == -90.0 - assert orb_params['projection_latitude'] == 0.0 - assert orb_params['projection_altitude'] == 35785831.0 + self.assertNotIn("scale_factor", res.attrs) + self.assertNotIn("_FillValue", res.attrs) + self.assertEqual(res.attrs["standard_name"], + "toa_bidirectional_reflectance") + assert "orbital_parameters" in res.attrs + orb_params = res.attrs["orbital_parameters"] + assert orb_params["projection_longitude"] == -90.0 + assert orb_params["projection_latitude"] == 0.0 + assert orb_params["projection_altitude"] == 35785831.0 class TestSCMIFileHandlerArea(unittest.TestCase): """Test the SCMIFileHandler's area creation.""" - @mock.patch('satpy.readers.scmi.xr') + @mock.patch("satpy.readers.scmi.xr") def create_reader(self, proj_name, proj_attrs, xr_): """Create a fake reader.""" from satpy.readers.scmi import SCMIFileHandler proj = xr.DataArray([], attrs=proj_attrs) x__ = xr.DataArray( [0, 1], - attrs={'scale_factor': 2., 'add_offset': -1., 'units': 'meters'}, + attrs={"scale_factor": 2., "add_offset": -1., "units": "meters"}, ) y__ = xr.DataArray( [0, 1], - attrs={'scale_factor': -2., 'add_offset': 1., 'units': 'meters'}, + attrs={"scale_factor": -2., "add_offset": 1., "units": "meters"}, ) xr_.open_dataset.return_value = FakeDataset({ - 'goes_imager_projection': proj, - 'x': x__, - 'y': y__, - 'Sectorized_CMI': np.ones((2, 2))}, + "goes_imager_projection": proj, + "x": x__, + "y": y__, + "Sectorized_CMI": np.ones((2, 2))}, { - 'satellite_id': 'GOES-16', - 'grid_mapping': proj_name, + "satellite_id": "GOES-16", + "grid_mapping": proj_name, }, { - 'y': y__.size, - 'x': x__.size, + "y": y__.size, + "x": x__.size, } ) - return SCMIFileHandler('filename', - {'platform_shortname': 'G16'}, - {'filetype': 'info'}) + return SCMIFileHandler("filename", + {"platform_shortname": "G16"}, + {"filetype": "info"}) - @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') + @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_geos(self, adef): """Test the area generation for geos projection.""" reader = self.create_reader( - 'goes_imager_projection', + "goes_imager_projection", { - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'perspective_point_height': 1., - 'longitude_of_projection_origin': -90., - 'sweep_angle_axis': u'x', - 'grid_mapping_name': 'geostationary', + "semi_major_axis": 1., + "semi_minor_axis": 1., + "perspective_point_height": 1., + "longitude_of_projection_origin": -90., + "sweep_angle_axis": u"x", + "grid_mapping_name": "geostationary", } ) reader.get_area_def(None) @@ -184,24 +184,24 @@ def test_get_area_def_geos(self, adef): self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], { - 'a': 1.0, 'b': 1.0, 'h': 1.0, 'lon_0': -90.0, 'lat_0': 0.0, - 'proj': 'geos', 'sweep': 'x', 'units': 'm'}) + "a": 1.0, "b": 1.0, "h": 1.0, "lon_0": -90.0, "lat_0": 0.0, + "proj": "geos", "sweep": "x", "units": "m"}) self.assertEqual(call_args[4], reader.ncols) self.assertEqual(call_args[5], reader.nlines) np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) - @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') + @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_lcc(self, adef): """Test the area generation for lcc projection.""" reader = self.create_reader( - 'goes_imager_projection', + "goes_imager_projection", { - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'longitude_of_central_meridian': -90., - 'standard_parallel': 25., - 'latitude_of_projection_origin': 25., - 'grid_mapping_name': 'lambert_conformal_conic', + "semi_major_axis": 1., + "semi_minor_axis": 1., + "longitude_of_central_meridian": -90., + "standard_parallel": 25., + "latitude_of_projection_origin": 25., + "grid_mapping_name": "lambert_conformal_conic", } ) reader.get_area_def(None) @@ -209,24 +209,24 @@ def test_get_area_def_lcc(self, adef): self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], { - 'a': 1.0, 'b': 1.0, 'lon_0': -90.0, 'lat_0': 25.0, 'lat_1': 25.0, - 'proj': 'lcc', 'units': 'm'}) + "a": 1.0, "b": 1.0, "lon_0": -90.0, "lat_0": 25.0, "lat_1": 25.0, + "proj": "lcc", "units": "m"}) self.assertEqual(call_args[4], reader.ncols) self.assertEqual(call_args[5], reader.nlines) np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) - @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') + @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_stere(self, adef): """Test the area generation for stere projection.""" reader = self.create_reader( - 'goes_imager_projection', + "goes_imager_projection", { - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'straight_vertical_longitude_from_pole': -90., - 'standard_parallel': 60., - 'latitude_of_projection_origin': 90., - 'grid_mapping_name': 'polar_stereographic', + "semi_major_axis": 1., + "semi_minor_axis": 1., + "straight_vertical_longitude_from_pole": -90., + "standard_parallel": 60., + "latitude_of_projection_origin": 90., + "grid_mapping_name": "polar_stereographic", } ) reader.get_area_def(None) @@ -234,23 +234,23 @@ def test_get_area_def_stere(self, adef): self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], { - 'a': 1.0, 'b': 1.0, 'lon_0': -90.0, 'lat_0': 90.0, 'lat_ts': 60.0, - 'proj': 'stere', 'units': 'm'}) + "a": 1.0, "b": 1.0, "lon_0": -90.0, "lat_0": 90.0, "lat_ts": 60.0, + "proj": "stere", "units": "m"}) self.assertEqual(call_args[4], reader.ncols) self.assertEqual(call_args[5], reader.nlines) np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) - @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') + @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_merc(self, adef): """Test the area generation for merc projection.""" reader = self.create_reader( - 'goes_imager_projection', + "goes_imager_projection", { - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'longitude_of_projection_origin': -90., - 'standard_parallel': 0., - 'grid_mapping_name': 'mercator', + "semi_major_axis": 1., + "semi_minor_axis": 1., + "longitude_of_projection_origin": -90., + "standard_parallel": 0., + "grid_mapping_name": "mercator", } ) reader.get_area_def(None) @@ -258,23 +258,23 @@ def test_get_area_def_merc(self, adef): self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], { - 'a': 1.0, 'b': 1.0, 'lon_0': -90.0, 'lat_0': 0.0, 'lat_ts': 0.0, - 'proj': 'merc', 'units': 'm'}) + "a": 1.0, "b": 1.0, "lon_0": -90.0, "lat_0": 0.0, "lat_ts": 0.0, + "proj": "merc", "units": "m"}) self.assertEqual(call_args[4], reader.ncols) self.assertEqual(call_args[5], reader.nlines) np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) - @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') + @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_bad(self, adef): """Test the area generation for bad projection.""" reader = self.create_reader( - 'goes_imager_projection', + "goes_imager_projection", { - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'longitude_of_projection_origin': -90., - 'standard_parallel': 0., - 'grid_mapping_name': 'fake', + "semi_major_axis": 1., + "semi_minor_axis": 1., + "longitude_of_projection_origin": -90., + "standard_parallel": 0., + "grid_mapping_name": "fake", } ) self.assertRaises(ValueError, reader.get_area_def, None) diff --git a/satpy/tests/reader_tests/test_seadas_l2.py b/satpy/tests/reader_tests/test_seadas_l2.py index c56fe42b33..01de26e96b 100644 --- a/satpy/tests/reader_tests/test_seadas_l2.py +++ b/satpy/tests/reader_tests/test_seadas_l2.py @@ -112,13 +112,13 @@ def _create_seadas_chlor_a_hdf4_file(full_path, mission, sensor): def _add_variable_to_hdf4_file(h, var_name, var_info): - v = h.create(var_name, var_info['type'], var_info['data'].shape) - v[:] = var_info['data'] - for dim_count, dimension_name in enumerate(var_info['dim_labels']): + v = h.create(var_name, var_info["type"], var_info["data"].shape) + v[:] = var_info["data"] + for dim_count, dimension_name in enumerate(var_info["dim_labels"]): v.dim(dim_count).setname(dimension_name) - if var_info.get('fill_value'): - v.setfillvalue(var_info['fill_value']) - for attr_key, attr_val in var_info['attrs'].items(): + if var_info.get("fill_value"): + v.setfillvalue(var_info["fill_value"]) + for attr_key, attr_val in var_info["attrs"].items(): setattr(v, attr_key, attr_val) @@ -196,8 +196,8 @@ def _create_seadas_chlor_a_netcdf_file(full_path, mission, sensor): def _add_variable_to_netcdf_file(nc, var_name, var_info): v = nc.createVariable(var_name, var_info["data"].dtype.str[1:], dimensions=var_info["dim_labels"], fill_value=var_info.get("fill_value")) - v[:] = var_info['data'] - for attr_key, attr_val in var_info['attrs'].items(): + v[:] = var_info["data"] + for attr_key, attr_val in var_info["attrs"].items(): setattr(v, attr_key, attr_val) @@ -206,7 +206,7 @@ class TestSEADAS: def test_available_reader(self): """Test that SEADAS L2 reader is available.""" - assert 'seadas_l2' in available_readers() + assert "seadas_l2" in available_readers() @pytest.mark.parametrize( "input_files", @@ -217,10 +217,10 @@ def test_available_reader(self): ]) def test_scene_available_datasets(self, input_files): """Test that datasets are available.""" - scene = Scene(reader='seadas_l2', filenames=input_files) + scene = Scene(reader="seadas_l2", filenames=input_files) available_datasets = scene.all_dataset_names() assert len(available_datasets) > 0 - assert 'chlor_a' in available_datasets + assert "chlor_a" in available_datasets @pytest.mark.parametrize( ("input_files", "exp_plat", "exp_sensor", "exp_rps"), @@ -234,13 +234,13 @@ def test_scene_available_datasets(self, input_files): def test_load_chlor_a(self, input_files, exp_plat, exp_sensor, exp_rps, apply_quality_flags): """Test that we can load 'chlor_a'.""" reader_kwargs = {"apply_quality_flags": apply_quality_flags} - scene = Scene(reader='seadas_l2', filenames=input_files, reader_kwargs=reader_kwargs) - scene.load(['chlor_a']) - data_arr = scene['chlor_a'] + scene = Scene(reader="seadas_l2", filenames=input_files, reader_kwargs=reader_kwargs) + scene.load(["chlor_a"]) + data_arr = scene["chlor_a"] assert data_arr.dims == ("y", "x") - assert data_arr.attrs['platform_name'] == exp_plat - assert data_arr.attrs['sensor'] == exp_sensor - assert data_arr.attrs['units'] == 'mg m^-3' + assert data_arr.attrs["platform_name"] == exp_plat + assert data_arr.attrs["sensor"] == exp_sensor + assert data_arr.attrs["units"] == "mg m^-3" assert data_arr.dtype.type == np.float32 assert isinstance(data_arr.attrs["area"], SwathDefinition) assert data_arr.attrs["rows_per_scan"] == exp_rps diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index 157ed88bbf..b6540d8623 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -75,24 +75,24 @@ def test_get_cds_time(self): """Test the get_cds_time function.""" # Scalar self.assertEqual(get_cds_time(days=21246, msecs=12*3600*1000), - np.datetime64('2016-03-03 12:00')) + np.datetime64("2016-03-03 12:00")) # Array days = np.array([21246, 21247, 21248]) msecs = np.array([12*3600*1000, 13*3600*1000 + 1, 14*3600*1000 + 2]) - expected = np.array([np.datetime64('2016-03-03 12:00:00.000'), - np.datetime64('2016-03-04 13:00:00.001'), - np.datetime64('2016-03-05 14:00:00.002')]) + expected = np.array([np.datetime64("2016-03-03 12:00:00.000"), + np.datetime64("2016-03-04 13:00:00.001"), + np.datetime64("2016-03-05 14:00:00.002")]) np.testing.assert_equal(get_cds_time(days=days, msecs=msecs), expected) days = 21246 msecs = 12*3600*1000 - expected = np.datetime64('2016-03-03 12:00:00.000') + expected = np.datetime64("2016-03-03 12:00:00.000") np.testing.assert_equal(get_cds_time(days=days, msecs=msecs), expected) def test_pad_data_horizontally_bad_shape(self): """Test the error handling for the horizontal hrv padding.""" - data = xr.DataArray(data=np.zeros((1, 10)), dims=('y', 'x')) + data = xr.DataArray(data=np.zeros((1, 10)), dims=("y", "x")) east_bound = 5 west_bound = 10 final_size = (1, 20) @@ -101,7 +101,7 @@ def test_pad_data_horizontally_bad_shape(self): def test_pad_data_vertically_bad_shape(self): """Test the error handling for the vertical hrv padding.""" - data = xr.DataArray(data=np.zeros((10, 1)), dims=('y', 'x')) + data = xr.DataArray(data=np.zeros((10, 1)), dims=("y", "x")) south_bound = 5 north_bound = 10 final_size = (20, 1) @@ -134,7 +134,7 @@ def test_round_nom_time(self): @staticmethod def test_pad_data_horizontally(): """Test the horizontal hrv padding.""" - data = xr.DataArray(data=np.zeros((1, 10)), dims=('y', 'x')) + data = xr.DataArray(data=np.zeros((1, 10)), dims=("y", "x")) east_bound = 4 west_bound = 13 final_size = (1, 20) @@ -146,7 +146,7 @@ def test_pad_data_horizontally(): @staticmethod def test_pad_data_vertically(): """Test the vertical hrv padding.""" - data = xr.DataArray(data=np.zeros((10, 1)), dims=('y', 'x')) + data = xr.DataArray(data=np.zeros((10, 1)), dims=("y", "x")) south_bound = 4 north_bound = 13 final_size = (20, 1) @@ -176,30 +176,30 @@ def test_get_padding_area_int(): ORBIT_POLYNOMIALS = { - 'StartTime': np.array([ + "StartTime": np.array([ [ datetime(2006, 1, 1, 6), datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18), datetime(1958, 1, 1, 0)] ]), - 'EndTime': np.array([ + "EndTime": np.array([ [ datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18), datetime(2006, 1, 2, 0), datetime(1958, 1, 1, 0) ] ]), - 'X': [np.zeros(8), + "X": [np.zeros(8), [8.41607082e+04, 2.94319260e+00, 9.86748617e-01, -2.70135453e-01, -3.84364650e-02, 8.48718433e-03, 7.70548174e-04, -1.44262718e-04], np.zeros(8)], - 'Y': [np.zeros(8), + "Y": [np.zeros(8), [-5.21170255e+03, 5.12998948e+00, -1.33370453e+00, -3.09634144e-01, 6.18232793e-02, 7.50505681e-03, -1.35131011e-03, -1.12054405e-04], np.zeros(8)], - 'Z': [np.zeros(8), + "Z": [np.zeros(8), [-6.51293855e+02, 1.45830459e+02, 5.61379400e+01, -3.90970565e+00, -7.38137565e-01, 3.06131644e-02, 3.82892428e-03, @@ -211,7 +211,7 @@ def test_get_padding_area_int(): # 01-01: Small gap (12:00 - 13:00) # 01-02: Large gap (04:00 - 18:00) # 01-03: Overlap (10:00 - 13:00) - 'StartTime': np.array([ + "StartTime": np.array([ [ datetime(2005, 12, 31, 10), datetime(2005, 12, 31, 12), datetime(2006, 1, 1, 10), datetime(2006, 1, 1, 13), @@ -219,7 +219,7 @@ def test_get_padding_area_int(): datetime(2006, 1, 3, 6), datetime(2006, 1, 3, 10), ] ]), - 'EndTime': np.array([ + "EndTime": np.array([ [ datetime(2005, 12, 31, 12), datetime(2005, 12, 31, 18), datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18), @@ -227,24 +227,24 @@ def test_get_padding_area_int(): datetime(2006, 1, 3, 13), datetime(2006, 1, 3, 18), ] ]), - 'X': [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0], - 'Y': [1.1, 2.1, 3.1, 4.1, 5.1, 6.1, 7.1, 8.1], - 'Z': [1.2, 2.2, 3.2, 4.2, 5.2, 6.2, 7.2, 8.2], + "X": [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0], + "Y": [1.1, 2.1, 3.1, 4.1, 5.1, 6.1, 7.1, 8.1], + "Z": [1.2, 2.2, 3.2, 4.2, 5.2, 6.2, 7.2, 8.2], } ORBIT_POLYNOMIALS_INVALID = { - 'StartTime': np.array([ + "StartTime": np.array([ [ datetime(1958, 1, 1), datetime(1958, 1, 1) ] ]), - 'EndTime': np.array([ + "EndTime": np.array([ [ datetime(1958, 1, 1), datetime(1958, 1, 1) ] ]), - 'X': [1, 2], - 'Y': [3, 4], - 'Z': [5, 6], + "X": [1, 2], + "Y": [3, 4], + "Z": [5, 6], } @@ -301,7 +301,7 @@ class TestOrbitPolynomialFinder: """Unit tests for orbit polynomial finder.""" @pytest.mark.parametrize( - ('orbit_polynomials', 'time', 'orbit_polynomial_exp'), + ("orbit_polynomials", "time", "orbit_polynomial_exp"), [ # Contiguous validity intervals (that's the norm) ( @@ -309,8 +309,8 @@ class TestOrbitPolynomialFinder: datetime(2005, 12, 31, 12, 15), OrbitPolynomial( coefs=(2.0, 2.1, 2.2), - start_time=np.datetime64('2005-12-31 12:00'), - end_time=np.datetime64('2005-12-31 18:00') + start_time=np.datetime64("2005-12-31 12:00"), + end_time=np.datetime64("2005-12-31 18:00") ) ), # No interval enclosing the given timestamp, but closest interval @@ -320,8 +320,8 @@ class TestOrbitPolynomialFinder: datetime(2006, 1, 1, 12, 15), OrbitPolynomial( coefs=(3.0, 3.1, 3.2), - start_time=np.datetime64('2006-01-01 10:00'), - end_time=np.datetime64('2006-01-01 12:00') + start_time=np.datetime64("2006-01-01 10:00"), + end_time=np.datetime64("2006-01-01 12:00") ) ), # Overlapping intervals @@ -330,8 +330,8 @@ class TestOrbitPolynomialFinder: datetime(2006, 1, 3, 12, 15), OrbitPolynomial( coefs=(8.0, 8.1, 8.2), - start_time=np.datetime64('2006-01-03 10:00'), - end_time=np.datetime64('2006-01-03 18:00') + start_time=np.datetime64("2006-01-03 10:00"), + end_time=np.datetime64("2006-01-03 18:00") ) ), ] @@ -344,7 +344,7 @@ def test_get_orbit_polynomial(self, orbit_polynomials, time, assert orbit_polynomial == orbit_polynomial_exp @pytest.mark.parametrize( - ('orbit_polynomials', 'time'), + ("orbit_polynomials", "time"), [ # No interval enclosing the given timestamp and closest interval # too far away diff --git a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py index fdcedea3f2..cc1107cc6c 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py @@ -53,7 +53,7 @@ CAL_TYPE1 = 1 CAL_TYPE2 = 2 CAL_TYPEBAD = -1 -CHANNEL_NAME = 'IR_108' +CHANNEL_NAME = "IR_108" PLATFORM_ID = 323 # Met-10 TBS_OUTPUT1 = xr.DataArray( @@ -152,28 +152,28 @@ def test_init(self): platform_id=None, channel_name=None, coefs=None, - calib_mode='invalid', + calib_mode="invalid", scan_time=None ) - def _get_calibration_handler(self, calib_mode='NOMINAL', ext_coefs=None): + def _get_calibration_handler(self, calib_mode="NOMINAL", ext_coefs=None): """Provide a calibration handler.""" return SEVIRICalibrationHandler( platform_id=324, - channel_name='IR_108', + channel_name="IR_108", coefs={ - 'coefs': { - 'NOMINAL': { - 'gain': 10, - 'offset': -1 + "coefs": { + "NOMINAL": { + "gain": 10, + "offset": -1 }, - 'GSICS': { - 'gain': 20, - 'offset': -2 + "GSICS": { + "gain": 20, + "offset": -2 }, - 'EXTERNAL': ext_coefs or {} + "EXTERNAL": ext_coefs or {} }, - 'radiance_type': 1 + "radiance_type": 1 }, calib_mode=calib_mode, scan_time=None @@ -183,15 +183,15 @@ def test_calibrate_exceptions(self): """Test exceptions raised by the calibration handler.""" calib = self._get_calibration_handler() with pytest.raises(ValueError): - calib.calibrate(None, 'invalid') + calib.calibrate(None, "invalid") @pytest.mark.parametrize( - ('calib_mode', 'ext_coefs', 'expected'), + ("calib_mode", "ext_coefs", "expected"), [ - ('NOMINAL', {}, (10, -1)), - ('GSICS', {}, (20, -40)), - ('GSICS', {'gain': 30, 'offset': -3}, (30, -3)), - ('NOMINAL', {'gain': 30, 'offset': -3}, (30, -3)) + ("NOMINAL", {}, (10, -1)), + ("GSICS", {}, (20, -40)), + ("GSICS", {"gain": 30, "offset": -3}, (30, -3)), + ("NOMINAL", {"gain": 30, "offset": -3}, (30, -3)) ] ) def test_get_gain_offset(self, calib_mode, ext_coefs, expected): @@ -214,145 +214,145 @@ class TestFileHandlerCalibrationBase: radiance_types = 2 * np.ones(12) scan_time = datetime(2020, 1, 1) external_coefs = { - 'VIS006': {'gain': 10, 'offset': -10}, - 'IR_108': {'gain': 20, 'offset': -20}, - 'HRV': {'gain': 5, 'offset': -5} + "VIS006": {"gain": 10, "offset": -10}, + "IR_108": {"gain": 20, "offset": -20}, + "HRV": {"gain": 5, "offset": -5} } - spectral_channel_ids = {'VIS006': 1, 'IR_108': 9, 'HRV': 12} + spectral_channel_ids = {"VIS006": 1, "IR_108": 9, "HRV": 12} expected = { - 'VIS006': { - 'counts': { - 'NOMINAL': xr.DataArray( + "VIS006": { + "counts": { + "NOMINAL": xr.DataArray( [[0, 10], [100, 255]], - dims=('y', 'x') + dims=("y", "x") ) }, - 'radiance': { - 'NOMINAL': xr.DataArray( + "radiance": { + "NOMINAL": xr.DataArray( [[np.nan, 9], [99, 254]], - dims=('y', 'x') + dims=("y", "x") ), - 'GSICS': xr.DataArray( + "GSICS": xr.DataArray( [[np.nan, 9], [99, 254]], - dims=('y', 'x') + dims=("y", "x") ), - 'EXTERNAL': xr.DataArray( + "EXTERNAL": xr.DataArray( [[np.nan, 90], [990, 2540]], - dims=('y', 'x') + dims=("y", "x") ) }, - 'reflectance': { - 'NOMINAL': xr.DataArray( + "reflectance": { + "NOMINAL": xr.DataArray( [[np.nan, 41.88985], [460.7884, 1182.2247]], - dims=('y', 'x') + dims=("y", "x") ), - 'EXTERNAL': xr.DataArray( + "EXTERNAL": xr.DataArray( [[np.nan, 418.89853], [4607.8843, 11822.249]], - dims=('y', 'x') + dims=("y", "x") ) } }, - 'IR_108': { - 'counts': { - 'NOMINAL': xr.DataArray( + "IR_108": { + "counts": { + "NOMINAL": xr.DataArray( [[0, 10], [100, 255]], - dims=('y', 'x') + dims=("y", "x") ) }, - 'radiance': { - 'NOMINAL': xr.DataArray( + "radiance": { + "NOMINAL": xr.DataArray( [[np.nan, 81], [891, 2286]], - dims=('y', 'x') + dims=("y", "x") ), - 'GSICS': xr.DataArray( + "GSICS": xr.DataArray( [[np.nan, 8.19], [89.19, 228.69]], - dims=('y', 'x') + dims=("y", "x") ), - 'EXTERNAL': xr.DataArray( + "EXTERNAL": xr.DataArray( [[np.nan, 180], [1980, 5080]], - dims=('y', 'x') + dims=("y", "x") ) }, - 'brightness_temperature': { - 'NOMINAL': xr.DataArray( + "brightness_temperature": { + "NOMINAL": xr.DataArray( [[np.nan, 279.82318], [543.2585, 812.77167]], - dims=('y', 'x') + dims=("y", "x") ), - 'GSICS': xr.DataArray( + "GSICS": xr.DataArray( [[np.nan, 189.20985], [285.53293, 356.06668]], - dims=('y', 'x') + dims=("y", "x") ), - 'EXTERNAL': xr.DataArray( + "EXTERNAL": xr.DataArray( [[np.nan, 335.14236], [758.6249, 1262.7567]], - dims=('y', 'x') + dims=("y", "x") ), } }, - 'HRV': { - 'counts': { - 'NOMINAL': xr.DataArray( + "HRV": { + "counts": { + "NOMINAL": xr.DataArray( [[0, 10], [100, 255]], - dims=('y', 'x') + dims=("y", "x") ) }, - 'radiance': { - 'NOMINAL': xr.DataArray( + "radiance": { + "NOMINAL": xr.DataArray( [[np.nan, 108], [1188, 3048]], - dims=('y', 'x') + dims=("y", "x") ), - 'GSICS': xr.DataArray( + "GSICS": xr.DataArray( [[np.nan, 108], [1188, 3048]], - dims=('y', 'x') + dims=("y", "x") ), - 'EXTERNAL': xr.DataArray( + "EXTERNAL": xr.DataArray( [[np.nan, 45], [495, 1270]], - dims=('y', 'x') + dims=("y", "x") ) }, - 'reflectance': { - 'NOMINAL': xr.DataArray( + "reflectance": { + "NOMINAL": xr.DataArray( [[np.nan, 415.26767], [4567.944, 11719.775]], - dims=('y', 'x') + dims=("y", "x") ), - 'EXTERNAL': xr.DataArray( + "EXTERNAL": xr.DataArray( [[np.nan, 173.02817], [1903.31, 4883.2397]], - dims=('y', 'x') + dims=("y", "x") ) } } } - @pytest.fixture(name='counts') + @pytest.fixture(name="counts") def counts(self): """Provide fake image counts.""" return xr.DataArray( [[0, 10], [100, 255]], - dims=('y', 'x') + dims=("y", "x") ) def _get_expected( self, channel, calibration, calib_mode, use_ext_coefs ): if use_ext_coefs: - return self.expected[channel][calibration]['EXTERNAL'] + return self.expected[channel][calibration]["EXTERNAL"] return self.expected[channel][calibration][calib_mode] diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index 90785ffdbf..ae042999e3 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -54,89 +54,89 @@ def setUp(self): ncols=5568, ) self.reader.mda.update({ - 'segment_sequence_number': 18, - 'planned_start_segment_number': 1 + "segment_sequence_number": 18, + "planned_start_segment_number": 1 }) self.reader.fill_hrv = True - @mock.patch('satpy.readers.hrit_base.np.memmap') + @mock.patch("satpy.readers.hrit_base.np.memmap") def test_read_hrv_band(self, memmap): """Test reading the hrv band.""" - nbits = self.reader.mda['number_of_bits_per_pixel'] + nbits = self.reader.mda["number_of_bits_per_pixel"] memmap.return_value = np.random.randint(0, 256, size=int((464 * 5568 * nbits) / 8), dtype=np.uint8) - res = self.reader.read_band('HRV', None) + res = self.reader.read_band("HRV", None) self.assertEqual(res.shape, (464, 5568)) - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset(self, calibrate, parent_get_dataset): """Test getting the hrv dataset.""" - key = make_dataid(name='HRV', calibration='reflectance') + key = make_dataid(name="HRV", calibration="reflectance") info = setup.get_fake_dataset_info() parent_get_dataset.return_value = mock.MagicMock() - calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=('y', 'x')) + calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=("y", "x")) res = self.reader.get_dataset(key, info) self.assertEqual(res.shape, (464, 11136)) # Test method calls parent_get_dataset.assert_called_with(key, info) - calibrate.assert_called_with(parent_get_dataset(), key['calibration']) + calibrate.assert_called_with(parent_get_dataset(), key["calibration"]) self.assert_attrs_equal(res.attrs, setup.get_attrs_exp()) np.testing.assert_equal( - res['acq_time'], + res["acq_time"], setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset_non_fill(self, calibrate, parent_get_dataset): """Test getting a non-filled hrv dataset.""" - key = make_dataid(name='HRV', calibration='reflectance') - key.name = 'HRV' + key = make_dataid(name="HRV", calibration="reflectance") + key.name = "HRV" info = setup.get_fake_dataset_info() self.reader.fill_hrv = False parent_get_dataset.return_value = mock.MagicMock() - calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=('y', 'x')) + calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=("y", "x")) res = self.reader.get_dataset(key, info) self.assertEqual(res.shape, (464, 5568)) # Test method calls parent_get_dataset.assert_called_with(key, info) - calibrate.assert_called_with(parent_get_dataset(), key['calibration']) + calibrate.assert_called_with(parent_get_dataset(), key["calibration"]) self.assert_attrs_equal(res.attrs, setup.get_attrs_exp()) np.testing.assert_equal( - res['acq_time'], + res["acq_time"], setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) def test_get_area_def(self): """Test getting the area def.""" from pyresample.utils import proj4_radius_parameters - area = self.reader.get_area_def(make_dataid(name='HRV', resolution=1000)) + area = self.reader.get_area_def(make_dataid(name="HRV", resolution=1000)) self.assertEqual(area.area_extent, (-45561979844414.07, -3720765401003.719, 45602912357076.38, 77771774058.38356)) proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) self.assertEqual(a, 6378169.0) self.assertAlmostEqual(b, 6356583.8) - self.assertEqual(proj_dict['h'], 35785831.0) - self.assertEqual(proj_dict['lon_0'], 0.0) - self.assertEqual(proj_dict['proj'], 'geos') - self.assertEqual(proj_dict['units'], 'm') + self.assertEqual(proj_dict["h"], 35785831.0) + self.assertEqual(proj_dict["lon_0"], 0.0) + self.assertEqual(proj_dict["proj"], "geos") + self.assertEqual(proj_dict["units"], "m") self.reader.fill_hrv = False - area = self.reader.get_area_def(make_dataid(name='HRV', resolution=1000)) + area = self.reader.get_area_def(make_dataid(name="HRV", resolution=1000)) npt.assert_allclose(area.defs[0].area_extent, (-22017598561055.01, -2926674655354.9604, 23564847539690.22, 77771774058.38356)) npt.assert_allclose(area.defs[1].area_extent, (-30793529275853.656, -3720765401003.719, 14788916824891.568, -2926674655354.9604)) - self.assertEqual(area.defs[0].area_id, 'msg_seviri_fes_1km') - self.assertEqual(area.defs[1].area_id, 'msg_seviri_fes_1km') + self.assertEqual(area.defs[0].area_id, "msg_seviri_fes_1km") + self.assertEqual(area.defs[1].area_id, "msg_seviri_fes_1km") class TestHRITMSGFileHandler(TestHRITMSGBase): @@ -155,70 +155,70 @@ def setUp(self): projection_longitude=self.projection_longitude ) self.reader.mda.update({ - 'segment_sequence_number': 18, - 'planned_start_segment_number': 1 + "segment_sequence_number": 18, + "planned_start_segment_number": 1 }) def _get_fake_data(self): return xr.DataArray( data=np.zeros((self.nlines, self.ncols)), - dims=('y', 'x') + dims=("y", "x") ) def test_get_area_def(self): """Test getting the area def.""" from pyresample.utils import proj4_radius_parameters - area = self.reader.get_area_def(make_dataid(name='VIS006', resolution=3000)) + area = self.reader.get_area_def(make_dataid(name="VIS006", resolution=3000)) proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) self.assertEqual(a, 6378169.0) self.assertAlmostEqual(b, 6356583.8) - self.assertEqual(proj_dict['h'], 35785831.0) - self.assertEqual(proj_dict['lon_0'], self.projection_longitude) - self.assertEqual(proj_dict['proj'], 'geos') - self.assertEqual(proj_dict['units'], 'm') + self.assertEqual(proj_dict["h"], 35785831.0) + self.assertEqual(proj_dict["lon_0"], self.projection_longitude) + self.assertEqual(proj_dict["proj"], "geos") + self.assertEqual(proj_dict["units"], "m") self.assertEqual(area.area_extent, (-77771774058.38356, -3720765401003.719, 30310525626438.438, 77771774058.38356)) # Data shifted by 1.5km to N-W - self.reader.mda['offset_corrected'] = False - area = self.reader.get_area_def(make_dataid(name='VIS006', resolution=3000)) + self.reader.mda["offset_corrected"] = False + area = self.reader.get_area_def(make_dataid(name="VIS006", resolution=3000)) self.assertEqual(area.area_extent, (-77771772558.38356, -3720765402503.719, 30310525627938.438, 77771772558.38356)) - self.assertEqual(area.area_id, 'msg_seviri_rss_3km') + self.assertEqual(area.area_id, "msg_seviri_rss_3km") - @mock.patch('satpy.readers.hrit_base.np.memmap') + @mock.patch("satpy.readers.hrit_base.np.memmap") def test_read_band(self, memmap): """Test reading a band.""" - nbits = self.reader.mda['number_of_bits_per_pixel'] + nbits = self.reader.mda["number_of_bits_per_pixel"] memmap.return_value = np.random.randint(0, 256, size=int((464 * 3712 * nbits) / 8), dtype=np.uint8) - res = self.reader.read_band('VIS006', None) + res = self.reader.read_band("VIS006", None) self.assertEqual(res.shape, (464, 3712)) - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset(self, calibrate, parent_get_dataset): """Test getting the dataset.""" data = self._get_fake_data() parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = data - key = make_dataid(name='VIS006', calibration='reflectance') + key = make_dataid(name="VIS006", calibration="reflectance") info = setup.get_fake_dataset_info() res = self.reader.get_dataset(key, info) # Test method calls - new_data = np.zeros_like(data.data).astype('float32') + new_data = np.zeros_like(data.data).astype("float32") new_data[:, :] = np.nan expected = data.copy(data=new_data) - expected['acq_time'] = ( - 'y', + expected["acq_time"] = ( + "y", setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) xr.testing.assert_equal(res, expected) @@ -237,26 +237,26 @@ def test_get_dataset(self, calibrate, parent_get_dataset): # test repeat cycle duration self.assertEqual(15, self.reader._repeat_cycle_duration) # Change the reducescan scenario to test the repeat cycle duration handling - self.reader.epilogue['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] = 1 + self.reader.epilogue["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] = 1 self.assertEqual(5, self.reader._repeat_cycle_duration) - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset_without_masking_bad_scan_lines(self, calibrate, parent_get_dataset): """Test getting the dataset.""" data = self._get_fake_data() parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = data - key = make_dataid(name='VIS006', calibration='reflectance') + key = make_dataid(name="VIS006", calibration="reflectance") info = setup.get_fake_dataset_info() self.reader.mask_bad_quality_scan_lines = False res = self.reader.get_dataset(key, info) # Test method calls expected = data.copy() - expected['acq_time'] = ( - 'y', + expected["acq_time"] = ( + "y", setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) xr.testing.assert_equal(res, expected) @@ -265,27 +265,27 @@ def test_get_dataset_without_masking_bad_scan_lines(self, calibrate, parent_get_ setup.get_attrs_exp(self.projection_longitude) ) - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset_with_raw_metadata(self, calibrate, parent_get_dataset): """Test getting the dataset.""" calibrate.return_value = self._get_fake_data() - key = make_dataid(name='VIS006', calibration='reflectance') + key = make_dataid(name="VIS006", calibration="reflectance") info = setup.get_fake_dataset_info() self.reader.include_raw_metadata = True res = self.reader.get_dataset(key, info) - assert 'raw_metadata' in res.attrs + assert "raw_metadata" in res.attrs def test_get_raw_mda(self): """Test provision of raw metadata.""" - self.reader.mda = {'segment': 1, 'loff': 123} - self.reader.prologue_.reduce = lambda max_size: {'prologue': 1} - self.reader.epilogue_.reduce = lambda max_size: {'epilogue': 1} - expected = {'prologue': 1, 'epilogue': 1, 'segment': 1} + self.reader.mda = {"segment": 1, "loff": 123} + self.reader.prologue_.reduce = lambda max_size: {"prologue": 1} + self.reader.epilogue_.reduce = lambda max_size: {"epilogue": 1} + expected = {"prologue": 1, "epilogue": 1, "segment": 1} self.assertDictEqual(self.reader._get_raw_mda(), expected) # Make sure _get_raw_mda() doesn't modify the original dictionary - self.assertIn('loff', self.reader.mda) + self.assertIn("loff", self.reader.mda) def test_satpos_no_valid_orbit_polynomial(self): """Test satellite position if there is no valid orbit polynomial.""" @@ -297,8 +297,8 @@ def test_satpos_no_valid_orbit_polynomial(self): orbit_polynomials=ORBIT_POLYNOMIALS_INVALID ) self.assertNotIn( - 'satellite_actual_longitude', - reader.mda['orbital_parameters'] + "satellite_actual_longitude", + reader.mda["orbital_parameters"] ) @@ -314,8 +314,8 @@ def setUp(self, *mocks): ) self.reader = fh.prologue_ - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler.read_prologue') - @mock.patch('satpy.readers.hrit_base.HRITFileHandler.__init__', autospec=True) + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler.read_prologue") + @mock.patch("satpy.readers.hrit_base.HRITFileHandler.__init__", autospec=True) def test_extra_kwargs(self, init, *mocks): """Test whether the prologue file handler accepts extra keyword arguments.""" @@ -324,31 +324,31 @@ def init_patched(self, *args, **kwargs): init.side_effect = init_patched - HRITMSGPrologueFileHandler(filename='dummy_prologue_filename', - filename_info={'service': ''}, + HRITMSGPrologueFileHandler(filename="dummy_prologue_filename", + filename_info={"service": ""}, filetype_info=None, ext_calib_coefs={}, mda_max_array_size=123, - calib_mode='nominal') + calib_mode="nominal") - @mock.patch('satpy.readers.seviri_l1b_hrit.utils.reduce_mda') + @mock.patch("satpy.readers.seviri_l1b_hrit.utils.reduce_mda") def test_reduce(self, reduce_mda): """Test metadata reduction.""" - reduce_mda.return_value = 'reduced' + reduce_mda.return_value = "reduced" # Set buffer - self.assertEqual(self.reader.reduce(123), 'reduced') + self.assertEqual(self.reader.reduce(123), "reduced") # Read buffer - self.assertEqual(self.reader.reduce(123), 'reduced') + self.assertEqual(self.reader.reduce(123), "reduced") reduce_mda.assert_called_once() class TestHRITMSGEpilogueFileHandler(unittest.TestCase): """Test the HRIT epilogue file handler.""" - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue') - @mock.patch('satpy.readers.hrit_base.HRITFileHandler.__init__', autospec=True) + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue") + @mock.patch("satpy.readers.hrit_base.HRITFileHandler.__init__", autospec=True) def setUp(self, init, *mocks): """Set up the test case.""" @@ -357,13 +357,13 @@ def init_patched(self, *args, **kwargs): init.side_effect = init_patched - self.reader = HRITMSGEpilogueFileHandler(filename='dummy_epilogue_filename', - filename_info={'service': ''}, + self.reader = HRITMSGEpilogueFileHandler(filename="dummy_epilogue_filename", + filename_info={"service": ""}, filetype_info=None, - calib_mode='nominal') + calib_mode="nominal") - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue') - @mock.patch('satpy.readers.hrit_base.HRITFileHandler.__init__', autospec=True) + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue") + @mock.patch("satpy.readers.hrit_base.HRITFileHandler.__init__", autospec=True) def test_extra_kwargs(self, init, *mocks): """Test whether the epilogue file handler accepts extra keyword arguments.""" @@ -372,74 +372,74 @@ def init_patched(self, *args, **kwargs): init.side_effect = init_patched - HRITMSGEpilogueFileHandler(filename='dummy_epilogue_filename', - filename_info={'service': ''}, + HRITMSGEpilogueFileHandler(filename="dummy_epilogue_filename", + filename_info={"service": ""}, filetype_info=None, ext_calib_coefs={}, mda_max_array_size=123, - calib_mode='nominal') + calib_mode="nominal") - @mock.patch('satpy.readers.seviri_l1b_hrit.utils.reduce_mda') + @mock.patch("satpy.readers.seviri_l1b_hrit.utils.reduce_mda") def test_reduce(self, reduce_mda): """Test metadata reduction.""" - reduce_mda.return_value = 'reduced' + reduce_mda.return_value = "reduced" # Set buffer - self.assertEqual(self.reader.reduce(123), 'reduced') + self.assertEqual(self.reader.reduce(123), "reduced") reduce_mda.assert_called() # Read buffer reduce_mda.reset_mock() - self.reader._reduced = 'red' - self.assertEqual(self.reader.reduce(123), 'red') + self.reader._reduced = "red" + self.assertEqual(self.reader.reduce(123), "red") reduce_mda.assert_not_called() class TestHRITMSGCalibration(TestFileHandlerCalibrationBase): """Unit tests for calibration.""" - @pytest.fixture(name='file_handler') + @pytest.fixture(name="file_handler") def file_handler(self): """Create a mocked file handler.""" prolog = { - 'RadiometricProcessing': { - 'Level15ImageCalibration': { - 'CalSlope': self.gains_nominal, - 'CalOffset': self.offsets_nominal, + "RadiometricProcessing": { + "Level15ImageCalibration": { + "CalSlope": self.gains_nominal, + "CalOffset": self.offsets_nominal, }, - 'MPEFCalFeedback': { - 'GSICSCalCoeff': self.gains_gsics, - 'GSICSOffsetCount': self.offsets_gsics, + "MPEFCalFeedback": { + "GSICSCalCoeff": self.gains_gsics, + "GSICSOffsetCount": self.offsets_gsics, } }, - 'ImageDescription': { - 'Level15ImageProduction': { - 'PlannedChanProcessing': self.radiance_types + "ImageDescription": { + "Level15ImageProduction": { + "PlannedChanProcessing": self.radiance_types } }, - 'ImageAcquisition': { - 'PlannedAcquisitionTime': { - 'TrueRepeatCycleStart': self.scan_time, + "ImageAcquisition": { + "PlannedAcquisitionTime": { + "TrueRepeatCycleStart": self.scan_time, } } } epilog = { - 'ImageProductionStats': { - 'ActualScanningSummary': { - 'ForwardScanStart': self.scan_time + "ImageProductionStats": { + "ActualScanningSummary": { + "ForwardScanStart": self.scan_time } } } mda = { - 'image_segment_line_quality': { - 'line_validity': np.array([3, 3]), - 'line_radiometric_quality': np.array([4, 4]), - 'line_geometric_quality': np.array([4, 4]) + "image_segment_line_quality": { + "line_validity": np.array([3, 3]), + "line_radiometric_quality": np.array([4, 4]), + "line_geometric_quality": np.array([4, 4]) }, } with mock.patch( - 'satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.__init__', + "satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.__init__", return_value=None ): fh = HRITMSGFileHandler() @@ -450,33 +450,33 @@ def file_handler(self): return fh @pytest.mark.parametrize( - ('channel', 'calibration', 'calib_mode', 'use_ext_coefs'), + ("channel", "calibration", "calib_mode", "use_ext_coefs"), [ # VIS channel, internal coefficients - ('VIS006', 'counts', 'NOMINAL', False), - ('VIS006', 'radiance', 'NOMINAL', False), - ('VIS006', 'radiance', 'GSICS', False), - ('VIS006', 'reflectance', 'NOMINAL', False), + ("VIS006", "counts", "NOMINAL", False), + ("VIS006", "radiance", "NOMINAL", False), + ("VIS006", "radiance", "GSICS", False), + ("VIS006", "reflectance", "NOMINAL", False), # VIS channel, external coefficients (mode should have no effect) - ('VIS006', 'radiance', 'GSICS', True), - ('VIS006', 'reflectance', 'NOMINAL', True), + ("VIS006", "radiance", "GSICS", True), + ("VIS006", "reflectance", "NOMINAL", True), # IR channel, internal coefficients - ('IR_108', 'counts', 'NOMINAL', False), - ('IR_108', 'radiance', 'NOMINAL', False), - ('IR_108', 'radiance', 'GSICS', False), - ('IR_108', 'brightness_temperature', 'NOMINAL', False), - ('IR_108', 'brightness_temperature', 'GSICS', False), + ("IR_108", "counts", "NOMINAL", False), + ("IR_108", "radiance", "NOMINAL", False), + ("IR_108", "radiance", "GSICS", False), + ("IR_108", "brightness_temperature", "NOMINAL", False), + ("IR_108", "brightness_temperature", "GSICS", False), # IR channel, external coefficients (mode should have no effect) - ('IR_108', 'radiance', 'NOMINAL', True), - ('IR_108', 'brightness_temperature', 'GSICS', True), + ("IR_108", "radiance", "NOMINAL", True), + ("IR_108", "brightness_temperature", "GSICS", True), # HRV channel, internal coefficiens - ('HRV', 'counts', 'NOMINAL', False), - ('HRV', 'radiance', 'NOMINAL', False), - ('HRV', 'radiance', 'GSICS', False), - ('HRV', 'reflectance', 'NOMINAL', False), + ("HRV", "counts", "NOMINAL", False), + ("HRV", "radiance", "NOMINAL", False), + ("HRV", "radiance", "GSICS", False), + ("HRV", "reflectance", "NOMINAL", False), # HRV channel, external coefficients (mode should have no effect) - ('HRV', 'radiance', 'GSICS', True), - ('HRV', 'reflectance', 'NOMINAL', True), + ("HRV", "radiance", "GSICS", True), + ("HRV", "reflectance", "NOMINAL", True), ] ) def test_calibrate( @@ -493,7 +493,7 @@ def test_calibrate( ) fh = file_handler - fh.mda['spectral_channel_id'] = self.spectral_channel_ids[channel] + fh.mda["spectral_channel_id"] = self.spectral_channel_ids[channel] fh.channel_name = channel fh.calib_mode = calib_mode fh.ext_calib_coefs = external_coefs @@ -502,18 +502,18 @@ def test_calibrate( def test_mask_bad_quality(self, file_handler): """Test the masking of bad quality scan lines.""" - channel = 'VIS006' + channel = "VIS006" expected = self._get_expected( channel=channel, - calibration='radiance', - calib_mode='NOMINAL', + calibration="radiance", + calib_mode="NOMINAL", use_ext_coefs=False ) fh = file_handler res = fh._mask_bad_quality(expected) - new_data = np.zeros_like(expected.data).astype('float32') + new_data = np.zeros_like(expected.data).astype("float32") new_data[:, :] = np.nan expected = expected.copy(data=new_data) xr.testing.assert_equal(res, expected) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py index 841d45b943..b9ff1f95ea 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py @@ -28,15 +28,15 @@ def new_get_hd(instance, hdr_info): """Generate some metadata.""" - instance.mda = {'spectral_channel_id': 1} - instance.mda.setdefault('number_of_bits_per_pixel', 10) + instance.mda = {"spectral_channel_id": 1} + instance.mda.setdefault("number_of_bits_per_pixel", 10) - instance.mda['projection_parameters'] = {'a': 6378169.00, - 'b': 6356583.80, - 'h': 35785831.00, - 'SSP_longitude': 0.0} - instance.mda['orbital_parameters'] = {} - instance.mda['total_header_length'] = 12 + instance.mda["projection_parameters"] = {"a": 6378169.00, + "b": 6356583.80, + "h": 35785831.00, + "SSP_longitude": 0.0} + instance.mda["orbital_parameters"] = {} + instance.mda["total_header_length"] = 12 def get_new_read_prologue(prologue): @@ -55,18 +55,18 @@ def get_fake_file_handler(observation_start_time, nlines, ncols, projection_long epilogue = get_fake_epilogue() m = mock.mock_open() - with mock.patch('satpy.readers.seviri_l1b_hrit.np.fromfile') as fromfile, \ - mock.patch('satpy.readers.hrit_base.open', m, create=True) as newopen, \ - mock.patch('satpy.readers.utils.open', m, create=True) as utilopen, \ - mock.patch('satpy.readers.seviri_l1b_hrit.CHANNEL_NAMES'), \ - mock.patch.object(HRITMSGFileHandler, '_get_hd', new=new_get_hd), \ - mock.patch.object(HRITMSGPrologueFileHandler, 'read_prologue', + with mock.patch("satpy.readers.seviri_l1b_hrit.np.fromfile") as fromfile, \ + mock.patch("satpy.readers.hrit_base.open", m, create=True) as newopen, \ + mock.patch("satpy.readers.utils.open", m, create=True) as utilopen, \ + mock.patch("satpy.readers.seviri_l1b_hrit.CHANNEL_NAMES"), \ + mock.patch.object(HRITMSGFileHandler, "_get_hd", new=new_get_hd), \ + mock.patch.object(HRITMSGPrologueFileHandler, "read_prologue", new=get_new_read_prologue(prologue)): fromfile.return_value = np.array( [(1, 2)], - dtype=[('total_header_length', int), - ('hdr_id', int)] + dtype=[("total_header_length", int), + ("hdr_id", int)] ) newopen.return_value.__enter__.return_value.tell.return_value = 1 # The size of the return value hereafter was chosen arbitrarily with the expectation @@ -74,16 +74,16 @@ def get_fake_file_handler(observation_start_time, nlines, ncols, projection_long # files. utilopen.return_value.__enter__.return_value.read.return_value = bytes([0]*8192) prologue = HRITMSGPrologueFileHandler( - filename='dummy_prologue_filename', + filename="dummy_prologue_filename", filename_info=filename_info, filetype_info={} ) epilogue = mock.MagicMock(epilogue=epilogue) reader = HRITMSGFileHandler( - 'filename', + "filename", filename_info, - {'filetype': 'info'}, + {"filetype": "info"}, prologue, epilogue ) @@ -99,30 +99,30 @@ def get_fake_prologue(projection_longitude, orbit_polynomials): "SatelliteId": 324, "NominalLongitude": -3.5 }, - 'Orbit': { - 'OrbitPolynomial': orbit_polynomials, + "Orbit": { + "OrbitPolynomial": orbit_polynomials, } }, - 'GeometricProcessing': { - 'EarthModel': { - 'TypeOfEarthModel': 2, - 'EquatorialRadius': 6378.169, - 'NorthPolarRadius': 6356.5838, - 'SouthPolarRadius': 6356.5838 + "GeometricProcessing": { + "EarthModel": { + "TypeOfEarthModel": 2, + "EquatorialRadius": 6378.169, + "NorthPolarRadius": 6356.5838, + "SouthPolarRadius": 6356.5838 } }, - 'ImageDescription': { - 'ProjectionDescription': { - 'LongitudeOfSSP': projection_longitude + "ImageDescription": { + "ProjectionDescription": { + "LongitudeOfSSP": projection_longitude }, - 'Level15ImageProduction': { - 'ImageProcDirection': 1 + "Level15ImageProduction": { + "ImageProcDirection": 1 } }, - 'ImageAcquisition': { - 'PlannedAcquisitionTime': { - 'TrueRepeatCycleStart': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'PlannedRepeatCycleEnd': datetime(2006, 1, 1, 12, 30, 0, 0) + "ImageAcquisition": { + "PlannedAcquisitionTime": { + "TrueRepeatCycleStart": datetime(2006, 1, 1, 12, 15, 9, 304888), + "PlannedRepeatCycleEnd": datetime(2006, 1, 1, 12, 30, 0, 0) } } } @@ -131,21 +131,21 @@ def get_fake_prologue(projection_longitude, orbit_polynomials): def get_fake_epilogue(): """Create a fake HRIT epilogue.""" return { - 'ImageProductionStats': { - 'ActualL15CoverageHRV': { - 'LowerSouthLineActual': 1, - 'LowerNorthLineActual': 8256, - 'LowerEastColumnActual': 2877, - 'LowerWestColumnActual': 8444, - 'UpperSouthLineActual': 8257, - 'UpperNorthLineActual': 11136, - 'UpperEastColumnActual': 1805, - 'UpperWestColumnActual': 7372 + "ImageProductionStats": { + "ActualL15CoverageHRV": { + "LowerSouthLineActual": 1, + "LowerNorthLineActual": 8256, + "LowerEastColumnActual": 2877, + "LowerWestColumnActual": 8444, + "UpperSouthLineActual": 8257, + "UpperNorthLineActual": 11136, + "UpperEastColumnActual": 1805, + "UpperWestColumnActual": 7372 }, - 'ActualScanningSummary': { - 'ReducedScan': 0, - 'ForwardScanStart': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'ForwardScanEnd': datetime(2006, 1, 1, 12, 27, 39, 0) + "ActualScanningSummary": { + "ReducedScan": 0, + "ForwardScanStart": datetime(2006, 1, 1, 12, 15, 9, 304888), + "ForwardScanEnd": datetime(2006, 1, 1, 12, 27, 39, 0) } } } @@ -156,19 +156,19 @@ def get_fake_mda(nlines, ncols, start_time): nbits = 10 tline = get_acq_time_cds(start_time, nlines) return { - 'number_of_bits_per_pixel': nbits, - 'number_of_lines': nlines, - 'number_of_columns': ncols, - 'data_field_length': nlines * ncols * nbits, - 'cfac': 5, - 'lfac': 5, - 'coff': 10, - 'loff': 10, - 'image_segment_line_quality': { - 'line_mean_acquisition': tline, - 'line_validity': np.full(nlines, 3), - 'line_radiometric_quality': np.full(nlines, 4), - 'line_geometric_quality': np.full(nlines, 4) + "number_of_bits_per_pixel": nbits, + "number_of_lines": nlines, + "number_of_columns": ncols, + "data_field_length": nlines * ncols * nbits, + "cfac": 5, + "lfac": 5, + "coff": 10, + "loff": 10, + "image_segment_line_quality": { + "line_mean_acquisition": tline, + "line_validity": np.full(nlines, 3), + "line_radiometric_quality": np.full(nlines, 4), + "line_geometric_quality": np.full(nlines, 4) } } @@ -176,18 +176,18 @@ def get_fake_mda(nlines, ncols, start_time): def get_fake_filename_info(start_time): """Create fake filename information.""" return { - 'platform_shortname': 'MSG3', - 'start_time': start_time, - 'service': 'MSG' + "platform_shortname": "MSG3", + "start_time": start_time, + "service": "MSG" } def get_fake_dataset_info(): """Create fake dataset info.""" return { - 'units': 'units', - 'wavelength': 'wavelength', - 'standard_name': 'standard_name' + "units": "units", + "wavelength": "wavelength", + "standard_name": "standard_name" } @@ -196,47 +196,47 @@ def get_acq_time_cds(start_time, nlines): days_since_1958 = (start_time - datetime(1958, 1, 1)).days tline = np.zeros( nlines, - dtype=[('days', '>u2'), ('milliseconds', '>u4')] + dtype=[("days", ">u2"), ("milliseconds", ">u4")] ) - tline['days'][1:-1] = days_since_1958 * np.ones(nlines - 2) + tline["days"][1:-1] = days_since_1958 * np.ones(nlines - 2) offset_second = (start_time - start_time.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds()*1000 - tline['milliseconds'][1:-1] = np.arange(nlines - 2)+offset_second + tline["milliseconds"][1:-1] = np.arange(nlines - 2)+offset_second return tline def get_acq_time_exp(start_time, nlines): """Get expected scanline acquisition times.""" - tline_exp = np.zeros(464, dtype='datetime64[ms]') - tline_exp[0] = np.datetime64('NaT') - tline_exp[-1] = np.datetime64('NaT') + tline_exp = np.zeros(464, dtype="datetime64[ms]") + tline_exp[0] = np.datetime64("NaT") + tline_exp[-1] = np.datetime64("NaT") tline_exp[1:-1] = np.datetime64(start_time) - tline_exp[1:-1] += np.arange(nlines - 2).astype('timedelta64[ms]') + tline_exp[1:-1] += np.arange(nlines - 2).astype("timedelta64[ms]") return tline_exp def get_attrs_exp(projection_longitude=0.0): """Get expected dataset attributes.""" return { - 'units': 'units', - 'wavelength': 'wavelength', - 'standard_name': 'standard_name', - 'platform_name': 'Meteosat-11', - 'sensor': 'seviri', - 'orbital_parameters': {'projection_longitude': projection_longitude, - 'projection_latitude': 0., - 'projection_altitude': 35785831.0, - 'satellite_nominal_longitude': -3.5, - 'satellite_nominal_latitude': 0.0, - 'satellite_actual_longitude': -3.55117540817073, - 'satellite_actual_latitude': -0.5711243456528018, - 'satellite_actual_altitude': 35783296.150123544}, - 'georef_offset_corrected': True, - 'nominal_start_time': (datetime(2006, 1, 1, 12, 15),), - 'nominal_end_time': (datetime(2006, 1, 1, 12, 30),), - 'time_parameters': { - 'nominal_start_time': datetime(2006, 1, 1, 12, 15), - 'nominal_end_time': datetime(2006, 1, 1, 12, 30), - 'observation_start_time': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'observation_end_time': datetime(2006, 1, 1, 12, 27, 39, 0) + "units": "units", + "wavelength": "wavelength", + "standard_name": "standard_name", + "platform_name": "Meteosat-11", + "sensor": "seviri", + "orbital_parameters": {"projection_longitude": projection_longitude, + "projection_latitude": 0., + "projection_altitude": 35785831.0, + "satellite_nominal_longitude": -3.5, + "satellite_nominal_latitude": 0.0, + "satellite_actual_longitude": -3.55117540817073, + "satellite_actual_latitude": -0.5711243456528018, + "satellite_actual_altitude": 35783296.150123544}, + "georef_offset_corrected": True, + "nominal_start_time": (datetime(2006, 1, 1, 12, 15),), + "nominal_end_time": (datetime(2006, 1, 1, 12, 30),), + "time_parameters": { + "nominal_start_time": datetime(2006, 1, 1, 12, 15), + "nominal_end_time": datetime(2006, 1, 1, 12, 30), + "observation_start_time": datetime(2006, 1, 1, 12, 15, 9, 304888), + "observation_end_time": datetime(2006, 1, 1, 12, 27, 39, 0) } } diff --git a/satpy/tests/reader_tests/test_seviri_l1b_icare.py b/satpy/tests/reader_tests/test_seviri_l1b_icare.py index 5ca8ac1a2e..81d385bc89 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_icare.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_icare.py @@ -38,56 +38,56 @@ class FakeHDF4FileHandler2(FakeHDF4FileHandler): def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} - file_content['/attr/Nadir_Pixel_Size'] = 3000. - file_content['/attr/Beginning_Acquisition_Date'] = "2004-12-29T12:15:00Z" - file_content['/attr/End_Acquisition_Date'] = "2004-12-29T12:27:44Z" - file_content['/attr/Geolocation'] = ('1.3642337E7', '1856.0', '1.3642337E7', '1856.0') - file_content['/attr/Altitude'] = '42164.0' - file_content['/attr/Geographic_Projection'] = 'geos' - file_content['/attr/Projection_Longitude'] = '0.0' - file_content['/attr/Sub_Satellite_Longitude'] = '3.4' - file_content['/attr/Sensors'] = 'MSG1/SEVIRI' - file_content['/attr/Zone'] = 'G' - file_content['/attr/_FillValue'] = 1 - file_content['/attr/scale_factor'] = 1. - file_content['/attr/add_offset'] = 0. + file_content["/attr/Nadir_Pixel_Size"] = 3000. + file_content["/attr/Beginning_Acquisition_Date"] = "2004-12-29T12:15:00Z" + file_content["/attr/End_Acquisition_Date"] = "2004-12-29T12:27:44Z" + file_content["/attr/Geolocation"] = ("1.3642337E7", "1856.0", "1.3642337E7", "1856.0") + file_content["/attr/Altitude"] = "42164.0" + file_content["/attr/Geographic_Projection"] = "geos" + file_content["/attr/Projection_Longitude"] = "0.0" + file_content["/attr/Sub_Satellite_Longitude"] = "3.4" + file_content["/attr/Sensors"] = "MSG1/SEVIRI" + file_content["/attr/Zone"] = "G" + file_content["/attr/_FillValue"] = 1 + file_content["/attr/scale_factor"] = 1. + file_content["/attr/add_offset"] = 0. # test one IR and one VIS channel - file_content['Normalized_Radiance'] = DEFAULT_FILE_DATA - file_content['Normalized_Radiance/attr/_FillValue'] = 1 - file_content['Normalized_Radiance/attr/scale_factor'] = 1. - file_content['Normalized_Radiance/attr/add_offset'] = 0. - file_content['Normalized_Radiance/shape'] = DEFAULT_FILE_SHAPE - - file_content['Brightness_Temperature'] = DEFAULT_FILE_DATA - file_content['Brightness_Temperature/attr/_FillValue'] = 1 - file_content['Brightness_Temperature/attr/scale_factor'] = 1. - file_content['Brightness_Temperature/attr/add_offset'] = 0. - file_content['Brightness_Temperature/shape'] = DEFAULT_FILE_SHAPE + file_content["Normalized_Radiance"] = DEFAULT_FILE_DATA + file_content["Normalized_Radiance/attr/_FillValue"] = 1 + file_content["Normalized_Radiance/attr/scale_factor"] = 1. + file_content["Normalized_Radiance/attr/add_offset"] = 0. + file_content["Normalized_Radiance/shape"] = DEFAULT_FILE_SHAPE + + file_content["Brightness_Temperature"] = DEFAULT_FILE_DATA + file_content["Brightness_Temperature/attr/_FillValue"] = 1 + file_content["Brightness_Temperature/attr/scale_factor"] = 1. + file_content["Brightness_Temperature/attr/add_offset"] = 0. + file_content["Brightness_Temperature/shape"] = DEFAULT_FILE_SHAPE # convert to xarrays from xarray import DataArray for key, val in file_content.items(): if isinstance(val, np.ndarray): attrs = {} - for a in ['_FillValue', 'scale_factor', 'add_offset']: - if key + '/attr/' + a in file_content: - attrs[a] = file_content[key + '/attr/' + a] - file_content[key] = DataArray(da.from_array(val), dims=('x', 'y'), attrs=attrs) + for a in ["_FillValue", "scale_factor", "add_offset"]: + if key + "/attr/" + a in file_content: + attrs[a] = file_content[key + "/attr/" + a] + file_content[key] = DataArray(da.from_array(val), dims=("x", "y"), attrs=attrs) return file_content class TestSEVIRIICAREReader(unittest.TestCase): """Test SEVIRI L1b HDF4 from ICARE Reader.""" - yaml_file = 'seviri_l1b_icare.yaml' + yaml_file = "seviri_l1b_icare.yaml" def setUp(self): """Wrap HDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.seviri_l1b_icare import SEVIRI_ICARE - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) - self.p = mock.patch.object(SEVIRI_ICARE, '__bases__', (FakeHDF4FileHandler2,)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + self.p = mock.patch.object(SEVIRI_ICARE, "__bases__", (FakeHDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -97,25 +97,25 @@ def tearDown(self): def compare_areas(self, v): """Compare produced AreaDefinition with expected.""" - test_area = {'area_id': 'geosmsg', - 'width': 10, - 'height': 300, - 'area_extent': (-5567248.2834071, + test_area = {"area_id": "geosmsg", + "width": 10, + "height": 300, + "area_extent": (-5567248.2834071, -5570248.6866857, -5537244.2506213, -4670127.7031114)} - self.assertEqual(v.attrs['area'].area_id, test_area['area_id']) - self.assertEqual(v.attrs['area'].width, test_area['width']) - self.assertEqual(v.attrs['area'].height, test_area['height']) - np.testing.assert_almost_equal(v.attrs['area'].area_extent, - test_area['area_extent']) + self.assertEqual(v.attrs["area"].area_id, test_area["area_id"]) + self.assertEqual(v.attrs["area"].width, test_area["width"]) + self.assertEqual(v.attrs["area"].height, test_area["height"]) + np.testing.assert_almost_equal(v.attrs["area"].area_extent, + test_area["area_extent"]) def test_init(self): """Test basic init with no extra parameters.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf', - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf' + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf", + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf" ]) self.assertEqual(len(loadables), 2) r.create_filehandlers(loadables) @@ -126,49 +126,49 @@ def test_load_dataset_vis(self): from datetime import datetime r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf' + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf" ]) r.create_filehandlers(loadables) - datasets = r.load(['VIS008']) + datasets = r.load(["VIS008"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): dt = datetime(2004, 12, 29, 12, 27, 44) - self.assertEqual(v.attrs['end_time'], dt) - self.assertEqual(v.attrs['calibration'], 'reflectance') + self.assertEqual(v.attrs["end_time"], dt) + self.assertEqual(v.attrs["calibration"], "reflectance") def test_load_dataset_ir(self): """Test loading all datasets from a full swath file.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf' + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf" ]) r.create_filehandlers(loadables) - datasets = r.load(['IR_108']) + datasets = r.load(["IR_108"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['calibration'], 'brightness_temperature') + self.assertEqual(v.attrs["calibration"], "brightness_temperature") def test_area_def_lores(self): """Test loading all datasets from an area of interest file.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf' + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf" ]) r.create_filehandlers(loadables) - ds = r.load(['VIS008']) - self.compare_areas(ds['VIS008']) - self.assertEqual(ds['VIS008'].attrs['area'].proj_id, 'msg_lowres') + ds = r.load(["VIS008"]) + self.compare_areas(ds["VIS008"]) + self.assertEqual(ds["VIS008"].attrs["area"].proj_id, "msg_lowres") def test_area_def_hires(self): """Test loading all datasets from an area of interest file.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_HRV_V1-04.hdf', + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_HRV_V1-04.hdf", ]) r.create_filehandlers(loadables) - ds = r.load(['HRV']) - self.compare_areas(ds['HRV']) - self.assertEqual(ds['HRV'].attrs['area'].proj_id, 'msg_hires') + ds = r.load(["HRV"]) + self.compare_areas(ds["HRV"]) + self.assertEqual(ds["HRV"].attrs["area"].proj_id, "msg_hires") def test_sensor_names(self): """Check satellite name conversion is correct, including error case.""" @@ -176,12 +176,12 @@ def test_sensor_names(self): mock.MagicMock(), mock.MagicMock(), mock.MagicMock()) - sensor_list = {'Meteosat-08': 'MSG1/SEVIRI', - 'Meteosat-09': 'MSG2/SEVIRI', - 'Meteosat-10': 'MSG3/SEVIRI', - 'Meteosat-11': 'MSG4/SEVIRI'} - with mock.patch('satpy.tests.reader_tests.test_seviri_l1b_icare.' - 'FakeHDF4FileHandler2.get_test_content') as patched_func: + sensor_list = {"Meteosat-08": "MSG1/SEVIRI", + "Meteosat-09": "MSG2/SEVIRI", + "Meteosat-10": "MSG3/SEVIRI", + "Meteosat-11": "MSG4/SEVIRI"} + with mock.patch("satpy.tests.reader_tests.test_seviri_l1b_icare." + "FakeHDF4FileHandler2.get_test_content") as patched_func: def _run_target(): patched_func.return_value = file_data return self.p.target(mock.MagicMock(), @@ -189,12 +189,12 @@ def _run_target(): mock.MagicMock()).sensor_name for sat in sensor_list: - file_data['/attr/Sensors'] = sensor_list[sat] + file_data["/attr/Sensors"] = sensor_list[sat] plat, sens = _run_target() self.assertEqual(plat, sat) with self.assertRaises(NameError): - file_data['/attr/Sensors'] = 'BADSAT/NOSENSE' + file_data["/attr/Sensors"] = "BADSAT/NOSENSE" plat, sens = _run_target() def test_bad_bandname(self): @@ -202,7 +202,7 @@ def test_bad_bandname(self): with self.assertRaises(NameError): self.p.target(mock.MagicMock(), mock.MagicMock(), - mock.MagicMock())._get_dsname({'name': 'badband'}) + mock.MagicMock())._get_dsname({"name": "badband"}) def test_nocompute(self): """Test that dask does not compute anything in the reader itself.""" @@ -212,7 +212,7 @@ def test_nocompute(self): with dask.config.set(scheduler=CustomScheduler(max_computes=0)): r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf' + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf" ]) r.create_filehandlers(loadables) - r.load(['VIS008']) + r.load(["VIS008"]) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index e344d09ff9..0130740246 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -43,476 +43,476 @@ from satpy.tests.reader_tests.test_seviri_l1b_calibration import TestFileHandlerCalibrationBase from satpy.tests.utils import assert_attrs_equal, make_dataid -CHANNEL_INDEX_LIST = ['VIS006', 'VIS008', 'IR_016', 'IR_039', - 'WV_062', 'WV_073', 'IR_087', 'IR_097', - 'IR_108', 'IR_120', 'IR_134', 'HRV'] +CHANNEL_INDEX_LIST = ["VIS006", "VIS008", "IR_016", "IR_039", + "WV_062", "WV_073", "IR_087", "IR_097", + "IR_108", "IR_120", "IR_134", "HRV"] AVAILABLE_CHANNELS = {} for item in CHANNEL_INDEX_LIST: AVAILABLE_CHANNELS[item] = True -SEC15HDR = '15_SECONDARY_PRODUCT_HEADER' -IDS = 'SelectedBandIDs' +SEC15HDR = "15_SECONDARY_PRODUCT_HEADER" +IDS = "SelectedBandIDs" TEST1_HEADER_CHNLIST: dict[str, dict[str, dict]] = {SEC15HDR: {IDS: {}}} -TEST1_HEADER_CHNLIST[SEC15HDR][IDS]['Value'] = 'XX--XX--XX--' +TEST1_HEADER_CHNLIST[SEC15HDR][IDS]["Value"] = "XX--XX--XX--" TEST2_HEADER_CHNLIST: dict[str, dict[str, dict]] = {SEC15HDR: {IDS: {}}} -TEST2_HEADER_CHNLIST[SEC15HDR][IDS]['Value'] = 'XX-XXXX----X' +TEST2_HEADER_CHNLIST[SEC15HDR][IDS]["Value"] = "XX-XXXX----X" TEST3_HEADER_CHNLIST: dict[str, dict[str, dict]] = {SEC15HDR: {IDS: {}}} -TEST3_HEADER_CHNLIST[SEC15HDR][IDS]['Value'] = 'XXXXXXXXXXXX' +TEST3_HEADER_CHNLIST[SEC15HDR][IDS]["Value"] = "XXXXXXXXXXXX" TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 3712, - 'Area extent': (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) + "earth_model": 1, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": True, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 3712, + "Area extent": (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 1392, - 'Area extent': (5568748.275756836, 5568748.275756836, -5568748.275756836, 1392187.068939209) + "earth_model": 1, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_rss_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "9.5", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 1392, + "Area extent": (5568748.275756836, 5568748.275756836, -5568748.275756836, 1392187.068939209) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN_FILL = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 3712, - 'Area extent': (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) + "earth_model": 1, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_rss_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "9.5", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 3712, + "Area extent": (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 2516, - 'Number of rows': 1829, - 'Area extent': (5337717.232, 5154692.6389, -2211297.1332, -333044.7514) + "earth_model": 1, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 2516, + "Number of rows": 1829, + "Area extent": (5337717.232, 5154692.6389, -2211297.1332, -333044.7514) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI_FILL = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 3712, - 'Area extent': (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) + "earth_model": 1, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_fes_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 3712, + "Area extent": (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 5568, - 'Number of rows': 11136, - 'Area extent 0': (5567747.920155525, 2625352.665781975, -1000.1343488693237, -5567747.920155525), - 'Area extent 1': (3602483.924627304, 5569748.188853264, -1966264.1298770905, 2625352.665781975) + "earth_model": 1, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": True, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 5568, + "Number of rows": 11136, + "Area extent 0": (5567747.920155525, 2625352.665781975, -1000.1343488693237, -5567747.920155525), + "Area extent 1": (3602483.924627304, 5569748.188853264, -1966264.1298770905, 2625352.665781975) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK_FILL = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 11136, - 'Number of rows': 11136, - 'Area extent': (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) + "earth_model": 1, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": True, + "is_rapid_scan": 0, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 11136, + "Number of rows": 11136, + "Area extent": (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 5568, - 'Number of rows': 8192, - 'Area extent': (5567747.920155525, 2625352.665781975, -1000.1343488693237, -5567747.920155525) + "earth_model": 1, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_rss_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "9.5", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 5568, + "Number of rows": 8192, + "Area extent": (5567747.920155525, 2625352.665781975, -1000.1343488693237, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN_FILL = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 11136, - 'Number of rows': 11136, - 'Area extent': (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) + "earth_model": 1, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_rss_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "9.5", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 11136, + "Number of rows": 11136, + "Area extent": (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 7548, - 'Number of rows': 5487, - 'Area extent': (5336716.885566711, 5155692.568421364, -2212297.179698944, -332044.6038246155) + "earth_model": 1, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 7548, + "Number of rows": 5487, + "Area extent": (5336716.885566711, 5155692.568421364, -2212297.179698944, -332044.6038246155) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI_FILL = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 11136, - 'Number of rows': 11136, - 'Area extent': (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) + "earth_model": 1, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 11136, + "Number of rows": 11136, + "Area extent": (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_FULLDISK = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 3712, - 'Area extent': (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) + "earth_model": 2, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": True, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 3712, + "Area extent": (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 5568, - 'Number of rows': 11136, - 'Area extent 0': (5566247.718632221, 2626852.867305279, -2500.3358721733093, -5566247.718632221), - 'Area extent 1': (3600983.723104, 5571248.390376568, -1967764.3314003944, 2626852.867305279) + "earth_model": 2, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": True, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 5568, + "Number of rows": 11136, + "Area extent 0": (5566247.718632221, 2626852.867305279, -2500.3358721733093, -5566247.718632221), + "Area extent 1": (3600983.723104, 5571248.390376568, -1967764.3314003944, 2626852.867305279) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK_FILL = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 11136, - 'Number of rows': 11136, - 'Area extent': (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) + "earth_model": 2, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": True, + "is_rapid_scan": 0, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 11136, + "Number of rows": 11136, + "Area extent": (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 1392, - 'Area extent': (5567248.074173927, 5570248.477339745, -5570248.477339745, 1393687.2705221176) + "earth_model": 2, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_rss_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "9.5", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 1392, + "Area extent": (5567248.074173927, 5570248.477339745, -5570248.477339745, 1393687.2705221176) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN_FILL = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 3712, - 'Area extent': (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) + "earth_model": 2, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_rss_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "9.5", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 3712, + "Area extent": (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 5568, - 'Number of rows': 8192, - 'Area extent': (5566247.718632221, 2626852.867305279, -2500.3358721733093, -5566247.718632221) + "earth_model": 2, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_rss_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 5568, + "Number of rows": 8192, + "Area extent": (5566247.718632221, 2626852.867305279, -2500.3358721733093, -5566247.718632221) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN_FILL = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 11136, - 'Number of rows': 11136, - 'Area extent': (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) + "earth_model": 2, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_rss_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 11136, + "Number of rows": 11136, + "Area extent": (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 2516, - 'Number of rows': 1829, - 'Area extent': (5336217.0304, 5156192.8405, -2212797.3348, -331544.5498) + "earth_model": 2, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 2516, + "Number of rows": 1829, + "Area extent": (5336217.0304, 5156192.8405, -2212797.3348, -331544.5498) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI_FILL = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 3712, - 'Area extent': (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) + "earth_model": 2, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_fes_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 3712, + "Area extent": (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 7548, - 'Number of rows': 5487, - 'Area extent': (5335216.684043407, 5157192.769944668, -2213797.381222248, -330544.4023013115) + "earth_model": 2, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 7548, + "Number of rows": 5487, + "Area extent": (5335216.684043407, 5157192.769944668, -2213797.381222248, -330544.4023013115) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI_FILL = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 11136, - 'Number of rows': 11136, - 'Area extent': (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) + "earth_model": 2, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 11136, + "Number of rows": 11136, + "Area extent": (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) } } TEST_IS_ROI_FULLDISK = { - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'is_roi': False + "is_full_disk": True, + "is_rapid_scan": 0, + "is_roi": False } TEST_IS_ROI_RAPIDSCAN = { - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'is_roi': False + "is_full_disk": False, + "is_rapid_scan": 1, + "is_roi": False } TEST_IS_ROI_ROI = { - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'is_roi': True + "is_full_disk": False, + "is_rapid_scan": 0, + "is_roi": True } TEST_CALIBRATION_MODE = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='IR_108', calibration='radiance'), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'calibration': 'radiance', - 'CalSlope': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], - 'CalOffset': [-1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0], - 'GSICSCalCoeff': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], - 'GSICSOffsetCount': [-51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0] + "earth_model": 1, + "dataset_id": make_dataid(name="IR_108", calibration="radiance"), + "is_full_disk": True, + "is_rapid_scan": 0, + "calibration": "radiance", + "CalSlope": [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], + "CalOffset": [-1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0], + "GSICSCalCoeff": [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], + "GSICSOffsetCount": [-51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0] } TEST_PADDER_RSS_ROI = { - 'img_bounds': {'south': [2], 'north': [4], 'east': [2], 'west': [3]}, - 'is_full_disk': False, - 'dataset_id': make_dataid(name='VIS006'), - 'dataset': xr.DataArray(np.ones((3, 2)), dims=['y', 'x']).astype(np.float32), - 'final_shape': (5, 5), - 'expected_padded_data': xr.DataArray(np.array([[np.nan, np.nan, np.nan, np.nan, np.nan], + "img_bounds": {"south": [2], "north": [4], "east": [2], "west": [3]}, + "is_full_disk": False, + "dataset_id": make_dataid(name="VIS006"), + "dataset": xr.DataArray(np.ones((3, 2)), dims=["y", "x"]).astype(np.float32), + "final_shape": (5, 5), + "expected_padded_data": xr.DataArray(np.array([[np.nan, np.nan, np.nan, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, np.nan, np.nan, np.nan, np.nan]]), - dims=['y', 'x']).astype(np.float32) + dims=["y", "x"]).astype(np.float32) } TEST_PADDER_FES_HRV = { - 'img_bounds': {'south': [1, 4], 'north': [3, 5], 'east': [2, 3], 'west': [3, 4]}, - 'is_full_disk': True, - 'dataset_id': make_dataid(name='HRV'), - 'dataset': xr.DataArray(np.ones((5, 2)), dims=['y', 'x']).astype(np.float32), - 'final_shape': (5, 5), - 'expected_padded_data': xr.DataArray(np.array([[np.nan, 1.0, 1.0, np.nan, np.nan], + "img_bounds": {"south": [1, 4], "north": [3, 5], "east": [2, 3], "west": [3, 4]}, + "is_full_disk": True, + "dataset_id": make_dataid(name="HRV"), + "dataset": xr.DataArray(np.ones((5, 2)), dims=["y", "x"]).astype(np.float32), + "final_shape": (5, 5), + "expected_padded_data": xr.DataArray(np.array([[np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, np.nan, 1.0, 1.0, np.nan], [np.nan, np.nan, 1.0, 1.0, np.nan]]), - dims=['y', 'x']).astype(np.float32) + dims=["y", "x"]).astype(np.float32) } -def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan, good_qual='OK'): +def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan, good_qual="OK"): """Create test header for SEVIRI L1.5 product. Header includes mandatory attributes for NativeMSGFileHandler.get_area_extent """ - if dataset_id['name'] == 'HRV': - reference_grid = 'ReferenceGridHRV' + if dataset_id["name"] == "HRV": + reference_grid = "ReferenceGridHRV" column_dir_grid_step = 1.0001343488693237 line_dir_grid_step = 1.0001343488693237 else: - reference_grid = 'ReferenceGridVIS_IR' + reference_grid = "ReferenceGridVIS_IR" column_dir_grid_step = 3.0004031658172607 line_dir_grid_step = 3.0004031658172607 @@ -547,45 +547,45 @@ def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan, goo n_hrv_lines = n_visir_lines * 3 ssp_lon = 0 header = { - '15_MAIN_PRODUCT_HEADER': { - 'QQOV': {'Name': 'QQOV', - 'Value': good_qual} + "15_MAIN_PRODUCT_HEADER": { + "QQOV": {"Name": "QQOV", + "Value": good_qual} }, - '15_DATA_HEADER': { - 'ImageDescription': { + "15_DATA_HEADER": { + "ImageDescription": { reference_grid: { - 'ColumnDirGridStep': column_dir_grid_step, - 'LineDirGridStep': line_dir_grid_step, - 'GridOrigin': 2, # south-east corner + "ColumnDirGridStep": column_dir_grid_step, + "LineDirGridStep": line_dir_grid_step, + "GridOrigin": 2, # south-east corner }, - 'ProjectionDescription': { - 'LongitudeOfSSP': ssp_lon + "ProjectionDescription": { + "LongitudeOfSSP": ssp_lon } }, - 'GeometricProcessing': { - 'EarthModel': { - 'TypeOfEarthModel': earth_model, - 'EquatorialRadius': 6378169.0, - 'NorthPolarRadius': 6356583.800000001, - 'SouthPolarRadius': 6356583.800000001, + "GeometricProcessing": { + "EarthModel": { + "TypeOfEarthModel": earth_model, + "EquatorialRadius": 6378169.0, + "NorthPolarRadius": 6356583.800000001, + "SouthPolarRadius": 6356583.800000001, } }, - 'SatelliteStatus': { - 'SatelliteDefinition': { - 'SatelliteId': 324 + "SatelliteStatus": { + "SatelliteDefinition": { + "SatelliteId": 324 } } }, - '15_SECONDARY_PRODUCT_HEADER': { - 'NorthLineSelectedRectangle': {'Value': north}, - 'EastColumnSelectedRectangle': {'Value': east}, - 'WestColumnSelectedRectangle': {'Value': west}, - 'SouthLineSelectedRectangle': {'Value': south}, - 'SelectedBandIDs': {'Value': 'xxxxxxxxxxxx'}, - 'NumberColumnsVISIR': {'Value': n_visir_cols}, - 'NumberLinesVISIR': {'Value': n_visir_lines}, - 'NumberColumnsHRV': {'Value': n_hrv_cols}, - 'NumberLinesHRV': {'Value': n_hrv_lines}, + "15_SECONDARY_PRODUCT_HEADER": { + "NorthLineSelectedRectangle": {"Value": north}, + "EastColumnSelectedRectangle": {"Value": east}, + "WestColumnSelectedRectangle": {"Value": west}, + "SouthLineSelectedRectangle": {"Value": south}, + "SelectedBandIDs": {"Value": "xxxxxxxxxxxx"}, + "NumberColumnsVISIR": {"Value": n_visir_cols}, + "NumberLinesVISIR": {"Value": n_visir_lines}, + "NumberColumnsHRV": {"Value": n_hrv_cols}, + "NumberLinesHRV": {"Value": n_hrv_lines}, } } @@ -599,20 +599,20 @@ def create_test_trailer(is_rapid_scan): Trailer includes mandatory attributes for NativeMSGFileHandler.get_area_extent """ trailer = { - '15TRAILER': { - 'ImageProductionStats': { - 'ActualL15CoverageHRV': { - 'UpperNorthLineActual': 11136, - 'UpperWestColumnActual': 7533, - 'UpperSouthLineActual': 8193, - 'UpperEastColumnActual': 1966, - 'LowerNorthLineActual': 8192, - 'LowerWestColumnActual': 5568, - 'LowerSouthLineActual': 1, - 'LowerEastColumnActual': 1 + "15TRAILER": { + "ImageProductionStats": { + "ActualL15CoverageHRV": { + "UpperNorthLineActual": 11136, + "UpperWestColumnActual": 7533, + "UpperSouthLineActual": 8193, + "UpperEastColumnActual": 1966, + "LowerNorthLineActual": 8192, + "LowerWestColumnActual": 5568, + "LowerSouthLineActual": 1, + "LowerEastColumnActual": 1 }, - 'ActualScanningSummary': { - 'ReducedScan': is_rapid_scan + "ActualScanningSummary": { + "ReducedScan": is_rapid_scan } } } @@ -623,21 +623,21 @@ def create_test_trailer(is_rapid_scan): def prepare_area_definitions(test_dict): """Prepare calculated and expected area definitions for equal checking.""" - earth_model = test_dict['earth_model'] - dataset_id = test_dict['dataset_id'] - is_full_disk = test_dict['is_full_disk'] - is_rapid_scan = test_dict['is_rapid_scan'] - fill_disk = test_dict['fill_disk'] + earth_model = test_dict["earth_model"] + dataset_id = test_dict["dataset_id"] + is_full_disk = test_dict["is_full_disk"] + is_rapid_scan = test_dict["is_rapid_scan"] + fill_disk = test_dict["fill_disk"] header = create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan) trailer = create_test_trailer(is_rapid_scan) - expected_area_def = test_dict['expected_area_def'] + expected_area_def = test_dict["expected_area_def"] - with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ - mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ + with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ + mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap") as _get_memmap, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch( - 'satpy.readers.seviri_l1b_native.has_archive_header' + "satpy.readers.seviri_l1b_native.has_archive_header" ) as has_archive_header: has_archive_header.return_value = True fromfile.return_value = header @@ -681,10 +681,10 @@ def prepare_area_definitions(test_dict): def test_area_definitions(actual, expected): """Test area definitions with only one area.""" np.testing.assert_allclose(np.array(actual.area_extent), - np.array(expected['Area extent'])) - assert actual.width == expected['Number of columns'] - assert actual.height == expected['Number of rows'] - assert actual.area_id == expected['Area ID'] + np.array(expected["Area extent"])) + assert actual.width == expected["Number of columns"] + assert actual.height == expected["Number of rows"] + assert actual.area_id == expected["Area ID"] @pytest.mark.parametrize( @@ -697,31 +697,31 @@ def test_area_definitions(actual, expected): def test_stacked_area_definitions(actual, expected): """Test area definitions with stacked areas.""" np.testing.assert_allclose(np.array(actual.defs[0].area_extent), - np.array(expected['Area extent 0'])) + np.array(expected["Area extent 0"])) np.testing.assert_allclose(np.array(actual.defs[1].area_extent), - np.array(expected['Area extent 1'])) - assert actual.width == expected['Number of columns'] - assert actual.height == expected['Number of rows'] - assert actual.defs[0].area_id, expected['Area ID'] - assert actual.defs[1].area_id, expected['Area ID'] + np.array(expected["Area extent 1"])) + assert actual.width == expected["Number of columns"] + assert actual.height == expected["Number of rows"] + assert actual.defs[0].area_id, expected["Area ID"] + assert actual.defs[1].area_id, expected["Area ID"] def prepare_is_roi(test_dict): """Prepare calculated and expected check for region of interest data for equal checking.""" earth_model = 2 - dataset_id = make_dataid(name='VIS006') - is_full_disk = test_dict['is_full_disk'] - is_rapid_scan = test_dict['is_rapid_scan'] + dataset_id = make_dataid(name="VIS006") + is_full_disk = test_dict["is_full_disk"] + is_rapid_scan = test_dict["is_rapid_scan"] header = create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan) trailer = create_test_trailer(is_rapid_scan) - expected = test_dict['is_roi'] + expected = test_dict["is_roi"] - with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ - mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ + with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ + mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap") as _get_memmap, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch( - 'satpy.readers.seviri_l1b_native.has_archive_header' + "satpy.readers.seviri_l1b_native.has_archive_header" ) as has_archive_header: has_archive_header.return_value = True fromfile.return_value = header @@ -754,7 +754,7 @@ class TestNativeMSGFileHandler(unittest.TestCase): def test_get_available_channels(self): """Test the derivation of the available channel list.""" available_chs = get_available_channels(TEST1_HEADER_CHNLIST) - trues = ('WV_062', 'WV_073', 'IR_108', 'VIS006', 'VIS008', 'IR_120') + trues = ("WV_062", "WV_073", "IR_108", "VIS006", "VIS008", "IR_120") for bandname in AVAILABLE_CHANNELS: if bandname in trues: self.assertTrue(available_chs[bandname]) @@ -762,7 +762,7 @@ def test_get_available_channels(self): self.assertFalse(available_chs[bandname]) available_chs = get_available_channels(TEST2_HEADER_CHNLIST) - trues = ('VIS006', 'VIS008', 'IR_039', 'WV_062', 'WV_073', 'IR_087', 'HRV') + trues = ("VIS006", "VIS008", "IR_039", "WV_062", "WV_073", "IR_087", "HRV") for bandname in AVAILABLE_CHANNELS: if bandname in trues: self.assertTrue(available_chs[bandname]) @@ -775,20 +775,20 @@ def test_get_available_channels(self): TEST_HEADER_CALIB = { - 'RadiometricProcessing': { - 'Level15ImageCalibration': { - 'CalSlope': TestFileHandlerCalibrationBase.gains_nominal, - 'CalOffset': TestFileHandlerCalibrationBase.offsets_nominal, + "RadiometricProcessing": { + "Level15ImageCalibration": { + "CalSlope": TestFileHandlerCalibrationBase.gains_nominal, + "CalOffset": TestFileHandlerCalibrationBase.offsets_nominal, }, - 'MPEFCalFeedback': { - 'GSICSCalCoeff': TestFileHandlerCalibrationBase.gains_gsics, - 'GSICSOffsetCount': TestFileHandlerCalibrationBase.offsets_gsics + "MPEFCalFeedback": { + "GSICSCalCoeff": TestFileHandlerCalibrationBase.gains_gsics, + "GSICSOffsetCount": TestFileHandlerCalibrationBase.offsets_gsics } }, - 'ImageDescription': { - 'Level15ImageProduction': { - 'PlannedChanProcessing': TestFileHandlerCalibrationBase.radiance_types + "ImageDescription": { + "Level15ImageProduction": { + "PlannedChanProcessing": TestFileHandlerCalibrationBase.radiance_types } }, } @@ -797,29 +797,29 @@ def test_get_available_channels(self): class TestNativeMSGCalibration(TestFileHandlerCalibrationBase): """Unit tests for calibration.""" - @pytest.fixture(name='file_handler') + @pytest.fixture(name="file_handler") def file_handler(self): """Create a mocked file handler.""" header = { - '15_DATA_HEADER': { - 'ImageAcquisition': { - 'PlannedAcquisitionTime': { - 'TrueRepeatCycleStart': self.scan_time + "15_DATA_HEADER": { + "ImageAcquisition": { + "PlannedAcquisitionTime": { + "TrueRepeatCycleStart": self.scan_time } } } } trailer = { - '15TRAILER': { - 'ImageProductionStats': { - 'ActualScanningSummary': { - 'ForwardScanStart': self.scan_time + "15TRAILER": { + "ImageProductionStats": { + "ActualScanningSummary": { + "ForwardScanStart": self.scan_time } } } } - header['15_DATA_HEADER'].update(TEST_HEADER_CALIB) - with mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__', + header["15_DATA_HEADER"].update(TEST_HEADER_CALIB) + with mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__", return_value=None): fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.header = header @@ -828,33 +828,33 @@ def file_handler(self): return fh @pytest.mark.parametrize( - ('channel', 'calibration', 'calib_mode', 'use_ext_coefs'), + ("channel", "calibration", "calib_mode", "use_ext_coefs"), ( # VIS channel, internal coefficients - ('VIS006', 'counts', 'NOMINAL', False), - ('VIS006', 'radiance', 'NOMINAL', False), - ('VIS006', 'radiance', 'GSICS', False), - ('VIS006', 'reflectance', 'NOMINAL', False), + ("VIS006", "counts", "NOMINAL", False), + ("VIS006", "radiance", "NOMINAL", False), + ("VIS006", "radiance", "GSICS", False), + ("VIS006", "reflectance", "NOMINAL", False), # VIS channel, external coefficients (mode should have no effect) - ('VIS006', 'radiance', 'GSICS', True), - ('VIS006', 'reflectance', 'NOMINAL', True), + ("VIS006", "radiance", "GSICS", True), + ("VIS006", "reflectance", "NOMINAL", True), # IR channel, internal coefficients - ('IR_108', 'counts', 'NOMINAL', False), - ('IR_108', 'radiance', 'NOMINAL', False), - ('IR_108', 'radiance', 'GSICS', False), - ('IR_108', 'brightness_temperature', 'NOMINAL', False), - ('IR_108', 'brightness_temperature', 'GSICS', False), + ("IR_108", "counts", "NOMINAL", False), + ("IR_108", "radiance", "NOMINAL", False), + ("IR_108", "radiance", "GSICS", False), + ("IR_108", "brightness_temperature", "NOMINAL", False), + ("IR_108", "brightness_temperature", "GSICS", False), # IR channel, external coefficients (mode should have no effect) - ('IR_108', 'radiance', 'NOMINAL', True), - ('IR_108', 'brightness_temperature', 'GSICS', True), + ("IR_108", "radiance", "NOMINAL", True), + ("IR_108", "brightness_temperature", "GSICS", True), # HRV channel, internal coefficiens - ('HRV', 'counts', 'NOMINAL', False), - ('HRV', 'radiance', 'NOMINAL', False), - ('HRV', 'radiance', 'GSICS', False), - ('HRV', 'reflectance', 'NOMINAL', False), + ("HRV", "counts", "NOMINAL", False), + ("HRV", "radiance", "NOMINAL", False), + ("HRV", "radiance", "GSICS", False), + ("HRV", "reflectance", "NOMINAL", False), # HRV channel, external coefficients (mode should have no effect) - ('HRV', 'radiance', 'GSICS', True), - ('HRV', 'reflectance', 'NOMINAL', True), + ("HRV", "radiance", "GSICS", True), + ("HRV", "reflectance", "NOMINAL", True), ) ) def test_calibrate( @@ -886,33 +886,33 @@ class TestNativeMSGDataset: def file_handler(self): """Create a file handler for testing.""" trailer = { - '15TRAILER': { - 'ImageProductionStats': { - 'ActualScanningSummary': { - 'ForwardScanStart': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'ForwardScanEnd': datetime(2006, 1, 1, 12, 27, 9, 304888), - 'ReducedScan': 0 + "15TRAILER": { + "ImageProductionStats": { + "ActualScanningSummary": { + "ForwardScanStart": datetime(2006, 1, 1, 12, 15, 9, 304888), + "ForwardScanEnd": datetime(2006, 1, 1, 12, 27, 9, 304888), + "ReducedScan": 0 } } } } mda = { - 'channel_list': ['VIS006', 'IR_108'], - 'number_of_lines': 4, - 'number_of_columns': 4, - 'is_full_disk': True, - 'platform_name': 'MSG-3', - 'offset_corrected': True, - 'projection_parameters': { - 'ssp_longitude': 0.0, - 'h': 35785831.0, - 'a': 6378169.0, - 'b': 6356583.8 + "channel_list": ["VIS006", "IR_108"], + "number_of_lines": 4, + "number_of_columns": 4, + "is_full_disk": True, + "platform_name": "MSG-3", + "offset_corrected": True, + "projection_parameters": { + "ssp_longitude": 0.0, + "h": 35785831.0, + "a": 6378169.0, + "b": 6356583.8 } } header = self._fake_header() data = self._fake_data() - with mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__', + with mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__", return_value=None): fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.header = header @@ -921,7 +921,7 @@ def file_handler(self): fh.dask_array = da.from_array(data) fh.platform_id = 324 fh.fill_disk = False - fh.calib_mode = 'NOMINAL' + fh.calib_mode = "NOMINAL" fh.ext_calib_coefs = {} fh.include_raw_metadata = False fh.mda_max_array_size = 100 @@ -930,32 +930,32 @@ def file_handler(self): @staticmethod def _fake_header(): header = { - '15_DATA_HEADER': { - 'SatelliteStatus': { - 'SatelliteDefinition': { - 'NominalLongitude': 0.0 + "15_DATA_HEADER": { + "SatelliteStatus": { + "SatelliteDefinition": { + "NominalLongitude": 0.0 }, - 'Orbit': { - 'OrbitPolynomial': ORBIT_POLYNOMIALS + "Orbit": { + "OrbitPolynomial": ORBIT_POLYNOMIALS } }, - 'ImageAcquisition': { - 'PlannedAcquisitionTime': { - 'TrueRepeatCycleStart': datetime(2006, 1, 1, 12, 15, 0, 0), - 'PlannedRepeatCycleEnd': datetime(2006, 1, 1, 12, 30, 0, 0), + "ImageAcquisition": { + "PlannedAcquisitionTime": { + "TrueRepeatCycleStart": datetime(2006, 1, 1, 12, 15, 0, 0), + "PlannedRepeatCycleEnd": datetime(2006, 1, 1, 12, 30, 0, 0), } } }, } - header['15_DATA_HEADER'].update(TEST_HEADER_CALIB) + header["15_DATA_HEADER"].update(TEST_HEADER_CALIB) return header @staticmethod def _fake_data(): num_visir_cols = 5 # will be divided by 1.25 -> 4 columns visir_rec = [ - ('line_data', np.uint8, (num_visir_cols,)), - ('acq_time', time_cds_short) + ("line_data", np.uint8, (num_visir_cols,)), + ("acq_time", time_cds_short) ] vis006_line1 = ( [1, 2, 3, 4, 5], # line_data @@ -973,26 +973,26 @@ def _fake_data(): [(vis006_line2,), (ir108_line2,)], [(vis006_line3,), (ir108_line3,)], [(vis006_line4,), (ir108_line4,)]], - dtype=[('visir', visir_rec)] + dtype=[("visir", visir_rec)] ) return data def test_get_dataset(self, file_handler): """Test getting the dataset.""" dataset_id = make_dataid( - name='VIS006', + name="VIS006", resolution=3000, - calibration='counts' + calibration="counts" ) dataset_info = { - 'units': '1', - 'wavelength': (1, 2, 3), - 'standard_name': 'counts' + "units": "1", + "wavelength": (1, 2, 3), + "standard_name": "counts" } xarr = file_handler.get_dataset(dataset_id, dataset_info) expected = self._exp_data_array() xr.testing.assert_equal(xarr, expected) - assert 'raw_metadata' not in xarr.attrs + assert "raw_metadata" not in xarr.attrs assert file_handler.start_time == datetime(2006, 1, 1, 12, 15, 0) assert file_handler.end_time == datetime(2006, 1, 1, 12, 30, 0) assert_attrs_equal(xarr.attrs, expected.attrs, tolerance=1e-4) @@ -1011,7 +1011,7 @@ def test_repeat_cycle_duration(self, file_handler): """Test repeat cycle handling for FD or ReduscedScan.""" assert 15 == file_handler._repeat_cycle_duration # Change the reducescan scenario to test the repeat cycle duration handling - file_handler.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] = 1 + file_handler.trailer["15TRAILER"]["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] = 1 assert 5 == file_handler._repeat_cycle_duration @staticmethod @@ -1022,72 +1022,72 @@ def _exp_data_array(): [44., 192., 835., 527.], [64., 273., 132., 788.]], dtype=np.float32), - dims=['y', 'x'], + dims=["y", "x"], attrs={ - 'orbital_parameters': { - 'satellite_actual_longitude': -3.55117540817073, - 'satellite_actual_latitude': -0.5711243456528018, - 'satellite_actual_altitude': 35783296.150123544, - 'satellite_nominal_longitude': 0.0, - 'satellite_nominal_latitude': 0.0, - 'projection_longitude': 0.0, - 'projection_latitude': 0.0, - 'projection_altitude': 35785831.0 + "orbital_parameters": { + "satellite_actual_longitude": -3.55117540817073, + "satellite_actual_latitude": -0.5711243456528018, + "satellite_actual_altitude": 35783296.150123544, + "satellite_nominal_longitude": 0.0, + "satellite_nominal_latitude": 0.0, + "projection_longitude": 0.0, + "projection_latitude": 0.0, + "projection_altitude": 35785831.0 }, - 'time_parameters': { - 'nominal_start_time': datetime(2006, 1, 1, 12, 15, 0), - 'nominal_end_time': datetime(2006, 1, 1, 12, 30, 0), - 'observation_start_time': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'observation_end_time': datetime(2006, 1, 1, 12, 27, 9, 304888), + "time_parameters": { + "nominal_start_time": datetime(2006, 1, 1, 12, 15, 0), + "nominal_end_time": datetime(2006, 1, 1, 12, 30, 0), + "observation_start_time": datetime(2006, 1, 1, 12, 15, 9, 304888), + "observation_end_time": datetime(2006, 1, 1, 12, 27, 9, 304888), }, - 'georef_offset_corrected': True, - 'platform_name': 'MSG-3', - 'sensor': 'seviri', - 'units': '1', - 'wavelength': (1, 2, 3), - 'standard_name': 'counts', + "georef_offset_corrected": True, + "platform_name": "MSG-3", + "sensor": "seviri", + "units": "1", + "wavelength": (1, 2, 3), + "standard_name": "counts", } ) - expected['acq_time'] = ('y', [np.datetime64('1958-01-02 00:00:01'), - np.datetime64('1958-01-02 00:00:02'), - np.datetime64('1958-01-02 00:00:03'), - np.datetime64('1958-01-02 00:00:04')]) + expected["acq_time"] = ("y", [np.datetime64("1958-01-02 00:00:01"), + np.datetime64("1958-01-02 00:00:02"), + np.datetime64("1958-01-02 00:00:03"), + np.datetime64("1958-01-02 00:00:04")]) return expected def test_get_dataset_with_raw_metadata(self, file_handler): """Test provision of raw metadata.""" file_handler.include_raw_metadata = True dataset_id = make_dataid( - name='VIS006', + name="VIS006", resolution=3000, - calibration='counts' + calibration="counts" ) dataset_info = { - 'units': '1', - 'wavelength': (1, 2, 3), - 'standard_name': 'counts' + "units": "1", + "wavelength": (1, 2, 3), + "standard_name": "counts" } xarr = file_handler.get_dataset(dataset_id, dataset_info) - assert 'raw_metadata' in xarr.attrs + assert "raw_metadata" in xarr.attrs def test_satpos_no_valid_orbit_polynomial(self, file_handler): """Test satellite position if there is no valid orbit polynomial.""" - file_handler.header['15_DATA_HEADER']['SatelliteStatus'][ - 'Orbit']['OrbitPolynomial'] = ORBIT_POLYNOMIALS_INVALID + file_handler.header["15_DATA_HEADER"]["SatelliteStatus"][ + "Orbit"]["OrbitPolynomial"] = ORBIT_POLYNOMIALS_INVALID dataset_id = make_dataid( - name='VIS006', + name="VIS006", resolution=3000, - calibration='counts' + calibration="counts" ) dataset_info = { - 'units': '1', - 'wavelength': (1, 2, 3), - 'standard_name': 'counts' + "units": "1", + "wavelength": (1, 2, 3), + "standard_name": "counts" } with pytest.warns(UserWarning, match="No orbit polynomial"): xarr = file_handler.get_dataset(dataset_id, dataset_info) - assert 'satellite_actual_longitude' not in xarr.attrs[ - 'orbital_parameters'] + assert "satellite_actual_longitude" not in xarr.attrs[ + "orbital_parameters"] class TestNativeMSGPadder(unittest.TestCase): @@ -1096,12 +1096,12 @@ class TestNativeMSGPadder(unittest.TestCase): @staticmethod def prepare_padder(test_dict): """Initialize Padder and pad test data.""" - dataset_id = test_dict['dataset_id'] - img_bounds = test_dict['img_bounds'] - is_full_disk = test_dict['is_full_disk'] - dataset = test_dict['dataset'] - final_shape = test_dict['final_shape'] - expected_padded_data = test_dict['expected_padded_data'] + dataset_id = test_dict["dataset_id"] + img_bounds = test_dict["img_bounds"] + is_full_disk = test_dict["is_full_disk"] + dataset = test_dict["dataset"] + final_shape = test_dict["final_shape"] + expected_padded_data = test_dict["expected_padded_data"] padder = Padder(dataset_id, img_bounds, is_full_disk) padder._final_shape = final_shape @@ -1150,56 +1150,56 @@ def test_file_pattern(self, reader): @pytest.mark.parametrize( - 'file_content,exp_header_size', + "file_content,exp_header_size", ( (ASCII_STARTSWITH, 450400), # with ascii header - (b'foobar', 445286), # without ascii header + (b"foobar", 445286), # without ascii header ) ) def test_header_type(file_content, exp_header_size): """Test identification of the file header type.""" header = create_test_header( - dataset_id=make_dataid(name='VIS006', resolution=3000), + dataset_id=make_dataid(name="VIS006", resolution=3000), earth_model=1, is_full_disk=True, is_rapid_scan=0 ) - if file_content == b'foobar': - header.pop('15_SECONDARY_PRODUCT_HEADER') - with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ - mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ + if file_content == b"foobar": + header.pop("15_SECONDARY_PRODUCT_HEADER") + with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ + mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap") as _get_memmap, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch("builtins.open", mock.mock_open(read_data=file_content)): fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) _get_memmap.return_value = np.arange(3) fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) assert fh.header_type.itemsize == exp_header_size - assert '15_SECONDARY_PRODUCT_HEADER' in fh.header + assert "15_SECONDARY_PRODUCT_HEADER" in fh.header def test_header_warning(): """Test warning is raised for NOK quality flag.""" header_good = create_test_header( - dataset_id=make_dataid(name='VIS006', resolution=3000), + dataset_id=make_dataid(name="VIS006", resolution=3000), earth_model=1, is_full_disk=True, is_rapid_scan=0, - good_qual='OK' + good_qual="OK" ) header_bad = create_test_header( - dataset_id=make_dataid(name='VIS006', resolution=3000), + dataset_id=make_dataid(name="VIS006", resolution=3000), earth_model=1, is_full_disk=True, is_rapid_scan=0, - good_qual='NOK' + good_qual="NOK" ) - with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ - mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ + with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ + mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap") as _get_memmap, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch("builtins.open", mock.mock_open(read_data=ASCII_STARTSWITH)): recarray2dict.side_effect = (lambda x: x) _get_memmap.return_value = np.arange(3) @@ -1217,30 +1217,30 @@ def test_header_warning(): # check that without Main Header the code doesn't crash header_missing = header_good.copy() - header_missing.pop('15_MAIN_PRODUCT_HEADER') + header_missing.pop("15_MAIN_PRODUCT_HEADER") fromfile.return_value = header_missing with warnings.catch_warnings(): warnings.simplefilter("error") - NativeMSGFileHandler('myfile', {}, None) + NativeMSGFileHandler("myfile", {}, None) @pytest.mark.parametrize( "starts_with, expected", [ (ASCII_STARTSWITH, True), - (b'this_shall_fail', False) + (b"this_shall_fail", False) ] ) def test_has_archive_header(starts_with, expected): """Test if the file includes an ASCII archive header.""" with mock.patch("builtins.open", mock.mock_open(read_data=starts_with)): - actual = has_archive_header('filename') + actual = has_archive_header("filename") assert actual == expected def test_read_header(): """Test that reading header returns the header correctly converted to a dictionary.""" - keys = ('SatelliteId', 'NominalLongitude', 'SatelliteStatus') + keys = ("SatelliteId", "NominalLongitude", "SatelliteStatus") values = (324, 0.0, 1) expected = dict(zip(keys, values)) @@ -1248,7 +1248,7 @@ def test_read_header(): dtypes = np.dtype([(k, t) for k, t in zip(keys, types)]) hdr_data = np.array([values], dtype=dtypes) - with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile: + with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile: fromfile.return_value = hdr_data actual = recarray2dict(hdr_data) assert actual == expected diff --git a/satpy/tests/reader_tests/test_seviri_l1b_nc.py b/satpy/tests/reader_tests/test_seviri_l1b_nc.py index f85e9f5aae..3f7b1a6296 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_nc.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_nc.py @@ -29,17 +29,17 @@ from satpy.tests.reader_tests.test_seviri_l1b_calibration import TestFileHandlerCalibrationBase from satpy.tests.utils import assert_attrs_equal, make_dataid -channel_keys_dict = {'VIS006': 'ch1', 'IR_108': 'ch9'} +channel_keys_dict = {"VIS006": "ch1", "IR_108": "ch9"} def to_cds_time(time): """Convert datetime to (days, msecs) since 1958-01-01.""" if isinstance(time, datetime): time = np.datetime64(time) - t0 = np.datetime64('1958-01-01 00:00') + t0 = np.datetime64("1958-01-01 00:00") delta = time - t0 - days = (delta / np.timedelta64(1, 'D')).astype(int) - msecs = delta / np.timedelta64(1, 'ms') - days * 24 * 3600 * 1E3 + days = (delta / np.timedelta64(1, "D")).astype(int) + msecs = delta / np.timedelta64(1, "ms") - days * 24 * 3600 * 1E3 return days, msecs @@ -64,130 +64,130 @@ def _get_fake_dataset(self, counts, h5netcdf): orbit_poly_start_day, orbit_poly_start_msec = to_cds_time( np.array([datetime(2019, 12, 31, 18), datetime(2019, 12, 31, 22)], - dtype='datetime64') + dtype="datetime64") ) orbit_poly_end_day, orbit_poly_end_msec = to_cds_time( np.array([datetime(2019, 12, 31, 22), datetime(2020, 1, 1, 2)], - dtype='datetime64') + dtype="datetime64") ) counts = counts.rename({ - 'y': 'num_rows_vis_ir', - 'x': 'num_columns_vis_ir' + "y": "num_rows_vis_ir", + "x": "num_columns_vis_ir" }) scan_time_days, scan_time_msecs = to_cds_time(self.scan_time) ds = xr.Dataset( { - 'ch1': counts.copy(), - 'ch9': counts.copy(), - 'HRV': (('num_rows_hrv', 'num_columns_hrv'), [[1, 2, 3], + "ch1": counts.copy(), + "ch9": counts.copy(), + "HRV": (("num_rows_hrv", "num_columns_hrv"), [[1, 2, 3], [4, 5, 6], [7, 8, 9]]), - 'planned_chan_processing': self.radiance_types, - 'channel_data_visir_data_l10_line_mean_acquisition_time_day': ( - ('num_rows_vis_ir', 'channels_vis_ir_dim'), + "planned_chan_processing": self.radiance_types, + "channel_data_visir_data_l10_line_mean_acquisition_time_day": ( + ("num_rows_vis_ir", "channels_vis_ir_dim"), acq_time_day ), - 'channel_data_visir_data_l10_line_mean_acquisition_msec': ( - ('num_rows_vis_ir', 'channels_vis_ir_dim'), + "channel_data_visir_data_l10_line_mean_acquisition_msec": ( + ("num_rows_vis_ir", "channels_vis_ir_dim"), acq_time_msec ), - 'channel_data_visir_data_line_validity': ( - ('num_rows_vis_ir', 'channels_vis_ir_dim'), + "channel_data_visir_data_line_validity": ( + ("num_rows_vis_ir", "channels_vis_ir_dim"), line_validity ), - 'channel_data_visir_data_line_geometric_quality': ( - ('num_rows_vis_ir', 'channels_vis_ir_dim'), + "channel_data_visir_data_line_geometric_quality": ( + ("num_rows_vis_ir", "channels_vis_ir_dim"), line_geom_radio_quality ), - 'channel_data_visir_data_line_radiometric_quality': ( - ('num_rows_vis_ir', 'channels_vis_ir_dim'), + "channel_data_visir_data_line_radiometric_quality": ( + ("num_rows_vis_ir", "channels_vis_ir_dim"), line_geom_radio_quality ), - 'orbit_polynomial_x': ( - ('orbit_polynomial_dim_row', - 'orbit_polynomial_dim_col'), - ORBIT_POLYNOMIALS['X'][0:2] + "orbit_polynomial_x": ( + ("orbit_polynomial_dim_row", + "orbit_polynomial_dim_col"), + ORBIT_POLYNOMIALS["X"][0:2] ), - 'orbit_polynomial_y': ( - ('orbit_polynomial_dim_row', - 'orbit_polynomial_dim_col'), - ORBIT_POLYNOMIALS['Y'][0:2] + "orbit_polynomial_y": ( + ("orbit_polynomial_dim_row", + "orbit_polynomial_dim_col"), + ORBIT_POLYNOMIALS["Y"][0:2] ), - 'orbit_polynomial_z': ( - ('orbit_polynomial_dim_row', - 'orbit_polynomial_dim_col'), - ORBIT_POLYNOMIALS['Z'][0:2] + "orbit_polynomial_z": ( + ("orbit_polynomial_dim_row", + "orbit_polynomial_dim_col"), + ORBIT_POLYNOMIALS["Z"][0:2] ), - 'orbit_polynomial_start_time_day': ( - 'orbit_polynomial_dim_row', + "orbit_polynomial_start_time_day": ( + "orbit_polynomial_dim_row", orbit_poly_start_day ), - 'orbit_polynomial_start_time_msec': ( - 'orbit_polynomial_dim_row', + "orbit_polynomial_start_time_msec": ( + "orbit_polynomial_dim_row", orbit_poly_start_msec ), - 'orbit_polynomial_end_time_day': ( - 'orbit_polynomial_dim_row', + "orbit_polynomial_end_time_day": ( + "orbit_polynomial_dim_row", orbit_poly_end_day ), - 'orbit_polynomial_end_time_msec': ( - 'orbit_polynomial_dim_row', + "orbit_polynomial_end_time_msec": ( + "orbit_polynomial_dim_row", orbit_poly_end_msec ), }, attrs={ - 'equatorial_radius': 6378.169, - 'north_polar_radius': 6356.5838, - 'south_polar_radius': 6356.5838, - 'longitude_of_SSP': 0.0, - 'nominal_longitude': -3.5, - 'satellite_id': self.platform_id, - 'true_repeat_cycle_start_day': scan_time_days, - 'true_repeat_cycle_start_mi_sec': scan_time_msecs, - 'planned_repeat_cycle_end_day': scan_time_days, - 'planned_repeat_cycle_end_mi_sec': scan_time_msecs, - 'north_most_line': 3712, - 'east_most_pixel': 1, - 'west_most_pixel': 3712, - 'south_most_line': 1, - 'vis_ir_grid_origin': 0, - 'vis_ir_column_dir_grid_step': 3.0004032, - 'vis_ir_line_dir_grid_step': 3.0004032, - 'type_of_earth_model': '0x02', - 'nominal_image_scanning': 'T', + "equatorial_radius": 6378.169, + "north_polar_radius": 6356.5838, + "south_polar_radius": 6356.5838, + "longitude_of_SSP": 0.0, + "nominal_longitude": -3.5, + "satellite_id": self.platform_id, + "true_repeat_cycle_start_day": scan_time_days, + "true_repeat_cycle_start_mi_sec": scan_time_msecs, + "planned_repeat_cycle_end_day": scan_time_days, + "planned_repeat_cycle_end_mi_sec": scan_time_msecs, + "north_most_line": 3712, + "east_most_pixel": 1, + "west_most_pixel": 3712, + "south_most_line": 1, + "vis_ir_grid_origin": 0, + "vis_ir_column_dir_grid_step": 3.0004032, + "vis_ir_line_dir_grid_step": 3.0004032, + "type_of_earth_model": "0x02", + "nominal_image_scanning": "T", } ) if h5netcdf: - nattrs = {'equatorial_radius': np.array([6378.169]), - 'north_polar_radius': np.array([6356.5838]), - 'south_polar_radius': np.array([6356.5838]), - 'longitude_of_SSP': np.array([0.0]), - 'vis_ir_column_dir_grid_step': np.array([3.0004032]), - 'vis_ir_line_dir_grid_step': np.array([3.0004032]) + nattrs = {"equatorial_radius": np.array([6378.169]), + "north_polar_radius": np.array([6356.5838]), + "south_polar_radius": np.array([6356.5838]), + "longitude_of_SSP": np.array([0.0]), + "vis_ir_column_dir_grid_step": np.array([3.0004032]), + "vis_ir_line_dir_grid_step": np.array([3.0004032]) } ds.attrs.update(nattrs) - ds['ch1'].attrs.update({ - 'scale_factor': self.gains_nominal[0], - 'add_offset': self.offsets_nominal[0] + ds["ch1"].attrs.update({ + "scale_factor": self.gains_nominal[0], + "add_offset": self.offsets_nominal[0] }) # IR_108 is dataset with key ch9 - ds['ch9'].attrs.update({ - 'scale_factor': self.gains_nominal[8], - 'add_offset': self.offsets_nominal[8], + ds["ch9"].attrs.update({ + "scale_factor": self.gains_nominal[8], + "add_offset": self.offsets_nominal[8], }) # Add some attributes so that the reader can strip them strip_attrs = { - 'comment': None, - 'long_name': None, - 'valid_min': None, - 'valid_max': None + "comment": None, + "long_name": None, + "valid_min": None, + "valid_max": None } - for name in ['ch1', 'ch9']: + for name in ["ch1", "ch9"]: ds[name].attrs.update(strip_attrs) return ds @@ -197,38 +197,38 @@ def h5netcdf(self): """Fixture for xr backend choice.""" return False - @pytest.fixture(name='file_handler') + @pytest.fixture(name="file_handler") def file_handler(self, counts, h5netcdf): """Create a mocked file handler.""" with mock.patch( - 'satpy.readers.seviri_l1b_nc.open_dataset', + "satpy.readers.seviri_l1b_nc.open_dataset", return_value=self._get_fake_dataset(counts=counts, h5netcdf=h5netcdf) ): return NCSEVIRIFileHandler( - 'filename', - {'platform_shortname': 'MSG3', - 'start_time': self.scan_time, - 'service': 'MSG'}, - {'filetype': 'info'} + "filename", + {"platform_shortname": "MSG3", + "start_time": self.scan_time, + "service": "MSG"}, + {"filetype": "info"} ) @pytest.mark.parametrize( - ('channel', 'calibration', 'use_ext_coefs'), + ("channel", "calibration", "use_ext_coefs"), [ # VIS channel, internal coefficients - ('VIS006', 'counts', False), - ('VIS006', 'radiance', False), - ('VIS006', 'reflectance', False), + ("VIS006", "counts", False), + ("VIS006", "radiance", False), + ("VIS006", "reflectance", False), # VIS channel, external coefficients - ('VIS006', 'radiance', True), - ('VIS006', 'reflectance', True), + ("VIS006", "radiance", True), + ("VIS006", "reflectance", True), # IR channel, internal coefficients - ('IR_108', 'counts', False), - ('IR_108', 'radiance', False), - ('IR_108', 'brightness_temperature', False), + ("IR_108", "counts", False), + ("IR_108", "radiance", False), + ("IR_108", "brightness_temperature", False), # IR channel, external coefficients - ('IR_108', 'radiance', True), - ('IR_108', 'brightness_temperature', True), + ("IR_108", "radiance", True), + ("IR_108", "brightness_temperature", True), # FUTURE: Enable once HRV reading has been fixed. # # HRV channel, internal coefficiens # ('HRV', 'counts', False), @@ -247,7 +247,7 @@ def test_calibrate( expected = self._get_expected( channel=channel, calibration=calibration, - calib_mode='NOMINAL', + calib_mode="NOMINAL", use_ext_coefs=use_ext_coefs ) fh = file_handler @@ -261,35 +261,35 @@ def test_calibrate( def test_mask_bad_quality(self, file_handler): """Test masking of bad quality scan lines.""" - channel = 'VIS006' + channel = "VIS006" key = channel_keys_dict[channel] dataset_info = { - 'nc_key': key, - 'units': 'units', - 'wavelength': 'wavelength', - 'standard_name': 'standard_name' + "nc_key": key, + "units": "units", + "wavelength": "wavelength", + "standard_name": "standard_name" } expected = self._get_expected( channel=channel, - calibration='radiance', - calib_mode='NOMINAL', + calibration="radiance", + calib_mode="NOMINAL", use_ext_coefs=False ) fh = file_handler res = fh._mask_bad_quality(fh.nc[key], dataset_info) - new_data = np.zeros_like(expected.data).astype('float32') + new_data = np.zeros_like(expected.data).astype("float32") new_data[:, :] = np.nan expected = expected.copy(data=new_data) xr.testing.assert_allclose(res, expected) @pytest.mark.parametrize( - ('channel', 'calibration', 'mask_bad_quality_scan_lines'), + ("channel", "calibration", "mask_bad_quality_scan_lines"), [ - ('VIS006', 'reflectance', True), - ('VIS006', 'reflectance', False), - ('IR_108', 'brightness_temperature', True) + ("VIS006", "reflectance", True), + ("VIS006", "reflectance", False), + ("IR_108", "brightness_temperature", True) ] ) def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_scan_lines): @@ -297,10 +297,10 @@ def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_ dataset_id = make_dataid(name=channel, calibration=calibration) key = channel_keys_dict[channel] dataset_info = { - 'nc_key': key, - 'units': 'units', - 'wavelength': 'wavelength', - 'standard_name': 'standard_name' + "nc_key": key, + "units": "units", + "wavelength": "wavelength", + "standard_name": "standard_name" } file_handler.mask_bad_quality_scan_lines = mask_bad_quality_scan_lines @@ -310,43 +310,43 @@ def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_ expected = self._get_expected( channel=channel, calibration=calibration, - calib_mode='NOMINAL', + calib_mode="NOMINAL", use_ext_coefs=False ) expected.attrs = { - 'orbital_parameters': { - 'satellite_actual_longitude': -3.541742131915741, - 'satellite_actual_latitude': -0.5203765167594427, - 'satellite_actual_altitude': 35783419.16135868, - 'satellite_nominal_longitude': -3.5, - 'satellite_nominal_latitude': 0.0, - 'projection_longitude': 0.0, - 'projection_latitude': 0.0, - 'projection_altitude': 35785831.0 + "orbital_parameters": { + "satellite_actual_longitude": -3.541742131915741, + "satellite_actual_latitude": -0.5203765167594427, + "satellite_actual_altitude": 35783419.16135868, + "satellite_nominal_longitude": -3.5, + "satellite_nominal_latitude": 0.0, + "projection_longitude": 0.0, + "projection_latitude": 0.0, + "projection_altitude": 35785831.0 }, - 'time_parameters': { - 'nominal_start_time': datetime(2020, 1, 1, 0, 0), - 'nominal_end_time': datetime(2020, 1, 1, 0, 0), - 'observation_start_time': datetime(2020, 1, 1, 0, 0), - 'observation_end_time': datetime(2020, 1, 1, 0, 0), + "time_parameters": { + "nominal_start_time": datetime(2020, 1, 1, 0, 0), + "nominal_end_time": datetime(2020, 1, 1, 0, 0), + "observation_start_time": datetime(2020, 1, 1, 0, 0), + "observation_end_time": datetime(2020, 1, 1, 0, 0), }, - 'georef_offset_corrected': True, - 'platform_name': 'Meteosat-11', - 'sensor': 'seviri', - 'units': 'units', - 'wavelength': 'wavelength', - 'standard_name': 'standard_name' + "georef_offset_corrected": True, + "platform_name": "Meteosat-11", + "sensor": "seviri", + "units": "units", + "wavelength": "wavelength", + "standard_name": "standard_name" } - expected['acq_time'] = ('y', [np.datetime64('1958-01-02 00:00:01'), - np.datetime64('1958-01-02 00:00:02')]) + expected["acq_time"] = ("y", [np.datetime64("1958-01-02 00:00:01"), + np.datetime64("1958-01-02 00:00:02")]) expected = expected[::-1] # reader flips data upside down if mask_bad_quality_scan_lines: expected = file_handler._mask_bad_quality(expected, dataset_info) xr.testing.assert_allclose(res, expected) - for key in ['sun_earth_distance_correction_applied', - 'sun_earth_distance_correction_factor']: + for key in ["sun_earth_distance_correction_applied", + "sun_earth_distance_correction_factor"]: res.attrs.pop(key, None) assert_attrs_equal(res.attrs, expected.attrs, tolerance=1e-4) @@ -364,29 +364,29 @@ def test_repeat_cycle_duration(self, file_handler): """Test repeat cycle handling for FD or ReduscedScan.""" assert 15 == file_handler._repeat_cycle_duration # Change the reducescan scenario to test the repeat cycle duration handling - file_handler.nc.attrs['nominal_image_scanning'] = '' - file_handler.nc.attrs['reduced_scanning'] = 'T' + file_handler.nc.attrs["nominal_image_scanning"] = "" + file_handler.nc.attrs["reduced_scanning"] = "T" # file_handler.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] = 1 assert 5 == file_handler._repeat_cycle_duration def test_satpos_no_valid_orbit_polynomial(self, file_handler): """Test satellite position if there is no valid orbit polynomial.""" - dataset_id = make_dataid(name='VIS006', calibration='counts') + dataset_id = make_dataid(name="VIS006", calibration="counts") dataset_info = { - 'name': 'VIS006', - 'nc_key': 'ch1', - 'units': 'units', - 'wavelength': 'wavelength', - 'standard_name': 'standard_name' + "name": "VIS006", + "nc_key": "ch1", + "units": "units", + "wavelength": "wavelength", + "standard_name": "standard_name" } - file_handler.nc['orbit_polynomial_start_time_day'] = 0 - file_handler.nc['orbit_polynomial_end_time_day'] = 0 + file_handler.nc["orbit_polynomial_start_time_day"] = 0 + file_handler.nc["orbit_polynomial_end_time_day"] = 0 res = file_handler.get_dataset(dataset_id, dataset_info) - assert 'satellite_actual_longitude' not in res.attrs[ - 'orbital_parameters'] + assert "satellite_actual_longitude" not in res.attrs[ + "orbital_parameters"] - @pytest.mark.parametrize('h5netcdf', [True]) + @pytest.mark.parametrize("h5netcdf", [True]) def test_h5netcdf_pecularity(self, file_handler, h5netcdf): """Test conversion of attributes when xarray is used with h5netcdf backend.""" fh = file_handler - assert isinstance(fh.mda['projection_parameters']['a'], float) + assert isinstance(fh.mda["projection_parameters"]["a"], float) diff --git a/satpy/tests/reader_tests/test_seviri_l2_bufr.py b/satpy/tests/reader_tests/test_seviri_l2_bufr.py index 3578645e5b..2979084974 100644 --- a/satpy/tests/reader_tests/test_seviri_l2_bufr.py +++ b/satpy/tests/reader_tests/test_seviri_l2_bufr.py @@ -29,83 +29,83 @@ from satpy.tests.utils import make_dataid -FILETYPE_INFO = {'file_type': 'seviri_l2_bufr_asr'} +FILETYPE_INFO = {"file_type": "seviri_l2_bufr_asr"} -FILENAME_INFO = {'start_time': '20191112000000', - 'spacecraft': 'MSG2'} -FILENAME_INFO2 = {'start_time': '20191112000000', - 'spacecraft': 'MSG2', - 'server': 'TESTSERVER'} +FILENAME_INFO = {"start_time": "20191112000000", + "spacecraft": "MSG2"} +FILENAME_INFO2 = {"start_time": "20191112000000", + "spacecraft": "MSG2", + "server": "TESTSERVER"} MPEF_PRODUCT_HEADER = { - 'NominalTime': datetime(2019, 11, 6, 18, 0), - 'SpacecraftName': '09', - 'RectificationLongitude': 'E0455' + "NominalTime": datetime(2019, 11, 6, 18, 0), + "SpacecraftName": "09", + "RectificationLongitude": "E0455" } DATASET_INFO = { - 'name': 'testdata', - 'key': '#1#brightnessTemperature', - 'coordinates': ('longitude', 'latitude'), - 'fill_value': 0 + "name": "testdata", + "key": "#1#brightnessTemperature", + "coordinates": ("longitude", "latitude"), + "fill_value": 0 } DATASET_INFO_LAT = { - 'name': 'latitude', - 'key': '#1#latitude', - 'fill_value': -1.e+100 + "name": "latitude", + "key": "#1#latitude", + "fill_value": -1.e+100 } DATASET_INFO_LON = { - 'name': 'longitude', - 'key': '#1#longitude', - 'fill_value': -1.e+100 + "name": "longitude", + "key": "#1#longitude", + "fill_value": -1.e+100 } DATASET_ATTRS = { - 'platform_name': 'MET09', - 'ssp_lon': 45.5, - 'seg_size': 16 + "platform_name": "MET09", + "ssp_lon": 45.5, + "seg_size": 16 } AREA_DEF = geometry.AreaDefinition( - 'msg_seviri_iodc_48km', - 'MSG SEVIRI Indian Ocean Data Coverage service area definition with 48 km resolution', + "msg_seviri_iodc_48km", + "MSG SEVIRI Indian Ocean Data Coverage service area definition with 48 km resolution", "", - {'a': 6378169., 'b': 6356583.8, 'lon_0': DATASET_ATTRS['ssp_lon'], - 'h': 35785831., 'proj': 'geos', 'units': 'm'}, + {"a": 6378169., "b": 6356583.8, "lon_0": DATASET_ATTRS["ssp_lon"], + "h": 35785831., "proj": "geos", "units": "m"}, 232, 232, (-5570248.6867, -5567248.2834, 5567248.2834, 5570248.6867) ) AREA_DEF_FES = geometry.AreaDefinition( - 'msg_seviri_res_48km', - 'MSG SEVIRI Full Earth Scanning service area definition with 48 km resolution', + "msg_seviri_res_48km", + "MSG SEVIRI Full Earth Scanning service area definition with 48 km resolution", "", - {'a': 6378169., 'b': 6356583.8, 'lon_0': 0.0, - 'h': 35785831., 'proj': 'geos', 'units': 'm'}, + {"a": 6378169., "b": 6356583.8, "lon_0": 0.0, + "h": 35785831., "proj": "geos", "units": "m"}, 232, 232, (-5570248.6867, -5567248.2834, 5567248.2834, 5570248.6867) ) AREA_DEF_EXT = geometry.AreaDefinition( - 'msg_seviri_iodc_9km_ext', - 'MSG SEVIRI Indian Ocean Data Coverage service area definition with 9 km resolution ' - '(extended outside original 3km grid)', + "msg_seviri_iodc_9km_ext", + "MSG SEVIRI Indian Ocean Data Coverage service area definition with 9 km resolution " + "(extended outside original 3km grid)", "", - {'a': 6378169., 'b': 6356583.8, 'lon_0': DATASET_ATTRS['ssp_lon'], - 'h': 35785831., 'proj': 'geos', 'units': 'm'}, + {"a": 6378169., "b": 6356583.8, "lon_0": DATASET_ATTRS["ssp_lon"], + "h": 35785831., "proj": "geos", "units": "m"}, 1238, 1238, (-5571748.8883, -5571748.8882, 5571748.8882, 5571748.8883) ) TEST_FILES = [ - 'ASRBUFRProd_20191106130000Z_00_OMPEFS02_MET09_FES_E0000', - 'MSG2-SEVI-MSGASRE-0101-0101-20191106130000.000000000Z-20191106131702-1362128.bfr', - 'MSG2-SEVI-MSGASRE-0101-0101-20191106101500.000000000Z-20191106103218-1362148' + "ASRBUFRProd_20191106130000Z_00_OMPEFS02_MET09_FES_E0000", + "MSG2-SEVI-MSGASRE-0101-0101-20191106130000.000000000Z-20191106131702-1362128.bfr", + "MSG2-SEVI-MSGASRE-0101-0101-20191106101500.000000000Z-20191106103218-1362148" ] # Test data @@ -117,30 +117,30 @@ class SeviriL2BufrData: """Mock SEVIRI L2 BUFR data.""" - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") - def __init__(self, filename, with_adef=False, rect_lon='default'): + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") + def __init__(self, filename, with_adef=False, rect_lon="default"): """Initialize by mocking test data for testing the SEVIRI L2 BUFR reader.""" import eccodes as ec from satpy.readers.seviri_l2_bufr import SeviriL2BufrFileHandler - self.buf1 = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') - ec.codes_set(self.buf1, 'unpack', 1) + self.buf1 = ec.codes_bufr_new_from_samples("BUFR4_local_satellite") + ec.codes_set(self.buf1, "unpack", 1) # write the bufr test data twice as we want to read in and then concatenate the data in the reader # 55 id corresponds to METEOSAT 8` - ec.codes_set(self.buf1, 'satelliteIdentifier', 56) - ec.codes_set_array(self.buf1, '#1#latitude', LAT) - ec.codes_set_array(self.buf1, '#1#latitude', LAT) - ec.codes_set_array(self.buf1, '#1#longitude', LON) - ec.codes_set_array(self.buf1, '#1#longitude', LON) - ec.codes_set_array(self.buf1, '#1#brightnessTemperature', DATA) - ec.codes_set_array(self.buf1, '#1#brightnessTemperature', DATA) + ec.codes_set(self.buf1, "satelliteIdentifier", 56) + ec.codes_set_array(self.buf1, "#1#latitude", LAT) + ec.codes_set_array(self.buf1, "#1#latitude", LAT) + ec.codes_set_array(self.buf1, "#1#longitude", LON) + ec.codes_set_array(self.buf1, "#1#longitude", LON) + ec.codes_set_array(self.buf1, "#1#brightnessTemperature", DATA) + ec.codes_set_array(self.buf1, "#1#brightnessTemperature", DATA) self.m = mock.mock_open() # only our offline product contain MPEF product headers so we get the metadata from there - if ('BUFRProd' in filename): - with mock.patch('satpy.readers.seviri_l2_bufr.np.fromfile') as fromfile: + if ("BUFRProd" in filename): + with mock.patch("satpy.readers.seviri_l2_bufr.np.fromfile") as fromfile: fromfile.return_value = MPEF_PRODUCT_HEADER - with mock.patch('satpy.readers.seviri_l2_bufr.recarray2dict') as recarray2dict: + with mock.patch("satpy.readers.seviri_l2_bufr.recarray2dict") as recarray2dict: recarray2dict.side_effect = (lambda x: x) self.fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO2, FILETYPE_INFO, with_area_definition=with_adef, rectification_longitude=rect_lon) @@ -148,13 +148,13 @@ def __init__(self, filename, with_adef=False, rect_lon='default'): else: # No Mpef Header so we get the metadata from the BUFR messages - with mock.patch('satpy.readers.seviri_l2_bufr.open', self.m, create=True): - with mock.patch('eccodes.codes_bufr_new_from_file', + with mock.patch("satpy.readers.seviri_l2_bufr.open", self.m, create=True): + with mock.patch("eccodes.codes_bufr_new_from_file", side_effect=[self.buf1, None, self.buf1, None, self.buf1, None]) as ec1: ec1.return_value = ec1.side_effect - with mock.patch('eccodes.codes_set') as ec2: + with mock.patch("eccodes.codes_set") as ec2: ec2.return_value = 1 - with mock.patch('eccodes.codes_release') as ec5: + with mock.patch("eccodes.codes_release") as ec5: ec5.return_value = 1 self.fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO, FILETYPE_INFO, with_area_definition=with_adef, @@ -162,15 +162,15 @@ def __init__(self, filename, with_adef=False, rect_lon='default'): def get_data(self, dataset_info): """Read data from mock file.""" - with mock.patch('satpy.readers.seviri_l2_bufr.open', self.m, create=True): - with mock.patch('eccodes.codes_bufr_new_from_file', + with mock.patch("satpy.readers.seviri_l2_bufr.open", self.m, create=True): + with mock.patch("eccodes.codes_bufr_new_from_file", side_effect=[self.buf1, self.buf1, None]) as ec1: ec1.return_value = ec1.side_effect - with mock.patch('eccodes.codes_set') as ec2: + with mock.patch("eccodes.codes_set") as ec2: ec2.return_value = 1 - with mock.patch('eccodes.codes_release') as ec5: + with mock.patch("eccodes.codes_release") as ec5: ec5.return_value = 1 - z = self.fh.get_dataset(make_dataid(name=dataset_info['name'], resolution=48000), dataset_info) + z = self.fh.get_dataset(make_dataid(name=dataset_info["name"], resolution=48000), dataset_info) return z @@ -193,9 +193,9 @@ def test_attributes_with_swath_definition(input_file): """Test correctness of dataset attributes with data loaded with a SwathDefinition (default behaviour).""" bufr_obj = SeviriL2BufrData(input_file) z = bufr_obj.get_data(DATASET_INFO) - assert z.attrs['platform_name'] == DATASET_ATTRS['platform_name'] - assert z.attrs['ssp_lon'] == DATASET_ATTRS['ssp_lon'] - assert z.attrs['seg_size'] == DATASET_ATTRS['seg_size'] + assert z.attrs["platform_name"] == DATASET_ATTRS["platform_name"] + assert z.attrs["ssp_lon"] == DATASET_ATTRS["ssp_lon"] + assert z.attrs["seg_size"] == DATASET_ATTRS["seg_size"] @staticmethod def test_attributes_with_area_definition(input_file): @@ -204,9 +204,9 @@ def test_attributes_with_area_definition(input_file): _ = bufr_obj.get_data(DATASET_INFO_LAT) # We need to load the lat/lon data in order to _ = bufr_obj.get_data(DATASET_INFO_LON) # populate the file handler with these data z = bufr_obj.get_data(DATASET_INFO) - assert z.attrs['platform_name'] == DATASET_ATTRS['platform_name'] - assert z.attrs['ssp_lon'] == DATASET_ATTRS['ssp_lon'] - assert z.attrs['seg_size'] == DATASET_ATTRS['seg_size'] + assert z.attrs["platform_name"] == DATASET_ATTRS["platform_name"] + assert z.attrs["ssp_lon"] == DATASET_ATTRS["ssp_lon"] + assert z.attrs["seg_size"] == DATASET_ATTRS["seg_size"] @staticmethod def test_data_with_swath_definition(input_file): @@ -242,7 +242,7 @@ def test_data_with_area_definition(self, input_file): # Test that the correct AreaDefinition is identified for products with 3 pixel segements bufr_obj.fh.seg_size = 3 - ad_ext = bufr_obj.fh._construct_area_def(make_dataid(name='dummmy', resolution=9000)) + ad_ext = bufr_obj.fh._construct_area_def(make_dataid(name="dummmy", resolution=9000)) assert ad_ext == AREA_DEF_EXT def test_data_with_rect_lon(self, input_file): @@ -260,14 +260,14 @@ def test_data_with_rect_lon(self, input_file): class SeviriL2AMVBufrData: """Mock SEVIRI L2 AMV BUFR data.""" - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def __init__(self, filename): """Initialize by mocking test data for testing the SEVIRI L2 BUFR reader.""" from satpy.readers.seviri_l2_bufr import SeviriL2BufrFileHandler - with mock.patch('satpy.readers.seviri_l2_bufr.np.fromfile'): + with mock.patch("satpy.readers.seviri_l2_bufr.np.fromfile"): self.fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO2, - filetype_info={'file_type': 'seviri_l2_bufr_amv'}, + filetype_info={"file_type": "seviri_l2_bufr_amv"}, with_area_definition=True) @@ -277,5 +277,5 @@ class TestSeviriL2AMVBufrReader: @staticmethod def test_amv_with_area_def(): """Test that AMV data can not be loaded with an area definition.""" - bufr_obj = SeviriL2AMVBufrData('AMVBUFRProd_20201110124500Z_00_OMPEFS04_MET11_FES_E0000') + bufr_obj = SeviriL2AMVBufrData("AMVBUFRProd_20201110124500Z_00_OMPEFS04_MET11_FES_E0000") assert bufr_obj.fh.with_adef is False diff --git a/satpy/tests/reader_tests/test_seviri_l2_grib.py b/satpy/tests/reader_tests/test_seviri_l2_grib.py index faee3f9bdb..a8b5310a78 100644 --- a/satpy/tests/reader_tests/test_seviri_l2_grib.py +++ b/satpy/tests/reader_tests/test_seviri_l2_grib.py @@ -29,17 +29,17 @@ # Dictionary to be used as fake GRIB message FAKE_MESSAGE = { - 'longitudeOfSubSatellitePointInDegrees': 9.5, - 'dataDate': 20191020, - 'dataTime': 1745, - 'Nx': 1000, - 'Ny': 1200, - 'earthMajorAxis': 6400., - 'earthMinorAxis': 6300., - 'NrInRadiusOfEarth': 6., - 'XpInGridLengths': 500, - 'parameterNumber': 30, - 'missingValue': 9999, + "longitudeOfSubSatellitePointInDegrees": 9.5, + "dataDate": 20191020, + "dataTime": 1745, + "Nx": 1000, + "Ny": 1200, + "earthMajorAxis": 6400., + "earthMinorAxis": 6300., + "NrInRadiusOfEarth": 6., + "XpInGridLengths": 500, + "parameterNumber": 30, + "missingValue": 9999, } # List to be used as fake GID source @@ -49,7 +49,7 @@ class Test_SeviriL2GribFileHandler(unittest.TestCase): """Test the SeviriL2GribFileHandler reader.""" - @mock.patch('satpy.readers.seviri_l2_grib.ec') + @mock.patch("satpy.readers.seviri_l2_grib.ec") def setUp(self, ec_): """Set up the test by creating a mocked eccodes library.""" fake_gid_generator = (i for i in FAKE_GID) @@ -58,9 +58,9 @@ def setUp(self, ec_): ec_.codes_get_values.return_value = np.ones(1000*1200) self.ec_ = ec_ - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") - @mock.patch('satpy.readers.seviri_l2_grib.xr') - @mock.patch('satpy.readers.seviri_l2_grib.da') + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") + @mock.patch("satpy.readers.seviri_l2_grib.xr") + @mock.patch("satpy.readers.seviri_l2_grib.da") def test_data_reading(self, da_, xr_): """Test the reading of data from the product.""" from satpy.readers.seviri_l2_grib import REPEAT_CYCLE_DURATION, SeviriL2GribFileHandler @@ -68,18 +68,18 @@ def test_data_reading(self, da_, xr_): CHUNK_SIZE = get_legacy_chunk_size() with mock.patch("builtins.open", mock.mock_open()) as mock_file: - with mock.patch('satpy.readers.seviri_l2_grib.ec', self.ec_): + with mock.patch("satpy.readers.seviri_l2_grib.ec", self.ec_): self.reader = SeviriL2GribFileHandler( - filename='test.grib', + filename="test.grib", filename_info={ - 'spacecraft': 'MET11', - 'start_time': datetime.datetime(year=2020, month=10, day=20, + "spacecraft": "MET11", + "start_time": datetime.datetime(year=2020, month=10, day=20, hour=19, minute=45, second=0) }, filetype_info={} ) - dataset_id = make_dataid(name='dummmy', resolution=3000) + dataset_id = make_dataid(name="dummmy", resolution=3000) # Checks that the codes_grib_multi_support_on function has been called self.ec_.codes_grib_multi_support_on.assert_called() @@ -91,11 +91,11 @@ def test_data_reading(self, da_, xr_): self.ec_.codes_release.reset_mock() # Checks the correct execution of the get_dataset function with a valid parameter_number - valid_dataset = self.reader.get_dataset(dataset_id, {'parameter_number': 30}) + valid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 30}) # Checks the correct file open call - mock_file.assert_called_with('test.grib', 'rb') + mock_file.assert_called_with("test.grib", "rb") # Checks that the dataset has been created as a DataArray object - self.assertEqual(valid_dataset._extract_mock_name(), 'xr.DataArray()') + self.assertEqual(valid_dataset._extract_mock_name(), "xr.DataArray()") # Checks that codes_release has been called after each codes_grib_new_from_file call # (except after the last one which has returned a None) self.assertEqual(self.ec_.codes_grib_new_from_file.call_count, @@ -108,7 +108,7 @@ def test_data_reading(self, da_, xr_): self.ec_.codes_release.reset_mock() # Checks the correct execution of the get_dataset function with an invalid parameter_number - invalid_dataset = self.reader.get_dataset(dataset_id, {'parameter_number': 50}) + invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) # Checks that the function returns None self.assertEqual(invalid_dataset, None) # Checks that codes_release has been called after each codes_grib_new_from_file call @@ -122,11 +122,11 @@ def test_data_reading(self, da_, xr_): # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = self.reader._get_attributes() expected_attributes = { - 'orbital_parameters': { - 'projection_longitude': 9.5 + "orbital_parameters": { + "projection_longitude": 9.5 }, - 'sensor': 'seviri', - 'platform_name': 'Meteosat-11' + "sensor": "seviri", + "platform_name": "Meteosat-11" } self.assertEqual(attributes, expected_attributes) @@ -140,45 +140,45 @@ def test_data_reading(self, da_, xr_): # Checks that xarray.DataArray has been called with the correct arguments name, args, kwargs = xr_.mock_calls[0] - self.assertEqual(kwargs['dims'], ('y', 'x')) + self.assertEqual(kwargs["dims"], ("y", "x")) # Checks the correct execution of the _get_proj_area function pdict, area_dict = self.reader._get_proj_area(0) expected_pdict = { - 'a': 6400000., - 'b': 6300000., - 'h': 32000000., - 'ssp_lon': 9.5, - 'nlines': 1000, - 'ncols': 1200, - 'a_name': 'msg_seviri_rss_3km', - 'a_desc': 'MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution', - 'p_id': '', + "a": 6400000., + "b": 6300000., + "h": 32000000., + "ssp_lon": 9.5, + "nlines": 1000, + "ncols": 1200, + "a_name": "msg_seviri_rss_3km", + "a_desc": "MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution", + "p_id": "", } self.assertEqual(pdict, expected_pdict) expected_area_dict = { - 'center_point': 500, - 'north': 1200, - 'east': 1, - 'west': 1000, - 'south': 1, + "center_point": 500, + "north": 1200, + "east": 1, + "west": 1000, + "south": 1, } self.assertEqual(area_dict, expected_area_dict) # Checks the correct execution of the get_area_def function - with mock.patch('satpy.readers.seviri_l2_grib.calculate_area_extent', - mock.Mock(name='calculate_area_extent')) as cae: - with mock.patch('satpy.readers.seviri_l2_grib.get_area_definition', mock.Mock()) as gad: - dataset_id = make_dataid(name='dummmy', resolution=400.) + with mock.patch("satpy.readers.seviri_l2_grib.calculate_area_extent", + mock.Mock(name="calculate_area_extent")) as cae: + with mock.patch("satpy.readers.seviri_l2_grib.get_area_definition", mock.Mock()) as gad: + dataset_id = make_dataid(name="dummmy", resolution=400.) self.reader.get_area_def(dataset_id) # Asserts that calculate_area_extent has been called with the correct arguments - expected_args = ({'center_point': 500, 'east': 1, 'west': 1000, 'south': 1, 'north': 1200, - 'column_step': 400., 'line_step': 400.},) + expected_args = ({"center_point": 500, "east": 1, "west": 1000, "south": 1, "north": 1200, + "column_step": 400., "line_step": 400.},) name, args, kwargs = cae.mock_calls[0] self.assertEqual(args, expected_args) # Asserts that get_area_definition has been called with the correct arguments name, args, kwargs = gad.mock_calls[0] self.assertEqual(args[0], expected_pdict) # The second argument must be the return result of calculate_area_extent - self.assertEqual(args[1]._extract_mock_name(), 'calculate_area_extent()') + self.assertEqual(args[1]._extract_mock_name(), "calculate_area_extent()") diff --git a/satpy/tests/reader_tests/test_slstr_l1b.py b/satpy/tests/reader_tests/test_slstr_l1b.py index 9f516b4cde..cc0764685f 100644 --- a/satpy/tests/reader_tests/test_slstr_l1b.py +++ b/satpy/tests/reader_tests/test_slstr_l1b.py @@ -27,40 +27,40 @@ from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange from satpy.readers.slstr_l1b import NCSLSTR1B, NCSLSTRAngles, NCSLSTRFlag, NCSLSTRGeo -local_id_keys_config = {'name': { - 'required': True, +local_id_keys_config = {"name": { + "required": True, }, - 'wavelength': { - 'type': WavelengthRange, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'reflectance', - 'brightness_temperature', - 'radiance', - 'counts' + "resolution": None, + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "counts" ] }, - 'stripe': { - 'enum': [ - 'a', - 'b', - 'c', - 'i', - 'f', + "stripe": { + "enum": [ + "a", + "b", + "c", + "i", + "f", ] }, - 'view': { - 'enum': [ - 'nadir', - 'oblique', + "view": { + "enum": [ + "nadir", + "oblique", ] }, - 'modifiers': { - 'required': True, - 'default': ModifierTuple(), - 'type': ModifierTuple, + "modifiers": { + "required": True, + "default": ModifierTuple(), + "type": ModifierTuple, }, } @@ -68,7 +68,7 @@ class TestSLSTRL1B(unittest.TestCase): """Common setup for SLSTR_L1B tests.""" - @mock.patch('satpy.readers.slstr_l1b.xr') + @mock.patch("satpy.readers.slstr_l1b.xr") def setUp(self, xr_): """Create a fake dataset using the given radiance data.""" self.base_data = np.array(([1., 2., 3.], [4., 5., 6.])) @@ -77,34 +77,34 @@ def setUp(self, xr_): self.end_time = "2020-05-10T12:06:18.012Z" self.rad = xr.DataArray( self.base_data, - dims=('columns', 'rows'), - attrs={'scale_factor': 1.0, 'add_offset': 0.0, - '_FillValue': -32768, 'units': 'mW.m-2.sr-1.nm-1', + dims=("columns", "rows"), + attrs={"scale_factor": 1.0, "add_offset": 0.0, + "_FillValue": -32768, "units": "mW.m-2.sr-1.nm-1", } ) det = xr.DataArray( self.base_data, - dims=('columns', 'rows'), - attrs={'scale_factor': 1.0, 'add_offset': 0.0, - '_FillValue': 255, + dims=("columns", "rows"), + attrs={"scale_factor": 1.0, "add_offset": 0.0, + "_FillValue": 255, } ) self.fake_dataset = xr.Dataset( data_vars={ - 'S5_radiance_an': self.rad, - 'S9_BT_ao': self.rad, - 'foo_radiance_an': self.rad, - 'S5_solar_irradiances': self.rad, - 'geometry_tn': self.rad, - 'latitude_an': self.rad, - 'x_tx': self.rad, - 'y_tx': self.rad, - 'x_in': self.rad, - 'y_in': self.rad, - 'x_an': self.rad, - 'y_an': self.rad, - 'flags_an': self.rad, - 'detector_an': det, + "S5_radiance_an": self.rad, + "S9_BT_ao": self.rad, + "foo_radiance_an": self.rad, + "S5_solar_irradiances": self.rad, + "geometry_tn": self.rad, + "latitude_an": self.rad, + "x_tx": self.rad, + "y_tx": self.rad, + "x_in": self.rad, + "y_in": self.rad, + "x_an": self.rad, + "y_an": self.rad, + "flags_an": self.rad, + "detector_an": det, }, attrs={ "start_time": self.start_time, @@ -129,122 +129,122 @@ def ev(foo_x, foo_y): """Fake function to return interpolated data.""" return np.zeros((3, 2)) - @mock.patch('satpy.readers.slstr_l1b.xr') - @mock.patch('scipy.interpolate.RectBivariateSpline') + @mock.patch("satpy.readers.slstr_l1b.xr") + @mock.patch("scipy.interpolate.RectBivariateSpline") def test_instantiate(self, bvs_, xr_): """Test initialization of file handlers.""" bvs_.return_value = self.FakeSpl xr_.open_dataset.return_value = self.fake_dataset good_start = datetime.strptime(self.start_time, - '%Y-%m-%dT%H:%M:%S.%fZ') + "%Y-%m-%dT%H:%M:%S.%fZ") good_end = datetime.strptime(self.end_time, - '%Y-%m-%dT%H:%M:%S.%fZ') + "%Y-%m-%dT%H:%M:%S.%fZ") - ds_id = make_dataid(name='foo', calibration='radiance', - stripe='a', view='nadir') - ds_id_500 = make_dataid(name='foo', calibration='radiance', - stripe='a', view='nadir', resolution=500) - filename_info = {'mission_id': 'S3A', 'dataset_name': 'foo', - 'start_time': 0, 'end_time': 0, - 'stripe': 'a', 'view': 'n'} - test = NCSLSTR1B('somedir/S1_radiance_an.nc', filename_info, 'c') - assert test.view == 'nadir' - assert test.stripe == 'a' - test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'foo'})) + ds_id = make_dataid(name="foo", calibration="radiance", + stripe="a", view="nadir") + ds_id_500 = make_dataid(name="foo", calibration="radiance", + stripe="a", view="nadir", resolution=500) + filename_info = {"mission_id": "S3A", "dataset_name": "foo", + "start_time": 0, "end_time": 0, + "stripe": "a", "view": "n"} + test = NCSLSTR1B("somedir/S1_radiance_an.nc", filename_info, "c") + assert test.view == "nadir" + assert test.stripe == "a" + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) self.assertEqual(test.start_time, good_start) self.assertEqual(test.end_time, good_end) xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() - filename_info = {'mission_id': 'S3A', 'dataset_name': 'foo', - 'start_time': 0, 'end_time': 0, - 'stripe': 'c', 'view': 'o'} - test = NCSLSTR1B('somedir/S1_radiance_co.nc', filename_info, 'c') - assert test.view == 'oblique' - assert test.stripe == 'c' - test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'foo'})) + filename_info = {"mission_id": "S3A", "dataset_name": "foo", + "start_time": 0, "end_time": 0, + "stripe": "c", "view": "o"} + test = NCSLSTR1B("somedir/S1_radiance_co.nc", filename_info, "c") + assert test.view == "oblique" + assert test.stripe == "c" + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) self.assertEqual(test.start_time, good_start) self.assertEqual(test.end_time, good_end) xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() - filename_info = {'mission_id': 'S3A', 'dataset_name': 'foo', - 'start_time': 0, 'end_time': 0, - 'stripe': 'a', 'view': 'n'} - test = NCSLSTRGeo('somedir/geometry_an.nc', filename_info, 'c') - test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'latitude_{stripe:1s}{view:1s}'})) + filename_info = {"mission_id": "S3A", "dataset_name": "foo", + "start_time": 0, "end_time": 0, + "stripe": "a", "view": "n"} + test = NCSLSTRGeo("somedir/geometry_an.nc", filename_info, "c") + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "latitude_{stripe:1s}{view:1s}"})) self.assertEqual(test.start_time, good_start) self.assertEqual(test.end_time, good_end) xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() - test = NCSLSTRFlag('somedir/S1_radiance_an.nc', filename_info, 'c') - test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'flags_{stripe:1s}{view:1s}'})) - assert test.view == 'nadir' - assert test.stripe == 'a' + test = NCSLSTRFlag("somedir/S1_radiance_an.nc", filename_info, "c") + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "flags_{stripe:1s}{view:1s}"})) + assert test.view == "nadir" + assert test.stripe == "a" self.assertEqual(test.start_time, good_start) self.assertEqual(test.end_time, good_end) xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() - test = NCSLSTRAngles('somedir/S1_radiance_an.nc', filename_info, 'c') - test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'geometry_t{view:1s}'})) + test = NCSLSTRAngles("somedir/S1_radiance_an.nc", filename_info, "c") + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "geometry_t{view:1s}"})) self.assertEqual(test.start_time, good_start) self.assertEqual(test.end_time, good_end) xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() - test.get_dataset(ds_id_500, dict(filename_info, **{'file_key': 'geometry_t{view:1s}'})) + test.get_dataset(ds_id_500, dict(filename_info, **{"file_key": "geometry_t{view:1s}"})) class TestSLSTRCalibration(TestSLSTRL1B): """Test the implementation of the calibration factors.""" - @mock.patch('satpy.readers.slstr_l1b.xr') + @mock.patch("satpy.readers.slstr_l1b.xr") def test_radiance_calibration(self, xr_): """Test radiance calibration steps.""" from satpy.readers.slstr_l1b import CHANCALIB_FACTORS xr_.open_dataset.return_value = self.fake_dataset - ds_id = make_dataid(name='foo', calibration='radiance', - stripe='a', view='nadir') - filename_info = {'mission_id': 'S3A', 'dataset_name': 'foo', - 'start_time': 0, 'end_time': 0, - 'stripe': 'a', 'view': 'n'} + ds_id = make_dataid(name="foo", calibration="radiance", + stripe="a", view="nadir") + filename_info = {"mission_id": "S3A", "dataset_name": "foo", + "start_time": 0, "end_time": 0, + "stripe": "a", "view": "n"} - test = NCSLSTR1B('somedir/S1_radiance_co.nc', filename_info, 'c') + test = NCSLSTR1B("somedir/S1_radiance_co.nc", filename_info, "c") # Check warning is raised if we don't have calibration with warnings.catch_warnings(record=True) as w: - test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'foo'})) + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) assert issubclass(w[-1].category, UserWarning) # Check user calibration is used correctly - test = NCSLSTR1B('somedir/S1_radiance_co.nc', filename_info, 'c', - user_calibration={'foo_nadir': 0.4}) - data = test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'foo'})) + test = NCSLSTR1B("somedir/S1_radiance_co.nc", filename_info, "c", + user_calibration={"foo_nadir": 0.4}) + data = test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) np.testing.assert_allclose(data.values, self.base_data * 0.4) # Check internal calibration is used correctly - ds_id = make_dataid(name='S5', calibration='radiance', stripe='a', view='nadir') - filename_info['dataset_name'] = 'S5' - test = NCSLSTR1B('somedir/S1_radiance_an.nc', filename_info, 'c') - data = test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'S5'})) + ds_id = make_dataid(name="S5", calibration="radiance", stripe="a", view="nadir") + filename_info["dataset_name"] = "S5" + test = NCSLSTR1B("somedir/S1_radiance_an.nc", filename_info, "c") + data = test.get_dataset(ds_id, dict(filename_info, **{"file_key": "S5"})) np.testing.assert_allclose(data.values, - self.base_data * CHANCALIB_FACTORS['S5_nadir']) + self.base_data * CHANCALIB_FACTORS["S5_nadir"]) - @mock.patch('satpy.readers.slstr_l1b.xr') - @mock.patch('satpy.readers.slstr_l1b.da') + @mock.patch("satpy.readers.slstr_l1b.xr") + @mock.patch("satpy.readers.slstr_l1b.da") def test_reflectance_calibration(self, da_, xr_): """Test reflectance calibration.""" xr_.open_dataset.return_value = self.fake_dataset da_.map_blocks.return_value = self.rad / 100. - filename_info = {'mission_id': 'S3A', 'dataset_name': 'S5', - 'start_time': 0, 'end_time': 0, - 'stripe': 'a', 'view': 'n'} - ds_id = make_dataid(name='S5', calibration='reflectance', stripe='a', view='nadir') - test = NCSLSTR1B('somedir/S1_radiance_an.nc', filename_info, 'c') - data = test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'S5'})) - self.assertEqual(data.units, '%') + filename_info = {"mission_id": "S3A", "dataset_name": "S5", + "start_time": 0, "end_time": 0, + "stripe": "a", "view": "n"} + ds_id = make_dataid(name="S5", calibration="reflectance", stripe="a", view="nadir") + test = NCSLSTR1B("somedir/S1_radiance_an.nc", filename_info, "c") + data = test.get_dataset(ds_id, dict(filename_info, **{"file_key": "S5"})) + self.assertEqual(data.units, "%") np.testing.assert_allclose(data.values, self.rad * np.pi) def test_cal_rad(self): diff --git a/satpy/tests/reader_tests/test_smos_l2_wind.py b/satpy/tests/reader_tests/test_smos_l2_wind.py index 731cd64181..3303abff17 100644 --- a/satpy/tests/reader_tests/test_smos_l2_wind.py +++ b/satpy/tests/reader_tests/test_smos_l2_wind.py @@ -35,39 +35,39 @@ class FakeNetCDF4FileHandlerSMOSL2WIND(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" from xarray import DataArray - dt_s = filename_info.get('start_time', datetime(2020, 4, 22, 12, 0, 0)) - dt_e = filename_info.get('end_time', datetime(2020, 4, 22, 12, 0, 0)) + dt_s = filename_info.get("start_time", datetime(2020, 4, 22, 12, 0, 0)) + dt_e = filename_info.get("end_time", datetime(2020, 4, 22, 12, 0, 0)) - if filetype_info['file_type'] == 'smos_l2_wind': + if filetype_info["file_type"] == "smos_l2_wind": file_content = { - '/attr/time_coverage_start': dt_s.strftime('%Y-%m-%dT%H:%M:%S Z'), - '/attr/time_coverage_end': dt_e.strftime('%Y-%m-%dT%H:%M:%S Z'), - '/attr/platform_shortname': 'SM', - '/attr/platform': 'SMOS', - '/attr/instrument': 'MIRAS', - '/attr/processing_level': 'L2', - '/attr/geospatial_bounds_vertical_crs': 'EPSG:4623', + "/attr/time_coverage_start": dt_s.strftime("%Y-%m-%dT%H:%M:%S Z"), + "/attr/time_coverage_end": dt_e.strftime("%Y-%m-%dT%H:%M:%S Z"), + "/attr/platform_shortname": "SM", + "/attr/platform": "SMOS", + "/attr/instrument": "MIRAS", + "/attr/processing_level": "L2", + "/attr/geospatial_bounds_vertical_crs": "EPSG:4623", } - file_content['lat'] = np.arange(-90., 90.25, 0.25) - file_content['lat/shape'] = (len(file_content['lat']),) - file_content['lat'] = DataArray(file_content['lat'], dims=('lat')) - file_content['lat'].attrs['_FillValue'] = -999.0 - - file_content['lon'] = np.arange(0., 360., 0.25) - file_content['lon/shape'] = (len(file_content['lon']),) - file_content['lon'] = DataArray(file_content['lon'], dims=('lon')) - file_content['lon'].attrs['_FillValue'] = -999.0 - - file_content['wind_speed'] = np.ndarray(shape=(1, # Time dimension - len(file_content['lat']), - len(file_content['lon']))) - file_content['wind_speed/shape'] = (1, - len(file_content['lat']), - len(file_content['lon'])) - file_content['wind_speed'] = DataArray(file_content['wind_speed'], dims=('time', 'lat', 'lon'), - coords=[[1], file_content['lat'], file_content['lon']]) - file_content['wind_speed'].attrs['_FillValue'] = -999.0 + file_content["lat"] = np.arange(-90., 90.25, 0.25) + file_content["lat/shape"] = (len(file_content["lat"]),) + file_content["lat"] = DataArray(file_content["lat"], dims=("lat")) + file_content["lat"].attrs["_FillValue"] = -999.0 + + file_content["lon"] = np.arange(0., 360., 0.25) + file_content["lon/shape"] = (len(file_content["lon"]),) + file_content["lon"] = DataArray(file_content["lon"], dims=("lon")) + file_content["lon"].attrs["_FillValue"] = -999.0 + + file_content["wind_speed"] = np.ndarray(shape=(1, # Time dimension + len(file_content["lat"]), + len(file_content["lon"]))) + file_content["wind_speed/shape"] = (1, + len(file_content["lat"]), + len(file_content["lon"])) + file_content["wind_speed"] = DataArray(file_content["wind_speed"], dims=("time", "lat", "lon"), + coords=[[1], file_content["lat"], file_content["lon"]]) + file_content["wind_speed"].attrs["_FillValue"] = -999.0 else: raise AssertionError() @@ -84,9 +84,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.smos_l2_wind import SMOSL2WINDFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(SMOSL2WINDFileHandler, '__bases__', (FakeNetCDF4FileHandlerSMOSL2WIND,)) + self.p = mock.patch.object(SMOSL2WINDFileHandler, "__bases__", (FakeNetCDF4FileHandlerSMOSL2WIND,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -99,7 +99,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', + "SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -110,20 +110,20 @@ def test_load_wind_speed(self): """Load wind_speed dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.smos_l2_wind.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.smos_l2_wind.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', + "SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", ]) r.create_filehandlers(loadables) - ds = r.load(['wind_speed']) + ds = r.load(["wind_speed"]) self.assertEqual(len(ds), 1) for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'SM') - self.assertEqual(d.attrs['sensor'], 'MIRAS') - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertIn('y', d.dims) - self.assertIn('x', d.dims) + self.assertEqual(d.attrs["platform_shortname"], "SM") + self.assertEqual(d.attrs["sensor"], "MIRAS") + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) + self.assertIn("y", d.dims) + self.assertIn("x", d.dims) self.assertEqual(d.shape, (719, 1440)) self.assertEqual(d.y[0].data, -89.75) self.assertEqual(d.y[d.shape[0] - 1].data, 89.75) @@ -132,15 +132,15 @@ def test_load_lat(self): """Load lat dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.smos_l2_wind.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.smos_l2_wind.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', + "SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", ]) r.create_filehandlers(loadables) - ds = r.load(['lat']) + ds = r.load(["lat"]) self.assertEqual(len(ds), 1) for d in ds.values(): - self.assertIn('y', d.dims) + self.assertIn("y", d.dims) self.assertEqual(d.shape, (719,)) self.assertEqual(d.data[0], -89.75) self.assertEqual(d.data[d.shape[0] - 1], 89.75) @@ -149,15 +149,15 @@ def test_load_lon(self): """Load lon dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.smos_l2_wind.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.smos_l2_wind.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', + "SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", ]) r.create_filehandlers(loadables) - ds = r.load(['lon']) + ds = r.load(["lon"]) self.assertEqual(len(ds), 1) for d in ds.values(): - self.assertIn('x', d.dims) + self.assertIn("x", d.dims) self.assertEqual(d.shape, (1440,)) self.assertEqual(d.data[0], -180.0) self.assertEqual(d.data[d.shape[0] - 1], 179.75) @@ -167,13 +167,13 @@ def test_adjust_lon(self): from xarray import DataArray from satpy.readers.smos_l2_wind import SMOSL2WINDFileHandler - smos_l2_wind_fh = SMOSL2WINDFileHandler('SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', - {}, filetype_info={'file_type': 'smos_l2_wind'}) - data = DataArray(np.arange(0., 360., 0.25), dims=('lon')) + smos_l2_wind_fh = SMOSL2WINDFileHandler("SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", + {}, filetype_info={"file_type": "smos_l2_wind"}) + data = DataArray(np.arange(0., 360., 0.25), dims=("lon")) adjusted = smos_l2_wind_fh._adjust_lon_coord(data) expected = DataArray(np.concatenate((np.arange(0, 180., 0.25), np.arange(-180.0, 0, 0.25))), - dims=('lon')) + dims=("lon")) self.assertEqual(adjusted.data.tolist(), expected.data.tolist()) def test_roll_dataset(self): @@ -181,9 +181,9 @@ def test_roll_dataset(self): from xarray import DataArray from satpy.readers.smos_l2_wind import SMOSL2WINDFileHandler - smos_l2_wind_fh = SMOSL2WINDFileHandler('SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', - {}, filetype_info={'file_type': 'smos_l2_wind'}) - data = DataArray(np.arange(0., 360., 0.25), dims=('lon')) + smos_l2_wind_fh = SMOSL2WINDFileHandler("SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", + {}, filetype_info={"file_type": "smos_l2_wind"}) + data = DataArray(np.arange(0., 360., 0.25), dims=("lon")) data = smos_l2_wind_fh._adjust_lon_coord(data) adjusted = smos_l2_wind_fh._roll_dataset_lon_coord(data) expected = np.arange(-180., 180., 0.25) diff --git a/satpy/tests/reader_tests/test_tropomi_l2.py b/satpy/tests/reader_tests/test_tropomi_l2.py index 4b6e3a8652..f2b3660089 100644 --- a/satpy/tests/reader_tests/test_tropomi_l2.py +++ b/satpy/tests/reader_tests/test_tropomi_l2.py @@ -41,41 +41,41 @@ class FakeNetCDF4FileHandlerTL2(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" - dt_s = filename_info.get('start_time', datetime(2016, 1, 1, 12, 0, 0)) - dt_e = filename_info.get('end_time', datetime(2016, 1, 1, 12, 0, 0)) + dt_s = filename_info.get("start_time", datetime(2016, 1, 1, 12, 0, 0)) + dt_e = filename_info.get("end_time", datetime(2016, 1, 1, 12, 0, 0)) - if filetype_info['file_type'] == 'tropomi_l2': + if filetype_info["file_type"] == "tropomi_l2": file_content = { - '/attr/time_coverage_start': (dt_s+timedelta(minutes=22)).strftime('%Y-%m-%dT%H:%M:%SZ'), - '/attr/time_coverage_end': (dt_e-timedelta(minutes=22)).strftime('%Y-%m-%dT%H:%M:%SZ'), - '/attr/platform_shortname': 'S5P', - '/attr/sensor': 'TROPOMI', + "/attr/time_coverage_start": (dt_s+timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"), + "/attr/time_coverage_end": (dt_e-timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"), + "/attr/platform_shortname": "S5P", + "/attr/sensor": "TROPOMI", } - file_content['PRODUCT/latitude'] = DEFAULT_FILE_DATA - file_content['PRODUCT/longitude'] = DEFAULT_FILE_DATA - file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds'] = DEFAULT_BOUND_DATA - file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds'] = DEFAULT_BOUND_DATA + file_content["PRODUCT/latitude"] = DEFAULT_FILE_DATA + file_content["PRODUCT/longitude"] = DEFAULT_FILE_DATA + file_content["PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds"] = DEFAULT_BOUND_DATA + file_content["PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds"] = DEFAULT_BOUND_DATA - if 'NO2' in filename: - file_content['PRODUCT/nitrogen_dioxide_total_column'] = DEFAULT_FILE_DATA - if 'SO2' in filename: - file_content['PRODUCT/sulfurdioxide_total_vertical_column'] = DEFAULT_FILE_DATA + if "NO2" in filename: + file_content["PRODUCT/nitrogen_dioxide_total_column"] = DEFAULT_FILE_DATA + if "SO2" in filename: + file_content["PRODUCT/sulfurdioxide_total_vertical_column"] = DEFAULT_FILE_DATA for k in list(file_content.keys()): - if not k.startswith('PRODUCT'): + if not k.startswith("PRODUCT"): continue - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE self._convert_data_content_to_dataarrays(file_content) - file_content['PRODUCT/latitude'].attrs['_FillValue'] = -999.0 - file_content['PRODUCT/longitude'].attrs['_FillValue'] = -999.0 - file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds'].attrs['_FillValue'] = -999.0 - file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds'].attrs['_FillValue'] = -999.0 - if 'NO2' in filename: - file_content['PRODUCT/nitrogen_dioxide_total_column'].attrs['_FillValue'] = -999.0 - if 'SO2' in filename: - file_content['PRODUCT/sulfurdioxide_total_vertical_column'].attrs['_FillValue'] = -999.0 + file_content["PRODUCT/latitude"].attrs["_FillValue"] = -999.0 + file_content["PRODUCT/longitude"].attrs["_FillValue"] = -999.0 + file_content["PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds"].attrs["_FillValue"] = -999.0 + file_content["PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds"].attrs["_FillValue"] = -999.0 + if "NO2" in filename: + file_content["PRODUCT/nitrogen_dioxide_total_column"].attrs["_FillValue"] = -999.0 + if "SO2" in filename: + file_content["PRODUCT/sulfurdioxide_total_vertical_column"].attrs["_FillValue"] = -999.0 else: raise NotImplementedError("Test data for file types other than " @@ -89,9 +89,9 @@ def _convert_data_content_to_dataarrays(self, file_content): for key, val in file_content.items(): if isinstance(val, np.ndarray): if 1 < val.ndim <= 2: - file_content[key] = DataArray(val, dims=('scanline', 'ground_pixel')) + file_content[key] = DataArray(val, dims=("scanline", "ground_pixel")) elif val.ndim > 2: - file_content[key] = DataArray(val, dims=('scanline', 'ground_pixel', 'corner')) + file_content[key] = DataArray(val, dims=("scanline", "ground_pixel", "corner")) else: file_content[key] = DataArray(val) @@ -105,9 +105,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.tropomi_l2 import TROPOMIL2FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(TROPOMIL2FileHandler, '__bases__', (FakeNetCDF4FileHandlerTL2,)) + self.p = mock.patch.object(TROPOMIL2FileHandler, "__bases__", (FakeNetCDF4FileHandlerTL2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -120,7 +120,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc', + "S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -131,69 +131,69 @@ def test_load_no2(self): """Load NO2 dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.tropomi_l2.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.tropomi_l2.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc', + "S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc", ]) r.create_filehandlers(loadables) - ds = r.load(['nitrogen_dioxide_total_column']) + ds = r.load(["nitrogen_dioxide_total_column"]) self.assertEqual(len(ds), 1) for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'S5P') - self.assertEqual(d.attrs['sensor'], 'tropomi') - self.assertEqual(d.attrs['time_coverage_start'], datetime(2018, 7, 9, 17, 25, 34)) - self.assertEqual(d.attrs['time_coverage_end'], datetime(2018, 7, 9, 18, 23, 4)) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertIn('y', d.dims) - self.assertIn('x', d.dims) + self.assertEqual(d.attrs["platform_shortname"], "S5P") + self.assertEqual(d.attrs["sensor"], "tropomi") + self.assertEqual(d.attrs["time_coverage_start"], datetime(2018, 7, 9, 17, 25, 34)) + self.assertEqual(d.attrs["time_coverage_end"], datetime(2018, 7, 9, 18, 23, 4)) + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) + self.assertIn("y", d.dims) + self.assertIn("x", d.dims) def test_load_so2(self): """Load SO2 dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.tropomi_l2.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.tropomi_l2.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'S5P_OFFL_L2__SO2____20181224T055107_20181224T073237_06198_01_010105_20181230T150634.nc', + "S5P_OFFL_L2__SO2____20181224T055107_20181224T073237_06198_01_010105_20181230T150634.nc", ]) r.create_filehandlers(loadables) - ds = r.load(['sulfurdioxide_total_vertical_column']) + ds = r.load(["sulfurdioxide_total_vertical_column"]) self.assertEqual(len(ds), 1) for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'S5P') - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertIn('y', d.dims) - self.assertIn('x', d.dims) + self.assertEqual(d.attrs["platform_shortname"], "S5P") + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) + self.assertIn("y", d.dims) + self.assertIn("x", d.dims) def test_load_bounds(self): """Load bounds dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.tropomi_l2.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.tropomi_l2.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc', + "S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc", ]) r.create_filehandlers(loadables) - keys = ['latitude_bounds', 'longitude_bounds'] + keys = ["latitude_bounds", "longitude_bounds"] ds = r.load(keys) self.assertEqual(len(ds), 2) for key in keys: - self.assertEqual(ds[key].attrs['platform_shortname'], 'S5P') - self.assertIn('y', ds[key].dims) - self.assertIn('x', ds[key].dims) - self.assertIn('corner', ds[key].dims) + self.assertEqual(ds[key].attrs["platform_shortname"], "S5P") + self.assertIn("y", ds[key].dims) + self.assertIn("x", ds[key].dims) + self.assertIn("corner", ds[key].dims) # check assembled bounds left = np.vstack([ds[key][:, :, 0], ds[key][-1:, :, 3]]) right = np.vstack([ds[key][:, -1:, 1], ds[key][-1:, -1:, 2]]) dest = np.hstack([left, right]) dest = xr.DataArray(dest, - dims=('y', 'x') + dims=("y", "x") ) dest.attrs = ds[key].attrs - self.assertEqual(dest.attrs['platform_shortname'], 'S5P') - self.assertIn('y', dest.dims) - self.assertIn('x', dest.dims) + self.assertEqual(dest.attrs["platform_shortname"], "S5P") + self.assertIn("y", dest.dims) + self.assertIn("x", dest.dims) self.assertEqual(DEFAULT_FILE_SHAPE[0] + 1, dest.shape[0]) self.assertEqual(DEFAULT_FILE_SHAPE[1] + 1, dest.shape[1]) self.assertIsNone(np.testing.assert_array_equal(dest[:-1, :-1], ds[key][:, :, 0])) diff --git a/satpy/tests/reader_tests/test_utils.py b/satpy/tests/reader_tests/test_utils.py index 54b156e4c5..12af80ca2a 100644 --- a/satpy/tests/reader_tests/test_utils.py +++ b/satpy/tests/reader_tests/test_utils.py @@ -45,11 +45,11 @@ def test_lonlat_from_geos(self): lon_0 = 0 h = 35785831.00 geos_area.crs = CRS({ - 'a': 6378169.00, - 'b': 6356583.80, - 'h': h, - 'lon_0': lon_0, - 'proj': 'geos'}) + "a": 6378169.00, + "b": 6356583.80, + "h": h, + "lon_0": lon_0, + "proj": "geos"}) proj = pyproj.Proj(geos_area.crs) expected = proj(0, 0, inverse=True) @@ -77,12 +77,12 @@ def test_get_geostationary_bbox(self): geos_area = mock.MagicMock() lon_0 = 0 geos_area.crs = CRS({ - 'proj': 'geos', - 'lon_0': lon_0, - 'a': 6378169.00, - 'b': 6356583.80, - 'h': 35785831.00, - 'units': 'm'}) + "proj": "geos", + "lon_0": lon_0, + "a": 6378169.00, + "b": 6356583.80, + "h": 35785831.00, + "units": "m"}) geos_area.area_extent = [-5500000., -5500000., 5500000., 5500000.] lon, lat = hf.get_geostationary_bounding_box(geos_area, 20) @@ -107,21 +107,21 @@ def test_get_geostationary_angle_extent(self): """Get max geostationary angles.""" geos_area = mock.MagicMock() proj_dict = { - 'proj': 'geos', - 'sweep': 'x', - 'lon_0': -89.5, - 'a': 6378169.00, - 'b': 6356583.80, - 'h': 35785831.00, - 'units': 'm'} + "proj": "geos", + "sweep": "x", + "lon_0": -89.5, + "a": 6378169.00, + "b": 6356583.80, + "h": 35785831.00, + "units": "m"} geos_area.crs = CRS(proj_dict) expected = (0.15185342867090912, 0.15133555510297725) np.testing.assert_allclose(expected, hf.get_geostationary_angle_extent(geos_area)) - proj_dict['a'] = 1000.0 - proj_dict['b'] = 1000.0 - proj_dict['h'] = np.sqrt(2) * 1000.0 - 1000.0 + proj_dict["a"] = 1000.0 + proj_dict["b"] = 1000.0 + proj_dict["h"] = np.sqrt(2) * 1000.0 - 1000.0 geos_area.reset_mock() geos_area.crs = CRS(proj_dict) expected = (np.deg2rad(45), np.deg2rad(45)) @@ -129,12 +129,12 @@ def test_get_geostationary_angle_extent(self): hf.get_geostationary_angle_extent(geos_area)) proj_dict = { - 'proj': 'geos', - 'sweep': 'x', - 'lon_0': -89.5, - 'ellps': 'GRS80', - 'h': 35785831.00, - 'units': 'm'} + "proj": "geos", + "sweep": "x", + "lon_0": -89.5, + "ellps": "GRS80", + "h": 35785831.00, + "units": "m"} geos_area.crs = CRS(proj_dict) expected = (0.15185277703584374, 0.15133971368991794) np.testing.assert_allclose(expected, @@ -144,15 +144,15 @@ def test_geostationary_mask(self): """Test geostationary mask.""" # Compute mask of a very elliptical earth area = pyresample.geometry.AreaDefinition( - 'FLDK', - 'Full Disk', - 'geos', - {'a': '6378169.0', - 'b': '3000000.0', - 'h': '35785831.0', - 'lon_0': '145.0', - 'proj': 'geos', - 'units': 'm'}, + "FLDK", + "Full Disk", + "geos", + {"a": "6378169.0", + "b": "3000000.0", + "h": "35785831.0", + "lon_0": "145.0", + "proj": "geos", + "units": "m"}, 101, 101, (-6498000.088960204, -6498000.088960204, @@ -181,37 +181,37 @@ def test_geostationary_mask(self): self.assertTrue(np.all(mask[range(68-1, 33-1, -1), range(33, 68)] == 1)) self.assertTrue(np.all(mask[range(33-1, -1, -1), range(68, 101)] == 0)) - @mock.patch('satpy.readers.utils.AreaDefinition') + @mock.patch("satpy.readers.utils.AreaDefinition") def test_sub_area(self, adef): """Sub area slicing.""" area = mock.MagicMock() area.pixel_size_x = 1.5 area.pixel_size_y = 1.5 area.pixel_upper_left = (0, 0) - area.area_id = 'fakeid' - area.name = 'fake name' - area.proj_id = 'fakeproj' - area.crs = 'some_crs' + area.area_id = "fakeid" + area.name = "fake name" + area.proj_id = "fakeproj" + area.crs = "some_crs" hf.get_sub_area(area, slice(1, 4), slice(0, 3)) - adef.assert_called_once_with('fakeid', 'fake name', 'fakeproj', - 'some_crs', + adef.assert_called_once_with("fakeid", "fake name", "fakeproj", + "some_crs", 3, 3, (0.75, -3.75, 5.25, 0.75)) def test_np2str(self): """Test the np2str function.""" # byte object - npstring = np.string_('hej') - self.assertEqual(hf.np2str(npstring), 'hej') + npstring = np.string_("hej") + self.assertEqual(hf.np2str(npstring), "hej") # single element numpy array np_arr = np.array([npstring]) - self.assertEqual(hf.np2str(np_arr), 'hej') + self.assertEqual(hf.np2str(np_arr), "hej") # scalar numpy array np_arr = np.array(npstring) - self.assertEqual(hf.np2str(np_arr), 'hej') + self.assertEqual(hf.np2str(np_arr), "hej") # multi-element array npstring = np.array([npstring, npstring]) @@ -243,44 +243,44 @@ def re(lat): def test_reduce_mda(self): """Test metadata size reduction.""" - mda = {'a': 1, - 'b': np.array([1, 2, 3]), - 'c': np.array([1, 2, 3, 4]), - 'd': {'a': 1, - 'b': np.array([1, 2, 3]), - 'c': np.array([1, 2, 3, 4]), - 'd': {'a': 1, - 'b': np.array([1, 2, 3]), - 'c': np.array([1, 2, 3, 4])}}} - exp = {'a': 1, - 'b': np.array([1, 2, 3]), - 'd': {'a': 1, - 'b': np.array([1, 2, 3]), - 'd': {'a': 1, - 'b': np.array([1, 2, 3])}}} + mda = {"a": 1, + "b": np.array([1, 2, 3]), + "c": np.array([1, 2, 3, 4]), + "d": {"a": 1, + "b": np.array([1, 2, 3]), + "c": np.array([1, 2, 3, 4]), + "d": {"a": 1, + "b": np.array([1, 2, 3]), + "c": np.array([1, 2, 3, 4])}}} + exp = {"a": 1, + "b": np.array([1, 2, 3]), + "d": {"a": 1, + "b": np.array([1, 2, 3]), + "d": {"a": 1, + "b": np.array([1, 2, 3])}}} numpy.testing.assert_equal(hf.reduce_mda(mda, max_size=3), exp) # Make sure, reduce_mda() doesn't modify the original dictionary - self.assertIn('c', mda) - self.assertIn('c', mda['d']) - self.assertIn('c', mda['d']['d']) + self.assertIn("c", mda) + self.assertIn("c", mda["d"]) + self.assertIn("c", mda["d"]["d"]) - @mock.patch('satpy.readers.utils.bz2.BZ2File') - @mock.patch('satpy.readers.utils.Popen') + @mock.patch("satpy.readers.utils.bz2.BZ2File") + @mock.patch("satpy.readers.utils.Popen") def test_unzip_file(self, mock_popen, mock_bz2): """Test the bz2 file unzipping techniques.""" process_mock = mock.Mock() - attrs = {'communicate.return_value': (b'output', b'error'), - 'returncode': 0} + attrs = {"communicate.return_value": (b"output", b"error"), + "returncode": 0} process_mock.configure_mock(**attrs) mock_popen.return_value = process_mock bz2_mock = mock.MagicMock() - bz2_mock.__enter__.return_value.read.return_value = b'TEST' + bz2_mock.__enter__.return_value.read.return_value = b"TEST" mock_bz2.return_value = bz2_mock - filename = 'tester.DAT.bz2' - whichstr = 'satpy.readers.utils.which' + filename = "tester.DAT.bz2" + whichstr = "satpy.readers.utils.which" segment = 3 segmentstr = str(segment).zfill(2) # no pbzip2 installed with prefix @@ -294,7 +294,7 @@ def test_unzip_file(self, mock_popen, mock_bz2): os.remove(new_fname) # pbzip2 installed without prefix with mock.patch(whichstr) as whichmock: - whichmock.return_value = '/usr/bin/pbzip2' + whichmock.return_value = "/usr/bin/pbzip2" new_fname = hf.unzip_file(filename) assert mock_popen.called assert os.path.exists(new_fname) @@ -302,21 +302,21 @@ def test_unzip_file(self, mock_popen, mock_bz2): if os.path.exists(new_fname): os.remove(new_fname) - filename = 'tester.DAT' + filename = "tester.DAT" new_fname = hf.unzip_file(filename) assert new_fname is None - @mock.patch('bz2.BZ2File') + @mock.patch("bz2.BZ2File") def test_generic_open_BZ2File(self, bz2_mock): """Test the generic_open method with bz2 filename input.""" mock_bz2_open = mock.MagicMock() - mock_bz2_open.read.return_value = b'TEST' + mock_bz2_open.read.return_value = b"TEST" bz2_mock.return_value = mock_bz2_open - filename = 'tester.DAT.bz2' + filename = "tester.DAT.bz2" with hf.generic_open(filename) as file_object: data = file_object.read() - assert data == b'TEST' + assert data == b"TEST" assert mock_bz2_open.read.called @@ -328,27 +328,27 @@ def test_generic_open_FSFile_MemoryFileSystem(self): fsf = FSFile(mem_file) with hf.generic_open(fsf) as file_object: data = file_object.read() - assert data == b'TEST' + assert data == b"TEST" - @mock.patch('satpy.readers.utils.open') + @mock.patch("satpy.readers.utils.open") def test_generic_open_filename(self, open_mock): """Test the generic_open method with filename (str).""" mock_fn_open = mock.MagicMock() - mock_fn_open.read.return_value = b'TEST' + mock_fn_open.read.return_value = b"TEST" open_mock.return_value = mock_fn_open filename = "test.DAT" with hf.generic_open(filename) as file_object: data = file_object.read() - assert data == b'TEST' + assert data == b"TEST" assert mock_fn_open.read.called - @mock.patch('bz2.decompress', return_value=b'TEST_DECOMPRESSED') + @mock.patch("bz2.decompress", return_value=b"TEST_DECOMPRESSED") def test_unzip_FSFile(self, bz2_mock): """Test the FSFile bz2 file unzipping techniques.""" mock_bz2_decompress = mock.MagicMock() - mock_bz2_decompress.return_value = b'TEST_DECOMPRESSED' + mock_bz2_decompress.return_value = b"TEST_DECOMPRESSED" segment = 3 segmentstr = str(segment).zfill(2) @@ -382,10 +382,10 @@ def test_unzip_FSFile(self, bz2_mock): os.remove(new_fname) @mock.patch("os.remove") - @mock.patch("satpy.readers.utils.unzip_file", return_value='dummy.txt') + @mock.patch("satpy.readers.utils.unzip_file", return_value="dummy.txt") def test_pro_reading_gets_unzipped_file(self, fake_unzip_file, fake_remove): """Test the bz2 file unzipping context manager.""" - filename = 'dummy.txt.bz2' + filename = "dummy.txt.bz2" expected_filename = filename[:-4] with hf.unzip_context(filename) as new_filename: @@ -403,24 +403,24 @@ def test_apply_rad_correction(self): def test_get_user_calibration_factors(self): """Test the retrieval of user-supplied calibration factors.""" - radcor_dict = {'WV063': {'slope': 1.015, - 'offset': -0.0556}, - 'IR108': {'slo': 1.015, - 'off': -0.0556}} + radcor_dict = {"WV063": {"slope": 1.015, + "offset": -0.0556}, + "IR108": {"slo": 1.015, + "off": -0.0556}} # Test that correct values are returned from the dict - slope, offset = hf.get_user_calibration_factors('WV063', radcor_dict) + slope, offset = hf.get_user_calibration_factors("WV063", radcor_dict) self.assertEqual(slope, 1.015) self.assertEqual(offset, -0.0556) # Test that channels not present in dict return 1.0, 0.0 with self.assertWarns(UserWarning): - slope, offset = hf.get_user_calibration_factors('IR097', radcor_dict) + slope, offset = hf.get_user_calibration_factors("IR097", radcor_dict) self.assertEqual(slope, 1.) self.assertEqual(offset, 0.) # Check that incorrect dict keys throw an error with self.assertRaises(KeyError): - hf.get_user_calibration_factors('IR108', radcor_dict) + hf.get_user_calibration_factors("IR108", radcor_dict) class TestSunEarthDistanceCorrection: @@ -431,15 +431,15 @@ def setup_method(self): self.test_date = datetime(2020, 8, 15, 13, 0, 40) raw_refl = xr.DataArray(da.from_array([10., 20., 40., 1., 98., 50.]), - attrs={'start_time': self.test_date, - 'scheduled_time': self.test_date}) + attrs={"start_time": self.test_date, + "scheduled_time": self.test_date}) corr_refl = xr.DataArray(da.from_array([ 10.25484833, 20.50969667, 41.01939333, 1.02548483, 100.49751367, 51.27424167]), - attrs={'start_time': self.test_date, - 'scheduled_time': self.test_date}, + attrs={"start_time": self.test_date, + "scheduled_time": self.test_date}, ) self.raw_refl = raw_refl self.corr_refl = corr_refl @@ -448,13 +448,13 @@ def test_get_utc_time(self): """Test the retrieval of scene time from a dataset.""" # First check correct time is returned with 'start_time' tmp_array = self.raw_refl.copy() - del tmp_array.attrs['scheduled_time'] + del tmp_array.attrs["scheduled_time"] utc_time = hf.get_array_date(tmp_array, None) assert utc_time == self.test_date # Now check correct time is returned with 'scheduled_time' tmp_array = self.raw_refl.copy() - del tmp_array.attrs['start_time'] + del tmp_array.attrs["start_time"] utc_time = hf.get_array_date(tmp_array, None) assert utc_time == self.test_date @@ -466,8 +466,8 @@ def test_get_utc_time(self): # Finally, ensure error is raised if no datetime is available tmp_array = self.raw_refl.copy() - del tmp_array.attrs['scheduled_time'] - del tmp_array.attrs['start_time'] + del tmp_array.attrs["scheduled_time"] + del tmp_array.attrs["start_time"] with pytest.raises(KeyError): hf.get_array_date(tmp_array, None) @@ -475,14 +475,14 @@ def test_apply_sunearth_corr(self): """Test the correction of reflectances with sun-earth distance.""" out_refl = hf.apply_earthsun_distance_correction(self.raw_refl) np.testing.assert_allclose(out_refl, self.corr_refl) - assert out_refl.attrs['sun_earth_distance_correction_applied'] + assert out_refl.attrs["sun_earth_distance_correction_applied"] assert isinstance(out_refl.data, da.Array) def test_remove_sunearth_corr(self): """Test the removal of the sun-earth distance correction.""" out_refl = hf.remove_earthsun_distance_correction(self.corr_refl) np.testing.assert_allclose(out_refl, self.raw_refl) - assert not out_refl.attrs['sun_earth_distance_correction_applied'] + assert not out_refl.attrs["sun_earth_distance_correction_applied"] assert isinstance(out_refl.data, da.Array) @@ -493,19 +493,19 @@ def test_generic_open_binary(tmp_path, data, filename, mode): """Test the bz2 file unzipping context manager using dummy binary data.""" dummy_data = data dummy_filename = os.fspath(tmp_path / filename) - with open(dummy_filename, 'w' + mode) as f: + with open(dummy_filename, "w" + mode) as f: f.write(dummy_data) - with hf.generic_open(dummy_filename, 'r' + mode) as f: + with hf.generic_open(dummy_filename, "r" + mode) as f: read_binary_data = f.read() assert read_binary_data == dummy_data - dummy_filename = os.fspath(tmp_path / (filename + '.bz2')) - with hf.bz2.open(dummy_filename, 'w' + mode) as f: + dummy_filename = os.fspath(tmp_path / (filename + ".bz2")) + with hf.bz2.open(dummy_filename, "w" + mode) as f: f.write(dummy_data) - with hf.generic_open(dummy_filename, 'r' + mode) as f: + with hf.generic_open(dummy_filename, "r" + mode) as f: read_binary_data = f.read() assert read_binary_data == dummy_data diff --git a/satpy/tests/reader_tests/test_vaisala_gld360.py b/satpy/tests/reader_tests/test_vaisala_gld360.py index 9add08b1d2..c08450613a 100644 --- a/satpy/tests/reader_tests/test_vaisala_gld360.py +++ b/satpy/tests/reader_tests/test_vaisala_gld360.py @@ -34,13 +34,13 @@ def test_vaisala_gld360(self): expected_power = np.array([12.3, 13.2, -31.]) expected_lat = np.array([30.5342, -0.5727, 12.1529]) expected_lon = np.array([-90.1152, 104.0688, -10.8756]) - expected_time = np.array(['2017-06-20T00:00:00.007178000', '2017-06-20T00:00:00.020162000', - '2017-06-20T00:00:00.023183000'], dtype='datetime64[ns]') + expected_time = np.array(["2017-06-20T00:00:00.007178000", "2017-06-20T00:00:00.020162000", + "2017-06-20T00:00:00.023183000"], dtype="datetime64[ns]") filename = StringIO( - u'2017-06-20 00:00:00.007178 30.5342 -90.1152 12.3 kA\n' - '2017-06-20 00:00:00.020162 -0.5727 104.0688 13.2 kA\n' - '2017-06-20 00:00:00.023183 12.1529 -10.8756 -31.0 kA' + u"2017-06-20 00:00:00.007178 30.5342 -90.1152 12.3 kA\n" + "2017-06-20 00:00:00.020162 -0.5727 104.0688 13.2 kA\n" + "2017-06-20 00:00:00.023183 12.1529 -10.8756 -31.0 kA" ) filename_info = {} filetype_info = {} @@ -52,25 +52,25 @@ def test_vaisala_gld360(self): filename.close() # test power - dataset_id = make_dataid(name='power') - dataset_info = {'units': 'kA'} + dataset_id = make_dataid(name="power") + dataset_info = {"units": "kA"} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_power, rtol=1e-05) # test lat - dataset_id = make_dataid(name='latitude') + dataset_id = make_dataid(name="latitude") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_lat, rtol=1e-05) # test lon - dataset_id = make_dataid(name='longitude') + dataset_id = make_dataid(name="longitude") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_lon, rtol=1e-05) # test time - dataset_id = make_dataid(name='time') + dataset_id = make_dataid(name="time") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_time) diff --git a/satpy/tests/reader_tests/test_vii_base_nc.py b/satpy/tests/reader_tests/test_vii_base_nc.py index c2afc04356..4f5dbcd141 100644 --- a/satpy/tests/reader_tests/test_vii_base_nc.py +++ b/satpy/tests/reader_tests/test_vii_base_nc.py @@ -30,20 +30,20 @@ from satpy.readers.vii_base_nc import SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR, ViiNCBaseFileHandler -TEST_FILE = 'test_file_vii_base_nc.nc' +TEST_FILE = "test_file_vii_base_nc.nc" class TestViiNCBaseFileHandler(unittest.TestCase): """Test the ViiNCBaseFileHandler reader.""" - @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_geo_interpolation') + @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_geo_interpolation") def setUp(self, pgi_): """Set up the test.""" # Easiest way to test the reader is to create a test netCDF file on the fly # uses a UUID to avoid permission conflicts during execution of tests in parallel self.test_file_name = TEST_FILE + str(uuid.uuid1()) + ".nc" - with Dataset(self.test_file_name, 'w') as nc: + with Dataset(self.test_file_name, "w") as nc: # Add global attributes nc.sensing_start_time_utc = "20170920173040.888" nc.sensing_end_time_utc = "20170920174117.555" @@ -51,50 +51,50 @@ def setUp(self, pgi_): nc.instrument = "test_instrument" # Create data group - g1 = nc.createGroup('data') + g1 = nc.createGroup("data") # Add dimensions to data group - g1.createDimension('num_pixels', 10) - g1.createDimension('num_lines', 100) + g1.createDimension("num_pixels", 10) + g1.createDimension("num_lines", 100) # Create data/measurement_data group - g1_1 = g1.createGroup('measurement_data') + g1_1 = g1.createGroup("measurement_data") # Add dimensions to data/measurement_data group - g1_1.createDimension('num_tie_points_act', 10) - g1_1.createDimension('num_tie_points_alt', 100) + g1_1.createDimension("num_tie_points_act", 10) + g1_1.createDimension("num_tie_points_alt", 100) # Add variables to data/measurement_data group - tpw = g1_1.createVariable('tpw', np.float32, dimensions=('num_pixels', 'num_lines')) + tpw = g1_1.createVariable("tpw", np.float32, dimensions=("num_pixels", "num_lines")) tpw[:] = 1. - tpw.test_attr = 'attr' - lon = g1_1.createVariable('longitude', + tpw.test_attr = "attr" + lon = g1_1.createVariable("longitude", np.float32, - dimensions=('num_tie_points_act', 'num_tie_points_alt')) + dimensions=("num_tie_points_act", "num_tie_points_alt")) lon[:] = 100. - lat = g1_1.createVariable('latitude', + lat = g1_1.createVariable("latitude", np.float32, - dimensions=('num_tie_points_act', 'num_tie_points_alt')) + dimensions=("num_tie_points_act", "num_tie_points_alt")) lat[:] = 10. # Create quality group - g2 = nc.createGroup('quality') + g2 = nc.createGroup("quality") # Add dimensions to quality group - g2.createDimension('gap_items', 2) + g2.createDimension("gap_items", 2) # Add variables to quality group - var = g2.createVariable('duration_of_product', np.double, dimensions=()) + var = g2.createVariable("duration_of_product", np.double, dimensions=()) var[:] = 1.0 - var = g2.createVariable('duration_of_data_present', np.double, dimensions=()) + var = g2.createVariable("duration_of_data_present", np.double, dimensions=()) var[:] = 2.0 - var = g2.createVariable('duration_of_data_missing', np.double, dimensions=()) + var = g2.createVariable("duration_of_data_missing", np.double, dimensions=()) var[:] = 3.0 - var = g2.createVariable('duration_of_data_degraded', np.double, dimensions=()) + var = g2.createVariable("duration_of_data_degraded", np.double, dimensions=()) var[:] = 4.0 - var = g2.createVariable('gap_start_time_utc', np.double, dimensions=('gap_items',)) + var = g2.createVariable("gap_start_time_utc", np.double, dimensions=("gap_items",)) var[:] = [5.0, 6.0] - var = g2.createVariable('gap_end_time_utc', np.double, dimensions=('gap_items',)) + var = g2.createVariable("gap_end_time_utc", np.double, dimensions=("gap_items",)) var[:] = [7.0, 8.0] # Create longitude and latitude "interpolated" arrays @@ -104,11 +104,11 @@ def setUp(self, pgi_): # Filename info valid for all readers filename_info = { - 'creation_time': datetime.datetime(year=2017, month=9, day=22, + "creation_time": datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), - 'sensing_start_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), - 'sensing_end_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50) } @@ -117,8 +117,8 @@ def setUp(self, pgi_): filename=self.test_file_name, filename_info=filename_info, filetype_info={ - 'cached_longitude': 'data/measurement_data/longitude', - 'cached_latitude': 'data/measurement_data/latitude' + "cached_longitude": "data/measurement_data/longitude", + "cached_latitude": "data/measurement_data/latitude" } ) @@ -128,10 +128,10 @@ def setUp(self, pgi_): filename=self.test_file_name, filename_info=filename_info, filetype_info={ - 'cached_longitude': 'data/measurement_data/longitude', - 'cached_latitude': 'data/measurement_data/latitude', - 'interpolate': False, - 'orthorect': False + "cached_longitude": "data/measurement_data/longitude", + "cached_latitude": "data/measurement_data/latitude", + "interpolate": False, + "orthorect": False }, orthorect=True ) @@ -170,24 +170,24 @@ def test_file_reading(self): # Checks that the global attributes are correctly read expected_global_attributes = { - 'filename': self.test_file_name, - 'start_time': expected_start_time, - 'end_time': expected_end_time, - 'spacecraft_name': "test_spacecraft", - 'ssp_lon': None, - 'sensor': "test_instrument", - 'filename_start_time': datetime.datetime(year=2017, month=9, day=20, + "filename": self.test_file_name, + "start_time": expected_start_time, + "end_time": expected_end_time, + "spacecraft_name": "test_spacecraft", + "ssp_lon": None, + "sensor": "test_instrument", + "filename_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), - 'filename_end_time': datetime.datetime(year=2017, month=9, day=20, + "filename_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50), - 'platform_name': "test_spacecraft", - 'quality_group': { - 'duration_of_product': 1., - 'duration_of_data_present': 2., - 'duration_of_data_missing': 3., - 'duration_of_data_degraded': 4., - 'gap_start_time_utc': (5., 6.), - 'gap_end_time_utc': (7., 8.) + "platform_name": "test_spacecraft", + "quality_group": { + "duration_of_product": 1., + "duration_of_data_present": 2., + "duration_of_data_missing": 3., + "duration_of_data_degraded": 4., + "gap_start_time_utc": (5., 6.), + "gap_end_time_utc": (7., 8.) } } @@ -197,7 +197,7 @@ def test_file_reading(self): # Must iterate on all keys to confirm that the dictionaries are equal self.assertEqual(global_attributes.keys(), expected_global_attributes.keys()) for key in expected_global_attributes: - if key not in ['quality_group']: + if key not in ["quality_group"]: # Quality check must be valid for both iterable and not iterable elements try: equal = all(global_attributes[key] == expected_global_attributes[key]) @@ -214,8 +214,8 @@ def test_file_reading(self): equal = global_attributes[key][inner_key] == expected_global_attributes[key][inner_key] self.assertTrue(equal) - @mock.patch('satpy.readers.vii_base_nc.tie_points_interpolation') - @mock.patch('satpy.readers.vii_base_nc.tie_points_geo_interpolation') + @mock.patch("satpy.readers.vii_base_nc.tie_points_interpolation") + @mock.patch("satpy.readers.vii_base_nc.tie_points_geo_interpolation") def test_functions(self, tpgi_, tpi_): """Test the functions.""" with self.assertRaises(NotImplementedError): @@ -226,16 +226,16 @@ def test_functions(self, tpgi_, tpi_): # Checks that the _perform_interpolation function is correctly executed variable = xr.DataArray( - dims=('y', 'x'), - name='test_name', + dims=("y", "x"), + name="test_name", attrs={ - 'key_1': 'value_1', - 'key_2': 'value_2' + "key_1": "value_1", + "key_2": "value_2" }, data=np.zeros((10, 100)), ) tpi_.return_value = [xr.DataArray( - dims=('num_tie_points_act', 'num_tie_points_alt'), + dims=("num_tie_points_act", "num_tie_points_alt"), data=np.ones((10, 100)) )] @@ -243,37 +243,37 @@ def test_functions(self, tpgi_, tpi_): tpi_.assert_called_with([variable], SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR) self.assertTrue(np.allclose(return_value, np.ones((10, 100)))) - self.assertEqual(return_value.attrs, {'key_1': 'value_1', 'key_2': 'value_2'}) - self.assertEqual(return_value.name, 'test_name') - self.assertEqual(return_value.dims, ('num_pixels', 'num_lines')) + self.assertEqual(return_value.attrs, {"key_1": "value_1", "key_2": "value_2"}) + self.assertEqual(return_value.name, "test_name") + self.assertEqual(return_value.dims, ("num_pixels", "num_lines")) # Checks that the _perform_geo_interpolation function is correctly executed variable_lon = xr.DataArray( - dims=('y', 'x'), - name='test_lon', + dims=("y", "x"), + name="test_lon", attrs={ - 'key_1': 'value_lon_1', - 'key_2': 'value_lon_2' + "key_1": "value_lon_1", + "key_2": "value_lon_2" }, data=np.zeros((10, 100)) ) variable_lat = xr.DataArray( - dims=('y', 'x'), - name='test_lat', + dims=("y", "x"), + name="test_lat", attrs={ - 'key_1': 'value_lat_1', - 'key_2': 'value_lat_2' + "key_1": "value_lat_1", + "key_2": "value_lat_2" }, data=np.ones((10, 100)) * 2. ) tpgi_.return_value = ( xr.DataArray( - dims=('num_tie_points_act', 'num_tie_points_alt'), + dims=("num_tie_points_act", "num_tie_points_alt"), data=np.ones((10, 100)) ), xr.DataArray( - dims=('num_tie_points_act', 'num_tie_points_alt'), + dims=("num_tie_points_act", "num_tie_points_alt"), data=6 * np.ones((10, 100)) ) ) @@ -283,54 +283,54 @@ def test_functions(self, tpgi_, tpi_): tpgi_.assert_called_with(variable_lon, variable_lat, SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR) self.assertTrue(np.allclose(return_lon, np.ones((10, 100)))) - self.assertEqual(return_lon.attrs, {'key_1': 'value_lon_1', 'key_2': 'value_lon_2'}) - self.assertEqual(return_lon.name, 'test_lon') - self.assertEqual(return_lon.dims, ('num_pixels', 'num_lines')) + self.assertEqual(return_lon.attrs, {"key_1": "value_lon_1", "key_2": "value_lon_2"}) + self.assertEqual(return_lon.name, "test_lon") + self.assertEqual(return_lon.dims, ("num_pixels", "num_lines")) self.assertTrue(np.allclose(return_lat, 6 * np.ones((10, 100)))) - self.assertEqual(return_lat.attrs, {'key_1': 'value_lat_1', 'key_2': 'value_lat_2'}) - self.assertEqual(return_lat.name, 'test_lat') - self.assertEqual(return_lat.dims, ('num_pixels', 'num_lines')) + self.assertEqual(return_lat.attrs, {"key_1": "value_lat_1", "key_2": "value_lat_2"}) + self.assertEqual(return_lat.name, "test_lat") + self.assertEqual(return_lat.dims, ("num_pixels", "num_lines")) def test_standardize_dims(self): """Test the standardize dims function.""" test_variable = xr.DataArray( - dims=('num_pixels', 'num_lines'), - name='test_data', + dims=("num_pixels", "num_lines"), + name="test_data", attrs={ - 'key_1': 'value_lat_1', - 'key_2': 'value_lat_2' + "key_1": "value_lat_1", + "key_2": "value_lat_2" }, data=np.ones((10, 100)) * 1. ) out_variable = self.reader._standardize_dims(test_variable) self.assertTrue(np.allclose(out_variable.values, np.ones((100, 10)))) - self.assertEqual(out_variable.dims, ('y', 'x')) - self.assertEqual(out_variable.attrs['key_1'], 'value_lat_1') + self.assertEqual(out_variable.dims, ("y", "x")) + self.assertEqual(out_variable.attrs["key_1"], "value_lat_1") - @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_calibration') - @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_interpolation') - @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_orthorectification') + @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_calibration") + @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_interpolation") + @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_orthorectification") def test_dataset(self, po_, pi_, pc_): """Test the execution of the get_dataset function.""" # Checks the correct execution of the get_dataset function with a valid file_key - variable = self.reader.get_dataset(None, {'file_key': 'data/measurement_data/tpw', - 'calibration': None}) + variable = self.reader.get_dataset(None, {"file_key": "data/measurement_data/tpw", + "calibration": None}) pc_.assert_not_called() pi_.assert_not_called() po_.assert_not_called() self.assertTrue(np.allclose(variable.values, np.ones((100, 10)))) - self.assertEqual(variable.dims, ('y', 'x')) - self.assertEqual(variable.attrs['test_attr'], 'attr') - self.assertEqual(variable.attrs['units'], None) + self.assertEqual(variable.dims, ("y", "x")) + self.assertEqual(variable.attrs["test_attr"], "attr") + self.assertEqual(variable.attrs["units"], None) # Checks the correct execution of the get_dataset function with a valid file_key # and required calibration and interpolation - self.reader.get_dataset(None, {'file_key': 'data/measurement_data/tpw', - 'calibration': 'reflectance', - 'interpolate': True, - 'standard_name': 'longitude'}) + self.reader.get_dataset(None, {"file_key": "data/measurement_data/tpw", + "calibration": "reflectance", + "interpolate": True, + "standard_name": "longitude"}) pc_.assert_called() pi_.assert_called() po_.assert_not_called() @@ -338,13 +338,13 @@ def test_dataset(self, po_, pi_, pc_): # Checks the correct execution of the get_dataset function with a valid file_key # and required orthorectification self.reader.orthorect = True - self.reader.get_dataset(None, {'file_key': 'data/measurement_data/tpw', - 'calibration': None, - 'orthorect_data': 'test_orthorect_data'}) + self.reader.get_dataset(None, {"file_key": "data/measurement_data/tpw", + "calibration": None, + "orthorect_data": "test_orthorect_data"}) po_.assert_called() # Checks the correct execution of the get_dataset function with an invalid file_key - invalid_dataset = self.reader.get_dataset(None, {'file_key': 'test_invalid', 'calibration': None}) + invalid_dataset = self.reader.get_dataset(None, {"file_key": "test_invalid", "calibration": None}) # Checks that the function returns None self.assertEqual(invalid_dataset, None) @@ -353,16 +353,16 @@ def test_dataset(self, po_, pi_, pc_): po_.reset_mock() # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key - longitude = self.reader.get_dataset(None, {'file_key': 'cached_longitude', - 'calibration': 'reflectance', - 'interpolate': True}) + longitude = self.reader.get_dataset(None, {"file_key": "cached_longitude", + "calibration": "reflectance", + "interpolate": True}) pc_.assert_not_called() pi_.assert_not_called() self.assertEqual(longitude[0, 0], 1.) # Checks the correct execution of the get_dataset function with a 'cached_latitude' file_key - latitude = self.reader.get_dataset(None, {'file_key': 'cached_latitude', - 'calibration': None}) + latitude = self.reader.get_dataset(None, {"file_key": "cached_latitude", + "calibration": None}) self.assertEqual(latitude[0, 0], 2.) # Repeats some check with the reader where orthorectification and interpolation are inhibited @@ -374,30 +374,30 @@ def test_dataset(self, po_, pi_, pc_): # Checks the correct execution of the get_dataset function with a valid file_key # and required calibration and interpolation - self.reader_2.get_dataset(None, {'file_key': 'data/measurement_data/tpw', - 'calibration': 'reflectance', - 'interpolate': True, - 'standard_name': 'longitude'}) + self.reader_2.get_dataset(None, {"file_key": "data/measurement_data/tpw", + "calibration": "reflectance", + "interpolate": True, + "standard_name": "longitude"}) pc_.assert_called() pi_.assert_not_called() po_.assert_not_called() # Checks the correct execution of the get_dataset function with a valid file_key # and required orthorectification - self.reader_2.get_dataset(None, {'file_key': 'data/measurement_data/tpw', - 'calibration': None, - 'orthorect_data': 'test_orthorect_data'}) + self.reader_2.get_dataset(None, {"file_key": "data/measurement_data/tpw", + "calibration": None, + "orthorect_data": "test_orthorect_data"}) po_.assert_not_called() # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key - longitude = self.reader_2.get_dataset(None, {'file_key': 'cached_longitude', - 'calibration': None}) + longitude = self.reader_2.get_dataset(None, {"file_key": "cached_longitude", + "calibration": None}) self.assertEqual(longitude[0, 0], 100.) # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key # in a reader without defined longitude - longitude = self.reader_3.get_dataset(None, {'file_key': 'cached_longitude', - 'calibration': 'reflectance', - 'interpolate': True}) + longitude = self.reader_3.get_dataset(None, {"file_key": "cached_longitude", + "calibration": "reflectance", + "interpolate": True}) # Checks that the function returns None self.assertEqual(longitude, None) diff --git a/satpy/tests/reader_tests/test_vii_l1b_nc.py b/satpy/tests/reader_tests/test_vii_l1b_nc.py index cf33e7872e..d62673d9f7 100644 --- a/satpy/tests/reader_tests/test_vii_l1b_nc.py +++ b/satpy/tests/reader_tests/test_vii_l1b_nc.py @@ -35,7 +35,7 @@ from satpy.readers.vii_l1b_nc import ViiL1bNCFileHandler from satpy.readers.vii_utils import MEAN_EARTH_RADIUS -TEST_FILE = 'test_file_vii_l1b_nc.nc' +TEST_FILE = "test_file_vii_l1b_nc.nc" class TestViiL1bNCFileHandler(unittest.TestCase): @@ -47,51 +47,51 @@ def setUp(self): # uses a UUID to avoid permission conflicts during execution of tests in parallel self.test_file_name = TEST_FILE + str(uuid.uuid1()) + ".nc" - with Dataset(self.test_file_name, 'w') as nc: + with Dataset(self.test_file_name, "w") as nc: # Create data group - g1 = nc.createGroup('data') + g1 = nc.createGroup("data") # Add dimensions to data group - g1.createDimension('num_chan_solar', 11) - g1.createDimension('num_chan_thermal', 9) - g1.createDimension('num_pixels', 72) - g1.createDimension('num_lines', 600) + g1.createDimension("num_chan_solar", 11) + g1.createDimension("num_chan_thermal", 9) + g1.createDimension("num_pixels", 72) + g1.createDimension("num_lines", 600) # Create calibration_data group - g1_1 = g1.createGroup('calibration_data') + g1_1 = g1.createGroup("calibration_data") # Add variables to data/calibration_data group - bt_a = g1_1.createVariable('bt_conversion_a', np.float32, dimensions=('num_chan_thermal',)) + bt_a = g1_1.createVariable("bt_conversion_a", np.float32, dimensions=("num_chan_thermal",)) bt_a[:] = np.arange(9) - bt_b = g1_1.createVariable('bt_conversion_b', np.float32, dimensions=('num_chan_thermal',)) + bt_b = g1_1.createVariable("bt_conversion_b", np.float32, dimensions=("num_chan_thermal",)) bt_b[:] = np.arange(9) - cw = g1_1.createVariable('channel_cw_thermal', np.float32, dimensions=('num_chan_thermal',)) + cw = g1_1.createVariable("channel_cw_thermal", np.float32, dimensions=("num_chan_thermal",)) cw[:] = np.arange(9) - isi = g1_1.createVariable('Band_averaged_solar_irradiance', np.float32, dimensions=('num_chan_solar',)) + isi = g1_1.createVariable("Band_averaged_solar_irradiance", np.float32, dimensions=("num_chan_solar",)) isi[:] = np.arange(11) # Create measurement_data group - g1_2 = g1.createGroup('measurement_data') + g1_2 = g1.createGroup("measurement_data") # Add dimensions to data/measurement_data group - g1_2.createDimension('num_tie_points_act', 10) - g1_2.createDimension('num_tie_points_alt', 100) + g1_2.createDimension("num_tie_points_act", 10) + g1_2.createDimension("num_tie_points_alt", 100) # Add variables to data/measurement_data group - sza = g1_2.createVariable('solar_zenith', np.float32, - dimensions=('num_tie_points_alt', 'num_tie_points_act')) + sza = g1_2.createVariable("solar_zenith", np.float32, + dimensions=("num_tie_points_alt", "num_tie_points_act")) sza[:] = 25.0 - delta_lat = g1_2.createVariable('delta_lat', np.float32, dimensions=('num_lines', 'num_pixels')) + delta_lat = g1_2.createVariable("delta_lat", np.float32, dimensions=("num_lines", "num_pixels")) delta_lat[:] = 1.0 self.reader = ViiL1bNCFileHandler( filename=self.test_file_name, filename_info={ - 'creation_time': datetime.datetime(year=2017, month=9, day=22, + "creation_time": datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), - 'sensing_start_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), - 'sensing_end_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50) }, filetype_info={} @@ -128,40 +128,40 @@ def test_functions(self): """Test the functions.""" # Checks that the _perform_orthorectification function is correctly executed variable = xr.DataArray( - dims=('num_lines', 'num_pixels'), - name='test_name', + dims=("num_lines", "num_pixels"), + name="test_name", attrs={ - 'key_1': 'value_1', - 'key_2': 'value_2' + "key_1": "value_1", + "key_2": "value_2" }, data=da.from_array(np.ones((600, 72))) ) - orthorect_variable = self.reader._perform_orthorectification(variable, 'data/measurement_data/delta_lat') + orthorect_variable = self.reader._perform_orthorectification(variable, "data/measurement_data/delta_lat") expected_values = np.degrees(np.ones((600, 72)) / MEAN_EARTH_RADIUS) + np.ones((600, 72)) self.assertTrue(np.allclose(orthorect_variable.values, expected_values)) # Checks that the _perform_calibration function is correctly executed in all cases # radiance calibration: return value is simply a copy of the variable - return_variable = self.reader._perform_calibration(variable, {'calibration': 'radiance'}) + return_variable = self.reader._perform_calibration(variable, {"calibration": "radiance"}) self.assertTrue(np.all(return_variable == variable)) # invalid calibration: raises a ValueError with self.assertRaises(ValueError): self.reader._perform_calibration(variable, - {'calibration': 'invalid', 'name': 'test'}) + {"calibration": "invalid", "name": "test"}) # brightness_temperature calibration: checks that the return value is correct calibrated_variable = self.reader._perform_calibration(variable, - {'calibration': 'brightness_temperature', - 'chan_thermal_index': 3}) + {"calibration": "brightness_temperature", + "chan_thermal_index": 3}) expected_values = np.full((600, 72), 1101.10413712) self.assertTrue(np.allclose(calibrated_variable.values, expected_values)) # reflectance calibration: checks that the return value is correct calibrated_variable = self.reader._perform_calibration(variable, - {'calibration': 'reflectance', - 'wavelength': [0.658, 0.668, 0.678], - 'chan_solar_index': 2}) + {"calibration": "reflectance", + "wavelength": [0.658, 0.668, 0.678], + "chan_solar_index": 2}) expected_values = np.full((600, 72), 173.3181982) self.assertTrue(np.allclose(calibrated_variable.values, expected_values)) diff --git a/satpy/tests/reader_tests/test_vii_l2_nc.py b/satpy/tests/reader_tests/test_vii_l2_nc.py index e431d16e73..772f783684 100644 --- a/satpy/tests/reader_tests/test_vii_l2_nc.py +++ b/satpy/tests/reader_tests/test_vii_l2_nc.py @@ -30,7 +30,7 @@ from satpy.readers.vii_l2_nc import ViiL2NCFileHandler -TEST_FILE = 'test_file_vii_l2_nc.nc' +TEST_FILE = "test_file_vii_l2_nc.nc" class TestViiL2NCFileHandler(unittest.TestCase): @@ -42,29 +42,29 @@ def setUp(self): # uses a UUID to avoid permission conflicts during execution of tests in parallel self.test_file_name = TEST_FILE + str(uuid.uuid1()) + ".nc" - with Dataset(self.test_file_name, 'w') as nc: + with Dataset(self.test_file_name, "w") as nc: # Create data group - g1 = nc.createGroup('data') + g1 = nc.createGroup("data") # Add dimensions to data group - g1.createDimension('num_pixels', 100) - g1.createDimension('num_lines', 10) + g1.createDimension("num_pixels", 100) + g1.createDimension("num_lines", 10) # Create measurement_data group - g1_2 = g1.createGroup('measurement_data') + g1_2 = g1.createGroup("measurement_data") # Add variables to data/measurement_data group - delta_lat = g1_2.createVariable('delta_lat', np.float32, dimensions=('num_lines', 'num_pixels')) + delta_lat = g1_2.createVariable("delta_lat", np.float32, dimensions=("num_lines", "num_pixels")) delta_lat[:] = 0.1 self.reader = ViiL2NCFileHandler( filename=self.test_file_name, filename_info={ - 'creation_time': datetime.datetime(year=2017, month=9, day=22, + "creation_time": datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), - 'sensing_start_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), - 'sensing_end_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50) }, filetype_info={} @@ -82,16 +82,16 @@ def test_functions(self): """Test the functions.""" # Checks that the _perform_orthorectification function is correctly executed variable = xr.DataArray( - dims=('num_lines', 'num_pixels'), - name='test_name', + dims=("num_lines", "num_pixels"), + name="test_name", attrs={ - 'key_1': 'value_1', - 'key_2': 'value_2' + "key_1": "value_1", + "key_2": "value_2" }, data=da.from_array(np.ones((10, 100))) ) - orthorect_variable = self.reader._perform_orthorectification(variable, 'data/measurement_data/delta_lat') + orthorect_variable = self.reader._perform_orthorectification(variable, "data/measurement_data/delta_lat") expected_values = 1.1 * np.ones((10, 100)) self.assertTrue(np.allclose(orthorect_variable.values, expected_values)) - self.assertEqual(orthorect_variable.attrs['key_1'], 'value_1') + self.assertEqual(orthorect_variable.attrs["key_1"], "value_1") diff --git a/satpy/tests/reader_tests/test_vii_wv_nc.py b/satpy/tests/reader_tests/test_vii_wv_nc.py index 991bbecec4..9d43f1ded1 100644 --- a/satpy/tests/reader_tests/test_vii_wv_nc.py +++ b/satpy/tests/reader_tests/test_vii_wv_nc.py @@ -30,7 +30,7 @@ from satpy.readers.vii_l2_nc import ViiL2NCFileHandler -TEST_FILE = 'test_file_vii_wv_nc.nc' +TEST_FILE = "test_file_vii_wv_nc.nc" class TestViiL2NCFileHandler(unittest.TestCase): @@ -42,29 +42,29 @@ def setUp(self): # uses a UUID to avoid permission conflicts during execution of tests in parallel self.test_file_name = TEST_FILE + str(uuid.uuid1()) + ".nc" - with Dataset(self.test_file_name, 'w') as nc: + with Dataset(self.test_file_name, "w") as nc: # Create data group - g1 = nc.createGroup('data') + g1 = nc.createGroup("data") # Add dimensions to data group - g1.createDimension('num_points_act', 100) - g1.createDimension('num_points_alt', 10) + g1.createDimension("num_points_act", 100) + g1.createDimension("num_points_alt", 10) # Create measurement_data group - g1_2 = g1.createGroup('measurement_data') + g1_2 = g1.createGroup("measurement_data") # Add variables to data/measurement_data group - delta_lat = g1_2.createVariable('delta_lat', np.float32, dimensions=('num_points_alt', 'num_points_act')) + delta_lat = g1_2.createVariable("delta_lat", np.float32, dimensions=("num_points_alt", "num_points_act")) delta_lat[:] = 0.1 self.reader = ViiL2NCFileHandler( filename=self.test_file_name, filename_info={ - 'creation_time': datetime.datetime(year=2017, month=9, day=22, + "creation_time": datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), - 'sensing_start_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), - 'sensing_end_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50) }, filetype_info={} @@ -82,16 +82,16 @@ def test_functions(self): """Test the functions.""" # Checks that the _perform_orthorectification function is correctly executed variable = xr.DataArray( - dims=('num_points_alt', 'num_points_act'), - name='test_name', + dims=("num_points_alt", "num_points_act"), + name="test_name", attrs={ - 'key_1': 'value_1', - 'key_2': 'value_2' + "key_1": "value_1", + "key_2": "value_2" }, data=da.from_array(np.ones((10, 100))) ) - orthorect_variable = self.reader._perform_orthorectification(variable, 'data/measurement_data/delta_lat') + orthorect_variable = self.reader._perform_orthorectification(variable, "data/measurement_data/delta_lat") expected_values = 1.1 * np.ones((10, 100)) self.assertTrue(np.allclose(orthorect_variable.values, expected_values)) - self.assertEqual(orthorect_variable.attrs['key_1'], 'value_1') + self.assertEqual(orthorect_variable.attrs["key_1"], "value_1") diff --git a/satpy/tests/reader_tests/test_viirs_atms_utils.py b/satpy/tests/reader_tests/test_viirs_atms_utils.py index 931cf6469e..cdcc0b9361 100644 --- a/satpy/tests/reader_tests/test_viirs_atms_utils.py +++ b/satpy/tests/reader_tests/test_viirs_atms_utils.py @@ -34,8 +34,8 @@ def test_get_file_units(caplog): """Test get the file-units from the dataset info.""" - did = make_dataid(name='some_variable', modifiers=()) - ds_info = {'file_units': None} + did = make_dataid(name="some_variable", modifiers=()) + ds_info = {"file_units": None} with caplog.at_level(logging.DEBUG): file_units = _get_file_units(did, ds_info) @@ -47,8 +47,8 @@ def test_get_file_units(caplog): def test_get_scale_factors_for_units_unsupported_units(): """Test get scale factors for units, when units are not supported.""" factors = xr.DataArray(da.from_array(DEFAULT_FILE_FACTORS, chunks=1)) - file_units = 'unknown unit' - output_units = '%' + file_units = "unknown unit" + output_units = "%" with pytest.raises(ValueError) as exec_info: _ = _get_scale_factors_for_units(factors, file_units, output_units) @@ -59,8 +59,8 @@ def test_get_scale_factors_for_units_unsupported_units(): def test_get_scale_factors_for_units_reflectances(caplog): """Test get scale factors for units, when variable is supposed to be a reflectance.""" factors = xr.DataArray(da.from_array(DEFAULT_FILE_FACTORS, chunks=1)) - file_units = '1' - output_units = '%' + file_units = "1" + output_units = "%" with caplog.at_level(logging.DEBUG): retv = _get_scale_factors_for_units(factors, file_units, output_units) @@ -72,8 +72,8 @@ def test_get_scale_factors_for_units_reflectances(caplog): def test_get_scale_factors_for_units_tbs(caplog): """Test get scale factors for units, when variable is supposed to be a brightness temperature.""" factors = xr.DataArray(da.from_array(DEFAULT_FILE_FACTORS, chunks=1)) - file_units = 'W cm-2 sr-1' - output_units = 'W m-2 sr-1' + file_units = "W cm-2 sr-1" + output_units = "W m-2 sr-1" with caplog.at_level(logging.DEBUG): retv = _get_scale_factors_for_units(factors, file_units, output_units) diff --git a/satpy/tests/reader_tests/test_viirs_compact.py b/satpy/tests/reader_tests/test_viirs_compact.py index 03ef09124c..006cdfe968 100644 --- a/satpy/tests/reader_tests/test_viirs_compact.py +++ b/satpy/tests/reader_tests/test_viirs_compact.py @@ -2445,17 +2445,17 @@ def _dataset_iterator(self): from satpy.tests.utils import make_dataid filename_info = {} - filetype_info = {'file_type': 'compact_dnb'} + filetype_info = {"file_type": "compact_dnb"} test = VIIRSCompactFileHandler(self.filename, filename_info, filetype_info) - dsid = make_dataid(name='DNB', calibration='radiance') + dsid = make_dataid(name="DNB", calibration="radiance") ds1 = test.get_dataset(dsid, {}) - dsid = make_dataid(name='longitude_dnb') - ds2 = test.get_dataset(dsid, {'standard_name': 'longitude'}) - dsid = make_dataid(name='latitude_dnb') - ds3 = test.get_dataset(dsid, {'standard_name': 'latitude'}) - dsid = make_dataid(name='solar_zenith_angle') - ds4 = test.get_dataset(dsid, {'standard_name': 'solar_zenith_angle'}) + dsid = make_dataid(name="longitude_dnb") + ds2 = test.get_dataset(dsid, {"standard_name": "longitude"}) + dsid = make_dataid(name="latitude_dnb") + ds3 = test.get_dataset(dsid, {"standard_name": "latitude"}) + dsid = make_dataid(name="solar_zenith_angle") + ds4 = test.get_dataset(dsid, {"standard_name": "solar_zenith_angle"}) for ds in [ds1, ds2, ds3, ds4]: yield ds @@ -2466,7 +2466,7 @@ def test_get_dataset(self): assert ds.shape == (752, 4064) assert ds.dtype == np.float32 assert ds.compute().shape == (752, 4064) - assert ds.attrs['rows_per_scan'] == 16 + assert ds.attrs["rows_per_scan"] == 16 def test_distributed(self): """Check that distributed computations work.""" diff --git a/satpy/tests/reader_tests/test_viirs_edr_active_fires.py b/satpy/tests/reader_tests/test_viirs_edr_active_fires.py index df94283fba..de55a9c20c 100644 --- a/satpy/tests/reader_tests/test_viirs_edr_active_fires.py +++ b/satpy/tests/reader_tests/test_viirs_edr_active_fires.py @@ -60,23 +60,23 @@ class FakeModFiresNetCDF4FileHandler(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} - file_content['/attr/data_id'] = "AFMOD" - file_content['/attr/satellite_name'] = "NPP" - file_content['/attr/instrument_name'] = 'VIIRS' - - file_content['Fire Pixels/FP_latitude'] = DEFAULT_LATLON_FILE_DATA - file_content['Fire Pixels/FP_longitude'] = DEFAULT_LATLON_FILE_DATA - file_content['Fire Pixels/FP_power'] = DEFAULT_POWER_FILE_DATA - file_content['Fire Pixels/FP_T13'] = DEFAULT_M13_FILE_DATA - file_content['Fire Pixels/FP_T13/attr/units'] = 'kelvins' - file_content['Fire Pixels/FP_confidence'] = DEFAULT_DETECTION_FILE_DATA - file_content['Fire Pixels/attr/units'] = 'none' - file_content['Fire Pixels/shape'] = DEFAULT_FILE_SHAPE - - attrs = ('FP_latitude', 'FP_longitude', 'FP_T13', 'FP_confidence') + file_content["/attr/data_id"] = "AFMOD" + file_content["/attr/satellite_name"] = "NPP" + file_content["/attr/instrument_name"] = "VIIRS" + + file_content["Fire Pixels/FP_latitude"] = DEFAULT_LATLON_FILE_DATA + file_content["Fire Pixels/FP_longitude"] = DEFAULT_LATLON_FILE_DATA + file_content["Fire Pixels/FP_power"] = DEFAULT_POWER_FILE_DATA + file_content["Fire Pixels/FP_T13"] = DEFAULT_M13_FILE_DATA + file_content["Fire Pixels/FP_T13/attr/units"] = "kelvins" + file_content["Fire Pixels/FP_confidence"] = DEFAULT_DETECTION_FILE_DATA + file_content["Fire Pixels/attr/units"] = "none" + file_content["Fire Pixels/shape"] = DEFAULT_FILE_SHAPE + + attrs = ("FP_latitude", "FP_longitude", "FP_T13", "FP_confidence") convert_file_content_to_data_array( file_content, attrs=attrs, - dims=('z', 'fakeDim0', 'fakeDim1')) + dims=("z", "fakeDim0", "fakeDim1")) return file_content @@ -86,21 +86,21 @@ class FakeImgFiresNetCDF4FileHandler(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} - file_content['/attr/data_id'] = "AFIMG" - file_content['/attr/satellite_name'] = "NPP" - file_content['/attr/instrument_name'] = 'VIIRS' - - file_content['Fire Pixels/FP_latitude'] = DEFAULT_LATLON_FILE_DATA - file_content['Fire Pixels/FP_longitude'] = DEFAULT_LATLON_FILE_DATA - file_content['Fire Pixels/FP_power'] = DEFAULT_POWER_FILE_DATA - file_content['Fire Pixels/FP_T4'] = DEFAULT_M13_FILE_DATA - file_content['Fire Pixels/FP_T4/attr/units'] = 'kelvins' - file_content['Fire Pixels/FP_confidence'] = DEFAULT_DETECTION_FILE_DATA - - attrs = ('FP_latitude', 'FP_longitude', 'FP_T13', 'FP_confidence') + file_content["/attr/data_id"] = "AFIMG" + file_content["/attr/satellite_name"] = "NPP" + file_content["/attr/instrument_name"] = "VIIRS" + + file_content["Fire Pixels/FP_latitude"] = DEFAULT_LATLON_FILE_DATA + file_content["Fire Pixels/FP_longitude"] = DEFAULT_LATLON_FILE_DATA + file_content["Fire Pixels/FP_power"] = DEFAULT_POWER_FILE_DATA + file_content["Fire Pixels/FP_T4"] = DEFAULT_M13_FILE_DATA + file_content["Fire Pixels/FP_T4/attr/units"] = "kelvins" + file_content["Fire Pixels/FP_confidence"] = DEFAULT_DETECTION_FILE_DATA + + attrs = ("FP_latitude", "FP_longitude", "FP_T13", "FP_confidence") convert_file_content_to_data_array( file_content, attrs=attrs, - dims=('z', 'fakeDim0', 'fakeDim1')) + dims=("z", "fakeDim0", "fakeDim1")) return file_content @@ -114,13 +114,13 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): platform_key = {"NPP": "Suomi-NPP", "J01": "NOAA-20", "J02": "NOAA-21"} - self.platform_name = platform_key.get(self.filename_info['satellite_name'].upper(), "unknown") + self.platform_name = platform_key.get(self.filename_info["satellite_name"].upper(), "unknown") def get_test_content(self): """Create fake test file content.""" - fake_file = io.StringIO(u'''\n\n\n\n\n\n\n\n\n\n\n\n\n\n + fake_file = io.StringIO(u"""\n\n\n\n\n\n\n\n\n\n\n\n\n\n 24.64015007, -107.57017517, 317.38290405, 0.75, 0.75, 40, 4.28618050 - 25.90660477, -100.06127167, 331.17962646, 0.75, 0.75, 81, 20.61096764''') + 25.90660477, -100.06127167, 331.17962646, 0.75, 0.75, 81, 20.61096764""") return dd.from_pandas(pd.read_csv(fake_file, skiprows=15, header=None, names=["latitude", "longitude", @@ -139,13 +139,13 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): def get_test_content(self): """Create fake test file content.""" - fake_file = io.StringIO(u'''\n\n\n\n\n\n\n\n\n\n\n\n\n\n + fake_file = io.StringIO(u"""\n\n\n\n\n\n\n\n\n\n\n\n\n\n 24.64015007, -107.57017517, 317.38290405, 0.75, 0.75, 40, 4.28618050 - 25.90660477, -100.06127167, 331.17962646, 0.75, 0.75, 81, 20.61096764''') + 25.90660477, -100.06127167, 331.17962646, 0.75, 0.75, 81, 20.61096764""") platform_key = {"NPP": "Suomi-NPP", "J01": "NOAA-20", "J02": "NOAA-21"} - self.platform_name = platform_key.get(self.filename_info['satellite_name'].upper(), "unknown") + self.platform_name = platform_key.get(self.filename_info["satellite_name"].upper(), "unknown") return dd.from_pandas(pd.read_csv(fake_file, skiprows=15, header=None, names=["latitude", "longitude", @@ -157,14 +157,14 @@ def get_test_content(self): class TestModVIIRSActiveFiresNetCDF4(unittest.TestCase): """Test VIIRS Fires Reader.""" - yaml_file = 'viirs_edr_active_fires.yaml' + yaml_file = "viirs_edr_active_fires.yaml" def setUp(self): """Wrap CDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) - self.p = mock.patch.object(VIIRSActiveFiresFileHandler, '__bases__', (FakeModFiresNetCDF4FileHandler,)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + self.p = mock.patch.object(VIIRSActiveFiresFileHandler, "__bases__", (FakeModFiresNetCDF4FileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -177,7 +177,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' + "AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -188,40 +188,40 @@ def test_load_dataset(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' + "AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) r.create_filehandlers(loadables) - datasets = r.load(['confidence_pct']) + datasets = r.load(["confidence_pct"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], '%') - self.assertEqual(v.attrs['_FillValue'], 255) + self.assertEqual(v.attrs["units"], "%") + self.assertEqual(v.attrs["_FillValue"], 255) self.assertTrue(np.issubdtype(v.dtype, DEFAULT_DETECTION_FILE_DTYPE)) - datasets = r.load(['T13']) + datasets = r.load(["T13"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'K') + self.assertEqual(v.attrs["units"], "K") - datasets = r.load(['power']) + datasets = r.load(["power"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'MW') - self.assertEqual(v.attrs['platform_name'], 'NOAA-21') - self.assertEqual(v.attrs['sensor'], 'viirs') + self.assertEqual(v.attrs["units"], "MW") + self.assertEqual(v.attrs["platform_name"], "NOAA-21") + self.assertEqual(v.attrs["sensor"], "viirs") class TestImgVIIRSActiveFiresNetCDF4(unittest.TestCase): """Test VIIRS Fires Reader.""" - yaml_file = 'viirs_edr_active_fires.yaml' + yaml_file = "viirs_edr_active_fires.yaml" def setUp(self): """Wrap CDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) - self.p = mock.patch.object(VIIRSActiveFiresFileHandler, '__bases__', (FakeImgFiresNetCDF4FileHandler,)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + self.p = mock.patch.object(VIIRSActiveFiresFileHandler, "__bases__", (FakeImgFiresNetCDF4FileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -234,7 +234,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' + "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -245,41 +245,41 @@ def test_load_dataset(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' + "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) r.create_filehandlers(loadables) - datasets = r.load(['confidence_cat']) + datasets = r.load(["confidence_cat"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], '1') - self.assertEqual(v.attrs['flag_meanings'], ['low', 'medium', 'high']) - self.assertEqual(v.attrs['flag_values'], [7, 8, 9]) + self.assertEqual(v.attrs["units"], "1") + self.assertEqual(v.attrs["flag_meanings"], ["low", "medium", "high"]) + self.assertEqual(v.attrs["flag_values"], [7, 8, 9]) - datasets = r.load(['T4']) + datasets = r.load(["T4"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'K') + self.assertEqual(v.attrs["units"], "K") - datasets = r.load(['power']) + datasets = r.load(["power"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'MW') - self.assertEqual(v.attrs['platform_name'], 'Suomi-NPP') - self.assertEqual(v.attrs['sensor'], 'viirs') + self.assertEqual(v.attrs["units"], "MW") + self.assertEqual(v.attrs["platform_name"], "Suomi-NPP") + self.assertEqual(v.attrs["sensor"], "viirs") -@mock.patch('satpy.readers.viirs_edr_active_fires.dd.read_csv') +@mock.patch("satpy.readers.viirs_edr_active_fires.dd.read_csv") class TestModVIIRSActiveFiresText(unittest.TestCase): """Test VIIRS Fires Reader.""" - yaml_file = 'viirs_edr_active_fires.yaml' + yaml_file = "viirs_edr_active_fires.yaml" def setUp(self): """Wrap file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresTextFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) - self.p = mock.patch.object(VIIRSActiveFiresTextFileHandler, '__bases__', (FakeModFiresTextFileHandler,)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + self.p = mock.patch.object(VIIRSActiveFiresTextFileHandler, "__bases__", (FakeModFiresTextFileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -292,7 +292,7 @@ def test_init(self, mock_obj): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' + "AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -303,39 +303,39 @@ def test_load_dataset(self, csv_mock): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' + "AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) r.create_filehandlers(loadables) - datasets = r.load(['confidence_pct']) + datasets = r.load(["confidence_pct"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], '%') + self.assertEqual(v.attrs["units"], "%") - datasets = r.load(['T13']) + datasets = r.load(["T13"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'K') + self.assertEqual(v.attrs["units"], "K") - datasets = r.load(['power']) + datasets = r.load(["power"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'MW') - self.assertEqual(v.attrs['platform_name'], 'NOAA-20') - self.assertEqual(v.attrs['sensor'], 'VIIRS') + self.assertEqual(v.attrs["units"], "MW") + self.assertEqual(v.attrs["platform_name"], "NOAA-20") + self.assertEqual(v.attrs["sensor"], "VIIRS") -@mock.patch('satpy.readers.viirs_edr_active_fires.dd.read_csv') +@mock.patch("satpy.readers.viirs_edr_active_fires.dd.read_csv") class TestImgVIIRSActiveFiresText(unittest.TestCase): """Test VIIRS Fires Reader.""" - yaml_file = 'viirs_edr_active_fires.yaml' + yaml_file = "viirs_edr_active_fires.yaml" def setUp(self): """Wrap file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresTextFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) - self.p = mock.patch.object(VIIRSActiveFiresTextFileHandler, '__bases__', (FakeImgFiresTextFileHandler,)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + self.p = mock.patch.object(VIIRSActiveFiresTextFileHandler, "__bases__", (FakeImgFiresTextFileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -348,7 +348,7 @@ def test_init(self, mock_obj): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' + "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -359,24 +359,24 @@ def test_load_dataset(self, mock_obj): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' + "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) r.create_filehandlers(loadables) - datasets = r.load(['confidence_cat']) + datasets = r.load(["confidence_cat"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], '1') - self.assertEqual(v.attrs['flag_meanings'], ['low', 'medium', 'high']) - self.assertEqual(v.attrs['flag_values'], [7, 8, 9]) + self.assertEqual(v.attrs["units"], "1") + self.assertEqual(v.attrs["flag_meanings"], ["low", "medium", "high"]) + self.assertEqual(v.attrs["flag_values"], [7, 8, 9]) - datasets = r.load(['T4']) + datasets = r.load(["T4"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'K') + self.assertEqual(v.attrs["units"], "K") - datasets = r.load(['power']) + datasets = r.load(["power"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'MW') - self.assertEqual(v.attrs['platform_name'], 'Suomi-NPP') - self.assertEqual(v.attrs['sensor'], 'VIIRS') + self.assertEqual(v.attrs["units"], "MW") + self.assertEqual(v.attrs["platform_name"], "Suomi-NPP") + self.assertEqual(v.attrs["sensor"], "VIIRS") diff --git a/satpy/tests/reader_tests/test_viirs_edr_flood.py b/satpy/tests/reader_tests/test_viirs_edr_flood.py index 9b544dc9f1..0141259784 100644 --- a/satpy/tests/reader_tests/test_viirs_edr_flood.py +++ b/satpy/tests/reader_tests/test_viirs_edr_flood.py @@ -36,51 +36,51 @@ class FakeHDF4FileHandler2(FakeHDF4FileHandler): def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} - file_content['/attr/Satellitename'] = filename_info['platform_shortname'] - file_content['/attr/SensorIdentifyCode'] = 'VIIRS' + file_content["/attr/Satellitename"] = filename_info["platform_shortname"] + file_content["/attr/SensorIdentifyCode"] = "VIIRS" # only one dataset for the flood reader - file_content['WaterDetection'] = DEFAULT_FILE_DATA - file_content['WaterDetection/attr/_Fillvalue'] = 1 - file_content['WaterDetection/attr/scale_factor'] = 1. - file_content['WaterDetection/attr/add_offset'] = 0. - file_content['WaterDetection/attr/units'] = 'none' - file_content['WaterDetection/shape'] = DEFAULT_FILE_SHAPE - file_content['WaterDetection/attr/ProjectionMinLatitude'] = 15. - file_content['WaterDetection/attr/ProjectionMaxLatitude'] = 68. - file_content['WaterDetection/attr/ProjectionMinLongitude'] = -124. - file_content['WaterDetection/attr/ProjectionMaxLongitude'] = -61. + file_content["WaterDetection"] = DEFAULT_FILE_DATA + file_content["WaterDetection/attr/_Fillvalue"] = 1 + file_content["WaterDetection/attr/scale_factor"] = 1. + file_content["WaterDetection/attr/add_offset"] = 0. + file_content["WaterDetection/attr/units"] = "none" + file_content["WaterDetection/shape"] = DEFAULT_FILE_SHAPE + file_content["WaterDetection/attr/ProjectionMinLatitude"] = 15. + file_content["WaterDetection/attr/ProjectionMaxLatitude"] = 68. + file_content["WaterDetection/attr/ProjectionMinLongitude"] = -124. + file_content["WaterDetection/attr/ProjectionMaxLongitude"] = -61. # convert tp xarrays from xarray import DataArray for key, val in file_content.items(): if isinstance(val, np.ndarray): attrs = {} - for a in ['_Fillvalue', 'units', 'ProjectionMinLatitude', 'ProjectionMaxLongitude', - 'ProjectionMinLongitude', 'ProjectionMaxLatitude']: - if key + '/attr/' + a in file_content: - attrs[a] = file_content[key + '/attr/' + a] + for a in ["_Fillvalue", "units", "ProjectionMinLatitude", "ProjectionMaxLongitude", + "ProjectionMinLongitude", "ProjectionMaxLatitude"]: + if key + "/attr/" + a in file_content: + attrs[a] = file_content[key + "/attr/" + a] if val.ndim > 1: - file_content[key] = DataArray(val, dims=('fakeDim0', 'fakeDim1'), attrs=attrs) + file_content[key] = DataArray(val, dims=("fakeDim0", "fakeDim1"), attrs=attrs) else: file_content[key] = DataArray(val, attrs=attrs) - if 'y' not in file_content['WaterDetection'].dims: - file_content['WaterDetection'] = file_content['WaterDetection'].rename({'fakeDim0': 'x', 'fakeDim1': 'y'}) + if "y" not in file_content["WaterDetection"].dims: + file_content["WaterDetection"] = file_content["WaterDetection"].rename({"fakeDim0": "x", "fakeDim1": "y"}) return file_content class TestVIIRSEDRFloodReader(unittest.TestCase): """Test VIIRS EDR Flood Reader.""" - yaml_file = 'viirs_edr_flood.yaml' + yaml_file = "viirs_edr_flood.yaml" def setUp(self): """Wrap HDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_flood import VIIRSEDRFlood - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) - self.p = mock.patch.object(VIIRSEDRFlood, '__bases__', (FakeHDF4FileHandler2,)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + self.p = mock.patch.object(VIIRSEDRFlood, "__bases__", (FakeHDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -93,7 +93,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf' + "WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf" ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -104,23 +104,23 @@ def test_load_dataset(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf' + "WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf" ]) r.create_filehandlers(loadables) - datasets = r.load(['WaterDetection']) + datasets = r.load(["WaterDetection"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'none') + self.assertEqual(v.attrs["units"], "none") def test_load_dataset_aoi(self): """Test loading all datasets from an area of interest file.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_001_10_300_01.hdf' + "WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_001_10_300_01.hdf" ]) r.create_filehandlers(loadables) - datasets = r.load(['WaterDetection']) + datasets = r.load(["WaterDetection"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'none') + self.assertEqual(v.attrs["units"], "none") diff --git a/satpy/tests/reader_tests/test_viirs_l1b.py b/satpy/tests/reader_tests/test_viirs_l1b.py index 0d3b2ad1b9..e60f83cfd0 100644 --- a/satpy/tests/reader_tests/test_viirs_l1b.py +++ b/satpy/tests/reader_tests/test_viirs_l1b.py @@ -49,22 +49,22 @@ class FakeNetCDF4FileHandlerDay(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" - dt = filename_info.get('start_time', datetime(2016, 1, 1, 12, 0, 0)) + dt = filename_info.get("start_time", datetime(2016, 1, 1, 12, 0, 0)) file_type = filename[:5].lower() num_lines = DEFAULT_FILE_SHAPE[0] num_pixels = DEFAULT_FILE_SHAPE[1] num_scans = 5 num_luts = DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1] file_content = { - '/dimension/number_of_scans': num_scans, - '/dimension/number_of_lines': num_lines, - '/dimension/number_of_pixels': num_pixels, - '/dimension/number_of_LUT_values': num_luts, - '/attr/time_coverage_start': dt.strftime('%Y-%m-%dT%H:%M:%S.000Z'), - '/attr/time_coverage_end': (dt + timedelta(minutes=6)).strftime('%Y-%m-%dT%H:%M:%S.000Z'), - '/attr/orbit_number': 26384, - '/attr/instrument': 'VIIRS', - '/attr/platform': 'Suomi-NPP', + "/dimension/number_of_scans": num_scans, + "/dimension/number_of_lines": num_lines, + "/dimension/number_of_pixels": num_pixels, + "/dimension/number_of_LUT_values": num_luts, + "/attr/time_coverage_start": dt.strftime("%Y-%m-%dT%H:%M:%S.000Z"), + "/attr/time_coverage_end": (dt + timedelta(minutes=6)).strftime("%Y-%m-%dT%H:%M:%S.000Z"), + "/attr/orbit_number": 26384, + "/attr/instrument": "VIIRS", + "/attr/platform": "Suomi-NPP", } self._fill_contents_with_default_data(file_content, file_type) self._set_dataset_specific_metadata(file_content) @@ -73,57 +73,57 @@ def get_test_content(self, filename, filename_info, filetype_info): def _fill_contents_with_default_data(self, file_content, file_type): """Fill file contents with default data.""" - if file_type.startswith('vgeo'): - file_content['/attr/OrbitNumber'] = file_content.pop('/attr/orbit_number') - file_content['geolocation_data/latitude'] = DEFAULT_LAT_DATA - file_content['geolocation_data/longitude'] = DEFAULT_LON_DATA - file_content['geolocation_data/solar_zenith'] = DEFAULT_LON_DATA - file_content['geolocation_data/solar_azimuth'] = DEFAULT_LON_DATA - file_content['geolocation_data/sensor_zenith'] = DEFAULT_LON_DATA - file_content['geolocation_data/sensor_azimuth'] = DEFAULT_LON_DATA - if file_type.endswith('d'): - file_content['geolocation_data/lunar_zenith'] = DEFAULT_LON_DATA - file_content['geolocation_data/lunar_azimuth'] = DEFAULT_LON_DATA - elif file_type == 'vl1bm': + if file_type.startswith("vgeo"): + file_content["/attr/OrbitNumber"] = file_content.pop("/attr/orbit_number") + file_content["geolocation_data/latitude"] = DEFAULT_LAT_DATA + file_content["geolocation_data/longitude"] = DEFAULT_LON_DATA + file_content["geolocation_data/solar_zenith"] = DEFAULT_LON_DATA + file_content["geolocation_data/solar_azimuth"] = DEFAULT_LON_DATA + file_content["geolocation_data/sensor_zenith"] = DEFAULT_LON_DATA + file_content["geolocation_data/sensor_azimuth"] = DEFAULT_LON_DATA + if file_type.endswith("d"): + file_content["geolocation_data/lunar_zenith"] = DEFAULT_LON_DATA + file_content["geolocation_data/lunar_azimuth"] = DEFAULT_LON_DATA + elif file_type == "vl1bm": for m_band in self.M_BANDS: - file_content[f'observation_data/{m_band}'] = DEFAULT_FILE_DATA - elif file_type == 'vl1bi': + file_content[f"observation_data/{m_band}"] = DEFAULT_FILE_DATA + elif file_type == "vl1bi": for i_band in self.I_BANDS: - file_content[f'observation_data/{i_band}'] = DEFAULT_FILE_DATA - elif file_type == 'vl1bd': - file_content['observation_data/DNB_observations'] = DEFAULT_FILE_DATA - file_content['observation_data/DNB_observations/attr/units'] = 'Watts/cm^2/steradian' + file_content[f"observation_data/{i_band}"] = DEFAULT_FILE_DATA + elif file_type == "vl1bd": + file_content["observation_data/DNB_observations"] = DEFAULT_FILE_DATA + file_content["observation_data/DNB_observations/attr/units"] = "Watts/cm^2/steradian" @staticmethod def _set_dataset_specific_metadata(file_content): """Set dataset-specific metadata.""" for k in list(file_content.keys()): - if not k.startswith('observation_data') and not k.startswith('geolocation_data'): + if not k.startswith("observation_data") and not k.startswith("geolocation_data"): continue - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - if k[-3:] in ['M12', 'M13', 'M14', 'M15', 'M16', 'I04', 'I05']: - file_content[k + '_brightness_temperature_lut'] = DEFAULT_FILE_DATA.ravel() - file_content[k + '_brightness_temperature_lut/attr/units'] = 'Kelvin' - file_content[k + '_brightness_temperature_lut/attr/valid_min'] = 0 - file_content[k + '_brightness_temperature_lut/attr/valid_max'] = 65534 - file_content[k + '_brightness_temperature_lut/attr/_FillValue'] = 65535 - file_content[k + '/attr/units'] = 'Watts/meter^2/steradian/micrometer' - elif k[-3:] in ['M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', - 'M09', 'M10', 'M11', 'I01', 'I02', 'I03']: - file_content[k + '/attr/radiance_units'] = 'Watts/meter^2/steradian/micrometer' - file_content[k + '/attr/radiance_scale_factor'] = 1.1 - file_content[k + '/attr/radiance_add_offset'] = 0.1 - elif k.endswith('longitude'): - file_content[k + '/attr/units'] = 'degrees_east' - elif k.endswith('latitude'): - file_content[k + '/attr/units'] = 'degrees_north' - elif k.endswith('zenith') or k.endswith('azimuth'): - file_content[k + '/attr/units'] = 'degrees' - file_content[k + '/attr/valid_min'] = 0 - file_content[k + '/attr/valid_max'] = 65534 - file_content[k + '/attr/_FillValue'] = 65535 - file_content[k + '/attr/scale_factor'] = 1.1 - file_content[k + '/attr/add_offset'] = 0.1 + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + if k[-3:] in ["M12", "M13", "M14", "M15", "M16", "I04", "I05"]: + file_content[k + "_brightness_temperature_lut"] = DEFAULT_FILE_DATA.ravel() + file_content[k + "_brightness_temperature_lut/attr/units"] = "Kelvin" + file_content[k + "_brightness_temperature_lut/attr/valid_min"] = 0 + file_content[k + "_brightness_temperature_lut/attr/valid_max"] = 65534 + file_content[k + "_brightness_temperature_lut/attr/_FillValue"] = 65535 + file_content[k + "/attr/units"] = "Watts/meter^2/steradian/micrometer" + elif k[-3:] in ["M01", "M02", "M03", "M04", "M05", "M06", "M07", "M08", + "M09", "M10", "M11", "I01", "I02", "I03"]: + file_content[k + "/attr/radiance_units"] = "Watts/meter^2/steradian/micrometer" + file_content[k + "/attr/radiance_scale_factor"] = 1.1 + file_content[k + "/attr/radiance_add_offset"] = 0.1 + elif k.endswith("longitude"): + file_content[k + "/attr/units"] = "degrees_east" + elif k.endswith("latitude"): + file_content[k + "/attr/units"] = "degrees_north" + elif k.endswith("zenith") or k.endswith("azimuth"): + file_content[k + "/attr/units"] = "degrees" + file_content[k + "/attr/valid_min"] = 0 + file_content[k + "/attr/valid_max"] = 65534 + file_content[k + "/attr/_FillValue"] = 65535 + file_content[k + "/attr/scale_factor"] = 1.1 + file_content[k + "/attr/add_offset"] = 0.1 class FakeNetCDF4FileHandlerNight(FakeNetCDF4FileHandlerDay): @@ -149,9 +149,9 @@ def setup_method(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_l1b import VIIRSL1BFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(VIIRSL1BFileHandler, '__bases__', (self.fake_cls,)) + self.p = mock.patch.object(VIIRSL1BFileHandler, "__bases__", (self.fake_cls,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -164,7 +164,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) @@ -176,8 +176,8 @@ def test_available_datasets_m_bands(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) avail_names = r.available_dataset_names @@ -190,52 +190,52 @@ def test_load_every_m_band_bt(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['M12', - 'M13', - 'M14', - 'M15', - 'M16']) + datasets = r.load(["M12", + "M13", + "M14", + "M15", + "M16"]) assert len(datasets) == 5 for v in datasets.values(): - assert v.attrs['calibration'] == 'brightness_temperature' - assert v.attrs['units'] == 'K' - assert v.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 - assert v.attrs['sensor'] == "viirs" + assert v.attrs["calibration"] == "brightness_temperature" + assert v.attrs["units"] == "K" + assert v.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 + assert v.attrs["sensor"] == "viirs" def test_load_every_m_band_refl(self): """Test loading all M band reflectances.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['M01', - 'M02', - 'M03', - 'M04', - 'M05', - 'M06', - 'M07', - 'M08', - 'M09', - 'M10', - 'M11']) + datasets = r.load(["M01", + "M02", + "M03", + "M04", + "M05", + "M06", + "M07", + "M08", + "M09", + "M10", + "M11"]) assert len(datasets) == (11 if self.has_reflectance_bands else 0) for v in datasets.values(): - assert v.attrs['calibration'] == 'reflectance' - assert v.attrs['units'] == '%' - assert v.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 - assert v.attrs['sensor'] == "viirs" + assert v.attrs["calibration"] == "reflectance" + assert v.attrs["units"] == "%" + assert v.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 + assert v.attrs["sensor"] == "viirs" def test_load_every_m_band_rad(self): """Test loading all M bands as radiances.""" @@ -243,34 +243,34 @@ def test_load_every_m_band_rad(self): from satpy.tests.utils import make_dataid r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) - datasets = r.load([make_dataid(name='M01', calibration='radiance'), - make_dataid(name='M02', calibration='radiance'), - make_dataid(name='M03', calibration='radiance'), - make_dataid(name='M04', calibration='radiance'), - make_dataid(name='M05', calibration='radiance'), - make_dataid(name='M06', calibration='radiance'), - make_dataid(name='M07', calibration='radiance'), - make_dataid(name='M08', calibration='radiance'), - make_dataid(name='M09', calibration='radiance'), - make_dataid(name='M10', calibration='radiance'), - make_dataid(name='M11', calibration='radiance'), - make_dataid(name='M12', calibration='radiance'), - make_dataid(name='M13', calibration='radiance'), - make_dataid(name='M14', calibration='radiance'), - make_dataid(name='M15', calibration='radiance'), - make_dataid(name='M16', calibration='radiance')]) + datasets = r.load([make_dataid(name="M01", calibration="radiance"), + make_dataid(name="M02", calibration="radiance"), + make_dataid(name="M03", calibration="radiance"), + make_dataid(name="M04", calibration="radiance"), + make_dataid(name="M05", calibration="radiance"), + make_dataid(name="M06", calibration="radiance"), + make_dataid(name="M07", calibration="radiance"), + make_dataid(name="M08", calibration="radiance"), + make_dataid(name="M09", calibration="radiance"), + make_dataid(name="M10", calibration="radiance"), + make_dataid(name="M11", calibration="radiance"), + make_dataid(name="M12", calibration="radiance"), + make_dataid(name="M13", calibration="radiance"), + make_dataid(name="M14", calibration="radiance"), + make_dataid(name="M15", calibration="radiance"), + make_dataid(name="M16", calibration="radiance")]) assert len(datasets) == (16 if self.has_reflectance_bands else 5) for v in datasets.values(): - assert v.attrs['calibration'] == 'radiance' - assert v.attrs['units'] == 'W m-2 um-1 sr-1' - assert v.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 - assert v.attrs['sensor'] == "viirs" + assert v.attrs["calibration"] == "radiance" + assert v.attrs["units"] == "W m-2 um-1 sr-1" + assert v.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 + assert v.attrs["sensor"] == "viirs" def test_load_i_band_angles(self): """Test loading all M bands as radiances.""" @@ -278,65 +278,65 @@ def test_load_i_band_angles(self): from satpy.tests.utils import make_dataid r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BI_snpp_d20161130_t012400_c20161130054822.nc', - 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOI_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BI_snpp_d20161130_t012400_c20161130054822.nc", + "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOI_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) datasets = r.load([ - make_dataid(name='satellite_zenith_angle'), - make_dataid(name='satellite_azimuth_angle'), - make_dataid(name='solar_azimuth_angle'), - make_dataid(name='solar_zenith_angle'), + make_dataid(name="satellite_zenith_angle"), + make_dataid(name="satellite_azimuth_angle"), + make_dataid(name="solar_azimuth_angle"), + make_dataid(name="solar_zenith_angle"), ]) assert len(datasets) == 4 for v in datasets.values(): - assert v.attrs['resolution'] == 371 - assert v.attrs['sensor'] == "viirs" + assert v.attrs["resolution"] == 371 + assert v.attrs["sensor"] == "viirs" def test_load_dnb_radiance(self): """Test loading the main DNB dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BD_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOD_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BD_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOD_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['DNB']) + datasets = r.load(["DNB"]) assert len(datasets) == 1 for v in datasets.values(): - assert v.attrs['calibration'] == 'radiance' - assert v.attrs['units'] == 'W m-2 sr-1' - assert v.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 - assert v.attrs['sensor'] == "viirs" + assert v.attrs["calibration"] == "radiance" + assert v.attrs["units"] == "W m-2 sr-1" + assert v.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 + assert v.attrs["sensor"] == "viirs" def test_load_dnb_angles(self): """Test loading all DNB angle datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BD_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOD_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BD_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOD_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['dnb_solar_zenith_angle', - 'dnb_solar_azimuth_angle', - 'dnb_satellite_zenith_angle', - 'dnb_satellite_azimuth_angle', - 'dnb_lunar_zenith_angle', - 'dnb_lunar_azimuth_angle', + datasets = r.load(["dnb_solar_zenith_angle", + "dnb_solar_azimuth_angle", + "dnb_satellite_zenith_angle", + "dnb_satellite_azimuth_angle", + "dnb_lunar_zenith_angle", + "dnb_lunar_azimuth_angle", ]) assert len(datasets) == 6 for v in datasets.values(): - assert v.attrs['units'] == 'degrees' - assert v.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 - assert v.attrs['sensor'] == "viirs" + assert v.attrs["units"] == "degrees" + assert v.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 + assert v.attrs["sensor"] == "viirs" class TestVIIRSL1BReaderDayNight(TestVIIRSL1BReaderDay): diff --git a/satpy/tests/reader_tests/test_viirs_sdr.py b/satpy/tests/reader_tests/test_viirs_sdr.py index ed50214c15..fecd9a0b0f 100644 --- a/satpy/tests/reader_tests/test_viirs_sdr.py +++ b/satpy/tests/reader_tests/test_viirs_sdr.py @@ -49,18 +49,18 @@ def __init__(self, filename, filename_info, filetype_info, include_factors=True) @staticmethod def _add_basic_metadata_to_file_content(file_content, filename_info, num_grans): - start_time = filename_info['start_time'] - end_time = filename_info['end_time'].replace(year=start_time.year, + start_time = filename_info["start_time"] + end_time = filename_info["end_time"].replace(year=start_time.year, month=start_time.month, day=start_time.day) - begin_date = start_time.strftime('%Y%m%d') + begin_date = start_time.strftime("%Y%m%d") begin_date = np.array(begin_date) - begin_time = start_time.strftime('%H%M%S.%fZ') + begin_time = start_time.strftime("%H%M%S.%fZ") begin_time = np.array(begin_time) - ending_date = end_time.strftime('%Y%m%d') - ending_time = end_time.strftime('%H%M%S.%fZ') + ending_date = end_time.strftime("%Y%m%d") + ending_time = end_time.strftime("%H%M%S.%fZ") new_file_content = { "{prefix2}/attr/AggregateNumberGranules": num_grans, "{prefix2}/attr/AggregateBeginningDate": begin_date, @@ -69,8 +69,8 @@ def _add_basic_metadata_to_file_content(file_content, filename_info, num_grans): "{prefix2}/attr/AggregateEndingTime": ending_time, "{prefix2}/attr/G-Ring_Longitude": np.array([0.0, 0.1, 0.2, 0.3]), "{prefix2}/attr/G-Ring_Latitude": np.array([0.0, 0.1, 0.2, 0.3]), - "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info['orbit']), - "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info['orbit']), + "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info["orbit"]), + "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info["orbit"]), "{prefix1}/attr/Instrument_Short_Name": "VIIRS", "/attr/Platform_Short_Name": "NPP", } @@ -84,13 +84,13 @@ def _add_granule_specific_info_to_file_content( lats_lists = self._get_per_granule_lats() file_content["{prefix3}/NumberOfScans"] = np.array([48] * num_granules) for granule_idx in range(num_granules): - prefix_gran = '{prefix}/{dataset_group}_Gran_{idx}'.format(prefix=gran_group_prefix, + prefix_gran = "{prefix}/{dataset_group}_Gran_{idx}".format(prefix=gran_group_prefix, dataset_group=dataset_group, idx=granule_idx) num_scans = num_scans_per_granule[granule_idx] - file_content[prefix_gran + '/attr/N_Number_Of_Scans'] = num_scans - file_content[prefix_gran + '/attr/G-Ring_Longitude'] = lons_lists[granule_idx] - file_content[prefix_gran + '/attr/G-Ring_Latitude'] = lats_lists[granule_idx] + file_content[prefix_gran + "/attr/N_Number_Of_Scans"] = num_scans + file_content[prefix_gran + "/attr/G-Ring_Longitude"] = lons_lists[granule_idx] + file_content[prefix_gran + "/attr/G-Ring_Latitude"] = lats_lists[granule_idx] @staticmethod def _get_per_granule_lons(): @@ -154,13 +154,13 @@ def _add_data_info_to_file_content(self, file_content, filename, data_var_prefix # SDR files always produce data with 48 scans per granule even if there are less total_rows = DEFAULT_FILE_SHAPE[0] * 48 * num_grans new_shape = (total_rows, DEFAULT_FILE_SHAPE[1]) - if filename[2:5] in ['M{:02d}'.format(x) for x in range(12)] + ['I01', 'I02', 'I03']: - keys = ['Radiance', 'Reflectance'] - elif filename[2:5] in ['M{:02d}'.format(x) for x in range(12, 17)] + ['I04', 'I05']: - keys = ['Radiance', 'BrightnessTemperature'] + if filename[2:5] in ["M{:02d}".format(x) for x in range(12)] + ["I01", "I02", "I03"]: + keys = ["Radiance", "Reflectance"] + elif filename[2:5] in ["M{:02d}".format(x) for x in range(12, 17)] + ["I04", "I05"]: + keys = ["Radiance", "BrightnessTemperature"] else: # DNB - keys = ['Radiance'] + keys = ["Radiance"] for k in keys: k = data_var_prefix + "/" + k @@ -175,7 +175,7 @@ def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefi # SDR files always produce data with 48 scans per granule even if there are less total_rows = DEFAULT_FILE_SHAPE[0] * 48 * num_grans new_shape = (total_rows, DEFAULT_FILE_SHAPE[1]) - is_dnb = filename[:5] not in ['GMODO', 'GIMGO'] + is_dnb = filename[:5] not in ["GMODO", "GIMGO"] if not is_dnb: lon_data = np.linspace(15, 55, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) lat_data = np.linspace(55, 75, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) @@ -194,12 +194,12 @@ def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefi file_content[k] = np.repeat([file_content[k]], total_rows, axis=0) file_content[k + "/shape"] = new_shape - angles = ['SolarZenithAngle', - 'SolarAzimuthAngle', - 'SatelliteZenithAngle', - 'SatelliteAzimuthAngle'] + angles = ["SolarZenithAngle", + "SolarAzimuthAngle", + "SatelliteZenithAngle", + "SatelliteAzimuthAngle"] if is_dnb: - angles += ['LunarZenithAngle', 'LunarAzimuthAngle'] + angles += ["LunarZenithAngle", "LunarAzimuthAngle"] for k in angles: k = data_var_prefix + "/" + k file_content[k] = lon_data # close enough to SZA @@ -208,14 +208,14 @@ def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefi @staticmethod def _add_geo_ref(file_content, filename): - if filename[:3] == 'SVI': - geo_prefix = 'GIMGO' - elif filename[:3] == 'SVM': - geo_prefix = 'GMODO' + if filename[:3] == "SVI": + geo_prefix = "GIMGO" + elif filename[:3] == "SVM": + geo_prefix = "GMODO" else: geo_prefix = None if geo_prefix: - file_content['/attr/N_GEO_Ref'] = geo_prefix + filename[5:] + file_content["/attr/N_GEO_Ref"] = geo_prefix + filename[5:] @staticmethod def _convert_numpy_content_to_dataarray(final_content): @@ -225,7 +225,7 @@ def _convert_numpy_content_to_dataarray(final_content): if isinstance(val, np.ndarray): val = da.from_array(val, chunks=val.shape) if val.ndim > 1: - final_content[key] = DataArray(val, dims=('y', 'x')) + final_content[key] = DataArray(val, dims=("y", "x")) else: final_content[key] = DataArray(val) @@ -234,9 +234,9 @@ def get_test_content(self, filename, filename_info, filetype_info): final_content = {} for dataset in self.datasets: dataset_group = DATASET_KEYS[dataset] - prefix1 = 'Data_Products/{dataset_group}'.format(dataset_group=dataset_group) - prefix2 = '{prefix}/{dataset_group}_Aggr'.format(prefix=prefix1, dataset_group=dataset_group) - prefix3 = 'All_Data/{dataset_group}_All'.format(dataset_group=dataset_group) + prefix1 = "Data_Products/{dataset_group}".format(dataset_group=dataset_group) + prefix2 = "{prefix}/{dataset_group}_Aggr".format(prefix=prefix1, dataset_group=dataset_group) + prefix3 = "All_Data/{dataset_group}_All".format(dataset_group=dataset_group) file_content = {} self._add_basic_metadata_to_file_content(file_content, filename_info, self._num_test_granules) @@ -248,10 +248,10 @@ def get_test_content(self, filename, filename_info, filetype_info): for k, v in list(file_content.items()): file_content[k.format(prefix1=prefix1, prefix2=prefix2, prefix3=prefix3)] = v - if filename[:3] in ['SVM', 'SVI', 'SVD']: + if filename[:3] in ["SVM", "SVI", "SVD"]: self._add_data_info_to_file_content(file_content, filename, prefix3, self._num_test_granules) - elif filename[0] == 'G': + elif filename[0] == "G": self._add_geolocation_info_to_file_content(file_content, filename, prefix3, self._num_test_granules) final_content.update(file_content) @@ -271,8 +271,8 @@ def touch_geo_files(*prefixes): def _touch_geo_file(prefix): - geo_fn = prefix + '_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' - open(geo_fn, 'w') + geo_fn = prefix + "_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5" + open(geo_fn, "w") return geo_fn @@ -283,47 +283,47 @@ class TestVIIRSSDRReader(unittest.TestCase): def _assert_reflectance_properties(self, data_arr, num_scans=16, with_area=True): self.assertTrue(np.issubdtype(data_arr.dtype, np.float32)) - self.assertEqual(data_arr.attrs['calibration'], 'reflectance') - self.assertEqual(data_arr.attrs['units'], '%') - self.assertEqual(data_arr.attrs['rows_per_scan'], num_scans) + self.assertEqual(data_arr.attrs["calibration"], "reflectance") + self.assertEqual(data_arr.attrs["units"], "%") + self.assertEqual(data_arr.attrs["rows_per_scan"], num_scans) if with_area: - self.assertIn('area', data_arr.attrs) - self.assertIsNotNone(data_arr.attrs['area']) - self.assertEqual(data_arr.attrs['area'].shape, data_arr.shape) + self.assertIn("area", data_arr.attrs) + self.assertIsNotNone(data_arr.attrs["area"]) + self.assertEqual(data_arr.attrs["area"].shape, data_arr.shape) else: - self.assertNotIn('area', data_arr.attrs) + self.assertNotIn("area", data_arr.attrs) def _assert_bt_properties(self, data_arr, num_scans=16, with_area=True): self.assertTrue(np.issubdtype(data_arr.dtype, np.float32)) - self.assertEqual(data_arr.attrs['calibration'], 'brightness_temperature') - self.assertEqual(data_arr.attrs['units'], 'K') - self.assertEqual(data_arr.attrs['rows_per_scan'], num_scans) + self.assertEqual(data_arr.attrs["calibration"], "brightness_temperature") + self.assertEqual(data_arr.attrs["units"], "K") + self.assertEqual(data_arr.attrs["rows_per_scan"], num_scans) if with_area: - self.assertIn('area', data_arr.attrs) - self.assertIsNotNone(data_arr.attrs['area']) - self.assertEqual(data_arr.attrs['area'].shape, data_arr.shape) + self.assertIn("area", data_arr.attrs) + self.assertIsNotNone(data_arr.attrs["area"]) + self.assertEqual(data_arr.attrs["area"].shape, data_arr.shape) else: - self.assertNotIn('area', data_arr.attrs) + self.assertNotIn("area", data_arr.attrs) def _assert_dnb_radiance_properties(self, data_arr, with_area=True): self.assertTrue(np.issubdtype(data_arr.dtype, np.float32)) - self.assertEqual(data_arr.attrs['calibration'], 'radiance') - self.assertEqual(data_arr.attrs['units'], 'W m-2 sr-1') - self.assertEqual(data_arr.attrs['rows_per_scan'], 16) + self.assertEqual(data_arr.attrs["calibration"], "radiance") + self.assertEqual(data_arr.attrs["units"], "W m-2 sr-1") + self.assertEqual(data_arr.attrs["rows_per_scan"], 16) if with_area: - self.assertIn('area', data_arr.attrs) - self.assertIsNotNone(data_arr.attrs['area']) - self.assertEqual(data_arr.attrs['area'].shape, data_arr.shape) + self.assertIn("area", data_arr.attrs) + self.assertIsNotNone(data_arr.attrs["area"]) + self.assertEqual(data_arr.attrs["area"].shape, data_arr.shape) else: - self.assertNotIn('area', data_arr.attrs) + self.assertNotIn("area", data_arr.attrs) def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_atms_sdr_base import JPSS_SDR_FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(JPSS_SDR_FileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(JPSS_SDR_FileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -336,7 +336,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -349,9 +349,9 @@ def test_init_start_time_is_nodate(self): r = load_reader(self.reader_configs) with pytest.raises(ValueError) as exec_info: _ = r.create_filehandlers([ - 'SVI01_npp_d19580101_t0000000_e0001261_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d19580101_t0000000_e0001261_b01708_c20120226002130255476_noaa_ops.h5", ]) - expected = 'Datetime invalid 1958-01-01 00:00:00' + expected = "Datetime invalid 1958-01-01 00:00:00" assert str(exec_info.value) == expected def test_init_start_time_beyond(self): @@ -361,10 +361,10 @@ def test_init_start_time_beyond(self): from satpy.readers import load_reader r = load_reader(self.reader_configs, filter_parameters={ - 'start_time': datetime(2012, 2, 26) + "start_time": datetime(2012, 2, 26) }) fhs = r.create_filehandlers([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) self.assertEqual(len(fhs), 0) @@ -375,10 +375,10 @@ def test_init_end_time_beyond(self): from satpy.readers import load_reader r = load_reader(self.reader_configs, filter_parameters={ - 'end_time': datetime(2012, 2, 24) + "end_time": datetime(2012, 2, 24) }) fhs = r.create_filehandlers([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) self.assertEqual(len(fhs), 0) @@ -390,11 +390,11 @@ def test_init_start_end_time(self): r = load_reader(self.reader_configs, filter_parameters={ - 'start_time': datetime(2012, 2, 24), - 'end_time': datetime(2012, 2, 26) + "start_time": datetime(2012, 2, 24), + "end_time": datetime(2012, 2, 26) }) loadables = r.select_files_from_pathnames([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -406,30 +406,30 @@ def test_load_all_m_reflectances_no_geo(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - ds = r.load(['M01', - 'M02', - 'M03', - 'M04', - 'M05', - 'M06', - 'M07', - 'M08', - 'M09', - 'M10', - 'M11', + ds = r.load(["M01", + "M02", + "M03", + "M04", + "M05", + "M06", + "M07", + "M08", + "M09", + "M10", + "M11", ]) self.assertEqual(len(ds), 11) for d in ds.values(): @@ -440,31 +440,31 @@ def test_load_all_m_reflectances_find_geo(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) with touch_geo_files("GMTCO", "GMODO") as (geo_fn1, geo_fn2): r.create_filehandlers(loadables) - ds = r.load(['M01', - 'M02', - 'M03', - 'M04', - 'M05', - 'M06', - 'M07', - 'M08', - 'M09', - 'M10', - 'M11', + ds = r.load(["M01", + "M02", + "M03", + "M04", + "M05", + "M06", + "M07", + "M08", + "M09", + "M10", + "M11", ]) self.assertEqual(len(ds), 11) @@ -476,140 +476,140 @@ def test_load_all_m_reflectances_provided_geo(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) with touch_geo_files("GMTCO", "GMODO") as (geo_fn1, geo_fn2): r.create_filehandlers(loadables) - ds = r.load(['M01', - 'M02', - 'M03', - 'M04', - 'M05', - 'M06', - 'M07', - 'M08', - 'M09', - 'M10', - 'M11', + ds = r.load(["M01", + "M02", + "M03", + "M04", + "M05", + "M06", + "M07", + "M08", + "M09", + "M10", + "M11", ]) self.assertEqual(len(ds), 11) for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) - self.assertEqual(d.attrs['area'].lons.min(), 5) - self.assertEqual(d.attrs['area'].lats.min(), 45) - self.assertEqual(d.attrs['area'].lons.attrs['rows_per_scan'], 16) - self.assertEqual(d.attrs['area'].lats.attrs['rows_per_scan'], 16) + self.assertEqual(d.attrs["area"].lons.min(), 5) + self.assertEqual(d.attrs["area"].lats.min(), 45) + self.assertEqual(d.attrs["area"].lons.attrs["rows_per_scan"], 16) + self.assertEqual(d.attrs["area"].lats.attrs["rows_per_scan"], 16) def test_load_all_m_reflectances_use_nontc(self): """Load all M band reflectances but use non-TC geolocation.""" from satpy.readers import load_reader r = load_reader(self.reader_configs, use_tc=False) loadables = r.select_files_from_pathnames([ - 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) with touch_geo_files("GMTCO", "GMODO") as (geo_fn1, geo_fn2): - r.create_filehandlers(loadables, {'use_tc': False}) - ds = r.load(['M01', - 'M02', - 'M03', - 'M04', - 'M05', - 'M06', - 'M07', - 'M08', - 'M09', - 'M10', - 'M11', + r.create_filehandlers(loadables, {"use_tc": False}) + ds = r.load(["M01", + "M02", + "M03", + "M04", + "M05", + "M06", + "M07", + "M08", + "M09", + "M10", + "M11", ]) self.assertEqual(len(ds), 11) for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) - self.assertEqual(d.attrs['area'].lons.min(), 15) - self.assertEqual(d.attrs['area'].lats.min(), 55) - self.assertEqual(d.attrs['area'].lons.attrs['rows_per_scan'], 16) - self.assertEqual(d.attrs['area'].lats.attrs['rows_per_scan'], 16) + self.assertEqual(d.attrs["area"].lons.min(), 15) + self.assertEqual(d.attrs["area"].lats.min(), 55) + self.assertEqual(d.attrs["area"].lons.attrs["rows_per_scan"], 16) + self.assertEqual(d.attrs["area"].lats.attrs["rows_per_scan"], 16) def test_load_all_m_reflectances_use_nontc2(self): """Load all M band reflectances but use non-TC geolocation because TC isn't available.""" from satpy.readers import load_reader r = load_reader(self.reader_configs, use_tc=None) loadables = r.select_files_from_pathnames([ - 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) with touch_geo_files("GMODO") as (geo_fn2,): - r.create_filehandlers(loadables, {'use_tc': None}) - ds = r.load(['M01', - 'M02', - 'M03', - 'M04', - 'M05', - 'M06', - 'M07', - 'M08', - 'M09', - 'M10', - 'M11', + r.create_filehandlers(loadables, {"use_tc": None}) + ds = r.load(["M01", + "M02", + "M03", + "M04", + "M05", + "M06", + "M07", + "M08", + "M09", + "M10", + "M11", ]) self.assertEqual(len(ds), 11) for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) - self.assertEqual(d.attrs['area'].lons.min(), 15) - self.assertEqual(d.attrs['area'].lats.min(), 55) - self.assertEqual(d.attrs['area'].lons.attrs['rows_per_scan'], 16) - self.assertEqual(d.attrs['area'].lats.attrs['rows_per_scan'], 16) + self.assertEqual(d.attrs["area"].lons.min(), 15) + self.assertEqual(d.attrs["area"].lats.min(), 55) + self.assertEqual(d.attrs["area"].lons.attrs["rows_per_scan"], 16) + self.assertEqual(d.attrs["area"].lats.attrs["rows_per_scan"], 16) def test_load_all_m_bts(self): """Load all M band brightness temperatures.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVM12_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM13_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM14_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM15_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM16_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM12_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM13_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM14_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM15_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM16_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - ds = r.load(['M12', - 'M13', - 'M14', - 'M15', - 'M16', + ds = r.load(["M12", + "M13", + "M14", + "M15", + "M16", ]) self.assertEqual(len(ds), 5) for d in ds.values(): @@ -625,22 +625,22 @@ def test_load_dnb_sza_no_factors(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) - r.create_filehandlers(loadables, {'include_factors': False}) - ds = r.load(['dnb_solar_zenith_angle', - 'dnb_solar_azimuth_angle', - 'dnb_satellite_zenith_angle', - 'dnb_satellite_azimuth_angle', - 'dnb_lunar_zenith_angle', - 'dnb_lunar_azimuth_angle']) + r.create_filehandlers(loadables, {"include_factors": False}) + ds = r.load(["dnb_solar_zenith_angle", + "dnb_solar_azimuth_angle", + "dnb_satellite_zenith_angle", + "dnb_satellite_azimuth_angle", + "dnb_lunar_zenith_angle", + "dnb_lunar_azimuth_angle"]) self.assertEqual(len(ds), 6) for d in ds.values(): self.assertTrue(np.issubdtype(d.dtype, np.float32)) - self.assertEqual(d.attrs['units'], 'degrees') - self.assertEqual(d.attrs['rows_per_scan'], 16) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + self.assertEqual(d.attrs["units"], "degrees") + self.assertEqual(d.attrs["rows_per_scan"], 16) + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) def test_load_all_m_radiances(self): """Load all M band radiances.""" @@ -648,62 +648,62 @@ def test_load_all_m_radiances(self): from satpy.tests.utils import make_dsq r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM12_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM13_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM14_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM15_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM16_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM12_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM13_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM14_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM15_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM16_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) ds = r.load([ - make_dsq(name='M01', calibration='radiance'), - make_dsq(name='M02', calibration='radiance'), - make_dsq(name='M03', calibration='radiance'), - make_dsq(name='M04', calibration='radiance'), - make_dsq(name='M05', calibration='radiance'), - make_dsq(name='M06', calibration='radiance'), - make_dsq(name='M07', calibration='radiance'), - make_dsq(name='M08', calibration='radiance'), - make_dsq(name='M09', calibration='radiance'), - make_dsq(name='M10', calibration='radiance'), - make_dsq(name='M11', calibration='radiance'), - make_dsq(name='M12', calibration='radiance'), - make_dsq(name='M13', calibration='radiance'), - make_dsq(name='M14', calibration='radiance'), - make_dsq(name='M15', calibration='radiance'), - make_dsq(name='M16', calibration='radiance'), + make_dsq(name="M01", calibration="radiance"), + make_dsq(name="M02", calibration="radiance"), + make_dsq(name="M03", calibration="radiance"), + make_dsq(name="M04", calibration="radiance"), + make_dsq(name="M05", calibration="radiance"), + make_dsq(name="M06", calibration="radiance"), + make_dsq(name="M07", calibration="radiance"), + make_dsq(name="M08", calibration="radiance"), + make_dsq(name="M09", calibration="radiance"), + make_dsq(name="M10", calibration="radiance"), + make_dsq(name="M11", calibration="radiance"), + make_dsq(name="M12", calibration="radiance"), + make_dsq(name="M13", calibration="radiance"), + make_dsq(name="M14", calibration="radiance"), + make_dsq(name="M15", calibration="radiance"), + make_dsq(name="M16", calibration="radiance"), ]) self.assertEqual(len(ds), 16) for d in ds.values(): self.assertTrue(np.issubdtype(d.dtype, np.float32)) - self.assertEqual(d.attrs['calibration'], 'radiance') - self.assertEqual(d.attrs['units'], 'W m-2 um-1 sr-1') - self.assertEqual(d.attrs['rows_per_scan'], 16) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + self.assertEqual(d.attrs["calibration"], "radiance") + self.assertEqual(d.attrs["units"], "W m-2 um-1 sr-1") + self.assertEqual(d.attrs["rows_per_scan"], 16) + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) def test_load_dnb(self): """Load DNB dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - ds = r.load(['DNB']) + ds = r.load(["DNB"]) self.assertEqual(len(ds), 1) for d in ds.values(): data = d.values @@ -722,11 +722,11 @@ def test_load_dnb_no_factors(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) - r.create_filehandlers(loadables, {'include_factors': False}) - ds = r.load(['DNB']) + r.create_filehandlers(loadables, {"include_factors": False}) + ds = r.load(["DNB"]) self.assertEqual(len(ds), 1) for d in ds.values(): data = d.values @@ -745,12 +745,12 @@ def test_load_i_no_files(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - self.assertNotIn('I01', [x['name'] for x in r.available_dataset_ids]) - ds = r.load(['I01']) + self.assertNotIn("I01", [x["name"] for x in r.available_dataset_ids]) + ds = r.load(["I01"]) self.assertEqual(len(ds), 0) def test_load_all_i_reflectances_provided_geo(self): @@ -758,36 +758,36 @@ def test_load_all_i_reflectances_provided_geo(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - ds = r.load(['I01', - 'I02', - 'I03', + ds = r.load(["I01", + "I02", + "I03", ]) self.assertEqual(len(ds), 3) for d in ds.values(): self._assert_reflectance_properties(d, num_scans=32) - self.assertEqual(d.attrs['area'].lons.min(), 5) - self.assertEqual(d.attrs['area'].lats.min(), 45) - self.assertEqual(d.attrs['area'].lons.attrs['rows_per_scan'], 32) - self.assertEqual(d.attrs['area'].lats.attrs['rows_per_scan'], 32) + self.assertEqual(d.attrs["area"].lons.min(), 5) + self.assertEqual(d.attrs["area"].lats.min(), 45) + self.assertEqual(d.attrs["area"].lons.attrs["rows_per_scan"], 32) + self.assertEqual(d.attrs["area"].lats.attrs["rows_per_scan"], 32) def test_load_all_i_bts(self): """Load all I band brightness temperatures.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVI04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - ds = r.load(['I04', - 'I05', + ds = r.load(["I04", + "I05", ]) self.assertEqual(len(ds), 2) for d in ds.values(): @@ -799,29 +799,29 @@ def test_load_all_i_radiances(self): from satpy.tests.utils import make_dsq r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) ds = r.load([ - make_dsq(name='I01', calibration='radiance'), - make_dsq(name='I02', calibration='radiance'), - make_dsq(name='I03', calibration='radiance'), - make_dsq(name='I04', calibration='radiance'), - make_dsq(name='I05', calibration='radiance'), + make_dsq(name="I01", calibration="radiance"), + make_dsq(name="I02", calibration="radiance"), + make_dsq(name="I03", calibration="radiance"), + make_dsq(name="I04", calibration="radiance"), + make_dsq(name="I05", calibration="radiance"), ]) self.assertEqual(len(ds), 5) for d in ds.values(): self.assertTrue(np.issubdtype(d.dtype, np.float32)) - self.assertEqual(d.attrs['calibration'], 'radiance') - self.assertEqual(d.attrs['units'], 'W m-2 um-1 sr-1') - self.assertEqual(d.attrs['rows_per_scan'], 32) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + self.assertEqual(d.attrs["calibration"], "radiance") + self.assertEqual(d.attrs["units"], "W m-2 um-1 sr-1") + self.assertEqual(d.attrs["rows_per_scan"], 32) + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) class FakeHDF5FileHandlerAggr(FakeHDF5FileHandler2): @@ -840,9 +840,9 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_sdr import VIIRSSDRFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandlerAggr,)) + self.p = mock.patch.object(VIIRSSDRFileHandler, "__bases__", (FakeHDF5FileHandlerAggr,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -855,7 +855,7 @@ def test_bounding_box(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) # make sure we have some files @@ -869,7 +869,7 @@ def test_bounding_box(self): 65.545685, 63.103046, 61.853905, 55.169273, 57.062447, 58.77254, 60.465942, 62.036346, 63.465122, 64.72178, 65.78417, 66.66166, 79.00025 ] - lons, lats = r.file_handlers['generic_file'][0].get_bounding_box() + lons, lats = r.file_handlers["generic_file"][0].get_bounding_box() np.testing.assert_allclose(lons, expected_lons) np.testing.assert_allclose(lats, expected_lats) @@ -890,9 +890,9 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_atms_sdr_base import JPSS_SDR_FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(JPSS_SDR_FileHandler, '__bases__', (FakeShortHDF5FileHandlerAggr,)) + self.p = mock.patch.object(JPSS_SDR_FileHandler, "__bases__", (FakeShortHDF5FileHandlerAggr,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -905,7 +905,7 @@ def test_load_truncated_band(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) ds = r.load(["I01"]) diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index 2f926a0e47..b14ff771d6 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -32,33 +32,33 @@ @pytest.fixture def _nc_filename(tmp_path): now = datetime.datetime.utcnow() - filename = f'VGAC_VJ10XMOD_A{now:%Y%j_%H%M}_n004946_K005.nc' + filename = f"VGAC_VJ10XMOD_A{now:%Y%j_%H%M}_n004946_K005.nc" filename_str = str(tmp_path / filename) # Create test data - with Dataset(filename_str, 'w') as nc: + with Dataset(filename_str, "w") as nc: nscn = 7 npix = 800 n_lut = 12000 - nc.createDimension('npix', npix) - nc.createDimension('nscn', nscn) - nc.createDimension('n_lut', n_lut) + nc.createDimension("npix", npix) + nc.createDimension("nscn", nscn) + nc.createDimension("n_lut", n_lut) nc.StartTime = "2023-03-28T09:08:07" nc.EndTime = "2023-03-28T10:11:12" for ind in range(1, 11, 1): ch_name = "M{:02d}".format(ind) - r_a = nc.createVariable(ch_name, np.int16, dimensions=('nscn', 'npix')) + r_a = nc.createVariable(ch_name, np.int16, dimensions=("nscn", "npix")) r_a[:] = np.ones((nscn, npix)) * 10 - attrs = {'scale_factor': 0.1, 'units': 'percent'} + attrs = {"scale_factor": 0.1, "units": "percent"} for attr in attrs: setattr(r_a, attr, attrs[attr]) for ind in range(12, 17, 1): ch_name = "M{:02d}".format(ind) - tb_b = nc.createVariable(ch_name, np.int16, dimensions=('nscn', 'npix')) + tb_b = nc.createVariable(ch_name, np.int16, dimensions=("nscn", "npix")) tb_b[:] = np.ones((nscn, npix)) * 800 - attrs = {'units': 'radiances', 'scale_factor': 0.002} + attrs = {"units": "radiances", "scale_factor": 0.002} for attr in attrs: setattr(tb_b, attr, attrs[attr]) - tb_lut = nc.createVariable(ch_name + "_LUT", np.float32, dimensions=('n_lut')) + tb_lut = nc.createVariable(ch_name + "_LUT", np.float32, dimensions=("n_lut")) tb_lut[:] = np.array(range(0, n_lut)) * 0.5 return filename_str @@ -72,7 +72,7 @@ def test_read_vgac(self, _nc_filename): # Read data scn_ = Scene( - reader='viirs_vgac_l1c_nc', + reader="viirs_vgac_l1c_nc", filenames=[_nc_filename]) scn_.load(["M05", "M15"]) assert (scn_["M05"][0, 0] == 100) diff --git a/satpy/tests/reader_tests/test_virr_l1b.py b/satpy/tests/reader_tests/test_virr_l1b.py index a7a76cafb3..ff0f780190 100644 --- a/satpy/tests/reader_tests/test_virr_l1b.py +++ b/satpy/tests/reader_tests/test_virr_l1b.py @@ -39,48 +39,48 @@ def _make_file(self, platform_id, geolocation_prefix, l1b_prefix, ECWN, Emissive dim_1 = 20 test_file = { # Satellite data. - '/attr/Day Or Night Flag': 'D', '/attr/Observing Beginning Date': '2018-12-25', - '/attr/Observing Beginning Time': '21:41:47.090', '/attr/Observing Ending Date': '2018-12-25', - '/attr/Observing Ending Time': '21:47:28.254', '/attr/Satellite Name': platform_id, - '/attr/Sensor Identification Code': 'VIRR', + "/attr/Day Or Night Flag": "D", "/attr/Observing Beginning Date": "2018-12-25", + "/attr/Observing Beginning Time": "21:41:47.090", "/attr/Observing Ending Date": "2018-12-25", + "/attr/Observing Ending Time": "21:47:28.254", "/attr/Satellite Name": platform_id, + "/attr/Sensor Identification Code": "VIRR", # Emissive data. - l1b_prefix + 'EV_Emissive': self.make_test_data([3, dim_0, dim_1]), - l1b_prefix + 'EV_Emissive/attr/valid_range': [0, 50000], - l1b_prefix + 'Emissive_Radiance_Scales': self.make_test_data([dim_0, dim_1]), - l1b_prefix + 'EV_Emissive/attr/units': Emissive_units, - l1b_prefix + 'Emissive_Radiance_Offsets': self.make_test_data([dim_0, dim_1]), - '/attr/' + ECWN: [2610.31, 917.6268, 836.2546], + l1b_prefix + "EV_Emissive": self.make_test_data([3, dim_0, dim_1]), + l1b_prefix + "EV_Emissive/attr/valid_range": [0, 50000], + l1b_prefix + "Emissive_Radiance_Scales": self.make_test_data([dim_0, dim_1]), + l1b_prefix + "EV_Emissive/attr/units": Emissive_units, + l1b_prefix + "Emissive_Radiance_Offsets": self.make_test_data([dim_0, dim_1]), + "/attr/" + ECWN: [2610.31, 917.6268, 836.2546], # Reflectance data. - l1b_prefix + 'EV_RefSB': self.make_test_data([7, dim_0, dim_1]), - l1b_prefix + 'EV_RefSB/attr/valid_range': [0, 32767], l1b_prefix + 'EV_RefSB/attr/units': 'none', - '/attr/RefSB_Cal_Coefficients': np.ones(14, dtype=np.float32) * 2 + l1b_prefix + "EV_RefSB": self.make_test_data([7, dim_0, dim_1]), + l1b_prefix + "EV_RefSB/attr/valid_range": [0, 32767], l1b_prefix + "EV_RefSB/attr/units": "none", + "/attr/RefSB_Cal_Coefficients": np.ones(14, dtype=np.float32) * 2 } - for attribute in ['Latitude', 'Longitude', geolocation_prefix + 'SolarZenith', - geolocation_prefix + 'SensorZenith', geolocation_prefix + 'SolarAzimuth', - geolocation_prefix + 'SensorAzimuth']: + for attribute in ["Latitude", "Longitude", geolocation_prefix + "SolarZenith", + geolocation_prefix + "SensorZenith", geolocation_prefix + "SolarAzimuth", + geolocation_prefix + "SensorAzimuth"]: test_file[attribute] = self.make_test_data([dim_0, dim_1]) - test_file[attribute + '/attr/Intercept'] = 0. - test_file[attribute + '/attr/units'] = 'degrees' - if 'Solar' in attribute or 'Sensor' in attribute: - test_file[attribute + '/attr/Slope'] = .01 - if 'Azimuth' in attribute: - test_file[attribute + '/attr/valid_range'] = [0, 18000] + test_file[attribute + "/attr/Intercept"] = 0. + test_file[attribute + "/attr/units"] = "degrees" + if "Solar" in attribute or "Sensor" in attribute: + test_file[attribute + "/attr/Slope"] = .01 + if "Azimuth" in attribute: + test_file[attribute + "/attr/valid_range"] = [0, 18000] else: - test_file[attribute + '/attr/valid_range'] = [-18000, 18000] + test_file[attribute + "/attr/valid_range"] = [-18000, 18000] else: - test_file[attribute + '/attr/Slope'] = 1. - if 'Longitude' == attribute: - test_file[attribute + '/attr/valid_range'] = [-180., 180.] + test_file[attribute + "/attr/Slope"] = 1. + if "Longitude" == attribute: + test_file[attribute + "/attr/valid_range"] = [-180., 180.] else: - test_file[attribute + '/attr/valid_range'] = [-90., 90.] + test_file[attribute + "/attr/valid_range"] = [-90., 90.] return test_file def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" - if filename_info['platform_id'] == 'FY3B': - return self._make_file('FY3B', '', '', 'Emmisive_Centroid_Wave_Number', 'milliWstts/m^2/cm^(-1)/steradian') - return self._make_file(filename_info['platform_id'], 'Geolocation/', 'Data/', - 'Emissive_Centroid_Wave_Number', 'none') + if filename_info["platform_id"] == "FY3B": + return self._make_file("FY3B", "", "", "Emmisive_Centroid_Wave_Number", "milliWstts/m^2/cm^(-1)/steradian") + return self._make_file(filename_info["platform_id"], "Geolocation/", "Data/", + "Emissive_Centroid_Wave_Number", "none") class TestVIRRL1BReader(unittest.TestCase): @@ -92,9 +92,9 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.virr_l1b import VIRR_L1B - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(VIRR_L1B, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(VIRR_L1B, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -104,62 +104,62 @@ def tearDown(self): def _band_helper(self, attributes, units, calibration, standard_name, file_type, band_index_size, resolution): - self.assertEqual(units, attributes['units']) - self.assertEqual(calibration, attributes['calibration']) - self.assertEqual(standard_name, attributes['standard_name']) - self.assertEqual(file_type, attributes['file_type']) - self.assertTrue(attributes['band_index'] in range(band_index_size)) - self.assertEqual(resolution, attributes['resolution']) - self.assertEqual(('longitude', 'latitude'), attributes['coordinates']) + self.assertEqual(units, attributes["units"]) + self.assertEqual(calibration, attributes["calibration"]) + self.assertEqual(standard_name, attributes["standard_name"]) + self.assertEqual(file_type, attributes["file_type"]) + self.assertTrue(attributes["band_index"] in range(band_index_size)) + self.assertEqual(resolution, attributes["resolution"]) + self.assertEqual(("longitude", "latitude"), attributes["coordinates"]) def _fy3_helper(self, platform_name, reader, Emissive_units): """Load channels and test accurate metadata.""" import datetime - band_values = {'1': 22.0, '2': 22.0, '6': 22.0, '7': 22.0, '8': 22.0, '9': 22.0, '10': 22.0, - '3': 496.542155, '4': 297.444511, '5': 288.956557, 'solar_zenith_angle': .1, - 'satellite_zenith_angle': .1, 'solar_azimuth_angle': .1, 'satellite_azimuth_angle': .1, - 'longitude': 10} - if platform_name == 'FY3B': + band_values = {"1": 22.0, "2": 22.0, "6": 22.0, "7": 22.0, "8": 22.0, "9": 22.0, "10": 22.0, + "3": 496.542155, "4": 297.444511, "5": 288.956557, "solar_zenith_angle": .1, + "satellite_zenith_angle": .1, "solar_azimuth_angle": .1, "satellite_azimuth_angle": .1, + "longitude": 10} + if platform_name == "FY3B": # updated 2015 coefficients - band_values['1'] = -0.168 - band_values['2'] = -0.2706 - band_values['6'] = -1.5631 - band_values['7'] = -0.2114 - band_values['8'] = -0.171 - band_values['9'] = -0.1606 - band_values['10'] = -0.1328 + band_values["1"] = -0.168 + band_values["2"] = -0.2706 + band_values["6"] = -1.5631 + band_values["7"] = -0.2114 + band_values["8"] = -0.171 + band_values["9"] = -0.1606 + band_values["10"] = -0.1328 datasets = reader.load([band for band in band_values]) for dataset in datasets: # Object returned by get_dataset. - ds = datasets[dataset['name']] + ds = datasets[dataset["name"]] attributes = ds.attrs self.assertTrue(isinstance(ds.data, da.Array)) - self.assertEqual('virr', attributes['sensor']) - self.assertEqual(platform_name, attributes['platform_name']) - self.assertEqual(datetime.datetime(2018, 12, 25, 21, 41, 47, 90000), attributes['start_time']) - self.assertEqual(datetime.datetime(2018, 12, 25, 21, 47, 28, 254000), attributes['end_time']) - self.assertEqual((19, 20), datasets[dataset['name']].shape) - self.assertEqual(('y', 'x'), datasets[dataset['name']].dims) - if dataset['name'] in ['1', '2', '6', '7', '8', '9', '10']: - self._band_helper(attributes, '%', 'reflectance', - 'toa_bidirectional_reflectance', 'virr_l1b', + self.assertEqual("virr", attributes["sensor"]) + self.assertEqual(platform_name, attributes["platform_name"]) + self.assertEqual(datetime.datetime(2018, 12, 25, 21, 41, 47, 90000), attributes["start_time"]) + self.assertEqual(datetime.datetime(2018, 12, 25, 21, 47, 28, 254000), attributes["end_time"]) + self.assertEqual((19, 20), datasets[dataset["name"]].shape) + self.assertEqual(("y", "x"), datasets[dataset["name"]].dims) + if dataset["name"] in ["1", "2", "6", "7", "8", "9", "10"]: + self._band_helper(attributes, "%", "reflectance", + "toa_bidirectional_reflectance", "virr_l1b", 7, 1000) - elif dataset['name'] in ['3', '4', '5']: - self._band_helper(attributes, Emissive_units, 'brightness_temperature', - 'toa_brightness_temperature', 'virr_l1b', 3, 1000) - elif dataset['name'] in ['longitude', 'latitude']: - self.assertEqual('degrees', attributes['units']) - self.assertTrue(attributes['standard_name'] in ['longitude', 'latitude']) - self.assertEqual(['virr_l1b', 'virr_geoxx'], attributes['file_type']) - self.assertEqual(1000, attributes['resolution']) + elif dataset["name"] in ["3", "4", "5"]: + self._band_helper(attributes, Emissive_units, "brightness_temperature", + "toa_brightness_temperature", "virr_l1b", 3, 1000) + elif dataset["name"] in ["longitude", "latitude"]: + self.assertEqual("degrees", attributes["units"]) + self.assertTrue(attributes["standard_name"] in ["longitude", "latitude"]) + self.assertEqual(["virr_l1b", "virr_geoxx"], attributes["file_type"]) + self.assertEqual(1000, attributes["resolution"]) else: - self.assertEqual('degrees', attributes['units']) + self.assertEqual("degrees", attributes["units"]) self.assertTrue( - attributes['standard_name'] in ['solar_zenith_angle', 'sensor_zenith_angle', 'solar_azimuth_angle', - 'sensor_azimuth_angle']) - self.assertEqual(['virr_geoxx', 'virr_l1b'], attributes['file_type']) - self.assertEqual(('longitude', 'latitude'), attributes['coordinates']) - self.assertEqual(band_values[dataset['name']], + attributes["standard_name"] in ["solar_zenith_angle", "sensor_zenith_angle", "solar_azimuth_angle", + "sensor_azimuth_angle"]) + self.assertEqual(["virr_geoxx", "virr_l1b"], attributes["file_type"]) + self.assertEqual(("longitude", "latitude"), attributes["coordinates"]) + self.assertEqual(band_values[dataset["name"]], round(float(np.array(ds[ds.shape[0] // 2][ds.shape[1] // 2])), 6)) assert "valid_range" not in ds.attrs @@ -167,21 +167,21 @@ def test_fy3b_file(self): """Test that FY3B files are recognized.""" from satpy.readers import load_reader FY3B_reader = load_reader(self.reader_configs) - FY3B_file = FY3B_reader.select_files_from_pathnames(['tf2018359214943.FY3B-L_VIRRX_L1B.HDF']) + FY3B_file = FY3B_reader.select_files_from_pathnames(["tf2018359214943.FY3B-L_VIRRX_L1B.HDF"]) self.assertEqual(1, len(FY3B_file)) FY3B_reader.create_filehandlers(FY3B_file) # Make sure we have some files self.assertTrue(FY3B_reader.file_handlers) - self._fy3_helper('FY3B', FY3B_reader, 'milliWstts/m^2/cm^(-1)/steradian') + self._fy3_helper("FY3B", FY3B_reader, "milliWstts/m^2/cm^(-1)/steradian") def test_fy3c_file(self): """Test that FY3C files are recognized.""" from satpy.readers import load_reader FY3C_reader = load_reader(self.reader_configs) - FY3C_files = FY3C_reader.select_files_from_pathnames(['tf2018359143912.FY3C-L_VIRRX_GEOXX.HDF', - 'tf2018359143912.FY3C-L_VIRRX_L1B.HDF']) + FY3C_files = FY3C_reader.select_files_from_pathnames(["tf2018359143912.FY3C-L_VIRRX_GEOXX.HDF", + "tf2018359143912.FY3C-L_VIRRX_L1B.HDF"]) self.assertEqual(2, len(FY3C_files)) FY3C_reader.create_filehandlers(FY3C_files) # Make sure we have some files self.assertTrue(FY3C_reader.file_handlers) - self._fy3_helper('FY3C', FY3C_reader, '1') + self._fy3_helper("FY3C", FY3C_reader, "1") diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index c62ffcea1d..3760249d95 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -35,8 +35,8 @@ def test_serialization_with_readers_and_data_arr(self): """Test that dask can serialize a Scene with readers.""" from distributed.protocol import deserialize, serialize - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"]) cloned_scene = deserialize(*serialize(scene)) assert scene._readers.keys() == cloned_scene._readers.keys() assert scene.all_dataset_ids == scene.all_dataset_ids @@ -57,12 +57,12 @@ def test_geoviews_basic_with_area(self): """Test converting a Scene to geoviews with an AreaDefinition.""" from pyresample.geometry import AreaDefinition scn = Scene() - area = AreaDefinition('test', 'test', 'test', - {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + area = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) - scn['ds1'] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1), - 'area': area}) + scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), + "area": area}) gv_obj = scn.to_geoviews() # we assume that if we got something back, geoviews can use it assert gv_obj is not None @@ -74,9 +74,9 @@ def test_geoviews_basic_with_swath(self): lons = xr.DataArray(da.zeros((2, 2))) lats = xr.DataArray(da.zeros((2, 2))) area = SwathDefinition(lons, lats) - scn['ds1'] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1), - 'area': area}) + scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), + "area": area}) gv_obj = scn.to_geoviews() # we assume that if we got something back, geoviews can use it assert gv_obj is not None @@ -98,14 +98,14 @@ def single_area_scn(self): """Define Scene with single area.""" from pyresample.geometry import AreaDefinition - area = AreaDefinition('test', 'test', 'test', - {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + area = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) data_array = xr.DataArray(da.zeros((2, 2), chunks=-1), - dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1), 'area': area}) + dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), "area": area}) scn = Scene() - scn['var1'] = data_array + scn["var1"] = data_array return scn @pytest.fixture @@ -113,22 +113,22 @@ def multi_area_scn(self): """Define Scene with multiple area.""" from pyresample.geometry import AreaDefinition - area1 = AreaDefinition('test', 'test', 'test', - {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + area1 = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) - area2 = AreaDefinition('test', 'test', 'test', - {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + area2 = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 4, 4, [-200, -200, 200, 200]) data_array1 = xr.DataArray(da.zeros((2, 2), chunks=-1), - dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1), 'area': area1}) + dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), "area": area1}) data_array2 = xr.DataArray(da.zeros((4, 4), chunks=-1), - dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1), 'area': area2}) + dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), "area": area2}) scn = Scene() - scn['var1'] = data_array1 - scn['var2'] = data_array2 + scn["var1"] = data_array1 + scn["var2"] = data_array2 return scn def test_with_single_area_scene_type(self, single_area_scn): diff --git a/satpy/tests/scene_tests/test_data_access.py b/satpy/tests/scene_tests/test_data_access.py index f345679e03..e446af9c46 100644 --- a/satpy/tests/scene_tests/test_data_access.py +++ b/satpy/tests/scene_tests/test_data_access.py @@ -83,21 +83,21 @@ def test_iter_by_area_swath(self): from pyresample.geometry import SwathDefinition scene = Scene() sd = SwathDefinition(lons=np.arange(5), lats=np.arange(5)) - scene["1"] = xr.DataArray(np.arange(5), attrs={'area': sd}) - scene["2"] = xr.DataArray(np.arange(5), attrs={'area': sd}) + scene["1"] = xr.DataArray(np.arange(5), attrs={"area": sd}) + scene["2"] = xr.DataArray(np.arange(5), attrs={"area": sd}) scene["3"] = xr.DataArray(np.arange(5)) for area_obj, ds_list in scene.iter_by_area(): - ds_list_names = set(ds['name'] for ds in ds_list) + ds_list_names = set(ds["name"] for ds in ds_list) if area_obj is sd: - assert ds_list_names == {'1', '2'} + assert ds_list_names == {"1", "2"} else: assert area_obj is None - assert ds_list_names == {'3'} + assert ds_list_names == {"3"} def test_bad_setitem(self): """Test setting an item wrongly.""" scene = Scene() - pytest.raises(ValueError, scene.__setitem__, '1', np.arange(5)) + pytest.raises(ValueError, scene.__setitem__, "1", np.arange(5)) def test_setitem(self): """Test setting an item.""" @@ -108,16 +108,16 @@ def test_setitem(self): assert set(scene._datasets.keys()) == {expected_id} assert set(scene._wishlist) == {expected_id} - did = make_dataid(name='oranges') + did = make_dataid(name="oranges") scene[did] = ds1 - assert 'oranges' in scene + assert "oranges" in scene nparray = np.arange(5*5).reshape(5, 5) with pytest.raises(ValueError): - scene['apples'] = nparray - assert 'apples' not in scene - did = make_dataid(name='apples') + scene["apples"] = nparray + assert "apples" not in scene + did = make_dataid(name="apples") scene[did] = nparray - assert 'apples' in scene + assert "apples" in scene def test_getitem(self): """Test __getitem__ with names only.""" @@ -125,41 +125,41 @@ def test_getitem(self): scene["1"] = ds1 = xr.DataArray(np.arange(5)) scene["2"] = ds2 = xr.DataArray(np.arange(5)) scene["3"] = ds3 = xr.DataArray(np.arange(5)) - assert scene['1'] is ds1 - assert scene['2'] is ds2 - assert scene['3'] is ds3 - pytest.raises(KeyError, scene.__getitem__, '4') - assert scene.get('3') is ds3 - assert scene.get('4') is None + assert scene["1"] is ds1 + assert scene["2"] is ds2 + assert scene["3"] is ds3 + pytest.raises(KeyError, scene.__getitem__, "4") + assert scene.get("3") is ds3 + assert scene.get("4") is None def test_getitem_modifiers(self): """Test __getitem__ with names and modifiers.""" # Return least modified item scene = Scene() - scene['1'] = ds1_m0 = xr.DataArray(np.arange(5)) - scene[make_dataid(name='1', modifiers=('mod1',)) + scene["1"] = ds1_m0 = xr.DataArray(np.arange(5)) + scene[make_dataid(name="1", modifiers=("mod1",)) ] = xr.DataArray(np.arange(5)) - assert scene['1'] is ds1_m0 + assert scene["1"] is ds1_m0 assert len(list(scene.keys())) == 2 scene = Scene() - scene['1'] = ds1_m0 = xr.DataArray(np.arange(5)) - scene[make_dataid(name='1', modifiers=('mod1',)) + scene["1"] = ds1_m0 = xr.DataArray(np.arange(5)) + scene[make_dataid(name="1", modifiers=("mod1",)) ] = xr.DataArray(np.arange(5)) - scene[make_dataid(name='1', modifiers=('mod1', 'mod2')) + scene[make_dataid(name="1", modifiers=("mod1", "mod2")) ] = xr.DataArray(np.arange(5)) - assert scene['1'] is ds1_m0 + assert scene["1"] is ds1_m0 assert len(list(scene.keys())) == 3 scene = Scene() - scene[make_dataid(name='1', modifiers=('mod1', 'mod2')) + scene[make_dataid(name="1", modifiers=("mod1", "mod2")) ] = ds1_m2 = xr.DataArray(np.arange(5)) - scene[make_dataid(name='1', modifiers=('mod1',)) + scene[make_dataid(name="1", modifiers=("mod1",)) ] = ds1_m1 = xr.DataArray(np.arange(5)) - assert scene['1'] is ds1_m1 - assert scene[make_dataid(name='1', modifiers=('mod1', 'mod2'))] is ds1_m2 + assert scene["1"] is ds1_m1 + assert scene[make_dataid(name="1", modifiers=("mod1", "mod2"))] is ds1_m2 pytest.raises(KeyError, scene.__getitem__, - make_dataid(name='1', modifiers=tuple())) + make_dataid(name="1", modifiers=tuple())) assert len(list(scene.keys())) == 2 def test_getitem_slices(self): @@ -168,13 +168,13 @@ def test_getitem_slices(self): from pyresample.utils import proj4_str_to_dict scene1 = Scene() scene2 = Scene() - proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 ' - '+units=m +no_defs') + proj_dict = proj4_str_to_dict("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 " + "+units=m +no_defs") area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, 200, 400, @@ -184,80 +184,80 @@ def test_getitem_slices(self): lats=np.zeros((5, 10))) scene1["1"] = scene2["1"] = xr.DataArray(np.zeros((5, 10))) scene1["2"] = scene2["2"] = xr.DataArray(np.zeros((5, 10)), - dims=('y', 'x')) - scene1["3"] = xr.DataArray(np.zeros((5, 10)), dims=('y', 'x'), - attrs={'area': area_def}) + dims=("y", "x")) + scene1["3"] = xr.DataArray(np.zeros((5, 10)), dims=("y", "x"), + attrs={"area": area_def}) anc_vars = [xr.DataArray(np.ones((5, 10)), - attrs={'name': 'anc_var', 'area': area_def})] - attrs = {'ancillary_variables': anc_vars, 'area': area_def} + attrs={"name": "anc_var", "area": area_def})] + attrs = {"ancillary_variables": anc_vars, "area": area_def} scene1["3a"] = xr.DataArray(np.zeros((5, 10)), - dims=('y', 'x'), + dims=("y", "x"), attrs=attrs) - scene2["4"] = xr.DataArray(np.zeros((5, 10)), dims=('y', 'x'), - attrs={'area': swath_def}) + scene2["4"] = xr.DataArray(np.zeros((5, 10)), dims=("y", "x"), + attrs={"area": swath_def}) anc_vars = [xr.DataArray(np.ones((5, 10)), - attrs={'name': 'anc_var', 'area': swath_def})] - attrs = {'ancillary_variables': anc_vars, 'area': swath_def} + attrs={"name": "anc_var", "area": swath_def})] + attrs = {"ancillary_variables": anc_vars, "area": swath_def} scene2["4a"] = xr.DataArray(np.zeros((5, 10)), - dims=('y', 'x'), + dims=("y", "x"), attrs=attrs) new_scn1 = scene1[2:5, 2:8] new_scn2 = scene2[2:5, 2:8] for new_scn in [new_scn1, new_scn2]: # datasets without an area don't get sliced - assert new_scn['1'].shape == (5, 10) - assert new_scn['2'].shape == (5, 10) - - assert new_scn1['3'].shape == (3, 6) - assert 'area' in new_scn1['3'].attrs - assert new_scn1['3'].attrs['area'].shape == (3, 6) - assert new_scn1['3a'].shape == (3, 6) - a_var = new_scn1['3a'].attrs['ancillary_variables'][0] + assert new_scn["1"].shape == (5, 10) + assert new_scn["2"].shape == (5, 10) + + assert new_scn1["3"].shape == (3, 6) + assert "area" in new_scn1["3"].attrs + assert new_scn1["3"].attrs["area"].shape == (3, 6) + assert new_scn1["3a"].shape == (3, 6) + a_var = new_scn1["3a"].attrs["ancillary_variables"][0] assert a_var.shape == (3, 6) - assert new_scn2['4'].shape == (3, 6) - assert 'area' in new_scn2['4'].attrs - assert new_scn2['4'].attrs['area'].shape == (3, 6) - assert new_scn2['4a'].shape == (3, 6) - a_var = new_scn2['4a'].attrs['ancillary_variables'][0] + assert new_scn2["4"].shape == (3, 6) + assert "area" in new_scn2["4"].attrs + assert new_scn2["4"].attrs["area"].shape == (3, 6) + assert new_scn2["4a"].shape == (3, 6) + a_var = new_scn2["4a"].attrs["ancillary_variables"][0] assert a_var.shape == (3, 6) def test_contains(self): """Test contains.""" scene = Scene() scene["1"] = xr.DataArray(np.arange(5), - attrs={'wavelength': (0.1, 0.2, 0.3), - '_satpy_id_keys': default_id_keys_config}) - assert '1' in scene + attrs={"wavelength": (0.1, 0.2, 0.3), + "_satpy_id_keys": default_id_keys_config}) + assert "1" in scene assert 0.15 in scene - assert '2' not in scene + assert "2" not in scene assert 0.31 not in scene scene = Scene() - scene['blueberry'] = xr.DataArray(np.arange(5)) - scene['blackberry'] = xr.DataArray(np.arange(5)) - scene['strawberry'] = xr.DataArray(np.arange(5)) - scene['raspberry'] = xr.DataArray(np.arange(5)) + scene["blueberry"] = xr.DataArray(np.arange(5)) + scene["blackberry"] = xr.DataArray(np.arange(5)) + scene["strawberry"] = xr.DataArray(np.arange(5)) + scene["raspberry"] = xr.DataArray(np.arange(5)) # deepcode ignore replace~keys~list~compare: This is on purpose - assert make_cid(name='blueberry') in scene.keys() - assert make_cid(name='blueberry') in scene - assert 'blueberry' in scene - assert 'blueberry' not in scene.keys() + assert make_cid(name="blueberry") in scene.keys() + assert make_cid(name="blueberry") in scene + assert "blueberry" in scene + assert "blueberry" not in scene.keys() def test_delitem(self): """Test deleting an item.""" scene = Scene() scene["1"] = xr.DataArray(np.arange(5), - attrs={'wavelength': (0.1, 0.2, 0.3), - '_satpy_id_keys': default_id_keys_config}) + attrs={"wavelength": (0.1, 0.2, 0.3), + "_satpy_id_keys": default_id_keys_config}) scene["2"] = xr.DataArray(np.arange(5), - attrs={'wavelength': (0.4, 0.5, 0.6), - '_satpy_id_keys': default_id_keys_config}) + attrs={"wavelength": (0.4, 0.5, 0.6), + "_satpy_id_keys": default_id_keys_config}) scene["3"] = xr.DataArray(np.arange(5), - attrs={'wavelength': (0.7, 0.8, 0.9), - '_satpy_id_keys': default_id_keys_config}) - del scene['1'] - del scene['3'] + attrs={"wavelength": (0.7, 0.8, 0.9), + "_satpy_id_keys": default_id_keys_config}) + del scene["1"] + del scene["3"] del scene[0.45] assert not scene._wishlist assert not list(scene._datasets.keys()) @@ -268,7 +268,7 @@ def _create_coarest_finest_data_array(shape, area_def, attrs=None): data_arr = xr.DataArray( da.arange(math.prod(shape)).reshape(shape), attrs={ - 'area': area_def, + "area": area_def, }) if attrs: data_arr.attrs.update(attrs) @@ -277,11 +277,11 @@ def _create_coarest_finest_data_array(shape, area_def, attrs=None): def _create_coarsest_finest_area_def(shape, extents): from pyresample import AreaDefinition - proj_str = '+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs' + proj_str = "+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs" area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_str, shape[1], shape[0], @@ -331,7 +331,7 @@ def test_coarsest_finest_area_different_shape(self, coarse_area, fine_area): assert scn.coarsest_area() is coarse_area assert scn.finest_area() is fine_area - assert scn.coarsest_area(['2', '3']) is fine_area + assert scn.coarsest_area(["2", "3"]) is fine_area @pytest.mark.parametrize( ("area_def", "shifted_area"), @@ -375,24 +375,24 @@ class TestComputePersist: def test_compute_pass_through(self): """Test pass through of xarray compute.""" import numpy as np - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"]) scene = scene.compute() - assert isinstance(scene['ds1'].data, np.ndarray) + assert isinstance(scene["ds1"].data, np.ndarray) def test_persist_pass_through(self): """Test pass through of xarray persist.""" from dask.array.utils import assert_eq - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"]) scenep = scene.persist() - assert_eq(scene['ds1'].data, scenep['ds1'].data) - assert set(scenep['ds1'].data.dask).issubset(scene['ds1'].data.dask) - assert len(scenep["ds1"].data.dask) == scenep['ds1'].data.npartitions + assert_eq(scene["ds1"].data, scenep["ds1"].data) + assert set(scenep["ds1"].data.dask).issubset(scene["ds1"].data.dask) + assert len(scenep["ds1"].data.dask) == scenep["ds1"].data.npartitions def test_chunk_pass_through(self): """Test pass through of xarray chunk.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"]) scene = scene.chunk(chunks=2) - assert scene['ds1'].data.chunksize == (2, 2) + assert scene["ds1"].data.chunksize == (2, 2) diff --git a/satpy/tests/scene_tests/test_init.py b/satpy/tests/scene_tests/test_init.py index 4caf804366..a9b4622769 100644 --- a/satpy/tests/scene_tests/test_init.py +++ b/satpy/tests/scene_tests/test_init.py @@ -38,49 +38,49 @@ class TestScene: def test_init(self): """Test scene initialization.""" - with mock.patch('satpy.scene.Scene._create_reader_instances') as cri: + with mock.patch("satpy.scene.Scene._create_reader_instances") as cri: cri.return_value = {} - Scene(filenames=['bla'], reader='blo') - cri.assert_called_once_with(filenames=['bla'], reader='blo', + Scene(filenames=["bla"], reader="blo") + cri.assert_called_once_with(filenames=["bla"], reader="blo", reader_kwargs=None) def test_init_str_filename(self): """Test initializing with a single string as filenames.""" - pytest.raises(ValueError, Scene, reader='blo', filenames='test.nc') + pytest.raises(ValueError, Scene, reader="blo", filenames="test.nc") def test_start_end_times(self): """Test start and end times for a scene.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") assert scene.start_time == FAKE_FILEHANDLER_START assert scene.end_time == FAKE_FILEHANDLER_END def test_init_preserve_reader_kwargs(self): """Test that the initialization preserves the kwargs.""" cri = spy_decorator(Scene._create_reader_instances) - with mock.patch('satpy.scene.Scene._create_reader_instances', cri): - reader_kwargs = {'calibration_type': 'gsics'} - scene = Scene(filenames=['fake1_1.txt'], - reader='fake1', - filter_parameters={'area': 'euron1'}, + with mock.patch("satpy.scene.Scene._create_reader_instances", cri): + reader_kwargs = {"calibration_type": "gsics"} + scene = Scene(filenames=["fake1_1.txt"], + reader="fake1", + filter_parameters={"area": "euron1"}, reader_kwargs=reader_kwargs) - assert reader_kwargs is not cri.mock.call_args[1]['reader_kwargs'] + assert reader_kwargs is not cri.mock.call_args[1]["reader_kwargs"] assert scene.start_time == FAKE_FILEHANDLER_START assert scene.end_time == FAKE_FILEHANDLER_END def test_init_alone(self): """Test simple initialization.""" scn = Scene() - assert not scn._readers, 'Empty scene should not load any readers' + assert not scn._readers, "Empty scene should not load any readers" def test_init_no_files(self): """Test that providing an empty list of filenames fails.""" - pytest.raises(ValueError, Scene, reader='viirs_sdr', filenames=[]) + pytest.raises(ValueError, Scene, reader="viirs_sdr", filenames=[]) def test_create_reader_instances_with_filenames(self): """Test creating a reader providing filenames.""" filenames = ["bla", "foo", "bar"] reader_name = None - with mock.patch('satpy.scene.load_readers') as findermock: + with mock.patch("satpy.scene.load_readers") as findermock: Scene(filenames=filenames) findermock.assert_called_once_with( filenames=filenames, @@ -112,7 +112,7 @@ def test_create_reader_instances_with_reader(self): """Test createring a reader instance providing the reader name.""" reader = "foo" filenames = ["1", "2", "3"] - with mock.patch('satpy.scene.load_readers') as findermock: + with mock.patch("satpy.scene.load_readers") as findermock: findermock.return_value = {} Scene(reader=reader, filenames=filenames) findermock.assert_called_once_with(reader=reader, @@ -123,29 +123,29 @@ def test_create_reader_instances_with_reader(self): def test_create_reader_instances_with_reader_kwargs(self): """Test creating a reader instance with reader kwargs.""" from satpy.readers.yaml_reader import FileYAMLReader - reader_kwargs = {'calibration_type': 'gsics'} - filter_parameters = {'area': 'euron1'} - reader_kwargs2 = {'calibration_type': 'gsics', 'filter_parameters': filter_parameters} + reader_kwargs = {"calibration_type": "gsics"} + filter_parameters = {"area": "euron1"} + reader_kwargs2 = {"calibration_type": "gsics", "filter_parameters": filter_parameters} rinit = spy_decorator(FileYAMLReader.create_filehandlers) - with mock.patch('satpy.readers.yaml_reader.FileYAMLReader.create_filehandlers', rinit): - scene = Scene(filenames=['fake1_1.txt'], - reader='fake1', - filter_parameters={'area': 'euron1'}, + with mock.patch("satpy.readers.yaml_reader.FileYAMLReader.create_filehandlers", rinit): + scene = Scene(filenames=["fake1_1.txt"], + reader="fake1", + filter_parameters={"area": "euron1"}, reader_kwargs=reader_kwargs) del scene - assert reader_kwargs == rinit.mock.call_args[1]['fh_kwargs'] + assert reader_kwargs == rinit.mock.call_args[1]["fh_kwargs"] rinit.mock.reset_mock() - scene = Scene(filenames=['fake1_1.txt'], - reader='fake1', + scene = Scene(filenames=["fake1_1.txt"], + reader="fake1", reader_kwargs=reader_kwargs2) - assert reader_kwargs == rinit.mock.call_args[1]['fh_kwargs'] + assert reader_kwargs == rinit.mock.call_args[1]["fh_kwargs"] del scene def test_create_multiple_reader_different_kwargs(self, include_test_etc): """Test passing different kwargs to different readers.""" from satpy.readers import load_reader - with mock.patch.object(satpy.readers, 'load_reader', wraps=load_reader) as lr: + with mock.patch.object(satpy.readers, "load_reader", wraps=load_reader) as lr: Scene(filenames={"fake1_1ds": ["fake1_1ds_1.txt"], "fake2_1ds": ["fake2_1ds_1.txt"]}, reader_kwargs={ @@ -153,8 +153,8 @@ def test_create_multiple_reader_different_kwargs(self, include_test_etc): "fake2_1ds": {"mouth": "varallo"} }) lr.assert_has_calls([ - mock.call([os.path.join(include_test_etc, 'readers', 'fake1_1ds.yaml')], mouth="omegna"), - mock.call([os.path.join(include_test_etc, 'readers', 'fake2_1ds.yaml')], mouth="varallo")]) + mock.call([os.path.join(include_test_etc, "readers", "fake1_1ds.yaml")], mouth="omegna"), + mock.call([os.path.join(include_test_etc, "readers", "fake2_1ds.yaml")], mouth="varallo")]) def test_storage_options_from_reader_kwargs_no_options(self): """Test getting storage options from reader kwargs. @@ -162,8 +162,8 @@ def test_storage_options_from_reader_kwargs_no_options(self): Case where there are no options given. """ filenames = ["s3://data-bucket/file1", "s3://data-bucket/file2", "s3://data-bucket/file3"] - with mock.patch('satpy.scene.load_readers'): - with mock.patch('fsspec.open_files') as open_files: + with mock.patch("satpy.scene.load_readers"): + with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames) open_files.assert_called_once_with(filenames) @@ -173,13 +173,13 @@ def test_storage_options_from_reader_kwargs_single_dict_no_options(self): Case where a single dict is given for all readers without storage options. """ filenames = ["s3://data-bucket/file1", "s3://data-bucket/file2", "s3://data-bucket/file3"] - reader_kwargs = {'reader_opt': 'foo'} - with mock.patch('satpy.scene.load_readers'): - with mock.patch('fsspec.open_files') as open_files: + reader_kwargs = {"reader_opt": "foo"} + with mock.patch("satpy.scene.load_readers"): + with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames, reader_kwargs=reader_kwargs) open_files.assert_called_once_with(filenames) - @pytest.mark.parametrize("reader_kwargs", [{}, {'reader_opt': 'foo'}]) + @pytest.mark.parametrize("reader_kwargs", [{}, {"reader_opt": "foo"}]) def test_storage_options_from_reader_kwargs_single_dict(self, reader_kwargs): """Test getting storage options from reader kwargs. @@ -187,14 +187,14 @@ def test_storage_options_from_reader_kwargs_single_dict(self, reader_kwargs): """ filenames = ["s3://data-bucket/file1", "s3://data-bucket/file2", "s3://data-bucket/file3"] expected_reader_kwargs = reader_kwargs.copy() - storage_options = {'option1': '1'} - reader_kwargs['storage_options'] = storage_options + storage_options = {"option1": "1"} + reader_kwargs["storage_options"] = storage_options orig_reader_kwargs = deepcopy(reader_kwargs) - with mock.patch('satpy.scene.load_readers') as load_readers: - with mock.patch('fsspec.open_files') as open_files: + with mock.patch("satpy.scene.load_readers") as load_readers: + with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames, reader_kwargs=reader_kwargs) call_ = load_readers.mock_calls[0] - assert call_.kwargs['reader_kwargs'] == expected_reader_kwargs + assert call_.kwargs["reader_kwargs"] == expected_reader_kwargs open_files.assert_called_once_with(filenames, **storage_options) assert reader_kwargs == orig_reader_kwargs @@ -208,25 +208,25 @@ def test_storage_options_from_reader_kwargs_per_reader(self): "reader2": ["s3://data-bucket/file2"], "reader3": ["s3://data-bucket/file3"], } - storage_options_1 = {'option1': '1'} - storage_options_2 = {'option2': '2'} - storage_options_3 = {'option3': '3'} + storage_options_1 = {"option1": "1"} + storage_options_2 = {"option2": "2"} + storage_options_3 = {"option3": "3"} reader_kwargs = { - "reader1": {'reader_opt_1': 'foo'}, - "reader2": {'reader_opt_2': 'bar'}, - "reader3": {'reader_opt_3': 'baz'}, + "reader1": {"reader_opt_1": "foo"}, + "reader2": {"reader_opt_2": "bar"}, + "reader3": {"reader_opt_3": "baz"}, } expected_reader_kwargs = deepcopy(reader_kwargs) - reader_kwargs['reader1']['storage_options'] = storage_options_1 - reader_kwargs['reader2']['storage_options'] = storage_options_2 - reader_kwargs['reader3']['storage_options'] = storage_options_3 + reader_kwargs["reader1"]["storage_options"] = storage_options_1 + reader_kwargs["reader2"]["storage_options"] = storage_options_2 + reader_kwargs["reader3"]["storage_options"] = storage_options_3 orig_reader_kwargs = deepcopy(reader_kwargs) - with mock.patch('satpy.scene.load_readers') as load_readers: - with mock.patch('fsspec.open_files') as open_files: + with mock.patch("satpy.scene.load_readers") as load_readers: + with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames, reader_kwargs=reader_kwargs) call_ = load_readers.mock_calls[0] - assert call_.kwargs['reader_kwargs'] == expected_reader_kwargs + assert call_.kwargs["reader_kwargs"] == expected_reader_kwargs assert mock.call(filenames["reader1"], **storage_options_1) in open_files.mock_calls assert mock.call(filenames["reader2"], **storage_options_2) in open_files.mock_calls assert mock.call(filenames["reader3"], **storage_options_3) in open_files.mock_calls @@ -244,15 +244,15 @@ def test_storage_options_from_reader_kwargs_per_reader_and_global(self): "reader3": ["s3://data-bucket/file3"], } reader_kwargs = { - "reader1": {'reader_opt_1': 'foo', 'storage_options': {'option1': '1'}}, - "reader2": {'reader_opt_2': 'bar', 'storage_options': {'option2': '2'}}, + "reader1": {"reader_opt_1": "foo", "storage_options": {"option1": "1"}}, + "reader2": {"reader_opt_2": "bar", "storage_options": {"option2": "2"}}, "storage_options": {"endpoint_url": "url"}, } orig_reader_kwargs = deepcopy(reader_kwargs) - with mock.patch('satpy.scene.load_readers'): - with mock.patch('fsspec.open_files') as open_files: + with mock.patch("satpy.scene.load_readers"): + with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames, reader_kwargs=reader_kwargs) - assert mock.call(filenames["reader1"], option1='1', endpoint_url='url') in open_files.mock_calls - assert mock.call(filenames["reader2"], option2='2', endpoint_url='url') in open_files.mock_calls + assert mock.call(filenames["reader1"], option1="1", endpoint_url="url") in open_files.mock_calls + assert mock.call(filenames["reader2"], option2="2", endpoint_url="url") in open_files.mock_calls assert reader_kwargs == orig_reader_kwargs diff --git a/satpy/tests/scene_tests/test_load.py b/satpy/tests/scene_tests/test_load.py index 6eefbc0080..889d9e2cbe 100644 --- a/satpy/tests/scene_tests/test_load.py +++ b/satpy/tests/scene_tests/test_load.py @@ -36,7 +36,7 @@ class TestSceneAllAvailableDatasets: def test_all_datasets_no_readers(self): """Test all datasets with no reader.""" scene = Scene() - pytest.raises(KeyError, scene.all_dataset_ids, reader_name='fake') + pytest.raises(KeyError, scene.all_dataset_ids, reader_name="fake") id_list = scene.all_dataset_ids() assert id_list == [] # no sensors are loaded so we shouldn't get any comps either @@ -46,7 +46,7 @@ def test_all_datasets_no_readers(self): def test_all_dataset_names_no_readers(self): """Test all dataset names with no reader.""" scene = Scene() - pytest.raises(KeyError, scene.all_dataset_names, reader_name='fake') + pytest.raises(KeyError, scene.all_dataset_names, reader_name="fake") name_list = scene.all_dataset_names() assert name_list == [] # no sensors are loaded so we shouldn't get any comps either @@ -57,7 +57,7 @@ def test_available_dataset_no_readers(self): """Test the available datasets without a reader.""" scene = Scene() pytest.raises( - KeyError, scene.available_dataset_ids, reader_name='fake') + KeyError, scene.available_dataset_ids, reader_name="fake") name_list = scene.available_dataset_ids() assert name_list == [] # no sensors are loaded so we shouldn't get any comps either @@ -68,7 +68,7 @@ def test_available_dataset_names_no_readers(self): """Test the available dataset names without a reader.""" scene = Scene() pytest.raises( - KeyError, scene.available_dataset_names, reader_name='fake') + KeyError, scene.available_dataset_names, reader_name="fake") name_list = scene.available_dataset_names() assert name_list == [] # no sensors are loaded so we shouldn't get any comps either @@ -77,8 +77,8 @@ def test_available_dataset_names_no_readers(self): def test_all_datasets_one_reader(self): """Test all datasets for one reader.""" - scene = Scene(filenames=['fake1_1.txt'], - reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], + reader="fake1") id_list = scene.all_dataset_ids() # 20 data products + 6 lon/lat products num_reader_ds = 21 + 6 @@ -88,8 +88,8 @@ def test_all_datasets_one_reader(self): def test_all_datasets_multiple_reader(self): """Test all datasets for multiple readers.""" - scene = Scene(filenames={'fake1_1ds': ['fake1_1ds_1.txt'], - 'fake2_1ds': ['fake2_1ds_1.txt']}) + scene = Scene(filenames={"fake1_1ds": ["fake1_1ds_1.txt"], + "fake2_1ds": ["fake2_1ds_1.txt"]}) id_list = scene.all_dataset_ids() assert len(id_list) == 2 id_list = scene.all_dataset_ids(composites=True) @@ -99,8 +99,8 @@ def test_all_datasets_multiple_reader(self): def test_available_datasets_one_reader(self): """Test the available datasets for one reader.""" - scene = Scene(filenames=['fake1_1ds_1.txt'], - reader='fake1_1ds') + scene = Scene(filenames=["fake1_1ds_1.txt"], + reader="fake1_1ds") id_list = scene.available_dataset_ids() assert len(id_list) == 1 id_list = scene.available_dataset_ids(composites=True) @@ -109,13 +109,13 @@ def test_available_datasets_one_reader(self): def test_available_composite_ids_missing_available(self): """Test available_composite_ids when a composites dep is missing.""" - scene = Scene(filenames=['fake1_1ds_1.txt'], - reader='fake1_1ds') - assert 'comp2' not in scene.available_composite_names() + scene = Scene(filenames=["fake1_1ds_1.txt"], + reader="fake1_1ds") + assert "comp2" not in scene.available_composite_names() def test_available_composites_known_versus_all(self): """Test available_composite_ids when some datasets aren't available.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1', + scene = Scene(filenames=["fake1_1.txt"], reader="fake1", reader_kwargs={"not_available": ["ds2", "ds3"]}) all_comps = scene.all_composite_names() avail_comps = scene.available_composite_names() @@ -127,11 +127,11 @@ def test_available_composites_known_versus_all(self): def test_available_comps_no_deps(self): """Test Scene available composites when composites don't have a dependency.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") all_comp_ids = scene.available_composite_ids() - assert make_cid(name='static_image') in all_comp_ids + assert make_cid(name="static_image") in all_comp_ids available_comp_ids = scene.available_composite_ids() - assert make_cid(name='static_image') in available_comp_ids + assert make_cid(name="static_image") in available_comp_ids def test_available_when_sensor_none_in_preloaded_dataarrays(self): """Test Scene available composites when existing loaded arrays have sensor set to None. @@ -143,7 +143,7 @@ def test_available_when_sensor_none_in_preloaded_dataarrays(self): """ scene = _scene_with_data_array_none_sensor() available_comp_ids = scene.available_composite_ids() - assert make_cid(name='static_image') in available_comp_ids + assert make_cid(name="static_image") in available_comp_ids @pytest.mark.usefixtures("include_test_etc") @@ -152,13 +152,13 @@ class TestBadLoading: def test_load_str(self): """Test passing a string to Scene.load.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - pytest.raises(TypeError, scene.load, 'ds1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + pytest.raises(TypeError, scene.load, "ds1") def test_load_no_exist(self): """Test loading a dataset that doesn't exist.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - pytest.raises(KeyError, scene.load, ['im_a_dataset_that_doesnt_exist']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + pytest.raises(KeyError, scene.load, ["im_a_dataset_that_doesnt_exist"]) @pytest.mark.usefixtures("include_test_etc") @@ -169,50 +169,50 @@ def test_load_no_exist2(self): """Test loading a dataset that doesn't exist then another load.""" from satpy.readers.yaml_reader import FileYAMLReader load_mock = spy_decorator(FileYAMLReader.load) - with mock.patch.object(FileYAMLReader, 'load', load_mock): + with mock.patch.object(FileYAMLReader, "load", load_mock): lmock = load_mock.mock - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds9_fail_load']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds9_fail_load"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 0 lmock.assert_called_once_with( - {make_dataid(name='ds9_fail_load', wavelength=(1.0, 1.1, 1.2))}) + {make_dataid(name="ds9_fail_load", wavelength=(1.0, 1.1, 1.2))}) - scene.load(['ds1']) + scene.load(["ds1"]) loaded_ids = list(scene._datasets.keys()) assert lmock.call_count == 2 # most recent call should have only been ds1 lmock.assert_called_with({ - make_dataid(name='ds1', resolution=250, calibration='reflectance', modifiers=tuple()), + make_dataid(name="ds1", resolution=250, calibration="reflectance", modifiers=tuple()), }) assert len(loaded_ids) == 1 def test_load_ds1_no_comps(self): """Test loading one dataset with no loaded compositors.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0] == make_dataid(name='ds1', resolution=250, calibration='reflectance', modifiers=tuple()) + assert loaded_ids[0] == make_dataid(name="ds1", resolution=250, calibration="reflectance", modifiers=tuple()) def test_load_ds1_load_twice(self): """Test loading one dataset with no loaded compositors.""" from satpy.readers.yaml_reader import FileYAMLReader - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0] == make_dataid(name='ds1', resolution=250, calibration='reflectance', modifiers=tuple()) + assert loaded_ids[0] == make_dataid(name="ds1", resolution=250, calibration="reflectance", modifiers=tuple()) load_mock = spy_decorator(FileYAMLReader.load) - with mock.patch.object(FileYAMLReader, 'load', load_mock): + with mock.patch.object(FileYAMLReader, "load", load_mock): lmock = load_mock.mock - scene.load(['ds1']) + scene.load(["ds1"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0] == make_dataid(name='ds1', + assert loaded_ids[0] == make_dataid(name="ds1", resolution=250, - calibration='reflectance', + calibration="reflectance", modifiers=tuple()) assert not lmock.called, ("Reader.load was called again when " "loading something that's already " @@ -220,17 +220,17 @@ def test_load_ds1_load_twice(self): def test_load_ds1_unknown_modifier(self): """Test loading one dataset with no loaded compositors.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") pytest.raises(KeyError, scene.load, - [make_dataid(name='ds1', modifiers=('_fake_bad_mod_',))]) + [make_dataid(name="ds1", modifiers=("_fake_bad_mod_",))]) def test_load_ds4_cal(self): """Test loading a dataset that has two calibration variations.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds4']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds4"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0]['calibration'] == 'reflectance' + assert loaded_ids[0]["calibration"] == "reflectance" @pytest.mark.parametrize( ("input_filenames", "load_kwargs", "exp_resolution"), @@ -243,37 +243,37 @@ def test_load_ds4_cal(self): ) def test_load_ds5_variations(self, input_filenames, load_kwargs, exp_resolution): """Test loading a dataset has multiple resolutions available.""" - scene = Scene(filenames=input_filenames, reader='fake1') - scene.load(['ds5'], **load_kwargs) + scene = Scene(filenames=input_filenames, reader="fake1") + scene.load(["ds5"], **load_kwargs) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0]['name'] == 'ds5' - assert loaded_ids[0]['resolution'] == exp_resolution + assert loaded_ids[0]["name"] == "ds5" + assert loaded_ids[0]["resolution"] == exp_resolution def test_load_ds5_multiple_resolution_loads(self): """Test loading a dataset with multiple resolutions available as separate loads.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds5'], resolution=1000) - scene.load(['ds5'], resolution=500) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds5"], resolution=1000) + scene.load(["ds5"], resolution=500) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 - assert loaded_ids[0]['name'] == 'ds5' - assert loaded_ids[0]['resolution'] == 500 - assert loaded_ids[1]['name'] == 'ds5' - assert loaded_ids[1]['resolution'] == 1000 + assert loaded_ids[0]["name"] == "ds5" + assert loaded_ids[0]["resolution"] == 500 + assert loaded_ids[1]["name"] == "ds5" + assert loaded_ids[1]["resolution"] == 1000 def test_load_ds6_wl(self): """Test loading a dataset by wavelength.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load([0.22]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0]['name'] == 'ds6' + assert loaded_ids[0]["name"] == "ds6" def test_load_ds9_fail_load(self): """Test loading a dataset that will fail during load.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds9_fail_load']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds9_fail_load"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 0 @@ -308,7 +308,7 @@ class TestLoadingComposites: ) def test_single_composite_loading(self, comp_name, exp_id_or_name): """Test that certain composites can be loaded individually.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load([comp_name]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 @@ -319,33 +319,33 @@ def test_single_composite_loading(self, comp_name, exp_id_or_name): def test_load_multiple_resolutions(self): """Test loading a dataset has multiple resolutions available with different resolutions.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - comp25 = make_cid(name='comp25', resolution=1000) - scene[comp25] = xr.DataArray([], attrs={'name': 'comp25', 'resolution': 1000}) - scene.load(['comp25'], resolution=500) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + comp25 = make_cid(name="comp25", resolution=1000) + scene[comp25] = xr.DataArray([], attrs={"name": "comp25", "resolution": 1000}) + scene.load(["comp25"], resolution=500) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 - assert loaded_ids[0]['name'] == 'comp25' - assert loaded_ids[0]['resolution'] == 500 - assert loaded_ids[1]['name'] == 'comp25' - assert loaded_ids[1]['resolution'] == 1000 + assert loaded_ids[0]["name"] == "comp25" + assert loaded_ids[0]["resolution"] == 500 + assert loaded_ids[1]["name"] == "comp25" + assert loaded_ids[1]["resolution"] == 1000 def test_load_same_subcomposite(self): """Test loading a composite and one of it's subcomposites at the same time.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp24', 'comp25'], resolution=500) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp24", "comp25"], resolution=500) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 - assert loaded_ids[0]['name'] == 'comp24' - assert loaded_ids[0]['resolution'] == 500 - assert loaded_ids[1]['name'] == 'comp25' - assert loaded_ids[1]['resolution'] == 500 + assert loaded_ids[0]["name"] == "comp24" + assert loaded_ids[0]["resolution"] == 500 + assert loaded_ids[1]["name"] == "comp25" + assert loaded_ids[1]["resolution"] == 500 def test_load_comp8(self): """Test loading a composite that has a non-existent prereq.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - pytest.raises(KeyError, scene.load, ['comp8']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + pytest.raises(KeyError, scene.load, ["comp8"]) def test_load_comp15(self): """Test loading a composite whose prerequisites can't be loaded. @@ -354,23 +354,23 @@ def test_load_comp15(self): """ # it is fine that an optional prereq doesn't exist - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp15']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp15"]) loaded_ids = list(scene._datasets.keys()) assert not loaded_ids def test_load_comp17(self): """Test loading a composite that depends on a composite that won't load.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp17']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp17"]) loaded_ids = list(scene._datasets.keys()) assert not loaded_ids def test_load_comp18(self): """Test loading a composite that depends on an incompatible area modified dataset.""" # it is fine that an optional prereq doesn't exist - scene = Scene(filenames=['fake1_1.txt', 'fake1_highres_1.txt'], reader='fake1') - scene.load(['comp18']) + scene = Scene(filenames=["fake1_1.txt", "fake1_highres_1.txt"], reader="fake1") + scene.load(["comp18"]) loaded_ids = list(scene._datasets.keys()) # depends on: # ds3 @@ -379,11 +379,11 @@ def test_load_comp18(self): # We should end up with ds3, ds4 (mod1, mod3), ds5 (mod1), and ds1 # for the incomp_areas modifier assert len(loaded_ids) == 4 # the 1 dependencies - assert 'ds3' in scene._datasets - assert make_dataid(name='ds4', calibration='reflectance', - modifiers=('mod1', 'mod3')) in scene._datasets - assert make_dataid(name='ds5', resolution=250, - modifiers=('mod1',)) in scene._datasets + assert "ds3" in scene._datasets + assert make_dataid(name="ds4", calibration="reflectance", + modifiers=("mod1", "mod3")) in scene._datasets + assert make_dataid(name="ds5", resolution=250, + modifiers=("mod1",)) in scene._datasets def test_load_comp18_2(self): """Test loading a composite that depends on an incompatible area modified dataset. @@ -393,8 +393,8 @@ def test_load_comp18_2(self): """ # it is fine that an optional prereq doesn't exist - scene = Scene(filenames=['fake1_1.txt', 'fake1_highres_1.txt'], reader='fake1') - scene.load(['comp18_2']) + scene = Scene(filenames=["fake1_1.txt", "fake1_highres_1.txt"], reader="fake1") + scene.load(["comp18_2"]) loaded_ids = list(scene._datasets.keys()) # depends on: # ds3 @@ -403,12 +403,12 @@ def test_load_comp18_2(self): # We should end up with ds3, ds4 (mod1, mod3), ds5 (mod1), and ds1 # and ds2 for the incomp_areas_opt modifier assert len(loaded_ids) == 5 # the 1 dependencies - assert 'ds3' in scene._datasets - assert 'ds2' in scene._datasets - assert make_dataid(name='ds4', calibration='reflectance', - modifiers=('mod1', 'mod3')) in scene._datasets - assert make_dataid(name='ds5', resolution=250, - modifiers=('mod1',)) in scene._datasets + assert "ds3" in scene._datasets + assert "ds2" in scene._datasets + assert make_dataid(name="ds4", calibration="reflectance", + modifiers=("mod1", "mod3")) in scene._datasets + assert make_dataid(name="ds5", resolution=250, + modifiers=("mod1",)) in scene._datasets def test_load_comp19(self): """Test loading a composite that shares a dep with a dependency. @@ -421,79 +421,79 @@ def test_load_comp19(self): """ # Check dependency tree nodes # initialize the dep tree without loading the data - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene._update_dependency_tree({'comp19'}, None) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene._update_dependency_tree({"comp19"}, None) - this_node = scene._dependency_tree['comp19'] - shared_dep_id = make_dataid(name='ds5', modifiers=('res_change',)) + this_node = scene._dependency_tree["comp19"] + shared_dep_id = make_dataid(name="ds5", modifiers=("res_change",)) shared_dep_expected_node = scene._dependency_tree[shared_dep_id] # get the node for the first dep in the prereqs list of the # comp13 node - shared_dep_node = scene._dependency_tree['comp13'].data[1][0] + shared_dep_node = scene._dependency_tree["comp13"].data[1][0] shared_dep_node2 = this_node.data[1][0] assert shared_dep_expected_node is shared_dep_node assert shared_dep_expected_node is shared_dep_node2 - scene.load(['comp19']) + scene.load(["comp19"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0] == make_cid(name='comp19') + assert loaded_ids[0] == make_cid(name="comp19") def test_load_multiple_comps(self): """Test loading multiple composites.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp1', 'comp2', 'comp3', 'comp4', 'comp5', 'comp6', - 'comp7', 'comp9', 'comp10']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp1", "comp2", "comp3", "comp4", "comp5", "comp6", + "comp7", "comp9", "comp10"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 9 def test_load_multiple_comps_separate(self): """Test loading multiple composites, one at a time.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp10']) - scene.load(['comp9']) - scene.load(['comp7']) - scene.load(['comp6']) - scene.load(['comp5']) - scene.load(['comp4']) - scene.load(['comp3']) - scene.load(['comp2']) - scene.load(['comp1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp10"]) + scene.load(["comp9"]) + scene.load(["comp7"]) + scene.load(["comp6"]) + scene.load(["comp5"]) + scene.load(["comp4"]) + scene.load(["comp3"]) + scene.load(["comp2"]) + scene.load(["comp1"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 9 def test_load_modified(self): """Test loading a modified dataset.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load([make_dsq(name='ds1', modifiers=('mod1', 'mod2'))]) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load([make_dsq(name="ds1", modifiers=("mod1", "mod2"))]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0]['modifiers'] == ('mod1', 'mod2') + assert loaded_ids[0]["modifiers"] == ("mod1", "mod2") def test_load_modified_with_load_kwarg(self): """Test loading a modified dataset using the ``Scene.load`` keyword argument.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1'], modifiers=('mod1', 'mod2')) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"], modifiers=("mod1", "mod2")) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0]['modifiers'] == ('mod1', 'mod2') + assert loaded_ids[0]["modifiers"] == ("mod1", "mod2") def test_load_multiple_modified(self): """Test loading multiple modified datasets.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load([ - make_dataid(name='ds1', modifiers=('mod1', 'mod2')), - make_dataid(name='ds2', modifiers=('mod2', 'mod1')), + make_dataid(name="ds1", modifiers=("mod1", "mod2")), + make_dataid(name="ds2", modifiers=("mod2", "mod1")), ]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 for i in loaded_ids: - if i['name'] == 'ds1': - assert i['modifiers'] == ('mod1', 'mod2') + if i["name"] == "ds1": + assert i["modifiers"] == ("mod1", "mod2") else: - assert i['name'] == 'ds2' - assert i['modifiers'] == ('mod2', 'mod1') + assert i["name"] == "ds2" + assert i["modifiers"] == ("mod2", "mod1") def test_load_dataset_after_composite(self): """Test load composite followed by other datasets.""" @@ -501,15 +501,15 @@ def test_load_dataset_after_composite(self): from satpy.tests.utils import FakeCompositor load_mock = spy_decorator(FileYAMLReader.load) comp_mock = spy_decorator(FakeCompositor.__call__) - with mock.patch.object(FileYAMLReader, 'load', load_mock), \ - mock.patch.object(FakeCompositor, '__call__', comp_mock): + with mock.patch.object(FileYAMLReader, "load", load_mock), \ + mock.patch.object(FakeCompositor, "__call__", comp_mock): lmock = load_mock.mock - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp3']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp3"]) assert lmock.call_count == 1 - scene.load(['ds1']) + scene.load(["ds1"]) assert lmock.call_count == 2 - scene.load(['ds1']) + scene.load(["ds1"]) # we should only load from the file twice assert lmock.call_count == 2 # we should only generate the composite once @@ -524,36 +524,36 @@ def test_load_dataset_after_composite2(self): load_mock = spy_decorator(FileYAMLReader.load) comp_mock = spy_decorator(FakeCompositor.__call__) mod_mock = spy_decorator(FakeModifier.__call__) - with mock.patch.object(FileYAMLReader, 'load', load_mock), \ - mock.patch.object(FakeCompositor, '__call__', comp_mock), \ - mock.patch.object(FakeModifier, '__call__', mod_mock): + with mock.patch.object(FileYAMLReader, "load", load_mock), \ + mock.patch.object(FakeCompositor, "__call__", comp_mock), \ + mock.patch.object(FakeModifier, "__call__", mod_mock): lmock = load_mock.mock - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp10']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp10"]) assert lmock.call_count == 1 loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - with mock.patch.object(scene, '_generate_composites_nodes_from_loaded_datasets', + with mock.patch.object(scene, "_generate_composites_nodes_from_loaded_datasets", wraps=scene._generate_composites_nodes_from_loaded_datasets) as m: - scene.load(['ds1']) + scene.load(["ds1"]) assert lmock.call_count == 2 loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 # this is the unmodified ds1 assert make_dataid( - name='ds1', resolution=250, calibration='reflectance', modifiers=tuple() + name="ds1", resolution=250, calibration="reflectance", modifiers=tuple() ) in loaded_ids # m.assert_called_once_with(set([scene._dependency_tree['ds1']])) m.assert_called_once_with(set()) - with mock.patch.object(scene, '_generate_composites_nodes_from_loaded_datasets', + with mock.patch.object(scene, "_generate_composites_nodes_from_loaded_datasets", wraps=scene._generate_composites_nodes_from_loaded_datasets) as m: - scene.load(['ds1']) + scene.load(["ds1"]) assert lmock.call_count == 2 loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 # this is the unmodified ds1 assert make_dataid( - name='ds1', resolution=250, calibration='reflectance', modifiers=tuple() + name="ds1", resolution=250, calibration="reflectance", modifiers=tuple() ) in loaded_ids m.assert_called_once_with(set()) # we should only generate the comp10 composite once but comp2 was also generated @@ -567,17 +567,17 @@ def test_load_dataset_after_composite2(self): def test_no_generate_comp10(self): """Test generating a composite after loading.""" # it is fine that an optional prereq doesn't exist - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp10'], generate=False) - assert any(ds_id['name'] == 'comp10' for ds_id in scene._wishlist) - assert 'comp10' not in scene._datasets + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp10"], generate=False) + assert any(ds_id["name"] == "comp10" for ds_id in scene._wishlist) + assert "comp10" not in scene._datasets # two dependencies should have been loaded assert len(scene._datasets) == 2 assert len(scene.missing_datasets) == 1 scene._generate_composites_from_loaded_datasets() - assert any(ds_id['name'] == 'comp10' for ds_id in scene._wishlist) - assert 'comp10' in scene._datasets + assert any(ds_id["name"] == "comp10" for ds_id in scene._wishlist) + assert "comp10" in scene._datasets assert not scene.missing_datasets def test_modified_with_wl_dep(self): @@ -592,10 +592,10 @@ def test_modified_with_wl_dep(self): # Check dependency tree nodes # initialize the dep tree without loading the data - ds1_mod_id = make_dsq(name='ds1', modifiers=('mod_wl',)) - ds3_mod_id = make_dsq(name='ds3', modifiers=('mod_wl',)) + ds1_mod_id = make_dsq(name="ds1", modifiers=("mod_wl",)) + ds3_mod_id = make_dsq(name="ds3", modifiers=("mod_wl",)) - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene._update_dependency_tree({ds1_mod_id, ds3_mod_id}, None) ds1_mod_node = scene._dependency_tree[ds1_mod_id] @@ -603,10 +603,10 @@ def test_modified_with_wl_dep(self): ds1_mod_dep_node = ds1_mod_node.data[1][1] ds3_mod_dep_node = ds3_mod_node.data[1][1] # mod_wl depends on the this node: - ds6_modded_node = scene._dependency_tree[make_dataid(name='ds6', modifiers=('mod1',))] + ds6_modded_node = scene._dependency_tree[make_dataid(name="ds6", modifiers=("mod1",))] # this dep should be full qualified with name and wavelength - assert ds6_modded_node.name['name'] is not None - assert isinstance(ds6_modded_node.name['wavelength'], WavelengthRange) + assert ds6_modded_node.name["name"] is not None + assert isinstance(ds6_modded_node.name["wavelength"], WavelengthRange) # the node should be shared between everything that uses it assert ds1_mod_dep_node is ds3_mod_dep_node assert ds1_mod_dep_node is ds6_modded_node @@ -621,25 +621,25 @@ def test_modified_with_wl_dep(self): def test_load_comp11_and_23(self): """Test loading two composites that depend on similar wavelengths.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") # mock the available comps/mods in the compositor loader avail_comps = scene.available_composite_ids() - assert make_cid(name='comp11') in avail_comps - assert make_cid(name='comp23') in avail_comps + assert make_cid(name="comp11") in avail_comps + assert make_cid(name="comp23") in avail_comps # it is fine that an optional prereq doesn't exist - scene.load(['comp11', 'comp23']) - comp11_node = scene._dependency_tree['comp11'] - comp23_node = scene._dependency_tree['comp23'] - assert comp11_node.data[1][-1].name['name'] == 'ds10' - assert comp23_node.data[1][0].name['name'] == 'ds8' + scene.load(["comp11", "comp23"]) + comp11_node = scene._dependency_tree["comp11"] + comp23_node = scene._dependency_tree["comp23"] + assert comp11_node.data[1][-1].name["name"] == "ds10" + assert comp23_node.data[1][0].name["name"] == "ds8" loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 - assert 'comp11' in scene - assert 'comp23' in scene + assert "comp11" in scene + assert "comp23" in scene def test_load_too_many(self): """Test dependency tree if too many reader keys match.""" - scene = Scene(filenames=['fake3_1.txt'], reader='fake3') + scene = Scene(filenames=["fake3_1.txt"], reader="fake3") avail_comps = scene.available_composite_ids() # static image => 1 assert len(avail_comps) == 1 @@ -660,8 +660,8 @@ def test_load_when_sensor_none_in_preloaded_dataarrays(self): def _scene_with_data_array_none_sensor(): - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene['my_data'] = _data_array_none_sensor("my_data") + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene["my_data"] = _data_array_none_sensor("my_data") return scene diff --git a/satpy/tests/scene_tests/test_resampling.py b/satpy/tests/scene_tests/test_resampling.py index 39f9a50092..286735c093 100644 --- a/satpy/tests/scene_tests/test_resampling.py +++ b/satpy/tests/scene_tests/test_resampling.py @@ -39,40 +39,40 @@ def test_crop(self): scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) - proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, - 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} + proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, + "lon_0": 0.0, "proj": "geos", "units": "m"} x_size = 3712 y_size = 3712 area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, x_size, y_size, area_extent, ) area_def2 = AreaDefinition( - 'test2', - 'test2', - 'test2', + "test2", + "test2", + "test2", proj_dict, x_size // 2, y_size // 2, area_extent, ) scene1["1"] = xr.DataArray(np.zeros((y_size, x_size))) - scene1["2"] = xr.DataArray(np.zeros((y_size, x_size)), dims=('y', 'x')) - scene1["3"] = xr.DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'), - attrs={'area': area_def}) - scene1["4"] = xr.DataArray(np.zeros((y_size // 2, x_size // 2)), dims=('y', 'x'), - attrs={'area': area_def2}) + scene1["2"] = xr.DataArray(np.zeros((y_size, x_size)), dims=("y", "x")) + scene1["3"] = xr.DataArray(np.zeros((y_size, x_size)), dims=("y", "x"), + attrs={"area": area_def}) + scene1["4"] = xr.DataArray(np.zeros((y_size // 2, x_size // 2)), dims=("y", "x"), + attrs={"area": area_def2}) # by area crop_area = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, x_size, y_size, @@ -80,33 +80,33 @@ def test_crop(self): area_extent[2] - 10000., area_extent[3] - 500000.) ) new_scn1 = scene1.crop(crop_area) - assert '1' in new_scn1 - assert '2' in new_scn1 - assert '3' in new_scn1 - assert new_scn1['1'].shape == (y_size, x_size) - assert new_scn1['2'].shape == (y_size, x_size) - assert new_scn1['3'].shape == (3380, 3708) - assert new_scn1['4'].shape == (1690, 1854) + assert "1" in new_scn1 + assert "2" in new_scn1 + assert "3" in new_scn1 + assert new_scn1["1"].shape == (y_size, x_size) + assert new_scn1["2"].shape == (y_size, x_size) + assert new_scn1["3"].shape == (3380, 3708) + assert new_scn1["4"].shape == (1690, 1854) # by lon/lat bbox new_scn1 = scene1.crop(ll_bbox=(-20., -5., 0, 0)) - assert '1' in new_scn1 - assert '2' in new_scn1 - assert '3' in new_scn1 - assert new_scn1['1'].shape == (y_size, x_size) - assert new_scn1['2'].shape == (y_size, x_size) - assert new_scn1['3'].shape == (184, 714) - assert new_scn1['4'].shape == (92, 357) + assert "1" in new_scn1 + assert "2" in new_scn1 + assert "3" in new_scn1 + assert new_scn1["1"].shape == (y_size, x_size) + assert new_scn1["2"].shape == (y_size, x_size) + assert new_scn1["3"].shape == (184, 714) + assert new_scn1["4"].shape == (92, 357) # by x/y bbox new_scn1 = scene1.crop(xy_bbox=(-200000., -100000., 0, 0)) - assert '1' in new_scn1 - assert '2' in new_scn1 - assert '3' in new_scn1 - assert new_scn1['1'].shape == (y_size, x_size) - assert new_scn1['2'].shape == (y_size, x_size) - assert new_scn1['3'].shape == (36, 70) - assert new_scn1['4'].shape == (18, 35) + assert "1" in new_scn1 + assert "2" in new_scn1 + assert "3" in new_scn1 + assert new_scn1["1"].shape == (y_size, x_size) + assert new_scn1["2"].shape == (y_size, x_size) + assert new_scn1["3"].shape == (36, 70) + assert new_scn1["4"].shape == (18, 35) def test_crop_epsg_crs(self): """Test the crop method when source area uses an EPSG code.""" @@ -117,18 +117,18 @@ def test_crop_epsg_crs(self): x_size = 3712 y_size = 3712 area_def = AreaDefinition( - 'test', 'test', 'test', + "test", "test", "test", "EPSG:32630", x_size, y_size, area_extent, ) - scene1["1"] = xr.DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'), - attrs={'area': area_def}) + scene1["1"] = xr.DataArray(np.zeros((y_size, x_size)), dims=("y", "x"), + attrs={"area": area_def}) # by x/y bbox new_scn1 = scene1.crop(xy_bbox=(719695.7781587119, 5427887.407618969, 725068.1609052602, 5433708.364368956)) - assert '1' in new_scn1 - assert new_scn1['1'].shape == (198, 182) + assert "1" in new_scn1 + assert new_scn1["1"].shape == (198, 182) def test_crop_rgb(self): """Test the crop method on multi-dimensional data.""" @@ -136,43 +136,43 @@ def test_crop_rgb(self): scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) - proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, - 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} + proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, + "lon_0": 0.0, "proj": "geos", "units": "m"} x_size = 3712 y_size = 3712 area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, x_size, y_size, area_extent, ) area_def2 = AreaDefinition( - 'test2', - 'test2', - 'test2', + "test2", + "test2", + "test2", proj_dict, x_size // 2, y_size // 2, area_extent, ) scene1["1"] = xr.DataArray(np.zeros((3, y_size, x_size)), - dims=('bands', 'y', 'x'), - attrs={'area': area_def}) + dims=("bands", "y", "x"), + attrs={"area": area_def}) scene1["2"] = xr.DataArray(np.zeros((y_size // 2, 3, x_size // 2)), - dims=('y', 'bands', 'x'), - attrs={'area': area_def2}) + dims=("y", "bands", "x"), + attrs={"area": area_def2}) # by lon/lat bbox new_scn1 = scene1.crop(ll_bbox=(-20., -5., 0, 0)) - assert '1' in new_scn1 - assert '2' in new_scn1 - assert 'bands' in new_scn1['1'].dims - assert 'bands' in new_scn1['2'].dims - assert new_scn1['1'].shape == (3, 184, 714) - assert new_scn1['2'].shape == (92, 3, 357) + assert "1" in new_scn1 + assert "2" in new_scn1 + assert "bands" in new_scn1["1"].dims + assert "bands" in new_scn1["2"].dims + assert new_scn1["1"].shape == (3, 184, 714) + assert new_scn1["2"].shape == (92, 3, 357) @pytest.mark.usefixtures("include_test_etc") @@ -187,17 +187,17 @@ def _fake_resample_dataset_force_20x20(self, dataset, dest_area, **kwargs): """Return copy of dataset pretending it was resampled to (20, 20) shape.""" data = np.zeros((20, 20)) attrs = dataset.attrs.copy() - attrs['area'] = dest_area + attrs["area"] = dest_area return xr.DataArray( data, - dims=('y', 'x'), + dims=("y", "x"), attrs=attrs, ) - @mock.patch('satpy.scene.resample_dataset') - @pytest.mark.parametrize('datasets', [ + @mock.patch("satpy.scene.resample_dataset") + @pytest.mark.parametrize("datasets", [ None, - ('comp13', 'ds5', 'ds2'), + ("comp13", "ds5", "ds2"), ]) def test_resample_scene_copy(self, rs, datasets): """Test that the Scene is properly copied during resampling. @@ -209,26 +209,26 @@ def test_resample_scene_copy(self, rs, datasets): from pyresample.geometry import AreaDefinition rs.side_effect = self._fake_resample_dataset_force_20x20 - proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') - area_def = AreaDefinition('test', 'test', 'test', proj_str, 5, 5, (-1000., -1500., 1000., 1500.)) + proj_str = ("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") + area_def = AreaDefinition("test", "test", "test", proj_str, 5, 5, (-1000., -1500., 1000., 1500.)) area_def.get_area_slices = mock.MagicMock() - scene = Scene(filenames=['fake1_1.txt', 'fake1_highres_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt", "fake1_highres_1.txt"], reader="fake1") - scene.load(['comp19']) + scene.load(["comp19"]) new_scene = scene.resample(area_def, datasets=datasets) - new_scene['new_ds'] = new_scene['comp19'].copy() + new_scene["new_ds"] = new_scene["comp19"].copy() - scene.load(['ds1']) + scene.load(["ds1"]) - comp19_node = scene._dependency_tree['comp19'] - ds5_mod_id = make_dataid(name='ds5', modifiers=('res_change',)) + comp19_node = scene._dependency_tree["comp19"] + ds5_mod_id = make_dataid(name="ds5", modifiers=("res_change",)) ds5_node = scene._dependency_tree[ds5_mod_id] - comp13_node = scene._dependency_tree['comp13'] + comp13_node = scene._dependency_tree["comp13"] assert comp13_node.data[1][0] is comp19_node.data[1][0] assert comp13_node.data[1][0] is ds5_node - pytest.raises(KeyError, scene._dependency_tree.__getitem__, 'new_ds') + pytest.raises(KeyError, scene._dependency_tree.__getitem__, "new_ds") # comp19 required resampling to produce so we should have its 3 deps # 1. comp13 @@ -238,15 +238,15 @@ def test_resample_scene_copy(self, rs, datasets): # 4. ds1 loaded_ids = list(scene.keys()) assert len(loaded_ids) == 4 - for name in ('comp13', 'ds5', 'ds2', 'ds1'): - assert any(x['name'] == name for x in loaded_ids) + for name in ("comp13", "ds5", "ds2", "ds1"): + assert any(x["name"] == name for x in loaded_ids) loaded_ids = list(new_scene.keys()) assert len(loaded_ids) == 2 - assert loaded_ids[0] == make_cid(name='comp19') - assert loaded_ids[1] == make_cid(name='new_ds') + assert loaded_ids[0] == make_cid(name="comp19") + assert loaded_ids[1] == make_cid(name="new_ds") - @mock.patch('satpy.scene.resample_dataset') + @mock.patch("satpy.scene.resample_dataset") def test_resample_scene_preserves_requested_dependencies(self, rs): """Test that the Scene is properly copied during resampling. @@ -258,61 +258,61 @@ def test_resample_scene_preserves_requested_dependencies(self, rs): from pyresample.utils import proj4_str_to_dict rs.side_effect = self._fake_resample_dataset - proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 ' - '+units=m +no_defs') - area_def = AreaDefinition('test', 'test', 'test', proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) + proj_dict = proj4_str_to_dict("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 " + "+units=m +no_defs") + area_def = AreaDefinition("test", "test", "test", proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) area_def.get_area_slices = mock.MagicMock() - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") # Set PYTHONHASHSEED to 0 in the interpreter to test as intended (comp26 comes before comp14) - scene.load(['comp26', 'comp14'], generate=False) + scene.load(["comp26", "comp14"], generate=False) scene.resample(area_def, unload=True) new_scene_2 = scene.resample(area_def, unload=True) - assert 'comp14' not in scene - assert 'comp26' not in scene - assert 'comp14' in new_scene_2 - assert 'comp26' in new_scene_2 - assert 'ds1' not in new_scene_2 # unloaded + assert "comp14" not in scene + assert "comp26" not in scene + assert "comp14" in new_scene_2 + assert "comp26" in new_scene_2 + assert "ds1" not in new_scene_2 # unloaded - @mock.patch('satpy.scene.resample_dataset') + @mock.patch("satpy.scene.resample_dataset") def test_resample_reduce_data_toggle(self, rs): """Test that the Scene can be reduced or not reduced during resampling.""" from pyresample.geometry import AreaDefinition rs.side_effect = self._fake_resample_dataset_force_20x20 - proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') - target_area = AreaDefinition('test', 'test', 'test', proj_str, 4, 4, (-1000., -1500., 1000., 1500.)) - area_def = AreaDefinition('test', 'test', 'test', proj_str, 5, 5, (-1000., -1500., 1000., 1500.)) + proj_str = ("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") + target_area = AreaDefinition("test", "test", "test", proj_str, 4, 4, (-1000., -1500., 1000., 1500.)) + area_def = AreaDefinition("test", "test", "test", proj_str, 5, 5, (-1000., -1500., 1000., 1500.)) area_def.get_area_slices = mock.MagicMock() get_area_slices = area_def.get_area_slices get_area_slices.return_value = (slice(0, 3, None), slice(0, 3, None)) - area_def_big = AreaDefinition('test', 'test', 'test', proj_str, 10, 10, (-1000., -1500., 1000., 1500.)) + area_def_big = AreaDefinition("test", "test", "test", proj_str, 10, 10, (-1000., -1500., 1000., 1500.)) area_def_big.get_area_slices = mock.MagicMock() get_area_slices_big = area_def_big.get_area_slices get_area_slices_big.return_value = (slice(0, 6, None), slice(0, 6, None)) # Test that data reduction can be disabled - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp19']) - scene['comp19'].attrs['area'] = area_def - scene['comp19_big'] = xr.DataArray( - da.zeros((10, 10)), dims=('y', 'x'), - attrs=scene['comp19'].attrs.copy()) - scene['comp19_big'].attrs['area'] = area_def_big - scene['comp19_copy'] = scene['comp19'].copy() + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp19"]) + scene["comp19"].attrs["area"] = area_def + scene["comp19_big"] = xr.DataArray( + da.zeros((10, 10)), dims=("y", "x"), + attrs=scene["comp19"].attrs.copy()) + scene["comp19_big"].attrs["area"] = area_def_big + scene["comp19_copy"] = scene["comp19"].copy() orig_slice_data = scene._slice_data # we force the below order of processing to test that success isn't # based on data of the same resolution being processed together test_order = [ - make_cid(**scene['comp19'].attrs), - make_cid(**scene['comp19_big'].attrs), - make_cid(**scene['comp19_copy'].attrs), + make_cid(**scene["comp19"].attrs), + make_cid(**scene["comp19_big"].attrs), + make_cid(**scene["comp19_copy"].attrs), ] - with mock.patch('satpy.scene.Scene._slice_data') as slice_data, \ - mock.patch('satpy.dataset.dataset_walker') as ds_walker: + with mock.patch("satpy.scene.Scene._slice_data") as slice_data, \ + mock.patch("satpy.dataset.dataset_walker") as ds_walker: ds_walker.return_value = test_order slice_data.side_effect = orig_slice_data scene.resample(target_area, reduce_data=False) @@ -332,24 +332,24 @@ def test_resample_ancillary(self): """Test that the Scene reducing data does not affect final output.""" from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict - proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 ' - '+units=m +no_defs') - area_def = AreaDefinition('test', 'test', 'test', proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - - scene.load(['comp19', 'comp20']) - scene['comp19'].attrs['area'] = area_def - scene['comp19'].attrs['ancillary_variables'] = [scene['comp20']] - scene['comp20'].attrs['area'] = area_def - - dst_area = AreaDefinition('dst', 'dst', 'dst', + proj_dict = proj4_str_to_dict("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 " + "+units=m +no_defs") + area_def = AreaDefinition("test", "test", "test", proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + + scene.load(["comp19", "comp20"]) + scene["comp19"].attrs["area"] = area_def + scene["comp19"].attrs["ancillary_variables"] = [scene["comp20"]] + scene["comp20"].attrs["area"] = area_def + + dst_area = AreaDefinition("dst", "dst", "dst", proj_dict, 2, 2, (-1000., -1500., 0., 0.), ) new_scene = scene.resample(dst_area) - assert new_scene['comp20'] is new_scene['comp19'].attrs['ancillary_variables'][0] + assert new_scene["comp20"] is new_scene["comp19"].attrs["ancillary_variables"][0] def test_resample_multi_ancillary(self): """Test that multiple ancillary variables are retained after resampling. @@ -380,14 +380,14 @@ def test_resample_multi_ancillary(self): def test_resample_reduce_data(self): """Test that the Scene reducing data does not affect final output.""" from pyresample.geometry import AreaDefinition - proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') - area_def = AreaDefinition('test', 'test', 'test', proj_str, 20, 20, (-1000., -1500., 1000., 1500.)) - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - - scene.load(['comp19']) - scene['comp19'].attrs['area'] = area_def - dst_area = AreaDefinition('dst', 'dst', 'dst', + proj_str = ("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") + area_def = AreaDefinition("test", "test", "test", proj_str, 20, 20, (-1000., -1500., 1000., 1500.)) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + + scene.load(["comp19"]) + scene["comp19"].attrs["area"] = area_def + dst_area = AreaDefinition("dst", "dst", "dst", proj_str, 20, 20, (-1000., -1500., 0., 0.), @@ -395,24 +395,24 @@ def test_resample_reduce_data(self): new_scene1 = scene.resample(dst_area, reduce_data=False) new_scene2 = scene.resample(dst_area) new_scene3 = scene.resample(dst_area, reduce_data=True) - assert new_scene1['comp19'].shape == (20, 20, 3) - assert new_scene2['comp19'].shape == (20, 20, 3) - assert new_scene3['comp19'].shape == (20, 20, 3) + assert new_scene1["comp19"].shape == (20, 20, 3) + assert new_scene2["comp19"].shape == (20, 20, 3) + assert new_scene3["comp19"].shape == (20, 20, 3) - @mock.patch('satpy.scene.resample_dataset') + @mock.patch("satpy.scene.resample_dataset") def test_no_generate_comp10(self, rs): """Test generating a composite after loading.""" from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict rs.side_effect = self._fake_resample_dataset - proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 ' - '+units=m +no_defs') + proj_dict = proj4_str_to_dict("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 " + "+units=m +no_defs") area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, 200, 400, @@ -420,36 +420,36 @@ def test_no_generate_comp10(self, rs): ) # it is fine that an optional prereq doesn't exist - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp10'], generate=False) - assert any(ds_id['name'] == 'comp10' for ds_id in scene._wishlist) - assert 'comp10' not in scene + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp10"], generate=False) + assert any(ds_id["name"] == "comp10" for ds_id in scene._wishlist) + assert "comp10" not in scene # two dependencies should have been loaded assert len(scene._datasets) == 2 assert len(scene.missing_datasets) == 1 new_scn = scene.resample(area_def, generate=False) - assert 'comp10' not in scene + assert "comp10" not in scene # two dependencies should have been loaded assert len(scene._datasets) == 2 assert len(scene.missing_datasets) == 1 new_scn._generate_composites_from_loaded_datasets() - assert any(ds_id['name'] == 'comp10' for ds_id in new_scn._wishlist) - assert 'comp10' in new_scn + assert any(ds_id["name"] == "comp10" for ds_id in new_scn._wishlist) + assert "comp10" in new_scn assert not new_scn.missing_datasets # try generating them right away new_scn = scene.resample(area_def) - assert any(ds_id['name'] == 'comp10' for ds_id in new_scn._wishlist) - assert 'comp10' in new_scn + assert any(ds_id["name"] == "comp10" for ds_id in new_scn._wishlist) + assert "comp10" in new_scn assert not new_scn.missing_datasets def test_comp_loading_after_resampling_existing_sensor(self): """Test requesting a composite after resampling.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["ds1", "ds2"]) - new_scn = scene.resample(resampler='native') + new_scn = scene.resample(resampler="native") # Can't load from readers after resampling with pytest.raises(KeyError): @@ -463,9 +463,9 @@ def test_comp_loading_after_resampling_existing_sensor(self): def test_comp_loading_after_resampling_new_sensor(self): """Test requesting a composite after resampling when the sensor composites weren't loaded before.""" # this is our base Scene with sensor "fake_sensor2" - scene1 = Scene(filenames=['fake2_3ds_1.txt'], reader='fake2_3ds') + scene1 = Scene(filenames=["fake2_3ds_1.txt"], reader="fake2_3ds") scene1.load(["ds2"]) - new_scn = scene1.resample(resampler='native') + new_scn = scene1.resample(resampler="native") # Can't load from readers after resampling with pytest.raises(KeyError): @@ -534,16 +534,16 @@ def test_comps_need_resampling_optional_mod_deps(self): dependencies that aren't needed which fail. """ - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") # should require resampling - scene.load(['comp27', 'ds13']) - assert 'comp27' not in scene - assert 'ds13' in scene + scene.load(["comp27", "ds13"]) + assert "comp27" not in scene + assert "ds13" in scene - new_scene = scene.resample(resampler='native') + new_scene = scene.resample(resampler="native") assert len(list(new_scene.keys())) == 2 - assert 'comp27' in new_scene - assert 'ds13' in new_scene + assert "comp27" in new_scene + assert "ds13" in new_scene class TestSceneAggregation: @@ -556,7 +556,7 @@ def test_aggregate(self): scene1 = self._create_test_data(x_size, y_size) - scene2 = scene1.aggregate(func='sum', x=2, y=2) + scene2 = scene1.aggregate(func="sum", x=2, y=2) expected_aggregated_shape = (y_size / 2, x_size / 2) self._check_aggregation_results(expected_aggregated_shape, scene1, scene2, x_size, y_size) @@ -577,40 +577,40 @@ def _create_test_data(x_size, y_size): scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) - proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, - 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} + proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, + "lon_0": 0.0, "proj": "geos", "units": "m"} area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, x_size, y_size, area_extent, ) scene1["1"] = xr.DataArray(np.ones((y_size, x_size)), - attrs={'_satpy_id_keys': default_id_keys_config}) + attrs={"_satpy_id_keys": default_id_keys_config}) scene1["2"] = xr.DataArray(np.ones((y_size, x_size)), - dims=('y', 'x'), - attrs={'_satpy_id_keys': default_id_keys_config}) + dims=("y", "x"), + attrs={"_satpy_id_keys": default_id_keys_config}) scene1["3"] = xr.DataArray(np.ones((y_size, x_size)), - dims=('y', 'x'), - attrs={'area': area_def, '_satpy_id_keys': default_id_keys_config}) + dims=("y", "x"), + attrs={"area": area_def, "_satpy_id_keys": default_id_keys_config}) scene1["4"] = xr.DataArray(np.ones((y_size, x_size)), - dims=('y', 'x'), - attrs={'area': area_def, 'standard_name': 'backscatter', - '_satpy_id_keys': default_id_keys_config}) + dims=("y", "x"), + attrs={"area": area_def, "standard_name": "backscatter", + "_satpy_id_keys": default_id_keys_config}) return scene1 def _check_aggregation_results(self, expected_aggregated_shape, scene1, scene2, x_size, y_size): - assert scene1['1'] is scene2['1'] - assert scene1['2'] is scene2['2'] - np.testing.assert_allclose(scene2['3'].data, 4) - assert scene2['1'].shape == (y_size, x_size) - assert scene2['2'].shape == (y_size, x_size) - assert scene2['3'].shape == expected_aggregated_shape - assert 'standard_name' in scene2['4'].attrs - assert scene2['4'].attrs['standard_name'] == 'backscatter' + assert scene1["1"] is scene2["1"] + assert scene1["2"] is scene2["2"] + np.testing.assert_allclose(scene2["3"].data, 4) + assert scene2["1"].shape == (y_size, x_size) + assert scene2["2"].shape == (y_size, x_size) + assert scene2["3"].shape == expected_aggregated_shape + assert "standard_name" in scene2["4"].attrs + assert scene2["4"].attrs["standard_name"] == "backscatter" def test_aggregate_with_boundary(self): """Test aggregation with boundary argument.""" @@ -620,8 +620,8 @@ def test_aggregate_with_boundary(self): scene1 = self._create_test_data(x_size, y_size) with pytest.raises(ValueError): - scene1.aggregate(func='sum', x=2, y=2, boundary='exact') + scene1.aggregate(func="sum", x=2, y=2, boundary="exact") - scene2 = scene1.aggregate(func='sum', x=2, y=2, boundary='trim') + scene2 = scene1.aggregate(func="sum", x=2, y=2, boundary="trim") expected_aggregated_shape = (y_size // 2, x_size // 2) self._check_aggregation_results(expected_aggregated_shape, scene1, scene2, x_size, y_size) diff --git a/satpy/tests/scene_tests/test_saving.py b/satpy/tests/scene_tests/test_saving.py index 188d9c1e75..0781ae8796 100644 --- a/satpy/tests/scene_tests/test_saving.py +++ b/satpy/tests/scene_tests/test_saving.py @@ -37,69 +37,69 @@ def test_save_datasets_default(self, tmp_path): """Save a dataset using 'save_datasets'.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime(2018, 1, 1, 0, 0, 0)} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() - scn['test'] = ds1 + scn["test"] = ds1 scn.save_datasets(base_dir=tmp_path) - assert os.path.isfile(os.path.join(tmp_path, 'test_20180101_000000.tif')) + assert os.path.isfile(os.path.join(tmp_path, "test_20180101_000000.tif")) def test_save_datasets_by_ext(self, tmp_path): """Save a dataset using 'save_datasets' with 'filename'.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime(2018, 1, 1, 0, 0, 0)} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() - scn['test'] = ds1 + scn["test"] = ds1 from satpy.writers.simple_image import PillowWriter save_image_mock = spy_decorator(PillowWriter.save_image) - with mock.patch.object(PillowWriter, 'save_image', save_image_mock): - scn.save_datasets(base_dir=tmp_path, filename='{name}.png') + with mock.patch.object(PillowWriter, "save_image", save_image_mock): + scn.save_datasets(base_dir=tmp_path, filename="{name}.png") save_image_mock.mock.assert_called_once() - assert os.path.isfile(os.path.join(tmp_path, 'test.png')) + assert os.path.isfile(os.path.join(tmp_path, "test.png")) def test_save_datasets_bad_writer(self, tmp_path): """Save a dataset using 'save_datasets' and a bad writer.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow()} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow()} ) scn = Scene() - scn['test'] = ds1 + scn["test"] = ds1 pytest.raises(ValueError, scn.save_datasets, - writer='_bad_writer_', + writer="_bad_writer_", base_dir=tmp_path) def test_save_datasets_missing_wishlist(self, tmp_path): """Calling 'save_datasets' with no valid datasets.""" scn = Scene() - scn._wishlist.add(make_cid(name='true_color')) + scn._wishlist.add(make_cid(name="true_color")) pytest.raises(RuntimeError, scn.save_datasets, - writer='geotiff', + writer="geotiff", base_dir=tmp_path) pytest.raises(KeyError, scn.save_datasets, - datasets=['no_exist']) + datasets=["no_exist"]) def test_save_dataset_default(self, tmp_path): """Save a dataset using 'save_dataset'.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime(2018, 1, 1, 0, 0, 0)} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() - scn['test'] = ds1 - scn.save_dataset('test', base_dir=tmp_path) - assert os.path.isfile(os.path.join(tmp_path, 'test_20180101_000000.tif')) + scn["test"] = ds1 + scn.save_dataset("test", base_dir=tmp_path) + assert os.path.isfile(os.path.join(tmp_path, "test_20180101_000000.tif")) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index f056d2fa93..5c3ededd40 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -39,24 +39,24 @@ class TestMatchDataArrays(unittest.TestCase): """Test the utility method 'match_data_arrays'.""" - def _get_test_ds(self, shape=(50, 100), dims=('y', 'x')): + def _get_test_ds(self, shape=(50, 100), dims=("y", "x")): """Get a fake DataArray.""" from pyresample.geometry import AreaDefinition data = da.random.random(shape, chunks=25) area = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, - shape[dims.index('x')], shape[dims.index('y')], + "test", "test", "test", + {"proj": "eqc", "lon_0": 0.0, + "lat_0": 0.0}, + shape[dims.index("x")], shape[dims.index("y")], (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) - attrs = {'area': area} + attrs = {"area": area} return xr.DataArray(data, dims=dims, attrs=attrs) def test_single_ds(self): """Test a single dataset is returned unharmed.""" from satpy.composites import CompositeBase ds1 = self._get_test_ds() - comp = CompositeBase('test_comp') + comp = CompositeBase("test_comp") ret_datasets = comp.match_data_arrays((ds1,)) assert ret_datasets[0].identical(ds1) @@ -65,7 +65,7 @@ def test_mult_ds_area(self): from satpy.composites import CompositeBase ds1 = self._get_test_ds() ds2 = self._get_test_ds() - comp = CompositeBase('test_comp') + comp = CompositeBase("test_comp") ret_datasets = comp.match_data_arrays((ds1, ds2)) assert ret_datasets[0].identical(ds1) assert ret_datasets[1].identical(ds2) @@ -75,8 +75,8 @@ def test_mult_ds_no_area(self): from satpy.composites import CompositeBase ds1 = self._get_test_ds() ds2 = self._get_test_ds() - del ds2.attrs['area'] - comp = CompositeBase('test_comp') + del ds2.attrs["area"] + comp = CompositeBase("test_comp") self.assertRaises(ValueError, comp.match_data_arrays, (ds1, ds2)) def test_mult_ds_diff_area(self): @@ -86,13 +86,13 @@ def test_mult_ds_diff_area(self): from satpy.composites import CompositeBase, IncompatibleAreas ds1 = self._get_test_ds() ds2 = self._get_test_ds() - ds2.attrs['area'] = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, + ds2.attrs["area"] = AreaDefinition( + "test", "test", "test", + {"proj": "eqc", "lon_0": 0.0, + "lat_0": 0.0}, 100, 50, (-30037508.34, -20018754.17, 10037508.34, 18754.17)) - comp = CompositeBase('test_comp') + comp = CompositeBase("test_comp") self.assertRaises(IncompatibleAreas, comp.match_data_arrays, (ds1, ds2)) def test_mult_ds_diff_dims(self): @@ -101,9 +101,9 @@ def test_mult_ds_diff_dims(self): # x is still 50, y is still 100, even though they are in # different order - ds1 = self._get_test_ds(shape=(50, 100), dims=('y', 'x')) - ds2 = self._get_test_ds(shape=(3, 100, 50), dims=('bands', 'x', 'y')) - comp = CompositeBase('test_comp') + ds1 = self._get_test_ds(shape=(50, 100), dims=("y", "x")) + ds2 = self._get_test_ds(shape=(3, 100, 50), dims=("bands", "x", "y")) + comp = CompositeBase("test_comp") ret_datasets = comp.match_data_arrays((ds1, ds2)) assert ret_datasets[0].identical(ds1) assert ret_datasets[1].identical(ds2) @@ -114,19 +114,19 @@ def test_mult_ds_diff_size(self): # x is 50 in this one, 100 in ds2 # y is 100 in this one, 50 in ds2 - ds1 = self._get_test_ds(shape=(50, 100), dims=('x', 'y')) - ds2 = self._get_test_ds(shape=(3, 50, 100), dims=('bands', 'y', 'x')) - comp = CompositeBase('test_comp') + ds1 = self._get_test_ds(shape=(50, 100), dims=("x", "y")) + ds2 = self._get_test_ds(shape=(3, 50, 100), dims=("bands", "y", "x")) + comp = CompositeBase("test_comp") self.assertRaises(IncompatibleAreas, comp.match_data_arrays, (ds1, ds2)) def test_nondimensional_coords(self): """Test the removal of non-dimensional coordinates when compositing.""" from satpy.composites import CompositeBase ds = self._get_test_ds(shape=(2, 2)) - ds['acq_time'] = ('y', [0, 1]) - comp = CompositeBase('test_comp') + ds["acq_time"] = ("y", [0, 1]) + comp = CompositeBase("test_comp") ret_datasets = comp.match_data_arrays([ds, ds]) - self.assertNotIn('acq_time', ret_datasets[0].coords) + self.assertNotIn("acq_time", ret_datasets[0].coords) class TestRatioSharpenedCompositors: @@ -135,88 +135,88 @@ class TestRatioSharpenedCompositors: def setup_method(self): """Create test data.""" from pyresample.geometry import AreaDefinition - area = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 2, 2, + area = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 2, 2, (-2000, -2000, 2000, 2000)) - attrs = {'area': area, - 'start_time': datetime(2018, 1, 1, 18), - 'modifiers': tuple(), - 'resolution': 1000, - 'calibration': 'reflectance', - 'units': '%', - 'name': 'test_vis'} + attrs = {"area": area, + "start_time": datetime(2018, 1, 1, 18), + "modifiers": tuple(), + "resolution": 1000, + "calibration": "reflectance", + "units": "%", + "name": "test_vis"} low_res_data = np.ones((2, 2), dtype=np.float64) + 4 low_res_data[1, 1] = 0.0 # produces infinite ratio ds1 = xr.DataArray(da.from_array(low_res_data, chunks=2), - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) self.ds1 = ds1 ds2 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 2, - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) - ds2.attrs['name'] += '2' + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) + ds2.attrs["name"] += "2" self.ds2 = ds2 ds3 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 3, - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) - ds3.attrs['name'] += '3' + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) + ds3.attrs["name"] += "3" self.ds3 = ds3 # high resolution version high_res_data = np.ones((2, 2), dtype=np.float64) high_res_data[1, 0] = np.nan # invalid value in one band ds4 = xr.DataArray(da.from_array(high_res_data, chunks=2), - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) - ds4.attrs['name'] += '4' - ds4.attrs['resolution'] = 500 + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) + ds4.attrs["name"] += "4" + ds4.attrs["resolution"] = 500 self.ds4 = ds4 # high resolution version - but too big ds4_big = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64), - attrs=attrs.copy(), dims=('y', 'x'), - coords={'y': [0, 1, 2, 3], 'x': [0, 1, 2, 3]}) - ds4_big.attrs['name'] += '4' - ds4_big.attrs['resolution'] = 500 - ds4_big.attrs['rows_per_scan'] = 1 - ds4_big.attrs['area'] = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 4, 4, + attrs=attrs.copy(), dims=("y", "x"), + coords={"y": [0, 1, 2, 3], "x": [0, 1, 2, 3]}) + ds4_big.attrs["name"] += "4" + ds4_big.attrs["resolution"] = 500 + ds4_big.attrs["rows_per_scan"] = 1 + ds4_big.attrs["area"] = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 4, 4, (-2000, -2000, 2000, 2000)) self.ds4_big = ds4_big @pytest.mark.parametrize( "init_kwargs", [ - {'high_resolution_band': "bad", 'neutral_resolution_band': "red"}, - {'high_resolution_band': "red", 'neutral_resolution_band': "bad"} + {"high_resolution_band": "bad", "neutral_resolution_band": "red"}, + {"high_resolution_band": "red", "neutral_resolution_band": "bad"} ] ) def test_bad_colors(self, init_kwargs): """Test that only valid band colors can be provided.""" from satpy.composites import RatioSharpenedRGB with pytest.raises(ValueError): - RatioSharpenedRGB(name='true_color', **init_kwargs) + RatioSharpenedRGB(name="true_color", **init_kwargs) def test_match_data_arrays(self): """Test that all areas have to be the same resolution.""" from satpy.composites import IncompatibleAreas, RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color') + comp = RatioSharpenedRGB(name="true_color") with pytest.raises(IncompatibleAreas): comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4_big,)) def test_more_than_three_datasets(self): """Test that only 3 datasets can be passed.""" from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color') + comp = RatioSharpenedRGB(name="true_color") with pytest.raises(ValueError): comp((self.ds1, self.ds2, self.ds3, self.ds1), optional_datasets=(self.ds4_big,)) def test_self_sharpened_no_high_res(self): """Test for exception when no high_res band is specified.""" from satpy.composites import SelfSharpenedRGB - comp = SelfSharpenedRGB(name='true_color', high_resolution_band=None) + comp = SelfSharpenedRGB(name="true_color", high_resolution_band=None) with pytest.raises(ValueError): comp((self.ds1, self.ds2, self.ds3)) @@ -266,7 +266,7 @@ def test_basic_no_sharpen(self): def test_ratio_sharpening(self, high_resolution_band, neutral_resolution_band, exp_r, exp_g, exp_b): """Test RatioSharpenedRGB by different groups of high_resolution_band and neutral_resolution_band.""" from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color', high_resolution_band=high_resolution_band, + comp = RatioSharpenedRGB(name="true_color", high_resolution_band=high_resolution_band, neutral_resolution_band=neutral_resolution_band) res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) @@ -291,7 +291,7 @@ def test_ratio_sharpening(self, high_resolution_band, neutral_resolution_band, e def test_self_sharpened_basic(self, exp_shape, exp_r, exp_g, exp_b): """Test that three datasets can be passed without optional high res.""" from satpy.composites import SelfSharpenedRGB - comp = SelfSharpenedRGB(name='true_color') + comp = SelfSharpenedRGB(name="true_color") res = comp((self.ds1, self.ds2, self.ds3)) data = res.values @@ -307,48 +307,48 @@ class TestDifferenceCompositor(unittest.TestCase): def setUp(self): """Create test data.""" from pyresample.geometry import AreaDefinition - area = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 2, 2, + area = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 2, 2, (-2000, -2000, 2000, 2000)) - attrs = {'area': area, - 'start_time': datetime(2018, 1, 1, 18), - 'modifiers': tuple(), - 'resolution': 1000, - 'name': 'test_vis'} + attrs = {"area": area, + "start_time": datetime(2018, 1, 1, 18), + "modifiers": tuple(), + "resolution": 1000, + "name": "test_vis"} ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64), - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) self.ds1 = ds1 ds2 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 2, - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) - ds2.attrs['name'] += '2' + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) + ds2.attrs["name"] += "2" self.ds2 = ds2 # high res version ds2 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64) + 4, - attrs=attrs.copy(), dims=('y', 'x'), - coords={'y': [0, 1, 2, 3], 'x': [0, 1, 2, 3]}) - ds2.attrs['name'] += '2' - ds2.attrs['resolution'] = 500 - ds2.attrs['rows_per_scan'] = 1 - ds2.attrs['area'] = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 4, 4, + attrs=attrs.copy(), dims=("y", "x"), + coords={"y": [0, 1, 2, 3], "x": [0, 1, 2, 3]}) + ds2.attrs["name"] += "2" + ds2.attrs["resolution"] = 500 + ds2.attrs["rows_per_scan"] = 1 + ds2.attrs["area"] = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 4, 4, (-2000, -2000, 2000, 2000)) self.ds2_big = ds2 def test_basic_diff(self): """Test that a basic difference composite works.""" from satpy.composites import DifferenceCompositor - comp = DifferenceCompositor(name='diff', standard_name='temperature_difference') + comp = DifferenceCompositor(name="diff", standard_name="temperature_difference") res = comp((self.ds1, self.ds2)) np.testing.assert_allclose(res.values, -2) - assert res.attrs.get('standard_name') == 'temperature_difference' + assert res.attrs.get("standard_name") == "temperature_difference" def test_bad_areas_diff(self): """Test that a difference where resolutions are different fails.""" from satpy.composites import DifferenceCompositor, IncompatibleAreas - comp = DifferenceCompositor(name='diff') + comp = DifferenceCompositor(name="diff") # too many arguments self.assertRaises(ValueError, comp, (self.ds1, self.ds2, self.ds2_big)) # different resolution @@ -391,7 +391,7 @@ class TestDayNightCompositor(unittest.TestCase): def setUp(self): """Create test data.""" - bands = ['R', 'G', 'B'] + bands = ["R", "G", "B"] start_time = datetime(2018, 1, 1, 18, 0, 0) # RGB @@ -401,20 +401,20 @@ def setUp(self): a[:, 1, 0] = 0.3 a[:, 1, 1] = 0.4 a = da.from_array(a, a.shape) - self.data_a = xr.DataArray(a, attrs={'test': 'a', 'start_time': start_time}, - coords={'bands': bands}, dims=('bands', 'y', 'x')) + self.data_a = xr.DataArray(a, attrs={"test": "a", "start_time": start_time}, + coords={"bands": bands}, dims=("bands", "y", "x")) b = np.zeros((3, 2, 2), dtype=np.float64) b[:, 0, 0] = np.nan b[:, 0, 1] = 0.25 b[:, 1, 0] = 0.50 b[:, 1, 1] = 0.75 b = da.from_array(b, b.shape) - self.data_b = xr.DataArray(b, attrs={'test': 'b', 'start_time': start_time}, - coords={'bands': bands}, dims=('bands', 'y', 'x')) + self.data_b = xr.DataArray(b, attrs={"test": "b", "start_time": start_time}, + coords={"bands": bands}, dims=("bands", "y", "x")) sza = np.array([[80., 86.], [94., 100.]]) sza = da.from_array(sza, sza.shape) - self.sza = xr.DataArray(sza, dims=('y', 'x')) + self.sza = xr.DataArray(sza, dims=("y", "x")) # fake area my_area = AreaDefinition( @@ -423,15 +423,15 @@ def setUp(self): 2, 2, (-95.0, 40.0, -92.0, 43.0), ) - self.data_a.attrs['area'] = my_area - self.data_b.attrs['area'] = my_area + self.data_a.attrs["area"] = my_area + self.data_b.attrs["area"] = my_area # not used except to check that it matches the data arrays - self.sza.attrs['area'] = my_area + self.sza.attrs["area"] = my_area def test_daynight_sza(self): """Test compositor with both day and night portions when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_night") + comp = DayNightCompositor(name="dn_test", day_night="day_night") res = comp((self.data_a, self.data_b, self.sza)) res = res.compute() expected = np.array([[0., 0.22122352], [0.5, 1.]]) @@ -440,7 +440,7 @@ def test_daynight_sza(self): def test_daynight_area(self): """Test compositor both day and night portions when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_night") + comp = DayNightCompositor(name="dn_test", day_night="day_night") res = comp((self.data_a, self.data_b)) res = res.compute() expected_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) @@ -450,7 +450,7 @@ def test_daynight_area(self): def test_night_only_sza_with_alpha(self): """Test compositor with night portion with alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=True) + comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=True) res = comp((self.data_b, self.sza)) res = res.compute() expected_red_channel = np.array([[np.nan, 0.], [0.5, 1.]]) @@ -461,17 +461,17 @@ def test_night_only_sza_with_alpha(self): def test_night_only_sza_without_alpha(self): """Test compositor with night portion without alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=False) + comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=False) res = comp((self.data_a, self.sza)) res = res.compute() expected = np.array([[0., 0.11042631], [0.66835017, 1.]]) np.testing.assert_allclose(res.values[0], expected) - assert 'A' not in res.bands + assert "A" not in res.bands def test_night_only_area_with_alpha(self): """Test compositor with night portion with alpha band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=True) + comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=True) res = comp((self.data_b,)) res = res.compute() expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]]) @@ -482,17 +482,17 @@ def test_night_only_area_with_alpha(self): def test_night_only_area_without_alpha(self): """Test compositor with night portion without alpha band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=False) + comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=False) res = comp((self.data_b,)) res = res.compute() expected = np.array([[np.nan, 0.], [0., 0.]]) np.testing.assert_allclose(res.values[0], expected) - assert 'A' not in res.bands + assert "A" not in res.bands def test_day_only_sza_with_alpha(self): """Test compositor with day portion with alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) + comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_a, self.sza)) res = res.compute() expected_red_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) @@ -503,18 +503,18 @@ def test_day_only_sza_with_alpha(self): def test_day_only_sza_without_alpha(self): """Test compositor with day portion without alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=False) + comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=False) res = comp((self.data_a, self.sza)) res = res.compute() expected_channel_data = np.array([[0., 0.22122352], [0., 0.]]) for i in range(3): np.testing.assert_allclose(res.values[i], expected_channel_data) - assert 'A' not in res.bands + assert "A" not in res.bands def test_day_only_area_with_alpha(self): """Test compositor with day portion with alpha_band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) + comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_a,)) res = res.compute() expected_l_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) @@ -525,7 +525,7 @@ def test_day_only_area_with_alpha(self): def test_day_only_area_with_alpha_and_missing_data(self): """Test compositor with day portion with alpha_band when SZA data is not provided and there is missing data.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) + comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_b,)) res = res.compute() expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]]) @@ -536,12 +536,12 @@ def test_day_only_area_with_alpha_and_missing_data(self): def test_day_only_area_without_alpha(self): """Test compositor with day portion without alpha_band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=False) + comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=False) res = comp((self.data_a,)) res = res.compute() expected = np.array([[0., 0.33164983], [0.66835017, 1.]]) np.testing.assert_allclose(res.values[0], expected) - assert 'A' not in res.bands + assert "A" not in res.bands class TestFillingCompositor(unittest.TestCase): @@ -550,15 +550,15 @@ class TestFillingCompositor(unittest.TestCase): def test_fill(self): """Test filling.""" from satpy.composites import FillingCompositor - comp = FillingCompositor(name='fill_test') + comp = FillingCompositor(name="fill_test") filler = xr.DataArray(np.array([1, 2, 3, 4, 3, 2, 1])) red = xr.DataArray(np.array([1, 2, 3, np.nan, 3, 2, 1])) green = xr.DataArray(np.array([np.nan, 2, 3, 4, 3, 2, np.nan])) blue = xr.DataArray(np.array([4, 3, 2, 1, 2, 3, 4])) res = comp([filler, red, green, blue]) - np.testing.assert_allclose(res.sel(bands='R').data, filler.data) - np.testing.assert_allclose(res.sel(bands='G').data, filler.data) - np.testing.assert_allclose(res.sel(bands='B').data, blue.data) + np.testing.assert_allclose(res.sel(bands="R").data, filler.data) + np.testing.assert_allclose(res.sel(bands="G").data, filler.data) + np.testing.assert_allclose(res.sel(bands="B").data, blue.data) class TestMultiFiller(unittest.TestCase): @@ -567,7 +567,7 @@ class TestMultiFiller(unittest.TestCase): def test_fill(self): """Test filling.""" from satpy.composites import MultiFiller - comp = MultiFiller(name='fill_test') + comp = MultiFiller(name="fill_test") attrs = {"units": "K"} a = xr.DataArray(np.array([1, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan]), attrs=attrs.copy()) b = xr.DataArray(np.array([np.nan, 2, 3, np.nan, np.nan, np.nan, np.nan]), attrs=attrs.copy()) @@ -587,29 +587,29 @@ class TestLuminanceSharpeningCompositor(unittest.TestCase): def test_compositor(self): """Test luminance sharpening compositor.""" from satpy.composites import LuminanceSharpeningCompositor - comp = LuminanceSharpeningCompositor(name='test') + comp = LuminanceSharpeningCompositor(name="test") # Three shades of grey rgb_arr = np.array([1, 50, 100, 200, 1, 50, 100, 200, 1, 50, 100, 200]) rgb = xr.DataArray(rgb_arr.reshape((3, 2, 2)), - dims=['bands', 'y', 'x'], coords={'bands': ['R', 'G', 'B']}) + dims=["bands", "y", "x"], coords={"bands": ["R", "G", "B"]}) # 100 % luminance -> all result values ~1.0 lum = xr.DataArray(np.array([[100., 100.], [100., 100.]]), - dims=['y', 'x']) + dims=["y", "x"]) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 1., atol=1e-9) # 50 % luminance, all result values ~0.5 lum = xr.DataArray(np.array([[50., 50.], [50., 50.]]), - dims=['y', 'x']) + dims=["y", "x"]) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 0.5, atol=1e-9) # 30 % luminance, all result values ~0.3 lum = xr.DataArray(np.array([[30., 30.], [30., 30.]]), - dims=['y', 'x']) + dims=["y", "x"]) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 0.3, atol=1e-9) # 0 % luminance, all values ~0.0 lum = xr.DataArray(np.array([[0., 0.], [0., 0.]]), - dims=['y', 'x']) + dims=["y", "x"]) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 0.0, atol=1e-9) @@ -621,29 +621,29 @@ class TestSandwichCompositor: @pytest.mark.parametrize( "input_shape,bands", [ - ((3, 2, 2), ['R', 'G', 'B']), - ((4, 2, 2), ['R', 'G', 'B', 'A']) + ((3, 2, 2), ["R", "G", "B"]), + ((4, 2, 2), ["R", "G", "B", "A"]) ] ) - @mock.patch('satpy.composites.enhance2dataset') + @mock.patch("satpy.composites.enhance2dataset") def test_compositor(self, e2d, input_shape, bands): """Test luminance sharpening compositor.""" from satpy.composites import SandwichCompositor rgb_arr = da.from_array(np.random.random(input_shape), chunks=2) - rgb = xr.DataArray(rgb_arr, dims=['bands', 'y', 'x'], - coords={'bands': bands}) + rgb = xr.DataArray(rgb_arr, dims=["bands", "y", "x"], + coords={"bands": bands}) lum_arr = da.from_array(100 * np.random.random((2, 2)), chunks=2) - lum = xr.DataArray(lum_arr, dims=['y', 'x']) + lum = xr.DataArray(lum_arr, dims=["y", "x"]) # Make enhance2dataset return unmodified dataset e2d.return_value = rgb - comp = SandwichCompositor(name='test') + comp = SandwichCompositor(name="test") res = comp([lum, rgb]) for band in rgb: - if band.bands != 'A': + if band.bands != "A": # Check compositor has modified this band np.testing.assert_allclose(res.loc[band.bands].to_numpy(), band.to_numpy() * lum_arr / 100.) @@ -661,32 +661,32 @@ class TestInlineComposites(unittest.TestCase): def test_inline_composites(self): """Test that inline composites are working.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors - comps = load_compositor_configs_for_sensors(['visir'])[0] + comps = load_compositor_configs_for_sensors(["visir"])[0] # Check that "fog" product has all its prerequisites defined - keys = comps['visir'].keys() - fog = [comps['visir'][dsid] for dsid in keys if "fog" == dsid['name']][0] - self.assertEqual(fog.attrs['prerequisites'][0]['name'], '_fog_dep_0') - self.assertEqual(fog.attrs['prerequisites'][1]['name'], '_fog_dep_1') - self.assertEqual(fog.attrs['prerequisites'][2], 10.8) + keys = comps["visir"].keys() + fog = [comps["visir"][dsid] for dsid in keys if "fog" == dsid["name"]][0] + self.assertEqual(fog.attrs["prerequisites"][0]["name"], "_fog_dep_0") + self.assertEqual(fog.attrs["prerequisites"][1]["name"], "_fog_dep_1") + self.assertEqual(fog.attrs["prerequisites"][2], 10.8) # Check that the sub-composite dependencies use wavelengths # (numeric values) - keys = comps['visir'].keys() - fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid['name']] - self.assertEqual(comps['visir'][fog_dep_ids[0]].attrs['prerequisites'], + keys = comps["visir"].keys() + fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid["name"]] + self.assertEqual(comps["visir"][fog_dep_ids[0]].attrs["prerequisites"], [12.0, 10.8]) - self.assertEqual(comps['visir'][fog_dep_ids[1]].attrs['prerequisites'], + self.assertEqual(comps["visir"][fog_dep_ids[1]].attrs["prerequisites"], [10.8, 8.7]) # Check the same for SEVIRI and verify channel names are used # in the sub-composite dependencies instead of wavelengths - comps = load_compositor_configs_for_sensors(['seviri'])[0] - keys = comps['seviri'].keys() - fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid['name']] - self.assertEqual(comps['seviri'][fog_dep_ids[0]].attrs['prerequisites'], - ['IR_120', 'IR_108']) - self.assertEqual(comps['seviri'][fog_dep_ids[1]].attrs['prerequisites'], - ['IR_108', 'IR_087']) + comps = load_compositor_configs_for_sensors(["seviri"])[0] + keys = comps["seviri"].keys() + fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid["name"]] + self.assertEqual(comps["seviri"][fog_dep_ids[0]].attrs["prerequisites"], + ["IR_120", "IR_108"]) + self.assertEqual(comps["seviri"][fog_dep_ids[1]].attrs["prerequisites"], + ["IR_108", "IR_087"]) class TestColormapCompositor(unittest.TestCase): @@ -695,7 +695,7 @@ class TestColormapCompositor(unittest.TestCase): def setUp(self): """Set up the test case.""" from satpy.composites import ColormapCompositor - self.colormap_compositor = ColormapCompositor('test_cmap_compositor') + self.colormap_compositor = ColormapCompositor("test_cmap_compositor") def test_build_colormap_with_int_data_and_without_meanings(self): """Test colormap building.""" @@ -707,8 +707,8 @@ def test_build_colormap_with_int_data_and_without_meanings(self): def test_build_colormap_with_int_data_and_with_meanings(self): """Test colormap building.""" palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), - dims=['value', 'band']) - palette.attrs['palette_meanings'] = [2, 3, 4] + dims=["value", "band"]) + palette.attrs["palette_meanings"] = [2, 3, 4] colormap, squeezed_palette = self.colormap_compositor.build_colormap(palette, np.uint8, {}) self.assertTrue(np.allclose(colormap.values, [2, 3, 4])) self.assertTrue(np.allclose(squeezed_palette, palette / 255.0)) @@ -720,12 +720,12 @@ class TestPaletteCompositor(unittest.TestCase): def test_call(self): """Test palette compositing.""" from satpy.composites import PaletteCompositor - cmap_comp = PaletteCompositor('test_cmap_compositor') + cmap_comp = PaletteCompositor("test_cmap_compositor") palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), - dims=['value', 'band']) - palette.attrs['palette_meanings'] = [2, 3, 4] + dims=["value", "band"]) + palette.attrs["palette_meanings"] = [2, 3, 4] - data = xr.DataArray(da.from_array(np.array([[4, 3, 2], [2, 3, 4]], dtype=np.uint8)), dims=['y', 'x']) + data = xr.DataArray(da.from_array(np.array([[4, 3, 2], [2, 3, 4]], dtype=np.uint8)), dims=["y", "x"]) res = cmap_comp([data, palette]) exp = np.array([[[1., 0.498039, 0.], [0., 0.498039, 1.]], @@ -742,15 +742,15 @@ class TestColorizeCompositor(unittest.TestCase): def test_colorize_no_fill(self): """Test colorizing.""" from satpy.composites import ColorizeCompositor - colormap_composite = ColorizeCompositor('test_color_compositor') + colormap_composite = ColorizeCompositor("test_color_compositor") palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), - dims=['value', 'band']) - palette.attrs['palette_meanings'] = [2, 3, 4] + dims=["value", "band"]) + palette.attrs["palette_meanings"] = [2, 3, 4] data = xr.DataArray(np.array([[4, 3, 2], [2, 3, 4]], dtype=np.uint8), - dims=['y', 'x']) + dims=["y", "x"]) res = colormap_composite([data, palette]) exp = np.array([[[1., 0.498039, 0.], [0., 0.498039, 1.]], @@ -763,15 +763,15 @@ def test_colorize_no_fill(self): def test_colorize_with_interpolation(self): """Test colorizing with interpolation.""" from satpy.composites import ColorizeCompositor - colormap_composite = ColorizeCompositor('test_color_compositor') + colormap_composite = ColorizeCompositor("test_color_compositor") palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), - dims=['value', 'band']) - palette.attrs['palette_meanings'] = [2, 3, 4] + dims=["value", "band"]) + palette.attrs["palette_meanings"] = [2, 3, 4] data = xr.DataArray(da.from_array(np.array([[4, 3, 2.5], [2, 3.2, 4]])), - dims=['y', 'x'], - attrs={'valid_range': np.array([2, 4])}) + dims=["y", "x"], + attrs={"valid_range": np.array([2, 4])}) res = colormap_composite([data, palette]) exp = np.array([[[1.0, 0.498039, 0.246575], [0., 0.59309977, 1.0]], @@ -788,7 +788,7 @@ class TestCloudCompositorWithoutCloudfree: def setup_method(self): """Set up the test case.""" from satpy.composites.cloud_products import CloudCompositorWithoutCloudfree - self.colormap_composite = CloudCompositorWithoutCloudfree('test_cmap_compositor') + self.colormap_composite = CloudCompositorWithoutCloudfree("test_cmap_compositor") self.exp = np.array([[4, 3, 2], [2, 3, np.nan], [8, 7, 655350]]) self.exp_bad_oc = np.array([[4, 3, 2], @@ -797,45 +797,45 @@ def setup_method(self): def test_call_numpy_with_invalid_value_in_status(self): """Test the CloudCompositorWithoutCloudfree composite generation.""" - status = xr.DataArray(np.array([[0, 0, 0], [0, 0, 65535], [0, 0, 1]]), dims=['y', 'x'], - attrs={'_FillValue': 65535}) + status = xr.DataArray(np.array([[0, 0, 0], [0, 0, 65535], [0, 0, 1]]), dims=["y", "x"], + attrs={"_FillValue": 65535}) data = xr.DataArray(np.array([[4, 3, 2], [2, 3, np.nan], [8, 7, np.nan]], dtype=np.float32), - dims=['y', 'x'], - attrs={'_FillValue': 65535, - 'scaled_FillValue': 655350}) + dims=["y", "x"], + attrs={"_FillValue": 65535, + "scaled_FillValue": 655350}) res = self.colormap_composite([data, status]) np.testing.assert_allclose(res, self.exp, atol=1e-4) def test_call_dask_with_invalid_value_in_status(self): """Test the CloudCompositorWithoutCloudfree composite generation.""" - status = xr.DataArray(da.from_array(np.array([[0, 0, 0], [0, 0, 65535], [0, 0, 1]])), dims=['y', 'x'], - attrs={'_FillValue': 65535}) + status = xr.DataArray(da.from_array(np.array([[0, 0, 0], [0, 0, 65535], [0, 0, 1]])), dims=["y", "x"], + attrs={"_FillValue": 65535}) data = xr.DataArray(da.from_array(np.array([[4, 3, 2], [2, 3, np.nan], [8, 7, np.nan]], dtype=np.float32)), - dims=['y', 'x'], - attrs={'_FillValue': 99, - 'scaled_FillValue': 655350}) + dims=["y", "x"], + attrs={"_FillValue": 99, + "scaled_FillValue": 655350}) res = self.colormap_composite([data, status]) np.testing.assert_allclose(res, self.exp, atol=1e-4) def test_call_bad_optical_conditions(self): """Test the CloudCompositorWithoutCloudfree composite generation.""" - status = xr.DataArray(da.from_array(np.array([[0, 0, 0], [3, 3, 3], [0, 0, 1]])), dims=['y', 'x'], - attrs={'_FillValue': 65535, - "flag_meanings": 'bad_optical_conditions'}) + status = xr.DataArray(da.from_array(np.array([[0, 0, 0], [3, 3, 3], [0, 0, 1]])), dims=["y", "x"], + attrs={"_FillValue": 65535, + "flag_meanings": "bad_optical_conditions"}) data = xr.DataArray(np.array([[4, 3, 2], [2, 255, 4], [255, 7, 255]], dtype=np.uint8), - dims=['y', 'x'], - name='cmic_cre', - attrs={'_FillValue': 255, - 'scaled_FillValue': 255}) + dims=["y", "x"], + name="cmic_cre", + attrs={"_FillValue": 255, + "scaled_FillValue": 255}) res = self.colormap_composite([data, status]) np.testing.assert_allclose(res, self.exp_bad_oc, atol=1e-4) def test_bad_indata(self): """Test the CloudCompositorWithoutCloudfree composite generation without status.""" data = xr.DataArray(np.array([[4, 3, 2], [2, 3, 4], [255, 7, 255]], dtype=np.uint8), - dims=['y', 'x'], - attrs={'_FillValue': 255, - 'scaled_FillValue': 255}) + dims=["y", "x"], + attrs={"_FillValue": 255, + "scaled_FillValue": 255}) np.testing.assert_raises(ValueError, self.colormap_composite, [data]) @@ -852,36 +852,36 @@ def setup_method(self): self.exp_b = np.array([[4, 3, 2], [2, 3, 255], [np.nan, np.nan, np.nan]]) - self.colormap_composite = CloudCompositorCommonMask('test_cmap_compositor') + self.colormap_composite = CloudCompositorCommonMask("test_cmap_compositor") def test_call_numpy(self): """Test the CloudCompositorCommonMask with numpy.""" - mask = xr.DataArray(np.array([[0, 0, 0], [1, 1, 1], [255, 255, 255]]), dims=['y', 'x'], - attrs={'_FillValue': 255}) + mask = xr.DataArray(np.array([[0, 0, 0], [1, 1, 1], [255, 255, 255]]), dims=["y", "x"], + attrs={"_FillValue": 255}) data = xr.DataArray(np.array([[4, 3, 2], [2, 3, np.nan], [np.nan, np.nan, np.nan]], dtype=np.float32), - dims=['y', 'x'], - attrs={'_FillValue': 65535, - 'scaled_FillValue': 655350}) + dims=["y", "x"], + attrs={"_FillValue": 65535, + "scaled_FillValue": 655350}) res = self.colormap_composite([data, mask]) np.testing.assert_allclose(res, self.exp_a, atol=1e-4) def test_call_dask(self): """Test the CloudCompositorCommonMask with dask.""" - mask = xr.DataArray(da.from_array(np.array([[0, 0, 0], [1, 1, 1], [255, 255, 255]])), dims=['y', 'x'], - attrs={'_FillValue': 255}) + mask = xr.DataArray(da.from_array(np.array([[0, 0, 0], [1, 1, 1], [255, 255, 255]])), dims=["y", "x"], + attrs={"_FillValue": 255}) data = xr.DataArray(da.from_array(np.array([[4, 3, 2], [2, 3, 255], [255, 255, 255]], dtype=np.int16)), - dims=['y', 'x'], - attrs={'_FillValue': 255, - 'scaled_FillValue': 255}) + dims=["y", "x"], + attrs={"_FillValue": 255, + "scaled_FillValue": 255}) res = self.colormap_composite([data, mask]) np.testing.assert_allclose(res, self.exp_b, atol=1e-4) def test_bad_call(self): """Test the CloudCompositorCommonMask without mask.""" data = xr.DataArray(np.array([[4, 3, 2], [2, 3, 255], [255, 255, 255]], dtype=np.int16), - dims=['y', 'x'], - attrs={'_FillValue': 255, - 'scaled_FillValue': 255}) + dims=["y", "x"], + attrs={"_FillValue": 255, + "scaled_FillValue": 255}) np.testing.assert_raises(ValueError, self.colormap_composite, [data]) @@ -891,16 +891,16 @@ class TestPrecipCloudsCompositor(unittest.TestCase): def test_call(self): """Test the precip composite generation.""" from satpy.composites.cloud_products import PrecipCloudsRGB - colormap_compositor = PrecipCloudsRGB('test_precip_compositor') + colormap_compositor = PrecipCloudsRGB("test_precip_compositor") data_light = xr.DataArray(np.array([[80, 70, 60, 0], [20, 30, 40, 255]], dtype=np.uint8), - dims=['y', 'x'], attrs={'_FillValue': 255}) + dims=["y", "x"], attrs={"_FillValue": 255}) data_moderate = xr.DataArray(np.array([[60, 50, 40, 0], [20, 30, 40, 255]], dtype=np.uint8), - dims=['y', 'x'], attrs={'_FillValue': 255}) + dims=["y", "x"], attrs={"_FillValue": 255}) data_intense = xr.DataArray(np.array([[40, 30, 20, 0], [20, 30, 40, 255]], dtype=np.uint8), - dims=['y', 'x'], attrs={'_FillValue': 255}) + dims=["y", "x"], attrs={"_FillValue": 255}) data_flags = xr.DataArray(np.array([[0, 0, 4, 0], [0, 0, 0, 0]], dtype=np.uint8), - dims=['y', 'x']) + dims=["y", "x"]) res = colormap_compositor([data_light, data_moderate, data_intense, data_flags]) exp = np.array([[[0.24313725, 0.18235294, 0.12156863, np.nan], @@ -919,35 +919,35 @@ class TestSingleBandCompositor(unittest.TestCase): def setUp(self): """Create test data.""" from satpy.composites import SingleBandCompositor - self.comp = SingleBandCompositor(name='test') + self.comp = SingleBandCompositor(name="test") all_valid = np.ones((2, 2)) - self.all_valid = xr.DataArray(all_valid, dims=['y', 'x']) + self.all_valid = xr.DataArray(all_valid, dims=["y", "x"]) def test_call(self): """Test calling the compositor.""" # Dataset with extra attributes all_valid = self.all_valid - all_valid.attrs['sensor'] = 'foo' + all_valid.attrs["sensor"] = "foo" attrs = { - 'foo': 'bar', - 'resolution': 333, - 'units': 'K', - 'sensor': {'fake_sensor1', 'fake_sensor2'}, - 'calibration': 'BT', - 'wavelength': 10.8 + "foo": "bar", + "resolution": 333, + "units": "K", + "sensor": {"fake_sensor1", "fake_sensor2"}, + "calibration": "BT", + "wavelength": 10.8 } - self.comp.attrs['resolution'] = None + self.comp.attrs["resolution"] = None res = self.comp([all_valid], **attrs) # Verify attributes - self.assertEqual(res.attrs.get('sensor'), 'foo') - self.assertTrue('foo' in res.attrs) - self.assertEqual(res.attrs.get('foo'), 'bar') - self.assertTrue('units' in res.attrs) - self.assertTrue('calibration' in res.attrs) - self.assertFalse('modifiers' in res.attrs) - self.assertEqual(res.attrs['wavelength'], 10.8) - self.assertEqual(res.attrs['resolution'], 333) + self.assertEqual(res.attrs.get("sensor"), "foo") + self.assertTrue("foo" in res.attrs) + self.assertEqual(res.attrs.get("foo"), "bar") + self.assertTrue("units" in res.attrs) + self.assertTrue("calibration" in res.attrs) + self.assertFalse("modifiers" in res.attrs) + self.assertEqual(res.attrs["wavelength"], 10.8) + self.assertEqual(res.attrs["resolution"], 333) class TestCategoricalDataCompositor(unittest.TestCase): @@ -955,9 +955,9 @@ class TestCategoricalDataCompositor(unittest.TestCase): def setUp(self): """Create test data.""" - attrs = {'name': 'foo'} + attrs = {"name": "foo"} data = xr.DataArray(da.from_array([[2., 1.], [3., 0.]]), attrs=attrs, - dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) + dims=("y", "x"), coords={"y": [0, 1], "x": [0, 1]}) self.data = data @@ -965,20 +965,20 @@ def test_basic_recategorization(self): """Test general functionality of compositor incl. attributes.""" from satpy.composites import CategoricalDataCompositor lut = [np.nan, 0, 1, 1] - name = 'bar' + name = "bar" comp = CategoricalDataCompositor(name=name, lut=lut) res = comp([self.data]) res = res.compute() expected = np.array([[1., 0.], [1., np.nan]]) np.testing.assert_equal(res.values, expected) - np.testing.assert_equal(res.attrs['name'], name) - np.testing.assert_equal(res.attrs['composite_lut'], lut) + np.testing.assert_equal(res.attrs["name"], name) + np.testing.assert_equal(res.attrs["composite_lut"], lut) def test_too_many_datasets(self): """Test that ValueError is raised if more than one dataset is provided.""" from satpy.composites import CategoricalDataCompositor lut = [np.nan, 0, 1, 1] - comp = CategoricalDataCompositor(name='foo', lut=lut) + comp = CategoricalDataCompositor(name="foo", lut=lut) np.testing.assert_raises(ValueError, comp, [self.data, self.data]) @@ -988,19 +988,19 @@ class TestGenericCompositor(unittest.TestCase): def setUp(self): """Create test data.""" from satpy.composites import GenericCompositor - self.comp = GenericCompositor(name='test') - self.comp2 = GenericCompositor(name='test2', common_channel_mask=False) + self.comp = GenericCompositor(name="test") + self.comp2 = GenericCompositor(name="test2", common_channel_mask=False) all_valid = np.ones((1, 2, 2)) - self.all_valid = xr.DataArray(all_valid, dims=['bands', 'y', 'x']) + self.all_valid = xr.DataArray(all_valid, dims=["bands", "y", "x"]) first_invalid = np.reshape(np.array([np.nan, 1., 1., 1.]), (1, 2, 2)) self.first_invalid = xr.DataArray(first_invalid, - dims=['bands', 'y', 'x']) + dims=["bands", "y", "x"]) second_invalid = np.reshape(np.array([1., np.nan, 1., 1.]), (1, 2, 2)) self.second_invalid = xr.DataArray(second_invalid, - dims=['bands', 'y', 'x']) + dims=["bands", "y", "x"]) wrong_shape = np.reshape(np.array([1., 1., 1.]), (1, 3, 1)) - self.wrong_shape = xr.DataArray(wrong_shape, dims=['bands', 'y', 'x']) + self.wrong_shape = xr.DataArray(wrong_shape, dims=["bands", "y", "x"]) def test_masking(self): """Test masking in generic compositor.""" @@ -1021,49 +1021,49 @@ def test_masking(self): def test_concat_datasets(self): """Test concatenation of datasets.""" from satpy.composites import IncompatibleAreas - res = self.comp._concat_datasets([self.all_valid], 'L') + res = self.comp._concat_datasets([self.all_valid], "L") num_bands = len(res.bands) self.assertEqual(num_bands, 1) self.assertEqual(res.shape[0], num_bands) - self.assertEqual(res.bands[0], 'L') - res = self.comp._concat_datasets([self.all_valid, self.all_valid], 'LA') + self.assertEqual(res.bands[0], "L") + res = self.comp._concat_datasets([self.all_valid, self.all_valid], "LA") num_bands = len(res.bands) self.assertEqual(num_bands, 2) self.assertEqual(res.shape[0], num_bands) - self.assertEqual(res.bands[0], 'L') - self.assertEqual(res.bands[1], 'A') + self.assertEqual(res.bands[0], "L") + self.assertEqual(res.bands[1], "A") self.assertRaises(IncompatibleAreas, self.comp._concat_datasets, - [self.all_valid, self.wrong_shape], 'LA') + [self.all_valid, self.wrong_shape], "LA") def test_get_sensors(self): """Test getting sensors from the dataset attributes.""" res = self.comp._get_sensors([self.all_valid]) self.assertIsNone(res) dset1 = self.all_valid - dset1.attrs['sensor'] = 'foo' + dset1.attrs["sensor"] = "foo" res = self.comp._get_sensors([dset1]) - self.assertEqual(res, 'foo') + self.assertEqual(res, "foo") dset2 = self.first_invalid - dset2.attrs['sensor'] = 'bar' + dset2.attrs["sensor"] = "bar" res = self.comp._get_sensors([dset1, dset2]) - self.assertIn('foo', res) - self.assertIn('bar', res) + self.assertIn("foo", res) + self.assertIn("bar", res) self.assertEqual(len(res), 2) self.assertIsInstance(res, set) - @mock.patch('satpy.composites.GenericCompositor._get_sensors') - @mock.patch('satpy.composites.combine_metadata') - @mock.patch('satpy.composites.check_times') - @mock.patch('satpy.composites.GenericCompositor.match_data_arrays') + @mock.patch("satpy.composites.GenericCompositor._get_sensors") + @mock.patch("satpy.composites.combine_metadata") + @mock.patch("satpy.composites.check_times") + @mock.patch("satpy.composites.GenericCompositor.match_data_arrays") def test_call_with_mock(self, match_data_arrays, check_times, combine_metadata, get_sensors): """Test calling generic compositor.""" from satpy.composites import IncompatibleAreas combine_metadata.return_value = dict() - get_sensors.return_value = 'foo' + get_sensors.return_value = "foo" # One dataset, no mode given res = self.comp([self.all_valid]) self.assertEqual(res.shape[0], 1) - self.assertEqual(res.attrs['mode'], 'L') + self.assertEqual(res.attrs["mode"], "L") match_data_arrays.assert_not_called() # This compositor has been initialized without common masking, so the # masking shouldn't have been called @@ -1088,25 +1088,25 @@ def test_call(self): """Test calling generic compositor.""" # Multiple datasets with extra attributes all_valid = self.all_valid - all_valid.attrs['sensor'] = 'foo' - attrs = {'foo': 'bar', 'resolution': 333} - self.comp.attrs['resolution'] = None + all_valid.attrs["sensor"] = "foo" + attrs = {"foo": "bar", "resolution": 333} + self.comp.attrs["resolution"] = None res = self.comp([self.all_valid, self.first_invalid], **attrs) # Verify attributes - self.assertEqual(res.attrs.get('sensor'), 'foo') - self.assertIn('foo', res.attrs) - self.assertEqual(res.attrs.get('foo'), 'bar') - self.assertNotIn('units', res.attrs) - self.assertNotIn('calibration', res.attrs) - self.assertNotIn('modifiers', res.attrs) - self.assertIsNone(res.attrs['wavelength']) - self.assertEqual(res.attrs['mode'], 'LA') - self.assertEqual(res.attrs['resolution'], 333) + self.assertEqual(res.attrs.get("sensor"), "foo") + self.assertIn("foo", res.attrs) + self.assertEqual(res.attrs.get("foo"), "bar") + self.assertNotIn("units", res.attrs) + self.assertNotIn("calibration", res.attrs) + self.assertNotIn("modifiers", res.attrs) + self.assertIsNone(res.attrs["wavelength"]) + self.assertEqual(res.attrs["mode"], "LA") + self.assertEqual(res.attrs["resolution"], 333) def test_deprecation_warning(self): """Test deprecation warning for dcprecated composite recipes.""" - warning_message = 'foo is a deprecated composite. Use composite bar instead.' - self.comp.attrs['deprecation_warning'] = warning_message + warning_message = "foo is a deprecated composite. Use composite bar instead." + self.comp.attrs["deprecation_warning"] = warning_message with pytest.warns(UserWarning, match=warning_message): self.comp([self.all_valid]) @@ -1119,72 +1119,72 @@ def test_add_bands_l_rgb(self): from satpy.composites import add_bands # L + RGB -> RGB - data = xr.DataArray(da.ones((1, 3, 3)), dims=('bands', 'y', 'x'), - coords={'bands': ['L']}) - new_bands = xr.DataArray(da.array(['R', 'G', 'B']), dims=('bands'), - coords={'bands': ['R', 'G', 'B']}) + data = xr.DataArray(da.ones((1, 3, 3)), dims=("bands", "y", "x"), + coords={"bands": ["L"]}) + new_bands = xr.DataArray(da.array(["R", "G", "B"]), dims=("bands"), + coords={"bands": ["R", "G", "B"]}) res = add_bands(data, new_bands) - res_bands = ['R', 'G', 'B'] - self.assertEqual(res.attrs['mode'], ''.join(res_bands)) + res_bands = ["R", "G", "B"] + self.assertEqual(res.attrs["mode"], "".join(res_bands)) np.testing.assert_array_equal(res.bands, res_bands) - np.testing.assert_array_equal(res.coords['bands'], res_bands) + np.testing.assert_array_equal(res.coords["bands"], res_bands) def test_add_bands_l_rgba(self): """Test adding bands.""" from satpy.composites import add_bands # L + RGBA -> RGBA - data = xr.DataArray(da.ones((1, 3, 3)), dims=('bands', 'y', 'x'), - coords={'bands': ['L']}, attrs={'mode': 'L'}) - new_bands = xr.DataArray(da.array(['R', 'G', 'B', 'A']), dims=('bands'), - coords={'bands': ['R', 'G', 'B', 'A']}) + data = xr.DataArray(da.ones((1, 3, 3)), dims=("bands", "y", "x"), + coords={"bands": ["L"]}, attrs={"mode": "L"}) + new_bands = xr.DataArray(da.array(["R", "G", "B", "A"]), dims=("bands"), + coords={"bands": ["R", "G", "B", "A"]}) res = add_bands(data, new_bands) - res_bands = ['R', 'G', 'B', 'A'] - self.assertEqual(res.attrs['mode'], ''.join(res_bands)) + res_bands = ["R", "G", "B", "A"] + self.assertEqual(res.attrs["mode"], "".join(res_bands)) np.testing.assert_array_equal(res.bands, res_bands) - np.testing.assert_array_equal(res.coords['bands'], res_bands) + np.testing.assert_array_equal(res.coords["bands"], res_bands) def test_add_bands_la_rgb(self): """Test adding bands.""" from satpy.composites import add_bands # LA + RGB -> RGBA - data = xr.DataArray(da.ones((2, 3, 3)), dims=('bands', 'y', 'x'), - coords={'bands': ['L', 'A']}, attrs={'mode': 'LA'}) - new_bands = xr.DataArray(da.array(['R', 'G', 'B']), dims=('bands'), - coords={'bands': ['R', 'G', 'B']}) + data = xr.DataArray(da.ones((2, 3, 3)), dims=("bands", "y", "x"), + coords={"bands": ["L", "A"]}, attrs={"mode": "LA"}) + new_bands = xr.DataArray(da.array(["R", "G", "B"]), dims=("bands"), + coords={"bands": ["R", "G", "B"]}) res = add_bands(data, new_bands) - res_bands = ['R', 'G', 'B', 'A'] - self.assertEqual(res.attrs['mode'], ''.join(res_bands)) + res_bands = ["R", "G", "B", "A"] + self.assertEqual(res.attrs["mode"], "".join(res_bands)) np.testing.assert_array_equal(res.bands, res_bands) - np.testing.assert_array_equal(res.coords['bands'], res_bands) + np.testing.assert_array_equal(res.coords["bands"], res_bands) def test_add_bands_rgb_rbga(self): """Test adding bands.""" from satpy.composites import add_bands # RGB + RGBA -> RGBA - data = xr.DataArray(da.ones((3, 3, 3)), dims=('bands', 'y', 'x'), - coords={'bands': ['R', 'G', 'B']}, - attrs={'mode': 'RGB'}) - new_bands = xr.DataArray(da.array(['R', 'G', 'B', 'A']), dims=('bands'), - coords={'bands': ['R', 'G', 'B', 'A']}) + data = xr.DataArray(da.ones((3, 3, 3)), dims=("bands", "y", "x"), + coords={"bands": ["R", "G", "B"]}, + attrs={"mode": "RGB"}) + new_bands = xr.DataArray(da.array(["R", "G", "B", "A"]), dims=("bands"), + coords={"bands": ["R", "G", "B", "A"]}) res = add_bands(data, new_bands) - res_bands = ['R', 'G', 'B', 'A'] - self.assertEqual(res.attrs['mode'], ''.join(res_bands)) + res_bands = ["R", "G", "B", "A"] + self.assertEqual(res.attrs["mode"], "".join(res_bands)) np.testing.assert_array_equal(res.bands, res_bands) - np.testing.assert_array_equal(res.coords['bands'], res_bands) + np.testing.assert_array_equal(res.coords["bands"], res_bands) def test_add_bands_p_l(self): """Test adding bands.""" from satpy.composites import add_bands # P(RGBA) + L -> RGBA - data = xr.DataArray(da.ones((1, 3, 3)), dims=('bands', 'y', 'x'), - coords={'bands': ['P']}, - attrs={'mode': 'P'}) - new_bands = xr.DataArray(da.array(['L']), dims=('bands'), - coords={'bands': ['L']}) + data = xr.DataArray(da.ones((1, 3, 3)), dims=("bands", "y", "x"), + coords={"bands": ["P"]}, + attrs={"mode": "P"}) + new_bands = xr.DataArray(da.array(["L"]), dims=("bands"), + coords={"bands": ["L"]}) with pytest.raises(NotImplementedError): add_bands(data, new_bands) @@ -1192,7 +1192,7 @@ def test_add_bands_p_l(self): class TestStaticImageCompositor(unittest.TestCase): """Test case for the static compositor.""" - @mock.patch('satpy.resample.get_area_def') + @mock.patch("satpy.resample.get_area_def") def test_init(self, get_area_def): """Test the initializiation of static compositor.""" from satpy.composites import StaticImageCompositor @@ -1213,14 +1213,14 @@ def test_init(self, get_area_def): self.assertEqual(comp.area, "bar") get_area_def.assert_called_once_with("euro4") - @mock.patch('satpy.aux_download.retrieve') - @mock.patch('satpy.aux_download.register_file') - @mock.patch('satpy.Scene') + @mock.patch("satpy.aux_download.retrieve") + @mock.patch("satpy.aux_download.register_file") + @mock.patch("satpy.Scene") def test_call(self, Scene, register, retrieve): # noqa """Test the static compositing.""" from satpy.composites import StaticImageCompositor - satpy.config.set(data_dir=os.path.join(os.path.sep, 'path', 'to', 'image')) + satpy.config.set(data_dir=os.path.join(os.path.sep, "path", "to", "image")) remote_tif = "http://example.com/foo.tif" class MockScene(dict): @@ -1230,20 +1230,20 @@ def load(self, arg): img = mock.MagicMock() img.attrs = {} scn = MockScene() - scn['image'] = img + scn["image"] = img Scene.return_value = scn # absolute path to local file comp = StaticImageCompositor("name", filename="/foo.tif", area="euro4") res = comp() - Scene.assert_called_once_with(reader='generic_image', - filenames=['/foo.tif']) + Scene.assert_called_once_with(reader="generic_image", + filenames=["/foo.tif"]) register.assert_not_called() retrieve.assert_not_called() self.assertIn("start_time", res.attrs) self.assertIn("end_time", res.attrs) - self.assertIsNone(res.attrs['sensor']) - self.assertNotIn('modifiers', res.attrs) - self.assertNotIn('calibration', res.attrs) + self.assertIsNone(res.attrs["sensor"]) + self.assertNotIn("modifiers", res.attrs) + self.assertNotIn("calibration", res.attrs) # remote file with local cached version Scene.reset_mock() @@ -1251,28 +1251,28 @@ def load(self, arg): retrieve.return_value = "data_dir/foo.tif" comp = StaticImageCompositor("name", url=remote_tif, area="euro4") res = comp() - Scene.assert_called_once_with(reader='generic_image', - filenames=['data_dir/foo.tif']) + Scene.assert_called_once_with(reader="generic_image", + filenames=["data_dir/foo.tif"]) self.assertIn("start_time", res.attrs) self.assertIn("end_time", res.attrs) - self.assertIsNone(res.attrs['sensor']) - self.assertNotIn('modifiers', res.attrs) - self.assertNotIn('calibration', res.attrs) + self.assertIsNone(res.attrs["sensor"]) + self.assertNotIn("modifiers", res.attrs) + self.assertNotIn("calibration", res.attrs) # Non-georeferenced image, no area given - img.attrs.pop('area') + img.attrs.pop("area") comp = StaticImageCompositor("name", filename="/foo.tif") with self.assertRaises(AttributeError): comp() # Non-georeferenced image, area given - comp = StaticImageCompositor("name", filename="/foo.tif", area='euro4') + comp = StaticImageCompositor("name", filename="/foo.tif", area="euro4") res = comp() - self.assertEqual(res.attrs['area'].area_id, 'euro4') + self.assertEqual(res.attrs["area"].area_id, "euro4") # Filename contains environment variable os.environ["TEST_IMAGE_PATH"] = "/path/to/image" - comp = StaticImageCompositor("name", filename="${TEST_IMAGE_PATH}/foo.tif", area='euro4') + comp = StaticImageCompositor("name", filename="${TEST_IMAGE_PATH}/foo.tif", area="euro4") self.assertEqual(comp._cache_filename, "/path/to/image/foo.tif") # URL and filename without absolute path @@ -1281,12 +1281,12 @@ def load(self, arg): self.assertEqual(comp._cache_filename, "bar.tif") # No URL, filename without absolute path, use default data_dir from config - with mock.patch('os.path.exists') as exists: + with mock.patch("os.path.exists") as exists: exists.return_value = True comp = StaticImageCompositor("name", filename="foo.tif") self.assertEqual(comp._url, None) self.assertEqual(comp._cache_filename, - os.path.join(os.path.sep, 'path', 'to', 'image', 'foo.tif')) + os.path.join(os.path.sep, "path", "to", "image", "foo.tif")) def _enhance2dataset(dataset, convert_p=False): @@ -1315,21 +1315,21 @@ def setup_class(cls): } cls.foreground_data = foreground_data - @mock.patch('satpy.composites.enhance2dataset', _enhance2dataset) + @mock.patch("satpy.composites.enhance2dataset", _enhance2dataset) @pytest.mark.parametrize( - ('foreground_bands', 'background_bands', 'exp_bands', 'exp_result'), + ("foreground_bands", "background_bands", "exp_bands", "exp_result"), [ - ('L', 'L', 'L', np.array([[1.0, 0.5], [0.0, 1.0]])), - ('LA', 'LA', 'L', np.array([[1.0, 0.75], [0.5, 1.0]])), - ('RGB', 'RGB', 'RGB', np.array([ + ("L", "L", "L", np.array([[1.0, 0.5], [0.0, 1.0]])), + ("LA", "LA", "L", np.array([[1.0, 0.75], [0.5, 1.0]])), + ("RGB", "RGB", "RGB", np.array([ [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]]])), - ('RGBA', 'RGBA', 'RGB', np.array([ + ("RGBA", "RGBA", "RGB", np.array([ [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]]])), - ('RGBA', 'RGB', 'RGB', np.array([ + ("RGBA", "RGB", "RGB", np.array([ [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]]])), @@ -1342,43 +1342,43 @@ def test_call(self, foreground_bands, background_bands, exp_bands, exp_result): # L mode images foreground_data = self.foreground_data[foreground_bands] - attrs = {'mode': foreground_bands, 'area': 'foo'} + attrs = {"mode": foreground_bands, "area": "foo"} foreground = xr.DataArray(da.from_array(foreground_data), - dims=('bands', 'y', 'x'), - coords={'bands': [c for c in attrs['mode']]}, + dims=("bands", "y", "x"), + coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs) - attrs = {'mode': background_bands, 'area': 'foo'} - background = xr.DataArray(da.ones((len(background_bands), 2, 2)), dims=('bands', 'y', 'x'), - coords={'bands': [c for c in attrs['mode']]}, + attrs = {"mode": background_bands, "area": "foo"} + background = xr.DataArray(da.ones((len(background_bands), 2, 2)), dims=("bands", "y", "x"), + coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs) res = comp([foreground, background]) - assert res.attrs['area'] == 'foo' + assert res.attrs["area"] == "foo" np.testing.assert_allclose(res, exp_result) - assert res.attrs['mode'] == exp_bands + assert res.attrs["mode"] == exp_bands - @mock.patch('satpy.composites.enhance2dataset', _enhance2dataset) + @mock.patch("satpy.composites.enhance2dataset", _enhance2dataset) def test_multiple_sensors(self): """Test the background compositing from multiple sensor data.""" from satpy.composites import BackgroundCompositor comp = BackgroundCompositor("name") # L mode images - attrs = {'mode': 'L', 'area': 'foo'} + attrs = {"mode": "L", "area": "foo"} foreground_data = self.foreground_data["L"] foreground = xr.DataArray(da.from_array(foreground_data), - dims=('bands', 'y', 'x'), - coords={'bands': [c for c in attrs['mode']]}, + dims=("bands", "y", "x"), + coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs.copy()) - foreground.attrs['sensor'] = 'abi' - background = xr.DataArray(da.ones((1, 2, 2)), dims=('bands', 'y', 'x'), - coords={'bands': [c for c in attrs['mode']]}, + foreground.attrs["sensor"] = "abi" + background = xr.DataArray(da.ones((1, 2, 2)), dims=("bands", "y", "x"), + coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs.copy()) - background.attrs['sensor'] = 'glm' + background.attrs["sensor"] = "glm" res = comp([foreground, background]) - assert res.attrs['area'] == 'foo' + assert res.attrs["area"] == "foo" np.testing.assert_allclose(res, np.array([[1., 0.5], [0., 1.]])) - assert res.attrs['mode'] == 'L' - assert res.attrs['sensor'] == {'abi', 'glm'} + assert res.attrs["mode"] == "L" + assert res.attrs["sensor"] == {"abi", "glm"} class TestMaskingCompositor: @@ -1387,39 +1387,39 @@ class TestMaskingCompositor: @pytest.fixture def conditions_v1(self): """Masking conditions with string values.""" - return [{'method': 'equal', - 'value': 'Cloud-free_land', - 'transparency': 100}, - {'method': 'equal', - 'value': 'Cloud-free_sea', - 'transparency': 50}] + return [{"method": "equal", + "value": "Cloud-free_land", + "transparency": 100}, + {"method": "equal", + "value": "Cloud-free_sea", + "transparency": 50}] @pytest.fixture def conditions_v2(self): """Masking conditions with numerical values.""" - return [{'method': 'equal', - 'value': 1, - 'transparency': 100}, - {'method': 'equal', - 'value': 2, - 'transparency': 50}] + return [{"method": "equal", + "value": 1, + "transparency": 100}, + {"method": "equal", + "value": 2, + "transparency": 50}] @pytest.fixture def test_data(self): """Test data to use with masking compositors.""" - return xr.DataArray(da.random.random((3, 3)), dims=['y', 'x']) + return xr.DataArray(da.random.random((3, 3)), dims=["y", "x"]) @pytest.fixture def test_ct_data(self): """Test 2D CT data array.""" - flag_meanings = ['Cloud-free_land', 'Cloud-free_sea'] + flag_meanings = ["Cloud-free_land", "Cloud-free_sea"] flag_values = da.array([1, 2]) ct_data = da.array([[1, 2, 2], [2, 1, 2], [2, 2, 1]]) - ct_data = xr.DataArray(ct_data, dims=['y', 'x']) - ct_data.attrs['flag_meanings'] = flag_meanings - ct_data.attrs['flag_values'] = flag_values + ct_data = xr.DataArray(ct_data, dims=["y", "x"]) + ct_data.attrs["flag_meanings"] = flag_meanings + ct_data.attrs["flag_values"] = flag_values return ct_data @pytest.fixture @@ -1439,7 +1439,7 @@ def reference_alpha(self): ref_alpha = da.array([[0, 0.5, 0.5], [0.5, 0, 0.5], [0.5, 0.5, 0]]) - return xr.DataArray(ref_alpha, dims=['y', 'x']) + return xr.DataArray(ref_alpha, dims=["y", "x"]) def test_init(self): """Test the initializiation of compositor.""" @@ -1451,10 +1451,10 @@ def test_init(self): # transparency defined transparency = {0: 100, 1: 50} - conditions = [{'method': 'equal', 'value': 0, 'transparency': 100}, - {'method': 'equal', 'value': 1, 'transparency': 50}] + conditions = [{"method": "equal", "value": 0, "transparency": 100}, + {"method": "equal", "value": 1, "transparency": 50}] comp = MaskingCompositor("name", transparency=transparency.copy()) - assert not hasattr(comp, 'transparency') + assert not hasattr(comp, "transparency") # Transparency should be converted to conditions assert comp.conditions == conditions @@ -1470,18 +1470,18 @@ def test_get_flag_value(self): mask = da.array([[1, 2, 2], [2, 1, 2], [2, 2, 1]]) - mask = xr.DataArray(mask, dims=['y', 'x']) - flag_meanings = ['Cloud-free_land', 'Cloud-free_sea'] - mask.attrs['flag_meanings'] = flag_meanings - mask.attrs['flag_values'] = flag_values + mask = xr.DataArray(mask, dims=["y", "x"]) + flag_meanings = ["Cloud-free_land", "Cloud-free_sea"] + mask.attrs["flag_meanings"] = flag_meanings + mask.attrs["flag_values"] = flag_values - assert _get_flag_value(mask, 'Cloud-free_land') == 1 - assert _get_flag_value(mask, 'Cloud-free_sea') == 2 + assert _get_flag_value(mask, "Cloud-free_land") == 1 + assert _get_flag_value(mask, "Cloud-free_sea") == 2 - flag_meanings_str = 'Cloud-free_land Cloud-free_sea' - mask.attrs['flag_meanings'] = flag_meanings_str - assert _get_flag_value(mask, 'Cloud-free_land') == 1 - assert _get_flag_value(mask, 'Cloud-free_sea') == 2 + flag_meanings_str = "Cloud-free_land Cloud-free_sea" + mask.attrs["flag_meanings"] = flag_meanings_str + assert _get_flag_value(mask, "Cloud-free_land") == 1 + assert _get_flag_value(mask, "Cloud-free_sea") == 2 @pytest.mark.parametrize("mode", ["LA", "RGBA"]) def test_call_numerical_transparency_data( @@ -1502,7 +1502,7 @@ def test_call_numerical_transparency_data( assert res.mode == mode for m in mode.rstrip("A"): np.testing.assert_allclose(res.sel(bands=m), reference_data) - np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) + np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_call_named_fields(self, conditions_v2, test_data, test_ct_data, reference_data, reference_alpha): @@ -1514,8 +1514,8 @@ def test_call_named_fields(self, conditions_v2, test_data, test_ct_data, comp = MaskingCompositor("name", conditions=conditions_v2) res = comp([test_data, test_ct_data]) assert res.mode == "LA" - np.testing.assert_allclose(res.sel(bands='L'), reference_data) - np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) + np.testing.assert_allclose(res.sel(bands="L"), reference_data) + np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_call_named_fields_string( self, conditions_v2, test_data, test_ct_data, reference_data, @@ -1524,14 +1524,14 @@ def test_call_named_fields_string( from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler - flag_meanings_str = 'Cloud-free_land Cloud-free_sea' - test_ct_data.attrs['flag_meanings'] = flag_meanings_str + flag_meanings_str = "Cloud-free_land Cloud-free_sea" + test_ct_data.attrs["flag_meanings"] = flag_meanings_str with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v2) res = comp([test_data, test_ct_data]) assert res.mode == "LA" - np.testing.assert_allclose(res.sel(bands='L'), reference_data) - np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) + np.testing.assert_allclose(res.sel(bands="L"), reference_data) + np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_method_isnan(self, test_data, test_ct_data, test_ct_data_v3): @@ -1539,27 +1539,27 @@ def test_method_isnan(self, test_data, from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler - conditions_v3 = [{'method': 'isnan', 'transparency': 100}] + conditions_v3 = [{"method": "isnan", "transparency": 100}] # The data are set to NaN where ct is NaN reference_data_v3 = test_data.where(test_ct_data == 1) reference_alpha_v3 = da.array([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]) - reference_alpha_v3 = xr.DataArray(reference_alpha_v3, dims=['y', 'x']) + reference_alpha_v3 = xr.DataArray(reference_alpha_v3, dims=["y", "x"]) with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v3) res = comp([test_data, test_ct_data_v3]) assert res.mode == "LA" - np.testing.assert_allclose(res.sel(bands='L'), reference_data_v3) - np.testing.assert_allclose(res.sel(bands='A'), reference_alpha_v3) + np.testing.assert_allclose(res.sel(bands="L"), reference_data_v3) + np.testing.assert_allclose(res.sel(bands="A"), reference_alpha_v3) def test_method_absolute_import(self, test_data, test_ct_data_v3): """Test "absolute_import" as method.""" from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler - conditions_v4 = [{'method': 'absolute_import', 'transparency': 'satpy.resample'}] + conditions_v4 = [{"method": "absolute_import", "transparency": "satpy.resample"}] # This should raise AttributeError with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v4) @@ -1573,50 +1573,50 @@ def test_rgb_dataset(self, conditions_v1, test_ct_data, reference_alpha): # 3D data array data = xr.DataArray(da.random.random((3, 3, 3)), - dims=['bands', 'y', 'x'], - coords={'bands': ['R', 'G', 'B'], - 'y': np.arange(3), - 'x': np.arange(3)}) + dims=["bands", "y", "x"], + coords={"bands": ["R", "G", "B"], + "y": np.arange(3), + "x": np.arange(3)}) with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v1) res = comp([data, test_ct_data]) assert res.mode == "RGBA" - np.testing.assert_allclose(res.sel(bands='R'), - data.sel(bands='R').where(test_ct_data > 1)) - np.testing.assert_allclose(res.sel(bands='G'), - data.sel(bands='G').where(test_ct_data > 1)) - np.testing.assert_allclose(res.sel(bands='B'), - data.sel(bands='B').where(test_ct_data > 1)) - np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) + np.testing.assert_allclose(res.sel(bands="R"), + data.sel(bands="R").where(test_ct_data > 1)) + np.testing.assert_allclose(res.sel(bands="G"), + data.sel(bands="G").where(test_ct_data > 1)) + np.testing.assert_allclose(res.sel(bands="B"), + data.sel(bands="B").where(test_ct_data > 1)) + np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_rgba_dataset(self, conditions_v2, test_ct_data, reference_alpha): """Test RGBA dataset.""" from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler data = xr.DataArray(da.random.random((4, 3, 3)), - dims=['bands', 'y', 'x'], - coords={'bands': ['R', 'G', 'B', 'A'], - 'y': np.arange(3), - 'x': np.arange(3)}) + dims=["bands", "y", "x"], + coords={"bands": ["R", "G", "B", "A"], + "y": np.arange(3), + "x": np.arange(3)}) with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v2) res = comp([data, test_ct_data]) assert res.mode == "RGBA" - np.testing.assert_allclose(res.sel(bands='R'), - data.sel(bands='R').where(test_ct_data > 1)) - np.testing.assert_allclose(res.sel(bands='G'), - data.sel(bands='G').where(test_ct_data > 1)) - np.testing.assert_allclose(res.sel(bands='B'), - data.sel(bands='B').where(test_ct_data > 1)) + np.testing.assert_allclose(res.sel(bands="R"), + data.sel(bands="R").where(test_ct_data > 1)) + np.testing.assert_allclose(res.sel(bands="G"), + data.sel(bands="G").where(test_ct_data > 1)) + np.testing.assert_allclose(res.sel(bands="B"), + data.sel(bands="B").where(test_ct_data > 1)) # The compositor should drop the original alpha band - np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) + np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_incorrect_method(self, test_data, test_ct_data): """Test incorrect method.""" from satpy.composites import MaskingCompositor - conditions = [{'method': 'foo', 'value': 0, 'transparency': 100}] + conditions = [{"method": "foo", "value": 0, "transparency": 100}] comp = MaskingCompositor("name", conditions=conditions) with pytest.raises(AttributeError): comp([test_data, test_ct_data]) @@ -1646,12 +1646,12 @@ def setUp(self): self.ch08_w = 3.0 self.ch06_w = 4.0 - @mock.patch('satpy.composites.NaturalEnh.__repr__') - @mock.patch('satpy.composites.NaturalEnh.match_data_arrays') + @mock.patch("satpy.composites.NaturalEnh.__repr__") + @mock.patch("satpy.composites.NaturalEnh.match_data_arrays") def test_natural_enh(self, match_data_arrays, repr_): """Test NaturalEnh compositor.""" from satpy.composites import NaturalEnh - repr_.return_value = '' + repr_.return_value = "" projectables = [self.ch1, self.ch2, self.ch3] def temp_func(*args): @@ -1676,57 +1676,57 @@ def temp_func(*args): class TestEnhance2Dataset(unittest.TestCase): """Test the enhance2dataset utility.""" - @mock.patch('satpy.composites.get_enhanced_image') + @mock.patch("satpy.composites.get_enhanced_image") def test_enhance_p_to_rgb(self, get_enhanced_image): """Test enhancing a paletted dataset in RGB mode.""" from trollimage.xrimage import XRImage - img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=('bands', 'y', 'x'), coords={'bands': ['P']})) + img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=("bands", "y", "x"), coords={"bands": ["P"]})) img.palette = ((0, 0, 0), (4, 4, 4), (8, 8, 8)) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset, convert_p=True) - assert res.attrs['mode'] == 'RGB' + assert res.attrs["mode"] == "RGB" - @mock.patch('satpy.composites.get_enhanced_image') + @mock.patch("satpy.composites.get_enhanced_image") def test_enhance_p_to_rgba(self, get_enhanced_image): """Test enhancing a paletted dataset in RGBA mode.""" from trollimage.xrimage import XRImage - img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=('bands', 'y', 'x'), coords={'bands': ['P']})) + img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=("bands", "y", "x"), coords={"bands": ["P"]})) img.palette = ((0, 0, 0, 255), (4, 4, 4, 255), (8, 8, 8, 255)) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset, convert_p=True) - assert res.attrs['mode'] == 'RGBA' + assert res.attrs["mode"] == "RGBA" - @mock.patch('satpy.composites.get_enhanced_image') + @mock.patch("satpy.composites.get_enhanced_image") def test_enhance_p(self, get_enhanced_image): """Test enhancing a paletted dataset in P mode.""" from trollimage.xrimage import XRImage - img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=('bands', 'y', 'x'), coords={'bands': ['P']})) + img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=("bands", "y", "x"), coords={"bands": ["P"]})) img.palette = ((0, 0, 0, 255), (4, 4, 4, 255), (8, 8, 8, 255)) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset) - assert res.attrs['mode'] == 'P' + assert res.attrs["mode"] == "P" assert res.max().values == 2 - @mock.patch('satpy.composites.get_enhanced_image') + @mock.patch("satpy.composites.get_enhanced_image") def test_enhance_l(self, get_enhanced_image): """Test enhancing a paletted dataset in P mode.""" from trollimage.xrimage import XRImage - img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=('bands', 'y', 'x'), coords={'bands': ['L']})) + img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=("bands", "y", "x"), coords={"bands": ["L"]})) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset) - assert res.attrs['mode'] == 'L' + assert res.attrs["mode"] == "L" assert res.max().values == 1 @@ -1736,29 +1736,29 @@ class TestInferMode(unittest.TestCase): def test_bands_coords_is_used(self): """Test that the `bands` coord is used.""" from satpy.composites import GenericCompositor - arr = xr.DataArray(np.ones((1, 5, 5)), dims=('bands', 'x', 'y'), coords={'bands': ['P']}) - assert GenericCompositor.infer_mode(arr) == 'P' + arr = xr.DataArray(np.ones((1, 5, 5)), dims=("bands", "x", "y"), coords={"bands": ["P"]}) + assert GenericCompositor.infer_mode(arr) == "P" - arr = xr.DataArray(np.ones((3, 5, 5)), dims=('bands', 'x', 'y'), coords={'bands': ['Y', 'Cb', 'Cr']}) - assert GenericCompositor.infer_mode(arr) == 'YCbCr' + arr = xr.DataArray(np.ones((3, 5, 5)), dims=("bands", "x", "y"), coords={"bands": ["Y", "Cb", "Cr"]}) + assert GenericCompositor.infer_mode(arr) == "YCbCr" def test_mode_is_used(self): """Test that the `mode` attribute is used.""" from satpy.composites import GenericCompositor - arr = xr.DataArray(np.ones((1, 5, 5)), dims=('bands', 'x', 'y'), attrs={'mode': 'P'}) - assert GenericCompositor.infer_mode(arr) == 'P' + arr = xr.DataArray(np.ones((1, 5, 5)), dims=("bands", "x", "y"), attrs={"mode": "P"}) + assert GenericCompositor.infer_mode(arr) == "P" def test_band_size_is_used(self): """Test that the band size is used.""" from satpy.composites import GenericCompositor - arr = xr.DataArray(np.ones((2, 5, 5)), dims=('bands', 'x', 'y')) - assert GenericCompositor.infer_mode(arr) == 'LA' + arr = xr.DataArray(np.ones((2, 5, 5)), dims=("bands", "x", "y")) + assert GenericCompositor.infer_mode(arr) == "LA" def test_no_bands_is_l(self): """Test that default (no band) is L.""" from satpy.composites import GenericCompositor - arr = xr.DataArray(np.ones((5, 5)), dims=('x', 'y')) - assert GenericCompositor.infer_mode(arr) == 'L' + arr = xr.DataArray(np.ones((5, 5)), dims=("x", "y")) + assert GenericCompositor.infer_mode(arr) == "L" class TestLongitudeMaskingCompositor(unittest.TestCase): @@ -1772,26 +1772,26 @@ def test_masking(self): lons = np.array([-180., -100., -50., 0., 50., 100., 180.]) area.get_lonlats = mock.MagicMock(return_value=[lons, []]) a = xr.DataArray(np.array([1, 2, 3, 4, 5, 6, 7]), - attrs={'area': area, 'units': 'K'}) + attrs={"area": area, "units": "K"}) - comp = LongitudeMaskingCompositor(name='test', lon_min=-40., lon_max=120.) + comp = LongitudeMaskingCompositor(name="test", lon_min=-40., lon_max=120.) expected = xr.DataArray(np.array([np.nan, np.nan, np.nan, 4, 5, 6, np.nan])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) assert "units" in res.attrs assert res.attrs["units"] == "K" - comp = LongitudeMaskingCompositor(name='test', lon_min=-40.) + comp = LongitudeMaskingCompositor(name="test", lon_min=-40.) expected = xr.DataArray(np.array([np.nan, np.nan, np.nan, 4, 5, 6, 7])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) - comp = LongitudeMaskingCompositor(name='test', lon_max=120.) + comp = LongitudeMaskingCompositor(name="test", lon_max=120.) expected = xr.DataArray(np.array([1, 2, 3, 4, 5, 6, np.nan])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) - comp = LongitudeMaskingCompositor(name='test', lon_min=120., lon_max=-40.) + comp = LongitudeMaskingCompositor(name="test", lon_min=120., lon_max=-40.) expected = xr.DataArray(np.array([1, 2, 3, np.nan, np.nan, np.nan, 7])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) diff --git a/satpy/tests/test_config.py b/satpy/tests/test_config.py index 10d3205223..5cb1c047d2 100644 --- a/satpy/tests/test_config.py +++ b/satpy/tests/test_config.py @@ -59,7 +59,7 @@ def test_areas_pyproj(self): swath_def = SwathDefinition(lons, lats) all_areas = parse_area_file(get_area_file()) for area_obj in all_areas: - if hasattr(area_obj, 'freeze'): + if hasattr(area_obj, "freeze"): try: area_obj = area_obj.freeze(lonslats=swath_def) except RuntimeError: @@ -74,7 +74,7 @@ def test_areas_rasterio(self): from rasterio.crs import CRS except ImportError: return unittest.skip("Missing rasterio dependency") - if not hasattr(CRS, 'from_dict'): + if not hasattr(CRS, "from_dict"): return unittest.skip("RasterIO 1.0+ required") import numpy as np @@ -91,7 +91,7 @@ def test_areas_rasterio(self): swath_def = SwathDefinition(lons, lats) all_areas = parse_area_file(get_area_file()) for area_obj in all_areas: - if hasattr(area_obj, 'freeze'): + if hasattr(area_obj, "freeze"): try: area_obj = area_obj.freeze(lonslats=swath_def) except RuntimeError: @@ -115,8 +115,8 @@ def fake_plugin_etc_path( etc_path, entry_points, module_paths = _get_entry_points_and_etc_paths(tmp_path, entry_point_names) fake_iter_entry_points = _create_fake_iter_entry_points(entry_points) fake_importlib_files = _create_fake_importlib_files(module_paths) - with mock.patch('satpy._config.entry_points', fake_iter_entry_points), \ - mock.patch('satpy._config.impr_files', fake_importlib_files): + with mock.patch("satpy._config.entry_points", fake_iter_entry_points), \ + mock.patch("satpy._config.impr_files", fake_importlib_files): yield etc_path @@ -298,7 +298,7 @@ def test_get_plugin_configs(self, fake_composite_plugin_etc_path): from satpy._config import get_entry_points_config_dirs with satpy.config.set(config_path=[]): - dirs = get_entry_points_config_dirs('satpy.composites') + dirs = get_entry_points_config_dirs("satpy.composites") assert dirs == [str(fake_composite_plugin_etc_path)] def test_load_entry_point_composite(self, fake_composite_plugin_etc_path): @@ -393,16 +393,16 @@ def test_custom_config_file(self): import satpy my_config_dict = { - 'cache_dir': "/path/to/cache", + "cache_dir": "/path/to/cache", } try: - with tempfile.NamedTemporaryFile(mode='w+t', suffix='.yaml', delete=False) as tfile: + with tempfile.NamedTemporaryFile(mode="w+t", suffix=".yaml", delete=False) as tfile: yaml.dump(my_config_dict, tfile) tfile.close() - with mock.patch.dict('os.environ', {'SATPY_CONFIG': tfile.name}): + with mock.patch.dict("os.environ", {"SATPY_CONFIG": tfile.name}): reload(satpy._config) reload(satpy) - assert satpy.config.get('cache_dir') == '/path/to/cache' + assert satpy.config.get("cache_dir") == "/path/to/cache" finally: os.remove(tfile.name) @@ -412,15 +412,15 @@ def test_deprecated_env_vars(self): import satpy old_vars = { - 'PPP_CONFIG_DIR': '/my/ppp/config/dir', - 'SATPY_ANCPATH': '/my/ancpath', + "PPP_CONFIG_DIR": "/my/ppp/config/dir", + "SATPY_ANCPATH": "/my/ancpath", } - with mock.patch.dict('os.environ', old_vars): + with mock.patch.dict("os.environ", old_vars): reload(satpy._config) reload(satpy) - assert satpy.config.get('data_dir') == '/my/ancpath' - assert satpy.config.get('config_path') == ['/my/ppp/config/dir'] + assert satpy.config.get("data_dir") == "/my/ancpath" + assert satpy.config.get("config_path") == ["/my/ppp/config/dir"] def test_config_path_multiple(self): """Test that multiple config paths are accepted.""" @@ -429,13 +429,13 @@ def test_config_path_multiple(self): import satpy exp_paths, env_paths = _os_specific_multipaths() old_vars = { - 'SATPY_CONFIG_PATH': env_paths, + "SATPY_CONFIG_PATH": env_paths, } - with mock.patch.dict('os.environ', old_vars): + with mock.patch.dict("os.environ", old_vars): reload(satpy._config) reload(satpy) - assert satpy.config.get('config_path') == exp_paths + assert satpy.config.get("config_path") == exp_paths def test_config_path_multiple_load(self): """Test that config paths from subprocesses load properly. @@ -449,10 +449,10 @@ def test_config_path_multiple_load(self): import satpy exp_paths, env_paths = _os_specific_multipaths() old_vars = { - 'SATPY_CONFIG_PATH': env_paths, + "SATPY_CONFIG_PATH": env_paths, } - with mock.patch.dict('os.environ', old_vars): + with mock.patch.dict("os.environ", old_vars): # these reloads will update env variable "SATPY_CONFIG_PATH" reload(satpy._config) reload(satpy) @@ -460,7 +460,7 @@ def test_config_path_multiple_load(self): # load the updated env variable and parse it again. reload(satpy._config) reload(satpy) - assert satpy.config.get('config_path') == exp_paths + assert satpy.config.get("config_path") == exp_paths def test_bad_str_config_path(self): """Test that a str config path isn't allowed.""" @@ -468,17 +468,17 @@ def test_bad_str_config_path(self): import satpy old_vars = { - 'SATPY_CONFIG_PATH': '/my/configs1', + "SATPY_CONFIG_PATH": "/my/configs1", } # single path from env var still works - with mock.patch.dict('os.environ', old_vars): + with mock.patch.dict("os.environ", old_vars): reload(satpy._config) reload(satpy) - assert satpy.config.get('config_path') == ['/my/configs1'] + assert satpy.config.get("config_path") == ["/my/configs1"] # strings are not allowed, lists are - with satpy.config.set(config_path='/single/string/paths/are/bad'): + with satpy.config.set(config_path="/single/string/paths/are/bad"): pytest.raises(ValueError, satpy._config.get_config_path_safe) def test_tmp_dir_is_writable(self): @@ -503,7 +503,7 @@ def _is_writable(directory): def _os_specific_multipaths(): - exp_paths = ['/my/configs1', '/my/configs2', '/my/configs3'] + exp_paths = ["/my/configs1", "/my/configs2", "/my/configs3"] if sys.platform.startswith("win"): exp_paths = ["C:" + p for p in exp_paths] path_str = os.pathsep.join(exp_paths) diff --git a/satpy/tests/test_data_download.py b/satpy/tests/test_data_download.py index 8f2984bd9b..85cd420951 100644 --- a/satpy/tests/test_data_download.py +++ b/satpy/tests/test_data_download.py @@ -42,16 +42,16 @@ def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwar if not prerequisites or len(prerequisites) != 1: raise ValueError("Unexpected number of prereqs") super().__init__(name, prerequisites, optional_prerequisites, **kwargs) - self.register_data_files({'url': kwargs['url'], - 'filename': kwargs['filename'], - 'known_hash': kwargs['known_hash']}) + self.register_data_files({"url": kwargs["url"], + "filename": kwargs["filename"], + "known_hash": kwargs["known_hash"]}) def _setup_custom_composite_config(base_dir): from satpy.composites import StaticImageCompositor from satpy.modifiers.atmosphere import ReflectanceCorrector composite_config = base_dir.mkdir("composites").join("visir.yaml") - with open(composite_config, 'w') as comp_file: + with open(composite_config, "w") as comp_file: yaml.dump({ "sensor_name": "visir", "modifiers": { @@ -79,7 +79,7 @@ def _setup_custom_composite_config(base_dir): def _setup_custom_reader_config(base_dir): reader_config = base_dir.mkdir("readers").join("fake.yaml") - with open(reader_config, 'wt') as comp_file: + with open(reader_config, "wt") as comp_file: # abstract base classes can't be converted so we do raw string comp_file.write(""" reader: @@ -97,7 +97,7 @@ def _setup_custom_reader_config(base_dir): def _setup_custom_writer_config(base_dir): writer_config = base_dir.mkdir("writers").join("fake.yaml") - with open(writer_config, 'wt') as comp_file: + with open(writer_config, "wt") as comp_file: # abstract base classes can't be converted so we do raw string comp_file.write(""" writer: @@ -113,8 +113,8 @@ def _setup_custom_writer_config(base_dir): def _assert_reader_files_downloaded(readers, found_files): - r_cond1 = 'readers/README.rst' in found_files - r_cond2 = 'readers/README2.rst' in found_files + r_cond1 = "readers/README.rst" in found_files + r_cond2 = "readers/README2.rst" in found_files if readers is not None and not readers: r_cond1 = not r_cond1 r_cond2 = not r_cond2 @@ -123,8 +123,8 @@ def _assert_reader_files_downloaded(readers, found_files): def _assert_writer_files_downloaded(writers, found_files): - w_cond1 = 'writers/README.rst' in found_files - w_cond2 = 'writers/README2.rst' in found_files + w_cond1 = "writers/README.rst" in found_files + w_cond2 = "writers/README2.rst" in found_files if writers is not None and not writers: w_cond1 = not w_cond1 w_cond2 = not w_cond2 @@ -133,15 +133,15 @@ def _assert_writer_files_downloaded(writers, found_files): def _assert_comp_files_downloaded(comp_sensors, found_files): - comp_cond = 'composites/README.rst' in found_files + comp_cond = "composites/README.rst" in found_files if comp_sensors is not None and not comp_sensors: comp_cond = not comp_cond assert comp_cond def _assert_mod_files_downloaded(comp_sensors, found_files): - mod_cond = 'modifiers/README.rst' in found_files - unfriendly_cond = 'modifiers/unfriendly.rst' in found_files + mod_cond = "modifiers/README.rst" in found_files + unfriendly_cond = "modifiers/unfriendly.rst" in found_files if comp_sensors is not None and not comp_sensors: mod_cond = not mod_cond assert mod_cond @@ -158,15 +158,15 @@ def _setup_custom_configs(self, tmpdir): _setup_custom_writer_config(tmpdir) self.tmpdir = tmpdir - @pytest.mark.parametrize('comp_sensors', [[], None, ['visir']]) - @pytest.mark.parametrize('writers', [[], None, ['fake']]) - @pytest.mark.parametrize('readers', [[], None, ['fake']]) + @pytest.mark.parametrize("comp_sensors", [[], None, ["visir"]]) + @pytest.mark.parametrize("writers", [[], None, ["fake"]]) + @pytest.mark.parametrize("readers", [[], None, ["fake"]]) def test_find_registerable(self, readers, writers, comp_sensors): """Test that find_registerable finds some things.""" import satpy from satpy.aux_download import find_registerable_files with satpy.config.set(config_path=[self.tmpdir]), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', {}): + mock.patch("satpy.aux_download._FILE_REGISTRY", {}): found_files = find_registerable_files( readers=readers, writers=writers, composite_sensors=comp_sensors, @@ -183,7 +183,7 @@ def test_limited_find_registerable(self): from satpy.aux_download import find_registerable_files file_registry = {} with satpy.config.set(config_path=[self.tmpdir]), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry): + mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry): found_files = find_registerable_files( readers=[], writers=[], composite_sensors=[], ) @@ -195,8 +195,8 @@ def test_retrieve(self): from satpy.aux_download import find_registerable_files, retrieve file_registry = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir)), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry): - comp_file = 'composites/README.rst' + mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry): + comp_file = "composites/README.rst" found_files = find_registerable_files() assert comp_file in found_files assert not self.tmpdir.join(comp_file).exists() @@ -209,8 +209,8 @@ def test_offline_retrieve(self): from satpy.aux_download import find_registerable_files, retrieve file_registry = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir), download_aux=True), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry): - comp_file = 'composites/README.rst' + mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry): + comp_file = "composites/README.rst" found_files = find_registerable_files() assert comp_file in found_files @@ -242,10 +242,10 @@ def test_retrieve_all(self): file_registry = {} file_urls = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir)), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry), \ - mock.patch('satpy.aux_download._FILE_URLS', file_urls), \ - mock.patch('satpy.aux_download.find_registerable_files'): - comp_file = 'composites/README.rst' + mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry), \ + mock.patch("satpy.aux_download._FILE_URLS", file_urls), \ + mock.patch("satpy.aux_download.find_registerable_files"): + comp_file = "composites/README.rst" file_registry[comp_file] = None file_urls[comp_file] = README_URL assert not self.tmpdir.join(comp_file).exists() @@ -260,13 +260,13 @@ def test_no_downloads_in_tests(self): file_registry = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir), download_aux=True), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry): - cache_key = 'myfile.rst' + mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry): + cache_key = "myfile.rst" register_file(README_URL, cache_key) assert not self.tmpdir.join(cache_key).exists() pytest.raises(RuntimeError, retrieve, cache_key) # touch the file so it gets created - open(self.tmpdir.join(cache_key), 'w').close() + open(self.tmpdir.join(cache_key), "w").close() # offline downloading should still be allowed with satpy.config.set(download_aux=False): retrieve(cache_key) @@ -278,10 +278,10 @@ def test_download_script(self): file_registry = {} file_urls = {} with satpy.config.set(config_path=[self.tmpdir]), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry), \ - mock.patch('satpy.aux_download._FILE_URLS', file_urls), \ - mock.patch('satpy.aux_download.find_registerable_files'): - comp_file = 'composites/README.rst' + mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry), \ + mock.patch("satpy.aux_download._FILE_URLS", file_urls), \ + mock.patch("satpy.aux_download.find_registerable_files"): + comp_file = "composites/README.rst" file_registry[comp_file] = None file_urls[comp_file] = README_URL assert not self.tmpdir.join(comp_file).exists() diff --git a/satpy/tests/test_dataset.py b/satpy/tests/test_dataset.py index 0bc1de2982..b8df391d30 100644 --- a/satpy/tests/test_dataset.py +++ b/satpy/tests/test_dataset.py @@ -36,19 +36,19 @@ def test_basic_init(self): from satpy.dataset.dataid import minimal_default_keys_config as mdkc did = DataID(dikc, name="a") - assert did['name'] == 'a' - assert did['modifiers'] == tuple() + assert did["name"] == "a" + assert did["modifiers"] == tuple() DataID(dikc, name="a", wavelength=0.86) DataID(dikc, name="a", resolution=1000) - DataID(dikc, name="a", calibration='radiance') + DataID(dikc, name="a", calibration="radiance") DataID(dikc, name="a", wavelength=0.86, resolution=250, - calibration='radiance') + calibration="radiance") DataID(dikc, name="a", wavelength=0.86, resolution=250, - calibration='radiance', modifiers=('sunz_corrected',)) + calibration="radiance", modifiers=("sunz_corrected",)) with pytest.raises(ValueError): DataID(dikc, wavelength=0.86) - did = DataID(mdkc, name='comp24', resolution=500) - assert did['resolution'] == 500 + did = DataID(mdkc, name="comp24", resolution=500) + assert did["resolution"] == 500 def test_init_bad_modifiers(self): """Test that modifiers are a tuple.""" @@ -72,13 +72,13 @@ def test_bad_calibration(self): from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc with pytest.raises(ValueError): - DataID(dikc, name='C05', calibration='_bad_') + DataID(dikc, name="C05", calibration="_bad_") def test_is_modified(self): """Test that modifications are detected properly.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc - d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=('hej',)) + d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=("hej",)) d2 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=tuple()) assert d1.is_modified() @@ -88,11 +88,11 @@ def test_create_less_modified_query(self): """Test that modifications are popped correctly.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc - d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=('hej',)) + d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=("hej",)) d2 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=tuple()) - assert not d1.create_less_modified_query()['modifiers'] - assert not d2.create_less_modified_query()['modifiers'] + assert not d1.create_less_modified_query()["modifiers"] + assert not d2.create_less_modified_query()["modifiers"] class TestCombineMetadata(unittest.TestCase): @@ -101,11 +101,11 @@ class TestCombineMetadata(unittest.TestCase): def setUp(self): """Set up the test case.""" self.datetime_dts = ( - {'start_time': datetime(2018, 2, 1, 11, 58, 0)}, - {'start_time': datetime(2018, 2, 1, 11, 59, 0)}, - {'start_time': datetime(2018, 2, 1, 12, 0, 0)}, - {'start_time': datetime(2018, 2, 1, 12, 1, 0)}, - {'start_time': datetime(2018, 2, 1, 12, 2, 0)}, + {"start_time": datetime(2018, 2, 1, 11, 58, 0)}, + {"start_time": datetime(2018, 2, 1, 11, 59, 0)}, + {"start_time": datetime(2018, 2, 1, 12, 0, 0)}, + {"start_time": datetime(2018, 2, 1, 12, 1, 0)}, + {"start_time": datetime(2018, 2, 1, 12, 2, 0)}, ) def test_average_datetimes(self): @@ -125,14 +125,14 @@ def test_combine_times_with_averaging(self): """Test the combine_metadata with times with averaging.""" from satpy.dataset.metadata import combine_metadata ret = combine_metadata(*self.datetime_dts) - self.assertEqual(self.datetime_dts[2]['start_time'], ret['start_time']) + self.assertEqual(self.datetime_dts[2]["start_time"], ret["start_time"]) def test_combine_times_without_averaging(self): """Test the combine_metadata with times without averaging.""" from satpy.dataset.metadata import combine_metadata ret = combine_metadata(*self.datetime_dts, average_times=False) # times are not equal so don't include it in the final result - self.assertNotIn('start_time', ret) + self.assertNotIn("start_time", ret) def test_combine_arrays(self): """Test the combine_metadata with arrays.""" @@ -175,44 +175,44 @@ def test_combine_lists_identical(self): """Test combine metadata with identical lists.""" from satpy.dataset.metadata import combine_metadata metadatas = [ - {'prerequisites': [1, 2, 3, 4]}, - {'prerequisites': [1, 2, 3, 4]}, + {"prerequisites": [1, 2, 3, 4]}, + {"prerequisites": [1, 2, 3, 4]}, ] res = combine_metadata(*metadatas) - assert res['prerequisites'] == [1, 2, 3, 4] + assert res["prerequisites"] == [1, 2, 3, 4] def test_combine_lists_same_size_diff_values(self): """Test combine metadata with lists with different values.""" from satpy.dataset.metadata import combine_metadata metadatas = [ - {'prerequisites': [1, 2, 3, 4]}, - {'prerequisites': [1, 2, 3, 5]}, + {"prerequisites": [1, 2, 3, 4]}, + {"prerequisites": [1, 2, 3, 5]}, ] res = combine_metadata(*metadatas) - assert 'prerequisites' not in res + assert "prerequisites" not in res def test_combine_lists_different_size(self): """Test combine metadata with different size lists.""" from satpy.dataset.metadata import combine_metadata metadatas = [ - {'prerequisites': [1, 2, 3, 4]}, - {'prerequisites': []}, + {"prerequisites": [1, 2, 3, 4]}, + {"prerequisites": []}, ] res = combine_metadata(*metadatas) - assert 'prerequisites' not in res + assert "prerequisites" not in res metadatas = [ - {'prerequisites': [1, 2, 3, 4]}, - {'prerequisites': [1, 2, 3]}, + {"prerequisites": [1, 2, 3, 4]}, + {"prerequisites": [1, 2, 3]}, ] res = combine_metadata(*metadatas) - assert 'prerequisites' not in res + assert "prerequisites" not in res def test_combine_identical_numpy_scalars(self): """Test combining identical fill values.""" from satpy.dataset.metadata import combine_metadata - test_metadata = [{'_FillValue': np.uint16(42)}, {'_FillValue': np.uint16(42)}] - assert combine_metadata(*test_metadata) == {'_FillValue': 42} + test_metadata = [{"_FillValue": np.uint16(42)}, {"_FillValue": np.uint16(42)}] + assert combine_metadata(*test_metadata) == {"_FillValue": 42} def test_combine_empty_metadata(self): """Test combining empty metadata.""" @@ -223,96 +223,96 @@ def test_combine_empty_metadata(self): def test_combine_nans(self): """Test combining nan fill values.""" from satpy.dataset.metadata import combine_metadata - test_metadata = [{'_FillValue': np.nan}, {'_FillValue': np.nan}] - assert combine_metadata(*test_metadata) == {'_FillValue': np.nan} + test_metadata = [{"_FillValue": np.nan}, {"_FillValue": np.nan}] + assert combine_metadata(*test_metadata) == {"_FillValue": np.nan} def test_combine_numpy_arrays(self): """Test combining values that are numpy arrays.""" from satpy.dataset.metadata import combine_metadata - test_metadata = [{'valid_range': np.array([0., 0.00032], dtype=np.float32)}, - {'valid_range': np.array([0., 0.00032], dtype=np.float32)}, - {'valid_range': np.array([0., 0.00032], dtype=np.float32)}] + test_metadata = [{"valid_range": np.array([0., 0.00032], dtype=np.float32)}, + {"valid_range": np.array([0., 0.00032], dtype=np.float32)}, + {"valid_range": np.array([0., 0.00032], dtype=np.float32)}] result = combine_metadata(*test_metadata) - assert np.allclose(result['valid_range'], np.array([0., 0.00032], dtype=np.float32)) + assert np.allclose(result["valid_range"], np.array([0., 0.00032], dtype=np.float32)) def test_combine_dask_arrays(self): """Test combining values that are dask arrays.""" import dask.array as da from satpy.dataset.metadata import combine_metadata - test_metadata = [{'valid_range': da.from_array(np.array([0., 0.00032], dtype=np.float32))}, - {'valid_range': da.from_array(np.array([0., 0.00032], dtype=np.float32))}] + test_metadata = [{"valid_range": da.from_array(np.array([0., 0.00032], dtype=np.float32))}, + {"valid_range": da.from_array(np.array([0., 0.00032], dtype=np.float32))}] result = combine_metadata(*test_metadata) - assert 'valid_range' not in result + assert "valid_range" not in result def test_combine_real_world_mda(self): """Test with real data.""" - mda_objects = ({'_FillValue': np.nan, - 'valid_range': np.array([0., 0.00032], dtype=np.float32), - 'ancillary_variables': ['cpp_status_flag', - 'cpp_conditions', - 'cpp_quality', - 'cpp_reff_pal', - '-'], - 'platform_name': 'NOAA-20', - 'sensor': {'viirs'}, - 'raw_metadata': {'foo': {'bar': np.array([1, 2, 3])}}}, - {'_FillValue': np.nan, - 'valid_range': np.array([0., 0.00032], dtype=np.float32), - 'ancillary_variables': ['cpp_status_flag', - 'cpp_conditions', - 'cpp_quality', - 'cpp_reff_pal', - '-'], - 'platform_name': 'NOAA-20', - 'sensor': {'viirs'}, - 'raw_metadata': {'foo': {'bar': np.array([1, 2, 3])}}}) - - expected = {'_FillValue': np.nan, - 'valid_range': np.array([0., 0.00032], dtype=np.float32), - 'ancillary_variables': ['cpp_status_flag', - 'cpp_conditions', - 'cpp_quality', - 'cpp_reff_pal', - '-'], - 'platform_name': 'NOAA-20', - 'sensor': {'viirs'}, - 'raw_metadata': {'foo': {'bar': np.array([1, 2, 3])}}} + mda_objects = ({"_FillValue": np.nan, + "valid_range": np.array([0., 0.00032], dtype=np.float32), + "ancillary_variables": ["cpp_status_flag", + "cpp_conditions", + "cpp_quality", + "cpp_reff_pal", + "-"], + "platform_name": "NOAA-20", + "sensor": {"viirs"}, + "raw_metadata": {"foo": {"bar": np.array([1, 2, 3])}}}, + {"_FillValue": np.nan, + "valid_range": np.array([0., 0.00032], dtype=np.float32), + "ancillary_variables": ["cpp_status_flag", + "cpp_conditions", + "cpp_quality", + "cpp_reff_pal", + "-"], + "platform_name": "NOAA-20", + "sensor": {"viirs"}, + "raw_metadata": {"foo": {"bar": np.array([1, 2, 3])}}}) + + expected = {"_FillValue": np.nan, + "valid_range": np.array([0., 0.00032], dtype=np.float32), + "ancillary_variables": ["cpp_status_flag", + "cpp_conditions", + "cpp_quality", + "cpp_reff_pal", + "-"], + "platform_name": "NOAA-20", + "sensor": {"viirs"}, + "raw_metadata": {"foo": {"bar": np.array([1, 2, 3])}}} from satpy.dataset.metadata import combine_metadata result = combine_metadata(*mda_objects) - assert np.allclose(result.pop('_FillValue'), expected.pop('_FillValue'), equal_nan=True) - assert np.allclose(result.pop('valid_range'), expected.pop('valid_range')) - np.testing.assert_equal(result.pop('raw_metadata'), - expected.pop('raw_metadata')) + assert np.allclose(result.pop("_FillValue"), expected.pop("_FillValue"), equal_nan=True) + assert np.allclose(result.pop("valid_range"), expected.pop("valid_range")) + np.testing.assert_equal(result.pop("raw_metadata"), + expected.pop("raw_metadata")) assert result == expected def test_combine_one_metadata_object(self): """Test combining one metadata object.""" - mda_objects = ({'_FillValue': np.nan, - 'valid_range': np.array([0., 0.00032], dtype=np.float32), - 'ancillary_variables': ['cpp_status_flag', - 'cpp_conditions', - 'cpp_quality', - 'cpp_reff_pal', - '-'], - 'platform_name': 'NOAA-20', - 'sensor': {'viirs'}},) - - expected = {'_FillValue': np.nan, - 'valid_range': np.array([0., 0.00032], dtype=np.float32), - 'ancillary_variables': ['cpp_status_flag', - 'cpp_conditions', - 'cpp_quality', - 'cpp_reff_pal', - '-'], - 'platform_name': 'NOAA-20', - 'sensor': {'viirs'}} + mda_objects = ({"_FillValue": np.nan, + "valid_range": np.array([0., 0.00032], dtype=np.float32), + "ancillary_variables": ["cpp_status_flag", + "cpp_conditions", + "cpp_quality", + "cpp_reff_pal", + "-"], + "platform_name": "NOAA-20", + "sensor": {"viirs"}},) + + expected = {"_FillValue": np.nan, + "valid_range": np.array([0., 0.00032], dtype=np.float32), + "ancillary_variables": ["cpp_status_flag", + "cpp_conditions", + "cpp_quality", + "cpp_reff_pal", + "-"], + "platform_name": "NOAA-20", + "sensor": {"viirs"}} from satpy.dataset.metadata import combine_metadata result = combine_metadata(*mda_objects) - assert np.allclose(result.pop('_FillValue'), expected.pop('_FillValue'), equal_nan=True) - assert np.allclose(result.pop('valid_range'), expected.pop('valid_range')) + assert np.allclose(result.pop("_FillValue"), expected.pop("_FillValue"), equal_nan=True) + assert np.allclose(result.pop("valid_range"), expected.pop("valid_range")) assert result == expected @@ -320,29 +320,29 @@ def test_combine_dicts_close(): """Test combination of dictionaries whose values are close.""" from satpy.dataset.metadata import combine_metadata attrs = { - 'raw_metadata': { - 'a': 1, - 'b': 'foo', - 'c': [1, 2, 3], - 'd': { - 'e': np.str_('bar'), - 'f': datetime(2020, 1, 1, 12, 15, 30), - 'g': np.array([1, 2, 3]), + "raw_metadata": { + "a": 1, + "b": "foo", + "c": [1, 2, 3], + "d": { + "e": np.str_("bar"), + "f": datetime(2020, 1, 1, 12, 15, 30), + "g": np.array([1, 2, 3]), }, - 'h': np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)]) + "h": np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)]) } } attrs_close = { - 'raw_metadata': { - 'a': 1 + 1E-12, - 'b': 'foo', - 'c': np.array([1, 2, 3]) + 1E-12, - 'd': { - 'e': np.str_('bar'), - 'f': datetime(2020, 1, 1, 12, 15, 30), - 'g': np.array([1, 2, 3]) + 1E-12 + "raw_metadata": { + "a": 1 + 1E-12, + "b": "foo", + "c": np.array([1, 2, 3]) + 1E-12, + "d": { + "e": np.str_("bar"), + "f": datetime(2020, 1, 1, 12, 15, 30), + "g": np.array([1, 2, 3]) + 1E-12 }, - 'h': np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)]) + "h": np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)]) } } test_metadata = [attrs, attrs_close] @@ -354,22 +354,22 @@ def test_combine_dicts_close(): "test_mda", [ # a/b/c/d different - {'a': np.array([1, 2, 3]), 'd': 123}, - {'a': {'b': np.array([4, 5, 6]), 'c': 1.0}, 'd': 'foo'}, - {'a': {'b': np.array([1, 2, 3]), 'c': 2.0}, 'd': 'foo'}, - {'a': {'b': np.array([1, 2, 3]), 'c': 1.0}, 'd': 'bar'}, + {"a": np.array([1, 2, 3]), "d": 123}, + {"a": {"b": np.array([4, 5, 6]), "c": 1.0}, "d": "foo"}, + {"a": {"b": np.array([1, 2, 3]), "c": 2.0}, "d": "foo"}, + {"a": {"b": np.array([1, 2, 3]), "c": 1.0}, "d": "bar"}, # a/b/c/d type different np.array([1, 2, 3]), - {'a': {'b': 'baz', 'c': 1.0}, 'd': 'foo'}, - {'a': {'b': np.array([1, 2, 3]), 'c': 'baz'}, 'd': 'foo'}, - {'a': {'b': np.array([1, 2, 3]), 'c': 1.0}, 'd': 1.0} + {"a": {"b": "baz", "c": 1.0}, "d": "foo"}, + {"a": {"b": np.array([1, 2, 3]), "c": "baz"}, "d": "foo"}, + {"a": {"b": np.array([1, 2, 3]), "c": 1.0}, "d": 1.0} ] ) def test_combine_dicts_different(test_mda): """Test combination of dictionaries differing in various ways.""" from satpy.dataset.metadata import combine_metadata - mda = {'a': {'b': np.array([1, 2, 3]), 'c': 1.0}, 'd': 'foo'} - test_metadata = [{'raw_metadata': mda}, {'raw_metadata': test_mda}] + mda = {"a": {"b": np.array([1, 2, 3]), "c": 1.0}, "d": "foo"} + test_metadata = [{"raw_metadata": mda}, {"raw_metadata": test_mda}] result = combine_metadata(*test_metadata) assert not result @@ -380,57 +380,57 @@ def test_dataid(): # Check that enum is translated to type. did = make_dataid() - assert issubclass(did._id_keys['calibration']['type'], ValueList) - assert 'enum' not in did._id_keys['calibration'] + assert issubclass(did._id_keys["calibration"]["type"], ValueList) + assert "enum" not in did._id_keys["calibration"] # Check that None is never a valid value - did = make_dataid(name='cheese_shops', resolution=None) - assert 'resolution' not in did - assert 'None' not in did.__repr__() + did = make_dataid(name="cheese_shops", resolution=None) + assert "resolution" not in did + assert "None" not in did.__repr__() with pytest.raises(ValueError): make_dataid(name=None, resolution=1000) # Check that defaults are applied correctly - assert did['modifiers'] == ModifierTuple() + assert did["modifiers"] == ModifierTuple() # Check that from_dict creates a distinct instance... - did2 = did.from_dict(dict(name='cheese_shops', resolution=None)) + did2 = did.from_dict(dict(name="cheese_shops", resolution=None)) assert did is not did2 # ...But is equal assert did2 == did # Check that the instance is immutable with pytest.raises(TypeError): - did['resolution'] = 1000 + did["resolution"] = 1000 # Check that a missing required field crashes with pytest.raises(ValueError): make_dataid(resolution=1000) # Check to_dict - assert did.to_dict() == dict(name='cheese_shops', modifiers=tuple()) + assert did.to_dict() == dict(name="cheese_shops", modifiers=tuple()) # Check repr - did = make_dataid(name='VIS008', resolution=111) + did = make_dataid(name="VIS008", resolution=111) assert repr(did) == "DataID(name='VIS008', resolution=111, modifiers=())" # Check inequality - default_id_keys_config = {'name': None, - 'wavelength': { - 'type': WavelengthRange, + default_id_keys_config = {"name": None, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'reflectance', - 'brightness_temperature', - 'radiance', - 'counts' + "resolution": None, + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "counts" ] }, - 'modifiers': { - 'default': ModifierTuple(), - 'type': ModifierTuple, + "modifiers": { + "default": ModifierTuple(), + "type": ModifierTuple, }, } assert DataID(default_id_keys_config, wavelength=10) != DataID(default_id_keys_config, name="VIS006") @@ -439,44 +439,44 @@ def test_dataid(): def test_dataid_equal_if_enums_different(): """Check that dataids with different enums but same items are equal.""" from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange - id_keys_config1 = {'name': None, - 'wavelength': { - 'type': WavelengthRange, + id_keys_config1 = {"name": None, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'c1', - 'c2', - 'c3', + "resolution": None, + "calibration": { + "enum": [ + "c1", + "c2", + "c3", ] }, - 'modifiers': { - 'default': ModifierTuple(), - 'type': ModifierTuple, + "modifiers": { + "default": ModifierTuple(), + "type": ModifierTuple, }, } - id_keys_config2 = {'name': None, - 'wavelength': { - 'type': WavelengthRange, + id_keys_config2 = {"name": None, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'c1', - 'c1.5', - 'c2', - 'c2.5', - 'c3' + "resolution": None, + "calibration": { + "enum": [ + "c1", + "c1.5", + "c2", + "c2.5", + "c3" ] }, - 'modifiers': { - 'default': ModifierTuple(), - 'type': ModifierTuple, + "modifiers": { + "default": ModifierTuple(), + "type": ModifierTuple, }, } - assert DataID(id_keys_config1, name='ni', calibration='c2') == DataID(id_keys_config2, name="ni", calibration='c2') + assert DataID(id_keys_config1, name="ni", calibration="c2") == DataID(id_keys_config2, name="ni", calibration="c2") def test_dataid_copy(): @@ -497,7 +497,7 @@ def test_dataid_pickle(): import pickle from satpy.tests.utils import make_dataid - did = make_dataid(name='hi', wavelength=(10, 11, 12), resolution=1000, calibration='radiance') + did = make_dataid(name="hi", wavelength=(10, 11, 12), resolution=1000, calibration="radiance") assert did == pickle.loads(pickle.dumps(did)) @@ -512,7 +512,7 @@ def test_dataid_elements_picklable(): import pickle from satpy.tests.utils import make_dataid - did = make_dataid(name='hi', wavelength=(10, 11, 12), resolution=1000, calibration='radiance') + did = make_dataid(name="hi", wavelength=(10, 11, 12), resolution=1000, calibration="radiance") for value in did.values(): pickled_value = pickle.loads(pickle.dumps(value)) assert value == pickled_value @@ -525,10 +525,10 @@ def test_dataquery(self): """Test DataQuery objects.""" from satpy.dataset import DataQuery - DataQuery(name='cheese_shops') + DataQuery(name="cheese_shops") # Check repr - did = DataQuery(name='VIS008', resolution=111) + did = DataQuery(name="VIS008", resolution=111) assert repr(did) == "DataQuery(name='VIS008', resolution=111)" # Check inequality @@ -537,7 +537,7 @@ def test_dataquery(self): def test_is_modified(self): """Test that modifications are detected properly.""" from satpy.dataset import DataQuery - d1 = DataQuery(name="a", wavelength=0.2, modifiers=('hej',)) + d1 = DataQuery(name="a", wavelength=0.2, modifiers=("hej",)) d2 = DataQuery(name="a", wavelength=0.2, modifiers=tuple()) assert d1.is_modified() @@ -546,11 +546,11 @@ def test_is_modified(self): def test_create_less_modified_query(self): """Test that modifications are popped correctly.""" from satpy.dataset import DataQuery - d1 = DataQuery(name="a", wavelength=0.2, modifiers=('hej',)) + d1 = DataQuery(name="a", wavelength=0.2, modifiers=("hej",)) d2 = DataQuery(name="a", wavelength=0.2, modifiers=tuple()) - assert not d1.create_less_modified_query()['modifiers'] - assert not d2.create_less_modified_query()['modifiers'] + assert not d1.create_less_modified_query()["modifiers"] + assert not d2.create_less_modified_query()["modifiers"] class TestIDQueryInteractions(unittest.TestCase): @@ -559,56 +559,56 @@ class TestIDQueryInteractions(unittest.TestCase): def setUp(self) -> None: """Set up the test case.""" self.default_id_keys_config = { - 'name': { - 'required': True, + "name": { + "required": True, }, - 'wavelength': { - 'type': WavelengthRange, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'reflectance', - 'brightness_temperature', - 'radiance', - 'counts' + "resolution": None, + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "counts" ] }, - 'modifiers': { - 'default': ModifierTuple(), - 'type': ModifierTuple, + "modifiers": { + "default": ModifierTuple(), + "type": ModifierTuple, }, } def test_hash_equality(self): """Test hash equality.""" - dq = DataQuery(modifiers=tuple(), name='cheese_shops') - did = DataID(self.default_id_keys_config, name='cheese_shops') + dq = DataQuery(modifiers=tuple(), name="cheese_shops") + did = DataID(self.default_id_keys_config, name="cheese_shops") assert hash(dq) == hash(did) def test_id_filtering(self): """Check did filtering.""" - dq = DataQuery(modifiers=tuple(), name='cheese_shops') - did = DataID(self.default_id_keys_config, name='cheese_shops') - did2 = DataID(self.default_id_keys_config, name='ni') + dq = DataQuery(modifiers=tuple(), name="cheese_shops") + did = DataID(self.default_id_keys_config, name="cheese_shops") + did2 = DataID(self.default_id_keys_config, name="ni") res = dq.filter_dataids([did2, did]) assert len(res) == 1 assert res[0] == did dataid_container = [DataID(self.default_id_keys_config, - name='ds1', + name="ds1", resolution=250, - calibration='reflectance', + calibration="reflectance", modifiers=tuple())] dq = DataQuery(wavelength=0.22, modifiers=tuple()) assert len(dq.filter_dataids(dataid_container)) == 0 dataid_container = [DataID(minimal_default_keys_config, - name='natural_color')] - dq = DataQuery(name='natural_color', resolution=250) + name="natural_color")] + dq = DataQuery(name="natural_color", resolution=250) assert len(dq.filter_dataids(dataid_container)) == 1 - dq = make_dsq(wavelength=0.22, modifiers=('mod1',)) - did = make_cid(name='static_image') + dq = make_dsq(wavelength=0.22, modifiers=("mod1",)) + did = make_cid(name="static_image") assert len(dq.filter_dataids([did])) == 0 def test_inequality(self): @@ -617,70 +617,70 @@ def test_inequality(self): def test_sort_dataids(self): """Check dataid sorting.""" - dq = DataQuery(name='cheese_shops', wavelength=2, modifiers='*') - did = DataID(self.default_id_keys_config, name='cheese_shops', wavelength=(1, 2, 3)) - did2 = DataID(self.default_id_keys_config, name='cheese_shops', wavelength=(1.1, 2.1, 3.1)) + dq = DataQuery(name="cheese_shops", wavelength=2, modifiers="*") + did = DataID(self.default_id_keys_config, name="cheese_shops", wavelength=(1, 2, 3)) + did2 = DataID(self.default_id_keys_config, name="cheese_shops", wavelength=(1.1, 2.1, 3.1)) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert np.allclose(distances, [0, 0.1]) - dq = DataQuery(name='cheese_shops') - did = DataID(self.default_id_keys_config, name='cheese_shops', resolution=200) - did2 = DataID(self.default_id_keys_config, name='cheese_shops', resolution=400) + dq = DataQuery(name="cheese_shops") + did = DataID(self.default_id_keys_config, name="cheese_shops", resolution=200) + did2 = DataID(self.default_id_keys_config, name="cheese_shops", resolution=400) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert distances[0] < distances[1] - did = DataID(self.default_id_keys_config, name='cheese_shops', calibration='counts') - did2 = DataID(self.default_id_keys_config, name='cheese_shops', calibration='reflectance') + did = DataID(self.default_id_keys_config, name="cheese_shops", calibration="counts") + did2 = DataID(self.default_id_keys_config, name="cheese_shops", calibration="reflectance") dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did2, did] assert distances[0] < distances[1] - did = DataID(self.default_id_keys_config, name='cheese_shops', modifiers=tuple()) - did2 = DataID(self.default_id_keys_config, name='cheese_shops', modifiers=tuple(['out_of_stock'])) + did = DataID(self.default_id_keys_config, name="cheese_shops", modifiers=tuple()) + did2 = DataID(self.default_id_keys_config, name="cheese_shops", modifiers=tuple(["out_of_stock"])) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert distances[0] < distances[1] def test_sort_dataids_with_different_set_of_keys(self): """Check sorting data ids when the query has a different set of keys.""" - dq = DataQuery(name='solar_zenith_angle', calibration='reflectance') - dids = [DataID(self.default_id_keys_config, name='solar_zenith_angle', resolution=1000, modifiers=()), - DataID(self.default_id_keys_config, name='solar_zenith_angle', resolution=500, modifiers=()), - DataID(self.default_id_keys_config, name='solar_zenith_angle', resolution=250, modifiers=())] + dq = DataQuery(name="solar_zenith_angle", calibration="reflectance") + dids = [DataID(self.default_id_keys_config, name="solar_zenith_angle", resolution=1000, modifiers=()), + DataID(self.default_id_keys_config, name="solar_zenith_angle", resolution=500, modifiers=()), + DataID(self.default_id_keys_config, name="solar_zenith_angle", resolution=250, modifiers=())] dsids, distances = dq.sort_dataids(dids) assert distances[0] < distances[1] assert distances[1] < distances[2] def test_seviri_hrv_has_priority_over_vis008(self): """Check that the HRV channel has priority over VIS008 when querying 0.8µm.""" - dids = [DataID(self.default_id_keys_config, name='HRV', - wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit='µm'), resolution=1000.134348869, + dids = [DataID(self.default_id_keys_config, name="HRV", + wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit="µm"), resolution=1000.134348869, calibration="reflectance", modifiers=()), - DataID(self.default_id_keys_config, name='HRV', - wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit='µm'), resolution=1000.134348869, + DataID(self.default_id_keys_config, name="HRV", + wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit="µm"), resolution=1000.134348869, calibration="radiance", modifiers=()), - DataID(self.default_id_keys_config, name='HRV', - wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit='µm'), resolution=1000.134348869, + DataID(self.default_id_keys_config, name="HRV", + wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit="µm"), resolution=1000.134348869, calibration="counts", modifiers=()), - DataID(self.default_id_keys_config, name='VIS006', - wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit='µm'), + DataID(self.default_id_keys_config, name="VIS006", + wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit="µm"), resolution=3000.403165817, calibration="reflectance", modifiers=()), - DataID(self.default_id_keys_config, name='VIS006', - wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit='µm'), + DataID(self.default_id_keys_config, name="VIS006", + wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit="µm"), resolution=3000.403165817, calibration="radiance", modifiers=()), - DataID(self.default_id_keys_config, name='VIS006', - wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit='µm'), + DataID(self.default_id_keys_config, name="VIS006", + wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit="µm"), resolution=3000.403165817, calibration="counts", modifiers=()), - DataID(self.default_id_keys_config, name='VIS008', - wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit='µm'), + DataID(self.default_id_keys_config, name="VIS008", + wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit="µm"), resolution=3000.403165817, calibration="reflectance", modifiers=()), - DataID(self.default_id_keys_config, name='VIS008', - wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit='µm'), + DataID(self.default_id_keys_config, name="VIS008", + wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit="µm"), resolution=3000.403165817, calibration="radiance", modifiers=()), - DataID(self.default_id_keys_config, name='VIS008', - wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit='µm'), + DataID(self.default_id_keys_config, name="VIS008", + wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit="µm"), resolution=3000.403165817, calibration="counts", modifiers=())] dq = DataQuery(wavelength=0.8) res, distances = dq.sort_dataids(dids) @@ -694,14 +694,14 @@ def test_frequency_quadruple_side_band_class_method_convert(): res = frq_qdsb.convert(57.37) assert res == 57.37 - res = frq_qdsb.convert({'central': 57.0, 'side': 0.322, 'sideside': 0.05, 'bandwidth': 0.036}) + res = frq_qdsb.convert({"central": 57.0, "side": 0.322, "sideside": 0.05, "bandwidth": 0.036}) assert res == FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036) def test_frequency_quadruple_side_band_channel_str(): """Test the frequency quadruple side band object: test the band description.""" frq_qdsb1 = FrequencyQuadrupleSideBand(57.0, 0.322, 0.05, 0.036) - frq_qdsb2 = FrequencyQuadrupleSideBand(57000, 322, 50, 36, 'MHz') + frq_qdsb2 = FrequencyQuadrupleSideBand(57000, 322, 50, 36, "MHz") assert str(frq_qdsb1) == "central=57.0 GHz ±0.322 ±0.05 width=0.036 GHz" assert str(frq_qdsb2) == "central=57000 MHz ±322 ±50 width=36 MHz" @@ -735,8 +735,8 @@ def test_frequency_quadruple_side_band_channel_distances(): frq_qdsb = FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036) mydist = frq_qdsb.distance([57, 0.322, 0.05, 0.036]) - frq_dict = {'central': 57, 'side': 0.322, 'sideside': 0.05, - 'bandwidth': 0.036, 'unit': 'GHz'} + frq_dict = {"central": 57, "side": 0.322, "sideside": 0.05, + "bandwidth": 0.036, "unit": "GHz"} mydist = frq_qdsb.distance(frq_dict) assert mydist == np.inf @@ -769,7 +769,7 @@ def test_frequency_quadruple_side_band_channel_containment(): frq_qdsb = None assert (frq_qdsb in FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.05)) is False - assert '57' not in FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.05) + assert "57" not in FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.05) def test_frequency_double_side_band_class_method_convert(): @@ -779,14 +779,14 @@ def test_frequency_double_side_band_class_method_convert(): res = frq_dsb.convert(185) assert res == 185 - res = frq_dsb.convert({'central': 185, 'side': 7, 'bandwidth': 2}) + res = frq_dsb.convert({"central": 185, "side": 7, "bandwidth": 2}) assert res == FrequencyDoubleSideBand(185, 7, 2) def test_frequency_double_side_band_channel_str(): """Test the frequency double side band object: test the band description.""" frq_dsb1 = FrequencyDoubleSideBand(183, 7, 2) - frq_dsb2 = FrequencyDoubleSideBand(183000, 7000, 2000, 'MHz') + frq_dsb2 = FrequencyDoubleSideBand(183000, 7000, 2000, "MHz") assert str(frq_dsb1) == "central=183 GHz ±7 width=2 GHz" assert str(frq_dsb2) == "central=183000 MHz ±7000 width=2000 MHz" @@ -846,12 +846,12 @@ def test_frequency_double_side_band_channel_containment(): assert frq_range not in FrequencyDoubleSideBand(183, 4, 2) with pytest.raises(NotImplementedError): - assert frq_range in FrequencyDoubleSideBand(183, 6.5, 3, 'MHz') + assert frq_range in FrequencyDoubleSideBand(183, 6.5, 3, "MHz") frq_range = None assert (frq_range in FrequencyDoubleSideBand(183, 3, 2)) is False - assert '183' not in FrequencyDoubleSideBand(183, 3, 2) + assert "183" not in FrequencyDoubleSideBand(183, 3, 2) def test_frequency_range_class_method_convert(): @@ -861,14 +861,14 @@ def test_frequency_range_class_method_convert(): res = frq_range.convert(89) assert res == 89 - res = frq_range.convert({'central': 89, 'bandwidth': 2}) + res = frq_range.convert({"central": 89, "bandwidth": 2}) assert res == FrequencyRange(89, 2) def test_frequency_range_class_method_str(): """Test the frequency range object: test the band description.""" frq_range1 = FrequencyRange(89, 2) - frq_range2 = FrequencyRange(89000, 2000, 'MHz') + frq_range2 = FrequencyRange(89000, 2000, "MHz") assert str(frq_range1) == "central=89 GHz width=2 GHz" assert str(frq_range2) == "central=89000 MHz width=2000 MHz" @@ -882,7 +882,7 @@ def test_frequency_range_channel_equality(): assert 1.2 != frqr assert frqr == (2, 1) - assert frqr == (2, 1, 'GHz') + assert frqr == (2, 1, "GHz") def test_frequency_range_channel_containment(): @@ -892,12 +892,12 @@ def test_frequency_range_channel_containment(): assert 2.8 not in frqr with pytest.raises(NotImplementedError): - assert frqr in FrequencyRange(89, 2, 'MHz') + assert frqr in FrequencyRange(89, 2, "MHz") frqr = None assert (frqr in FrequencyRange(89, 2)) is False - assert '89' not in FrequencyRange(89, 2) + assert "89" not in FrequencyRange(89, 2) def test_frequency_range_channel_distances(): @@ -920,7 +920,7 @@ def test_wavelength_range(): assert 1.2 == wr assert .9 != wr assert wr == (1, 2, 3) - assert wr == (1, 2, 3, 'µm') + assert wr == (1, 2, 3, "µm") # Check containement assert 1.2 in wr @@ -929,11 +929,11 @@ def test_wavelength_range(): assert WavelengthRange(1.1, 2.2, 3.3) not in wr assert WavelengthRange(1.2, 2, 2.8) in wr assert WavelengthRange(10, 20, 30) not in wr - assert 'bla' not in wr + assert "bla" not in wr assert None not in wr - wr2 = WavelengthRange(1, 2, 3, 'µm') + wr2 = WavelengthRange(1, 2, 3, "µm") assert wr2 in wr - wr2 = WavelengthRange(1, 2, 3, 'nm') + wr2 = WavelengthRange(1, 2, 3, "nm") with pytest.raises(NotImplementedError): wr2 in wr # noqa diff --git a/satpy/tests/test_demo.py b/satpy/tests/test_demo.py index 976b6bbd6e..7ed3a3ac43 100644 --- a/satpy/tests/test_demo.py +++ b/satpy/tests/test_demo.py @@ -58,7 +58,7 @@ def __call__(self, pattern): except IndexError: num_results = self.num_results[-1] self.current_call += 1 - return [pattern + '.{:03d}'.format(idx) for idx in range(num_results)] + return [pattern + ".{:03d}".format(idx) for idx in range(num_results)] class TestDemo(unittest.TestCase): @@ -80,26 +80,26 @@ def tearDown(self): except OSError: pass - @mock.patch('satpy.demo._google_cloud_platform.gcsfs') + @mock.patch("satpy.demo._google_cloud_platform.gcsfs") def test_get_us_midlatitude_cyclone_abi(self, gcsfs_mod): """Test data download function.""" from satpy.demo import get_us_midlatitude_cyclone_abi gcsfs_mod.GCSFileSystem = mock.MagicMock() gcsfs_inst = mock.MagicMock() gcsfs_mod.GCSFileSystem.return_value = gcsfs_inst - gcsfs_inst.glob.return_value = ['a.nc', 'b.nc'] + gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] # expected 16 files, got 2 self.assertRaises(AssertionError, get_us_midlatitude_cyclone_abi) # unknown access method - self.assertRaises(NotImplementedError, get_us_midlatitude_cyclone_abi, method='unknown') + self.assertRaises(NotImplementedError, get_us_midlatitude_cyclone_abi, method="unknown") - gcsfs_inst.glob.return_value = ['a.nc'] * 16 + gcsfs_inst.glob.return_value = ["a.nc"] * 16 filenames = get_us_midlatitude_cyclone_abi() - expected = os.path.join('.', 'abi_l1b', '20190314_us_midlatitude_cyclone', 'a.nc') + expected = os.path.join(".", "abi_l1b", "20190314_us_midlatitude_cyclone", "a.nc") for fn in filenames: self.assertEqual(expected, fn) - @mock.patch('satpy.demo._google_cloud_platform.gcsfs') + @mock.patch("satpy.demo._google_cloud_platform.gcsfs") def test_get_hurricane_florence_abi(self, gcsfs_mod): """Test data download function.""" from satpy.demo import get_hurricane_florence_abi @@ -110,7 +110,7 @@ def test_get_hurricane_florence_abi(self, gcsfs_mod): gcsfs_inst.glob.side_effect = _GlobHelper([5, 0]) # expected 16 files * 10 frames, got 16 * 5 self.assertRaises(AssertionError, get_hurricane_florence_abi) - self.assertRaises(NotImplementedError, get_hurricane_florence_abi, method='unknown') + self.assertRaises(NotImplementedError, get_hurricane_florence_abi, method="unknown") gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi() @@ -132,63 +132,63 @@ def test_get_hurricane_florence_abi(self, gcsfs_mod): class TestGCPUtils(unittest.TestCase): """Test Google Cloud Platform utilities.""" - @mock.patch('satpy.demo._google_cloud_platform.urlopen') + @mock.patch("satpy.demo._google_cloud_platform.urlopen") def test_is_gcp_instance(self, uo): """Test is_google_cloud_instance.""" from satpy.demo._google_cloud_platform import URLError, is_google_cloud_instance uo.side_effect = URLError("Test Environment") self.assertFalse(is_google_cloud_instance()) - @mock.patch('satpy.demo._google_cloud_platform.gcsfs') + @mock.patch("satpy.demo._google_cloud_platform.gcsfs") def test_get_bucket_files(self, gcsfs_mod): """Test get_bucket_files basic cases.""" from satpy.demo._google_cloud_platform import get_bucket_files gcsfs_mod.GCSFileSystem = mock.MagicMock() gcsfs_inst = mock.MagicMock() gcsfs_mod.GCSFileSystem.return_value = gcsfs_inst - gcsfs_inst.glob.return_value = ['a.nc', 'b.nc'] - filenames = get_bucket_files('*.nc', '.') - expected = [os.path.join('.', 'a.nc'), os.path.join('.', 'b.nc')] + gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] + filenames = get_bucket_files("*.nc", ".") + expected = [os.path.join(".", "a.nc"), os.path.join(".", "b.nc")] self.assertEqual(expected, filenames) gcsfs_inst.glob.side_effect = _GlobHelper(10) - filenames = get_bucket_files(['*.nc', '*.txt'], '.', pattern_slice=slice(2, 5)) + filenames = get_bucket_files(["*.nc", "*.txt"], ".", pattern_slice=slice(2, 5)) self.assertEqual(len(filenames), 3 * 2) gcsfs_inst.glob.side_effect = None # reset mock side effect - gcsfs_inst.glob.return_value = ['a.nc', 'b.nc'] - self.assertRaises(OSError, get_bucket_files, '*.nc', 'does_not_exist') + gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] + self.assertRaises(OSError, get_bucket_files, "*.nc", "does_not_exist") - open('a.nc', 'w').close() # touch the file + open("a.nc", "w").close() # touch the file gcsfs_inst.get.reset_mock() - gcsfs_inst.glob.return_value = ['a.nc'] - filenames = get_bucket_files('*.nc', '.') - self.assertEqual([os.path.join('.', 'a.nc')], filenames) + gcsfs_inst.glob.return_value = ["a.nc"] + filenames = get_bucket_files("*.nc", ".") + self.assertEqual([os.path.join(".", "a.nc")], filenames) gcsfs_inst.get.assert_not_called() # force redownload gcsfs_inst.get.reset_mock() - gcsfs_inst.glob.return_value = ['a.nc'] - filenames = get_bucket_files('*.nc', '.', force=True) - self.assertEqual([os.path.join('.', 'a.nc')], filenames) + gcsfs_inst.glob.return_value = ["a.nc"] + filenames = get_bucket_files("*.nc", ".", force=True) + self.assertEqual([os.path.join(".", "a.nc")], filenames) gcsfs_inst.get.assert_called_once() # if we don't get any results then we expect an exception gcsfs_inst.get.reset_mock() gcsfs_inst.glob.return_value = [] - self.assertRaises(OSError, get_bucket_files, '*.nc', '.') + self.assertRaises(OSError, get_bucket_files, "*.nc", ".") - @mock.patch('satpy.demo._google_cloud_platform.gcsfs', None) + @mock.patch("satpy.demo._google_cloud_platform.gcsfs", None) def test_no_gcsfs(self): """Test that 'gcsfs' is required.""" from satpy.demo._google_cloud_platform import get_bucket_files - self.assertRaises(RuntimeError, get_bucket_files, '*.nc', '.') + self.assertRaises(RuntimeError, get_bucket_files, "*.nc", ".") class TestAHIDemoDownload: """Test the AHI demo data download.""" - @mock.patch.dict(sys.modules, {'s3fs': mock.MagicMock()}) + @mock.patch.dict(sys.modules, {"s3fs": mock.MagicMock()}) def test_ahi_full_download(self): """Test that the himawari download works as expected.""" from tempfile import gettempdir @@ -197,7 +197,7 @@ def test_ahi_full_download(self): files = download_typhoon_surigae_ahi(base_dir=gettempdir()) assert len(files) == 160 - @mock.patch.dict(sys.modules, {'s3fs': mock.MagicMock()}) + @mock.patch.dict(sys.modules, {"s3fs": mock.MagicMock()}) def test_ahi_partial_download(self): """Test that the himawari download works as expected.""" from tempfile import gettempdir @@ -274,7 +274,7 @@ def iter_content(self, chunk_size): x = bytes_io.read(chunk_size) -@mock.patch('satpy.demo.utils.requests') +@mock.patch("satpy.demo.utils.requests") class TestVIIRSSDRDemoDownload: """Test VIIRS SDR downloading.""" @@ -415,7 +415,7 @@ def setUp(self): self.subdir = os.path.join(".", "seviri_hrit", "20180228_1500") self.files = generate_subset_of_filenames(base_dir=self.subdir) - self.patcher = mock.patch('satpy.demo.utils.requests.get', autospec=True) + self.patcher = mock.patch("satpy.demo.utils.requests.get", autospec=True) self.get_mock = self.patcher.start() _FakeRequest.requests_log = [] @@ -450,12 +450,12 @@ def test_download_a_subset_of_files(self): with mock_filesystem(): files = download_seviri_hrit_20180228_1500(subset={"HRV": [1, 2, 3], "IR_108": [1, 2], "EPI": None}) assert set(files) == set(os.path.join(self.subdir, filename) for filename in [ - 'H-000-MSG4__-MSG4________-_________-EPI______-201802281500-__', - 'H-000-MSG4__-MSG4________-HRV______-000001___-201802281500-__', - 'H-000-MSG4__-MSG4________-HRV______-000002___-201802281500-__', - 'H-000-MSG4__-MSG4________-HRV______-000003___-201802281500-__', - 'H-000-MSG4__-MSG4________-IR_108___-000001___-201802281500-__', - 'H-000-MSG4__-MSG4________-IR_108___-000002___-201802281500-__', + "H-000-MSG4__-MSG4________-_________-EPI______-201802281500-__", + "H-000-MSG4__-MSG4________-HRV______-000001___-201802281500-__", + "H-000-MSG4__-MSG4________-HRV______-000002___-201802281500-__", + "H-000-MSG4__-MSG4________-HRV______-000003___-201802281500-__", + "H-000-MSG4__-MSG4________-IR_108___-000001___-201802281500-__", + "H-000-MSG4__-MSG4________-IR_108___-000002___-201802281500-__", ]) def test_do_not_download_same_file_twice(self): diff --git a/satpy/tests/test_dependency_tree.py b/satpy/tests/test_dependency_tree.py index 415a927cc5..57b718963f 100644 --- a/satpy/tests/test_dependency_tree.py +++ b/satpy/tests/test_dependency_tree.py @@ -59,7 +59,7 @@ def setUp(self): self.dependency_tree.add_leaf(dependency_2_1, node_dependency_2) # We don't need to add the unmodified dependency a second time. - dependency_3 = make_dataid(name='ds2', resolution=250, calibration="reflectance", modifiers=tuple()) + dependency_3 = make_dataid(name="ds2", resolution=250, calibration="reflectance", modifiers=tuple()) self.dependency_tree.add_leaf(dependency_3, node_composite_1) @staticmethod @@ -78,7 +78,7 @@ def test_copy_preserves_all_nodes(self): new_dependency_tree.trunk()) # make sure that we can get access to sub-nodes - c13_id = make_cid(name='comp13') + c13_id = make_cid(name="comp13") assert self._nodes_equal(self.dependency_tree.trunk(limit_nodes_to=[c13_id]), new_dependency_tree.trunk(limit_nodes_to=[c13_id])) @@ -104,14 +104,14 @@ class TestMissingDependencies(unittest.TestCase): def test_new_missing_dependencies(self): """Test new MissingDependencies.""" from satpy.node import MissingDependencies - error = MissingDependencies('bla') - assert error.missing_dependencies == 'bla' + error = MissingDependencies("bla") + assert error.missing_dependencies == "bla" def test_new_missing_dependencies_with_message(self): """Test new MissingDependencies with a message.""" from satpy.node import MissingDependencies - error = MissingDependencies('bla', "This is a message") - assert 'This is a message' in str(error) + error = MissingDependencies("bla", "This is a message") + assert "This is a message" in str(error) class TestMultipleResolutionSameChannelDependency(unittest.TestCase): @@ -126,27 +126,27 @@ def test_modis_overview_1000m(self): from satpy.modifiers.geometry import SunZenithCorrector from satpy.readers.yaml_reader import FileYAMLReader - config_file = os.path.join(PACKAGE_CONFIG_PATH, 'readers', 'modis_l1b.yaml') + config_file = os.path.join(PACKAGE_CONFIG_PATH, "readers", "modis_l1b.yaml") self.reader_instance = FileYAMLReader.from_config_files(config_file) - overview = {'_satpy_id': make_dataid(name='overview'), - 'name': 'overview', - 'optional_prerequisites': [], - 'prerequisites': [DataQuery(name='1', modifiers=('sunz_corrected',)), - DataQuery(name='2', modifiers=('sunz_corrected',)), - DataQuery(name='31')], - 'standard_name': 'overview'} - compositors = {'modis': DatasetDict()} - compositors['modis']['overview'] = GenericCompositor(**overview) - - modifiers = {'modis': {'sunz_corrected': (SunZenithCorrector, - {'optional_prerequisites': ['solar_zenith_angle'], - 'name': 'sunz_corrected', - 'prerequisites': []})}} - dep_tree = DependencyTree({'modis_l1b': self.reader_instance}, compositors, modifiers) - dep_tree.populate_with_keys({'overview'}, DataQuery(resolution=1000)) + overview = {"_satpy_id": make_dataid(name="overview"), + "name": "overview", + "optional_prerequisites": [], + "prerequisites": [DataQuery(name="1", modifiers=("sunz_corrected",)), + DataQuery(name="2", modifiers=("sunz_corrected",)), + DataQuery(name="31")], + "standard_name": "overview"} + compositors = {"modis": DatasetDict()} + compositors["modis"]["overview"] = GenericCompositor(**overview) + + modifiers = {"modis": {"sunz_corrected": (SunZenithCorrector, + {"optional_prerequisites": ["solar_zenith_angle"], + "name": "sunz_corrected", + "prerequisites": []})}} + dep_tree = DependencyTree({"modis_l1b": self.reader_instance}, compositors, modifiers) + dep_tree.populate_with_keys({"overview"}, DataQuery(resolution=1000)) for key in dep_tree._all_nodes.keys(): - assert key.get('resolution', 1000) == 1000 + assert key.get("resolution", 1000) == 1000 class TestMultipleSensors(unittest.TestCase): @@ -194,18 +194,18 @@ def __call__(self, *args, **kwargs): # create the dictionary one element at a time to force "incorrect" order # (sensor2 comes before sensor1, but results should be alphabetical order) compositors = {} - compositors['sensor2'] = s2_comps = DatasetDict() - compositors['sensor1'] = s1_comps = DatasetDict() - c1_s2_id = make_cid(name='comp1', resolution=1000) - c1_s1_id = make_cid(name='comp1', resolution=500) + compositors["sensor2"] = s2_comps = DatasetDict() + compositors["sensor1"] = s1_comps = DatasetDict() + c1_s2_id = make_cid(name="comp1", resolution=1000) + c1_s1_id = make_cid(name="comp1", resolution=500) s2_comps[c1_s2_id] = comp1_sensor2 s1_comps[c1_s1_id] = comp1_sensor1 modifiers = {} - modifiers['sensor2'] = s2_mods = {} - modifiers['sensor1'] = s1_mods = {} - s2_mods['mod1'] = (_FakeModifier, {'ret_val': 2}) - s1_mods['mod1'] = (_FakeModifier, {'ret_val': 1}) + modifiers["sensor2"] = s2_mods = {} + modifiers["sensor1"] = s1_mods = {} + s2_mods["mod1"] = (_FakeModifier, {"ret_val": 2}) + s1_mods["mod1"] = (_FakeModifier, {"ret_val": 1}) self.dependency_tree = DependencyTree({}, compositors, modifiers) # manually add a leaf so we don't have to mock a reader @@ -214,7 +214,7 @@ def __call__(self, *args, **kwargs): def test_compositor_loaded_sensor_order(self): """Test that a compositor is loaded from the first alphabetical sensor.""" - self.dependency_tree.populate_with_keys({'comp1'}) + self.dependency_tree.populate_with_keys({"comp1"}) comp_nodes = self.dependency_tree.trunk() self.assertEqual(len(comp_nodes), 1) self.assertEqual(comp_nodes[0].name["resolution"], 500) @@ -222,7 +222,7 @@ def test_compositor_loaded_sensor_order(self): def test_modifier_loaded_sensor_order(self): """Test that a modifier is loaded from the first alphabetical sensor.""" from satpy import DataQuery - dq = DataQuery(name='ds5', modifiers=('mod1',)) + dq = DataQuery(name="ds5", modifiers=("mod1",)) self.dependency_tree.populate_with_keys({dq}) comp_nodes = self.dependency_tree.trunk() self.assertEqual(len(comp_nodes), 1) diff --git a/satpy/tests/test_file_handlers.py b/satpy/tests/test_file_handlers.py index 46f4a16784..4282bc86b1 100644 --- a/satpy/tests/test_file_handlers.py +++ b/satpy/tests/test_file_handlers.py @@ -32,7 +32,7 @@ def test_open_dataset(): """Test xr.open_dataset wrapper.""" fn = mock.MagicMock() str_file_path = "path/to/file.nc" - with mock.patch('xarray.open_dataset') as xr_open: + with mock.patch("xarray.open_dataset") as xr_open: _ = open_dataset(fn, decode_cf=True, chunks=500) fn.open.assert_called_once_with() xr_open.assert_called_once_with(fn.open(), decode_cf=True, chunks=500) @@ -48,105 +48,105 @@ class TestBaseFileHandler(unittest.TestCase): def setUp(self): """Set up the test.""" self.fh = BaseFileHandler( - 'filename', {'filename_info': 'bla'}, 'filetype_info') + "filename", {"filename_info": "bla"}, "filetype_info") def test_combine_times(self): """Combine times.""" - info1 = {'start_time': 1} - info2 = {'start_time': 2} + info1 = {"start_time": 1} + info2 = {"start_time": 2} res = self.fh.combine_info([info1, info2]) - exp = {'start_time': 1} + exp = {"start_time": 1} self.assertDictEqual(res, exp) res = self.fh.combine_info([info2, info1]) - exp = {'start_time': 1} + exp = {"start_time": 1} self.assertDictEqual(res, exp) - info1 = {'end_time': 1} - info2 = {'end_time': 2} + info1 = {"end_time": 1} + info2 = {"end_time": 2} res = self.fh.combine_info([info1, info2]) - exp = {'end_time': 2} + exp = {"end_time": 2} self.assertDictEqual(res, exp) res = self.fh.combine_info([info2, info1]) - exp = {'end_time': 2} + exp = {"end_time": 2} self.assertDictEqual(res, exp) def test_combine_orbits(self): """Combine orbits.""" - info1 = {'start_orbit': 1} - info2 = {'start_orbit': 2} + info1 = {"start_orbit": 1} + info2 = {"start_orbit": 2} res = self.fh.combine_info([info1, info2]) - exp = {'start_orbit': 1} + exp = {"start_orbit": 1} self.assertDictEqual(res, exp) res = self.fh.combine_info([info2, info1]) - exp = {'start_orbit': 1} + exp = {"start_orbit": 1} self.assertDictEqual(res, exp) - info1 = {'end_orbit': 1} - info2 = {'end_orbit': 2} + info1 = {"end_orbit": 1} + info2 = {"end_orbit": 2} res = self.fh.combine_info([info1, info2]) - exp = {'end_orbit': 2} + exp = {"end_orbit": 2} self.assertDictEqual(res, exp) res = self.fh.combine_info([info2, info1]) - exp = {'end_orbit': 2} + exp = {"end_orbit": 2} self.assertDictEqual(res, exp) - @mock.patch('satpy.readers.file_handlers.SwathDefinition') + @mock.patch("satpy.readers.file_handlers.SwathDefinition") def test_combine_area(self, sdef): """Combine area.""" area1 = mock.MagicMock() area1.lons = np.arange(5) area1.lats = np.arange(5) - area1.name = 'area1' + area1.name = "area1" area2 = mock.MagicMock() area2.lons = np.arange(5) area2.lats = np.arange(5) - area2.name = 'area2' + area2.name = "area2" - info1 = {'area': area1} - info2 = {'area': area2} + info1 = {"area": area1} + info2 = {"area": area2} self.fh.combine_info([info1, info2]) - self.assertTupleEqual(sdef.call_args[1]['lons'].shape, (2, 5)) - self.assertTupleEqual(sdef.call_args[1]['lats'].shape, (2, 5)) - self.assertEqual(sdef.return_value.name, 'area1_area2') + self.assertTupleEqual(sdef.call_args[1]["lons"].shape, (2, 5)) + self.assertTupleEqual(sdef.call_args[1]["lats"].shape, (2, 5)) + self.assertEqual(sdef.return_value.name, "area1_area2") def test_combine_orbital_parameters(self): """Combine orbital parameters.""" - info1 = {'orbital_parameters': {'projection_longitude': 1, - 'projection_latitude': 1, - 'projection_altitude': 1, - 'satellite_nominal_longitude': 1, - 'satellite_nominal_latitude': 1, - 'satellite_actual_longitude': 1, - 'satellite_actual_latitude': 1, - 'satellite_actual_altitude': 1, - 'nadir_longitude': 1, - 'nadir_latitude': 1, - 'only_in_1': False}} - info2 = {'orbital_parameters': {'projection_longitude': 2, - 'projection_latitude': 2, - 'projection_altitude': 2, - 'satellite_nominal_longitude': 2, - 'satellite_nominal_latitude': 2, - 'satellite_actual_longitude': 2, - 'satellite_actual_latitude': 2, - 'satellite_actual_altitude': 2, - 'nadir_longitude': 2, - 'nadir_latitude': 2, - 'only_in_2': True}} - exp = {'orbital_parameters': {'projection_longitude': 1.5, - 'projection_latitude': 1.5, - 'projection_altitude': 1.5, - 'satellite_nominal_longitude': 1.5, - 'satellite_nominal_latitude': 1.5, - 'satellite_actual_longitude': 1.5, - 'satellite_actual_latitude': 1.5, - 'satellite_actual_altitude': 1.5, - 'nadir_longitude': 1.5, - 'nadir_latitude': 1.5, - 'only_in_1': False, - 'only_in_2': True}} + info1 = {"orbital_parameters": {"projection_longitude": 1, + "projection_latitude": 1, + "projection_altitude": 1, + "satellite_nominal_longitude": 1, + "satellite_nominal_latitude": 1, + "satellite_actual_longitude": 1, + "satellite_actual_latitude": 1, + "satellite_actual_altitude": 1, + "nadir_longitude": 1, + "nadir_latitude": 1, + "only_in_1": False}} + info2 = {"orbital_parameters": {"projection_longitude": 2, + "projection_latitude": 2, + "projection_altitude": 2, + "satellite_nominal_longitude": 2, + "satellite_nominal_latitude": 2, + "satellite_actual_longitude": 2, + "satellite_actual_latitude": 2, + "satellite_actual_altitude": 2, + "nadir_longitude": 2, + "nadir_latitude": 2, + "only_in_2": True}} + exp = {"orbital_parameters": {"projection_longitude": 1.5, + "projection_latitude": 1.5, + "projection_altitude": 1.5, + "satellite_nominal_longitude": 1.5, + "satellite_nominal_latitude": 1.5, + "satellite_actual_longitude": 1.5, + "satellite_actual_latitude": 1.5, + "satellite_actual_altitude": 1.5, + "nadir_longitude": 1.5, + "nadir_latitude": 1.5, + "only_in_1": False, + "only_in_2": True}} res = self.fh.combine_info([info1, info2]) self.assertDictEqual(res, exp) @@ -159,34 +159,34 @@ def test_combine_orbital_parameters(self): def test_combine_time_parameters(self): """Combine times in 'time_parameters.""" time_params1 = { - 'nominal_start_time': datetime(2020, 1, 1, 12, 0, 0), - 'nominal_end_time': datetime(2020, 1, 1, 12, 2, 30), - 'observation_start_time': datetime(2020, 1, 1, 12, 0, 2, 23821), - 'observation_end_time': datetime(2020, 1, 1, 12, 2, 23, 12348), + "nominal_start_time": datetime(2020, 1, 1, 12, 0, 0), + "nominal_end_time": datetime(2020, 1, 1, 12, 2, 30), + "observation_start_time": datetime(2020, 1, 1, 12, 0, 2, 23821), + "observation_end_time": datetime(2020, 1, 1, 12, 2, 23, 12348), } time_params2 = {} time_shift = timedelta(seconds=1.5) for key, value in time_params1.items(): time_params2[key] = value + time_shift res = self.fh.combine_info([ - {'time_parameters': time_params1}, - {'time_parameters': time_params2} + {"time_parameters": time_params1}, + {"time_parameters": time_params2} ]) - res_time_params = res['time_parameters'] - assert res_time_params['nominal_start_time'] == datetime(2020, 1, 1, 12, 0, 0) - assert res_time_params['nominal_end_time'] == datetime(2020, 1, 1, 12, 2, 31, 500000) - assert res_time_params['observation_start_time'] == datetime(2020, 1, 1, 12, 0, 2, 23821) - assert res_time_params['observation_end_time'] == datetime(2020, 1, 1, 12, 2, 24, 512348) + res_time_params = res["time_parameters"] + assert res_time_params["nominal_start_time"] == datetime(2020, 1, 1, 12, 0, 0) + assert res_time_params["nominal_end_time"] == datetime(2020, 1, 1, 12, 2, 31, 500000) + assert res_time_params["observation_start_time"] == datetime(2020, 1, 1, 12, 0, 2, 23821) + assert res_time_params["observation_end_time"] == datetime(2020, 1, 1, 12, 2, 24, 512348) def test_file_is_kept_intact(self): """Test that the file object passed (string, path, or other) is kept intact.""" open_file = mock.MagicMock() - bfh = BaseFileHandler(open_file, {'filename_info': 'bla'}, 'filetype_info') + bfh = BaseFileHandler(open_file, {"filename_info": "bla"}, "filetype_info") assert bfh.filename == open_file from pathlib import Path - filename = Path('/bla/bla.nc') - bfh = BaseFileHandler(filename, {'filename_info': 'bla'}, 'filetype_info') + filename = Path("/bla/bla.nc") + bfh = BaseFileHandler(filename, {"filename_info": "bla"}, "filetype_info") assert isinstance(bfh.filename, Path) diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index c21a514808..04d32b7ecc 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -30,43 +30,43 @@ def _sunz_area_def(): """Get fake area for testing sunz generation.""" - area = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 2, 2, + area = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 2, 2, (-2000, -2000, 2000, 2000)) return area def _sunz_bigger_area_def(): """Get area that is twice the size of 'sunz_area_def'.""" - bigger_area = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 4, 4, + bigger_area = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 4, 4, (-2000, -2000, 2000, 2000)) return bigger_area def _sunz_stacked_area_def(): """Get fake stacked area for testing sunz generation.""" - area1 = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 2, 1, + area1 = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 2, 1, (-2000, 0, 2000, 2000)) - area2 = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 2, 1, + area2 = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 2, 1, (-2000, -2000, 2000, 0)) return StackedAreaDefinition(area1, area2) def _shared_sunz_attrs(area_def): - attrs = {'area': area_def, - 'start_time': datetime(2018, 1, 1, 18), - 'modifiers': tuple(), - 'name': 'test_vis'} + attrs = {"area": area_def, + "start_time": datetime(2018, 1, 1, 18), + "modifiers": tuple(), + "name": "test_vis"} return attrs def _get_ds1(attrs): ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64), - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) return ds1 @@ -89,8 +89,8 @@ def sunz_ds2(): """Generate larger fake dataset for sunz tests.""" attrs = _shared_sunz_attrs(_sunz_bigger_area_def()) ds2 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64), - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 0.5, 1, 1.5], 'x': [0, 0.5, 1, 1.5]}) + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 0.5, 1, 1.5], "x": [0, 0.5, 1, 1.5]}) return ds2 @@ -100,9 +100,9 @@ def sunz_sza(): sza = xr.DataArray( np.rad2deg(np.arccos(da.from_array([[0.0149581333, 0.0146694376], [0.0150812684, 0.0147925727]], chunks=2))), - attrs={'area': _sunz_area_def()}, - dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}, + attrs={"area": _sunz_area_def()}, + dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}, ) return sza @@ -113,47 +113,47 @@ class TestSunZenithCorrector: def test_basic_default_not_provided(self, sunz_ds1): """Test default limits when SZA isn't provided.""" from satpy.modifiers.geometry import SunZenithCorrector - comp = SunZenithCorrector(name='sza_test', modifiers=tuple()) - res = comp((sunz_ds1,), test_attr='test') + comp = SunZenithCorrector(name="sza_test", modifiers=tuple()) + res = comp((sunz_ds1,), test_attr="test") np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) - assert 'y' in res.coords - assert 'x' in res.coords - ds1 = sunz_ds1.copy().drop_vars(('y', 'x')) - res = comp((ds1,), test_attr='test') + assert "y" in res.coords + assert "x" in res.coords + ds1 = sunz_ds1.copy().drop_vars(("y", "x")) + res = comp((ds1,), test_attr="test") np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) - assert 'y' not in res.coords - assert 'x' not in res.coords + assert "y" not in res.coords + assert "x" not in res.coords def test_basic_lims_not_provided(self, sunz_ds1): """Test custom limits when SZA isn't provided.""" from satpy.modifiers.geometry import SunZenithCorrector - comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90) - res = comp((sunz_ds1,), test_attr='test') + comp = SunZenithCorrector(name="sza_test", modifiers=tuple(), correction_limit=90) + res = comp((sunz_ds1,), test_attr="test") np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]])) @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")]) def test_basic_default_provided(self, data_arr, sunz_sza): """Test default limits when SZA is provided.""" from satpy.modifiers.geometry import SunZenithCorrector - comp = SunZenithCorrector(name='sza_test', modifiers=tuple()) - res = comp((data_arr, sunz_sza), test_attr='test') + comp = SunZenithCorrector(name="sza_test", modifiers=tuple()) + res = comp((data_arr, sunz_sza), test_attr="test") np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")]) def test_basic_lims_provided(self, data_arr, sunz_sza): """Test custom limits when SZA is provided.""" from satpy.modifiers.geometry import SunZenithCorrector - comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90) - res = comp((data_arr, sunz_sza), test_attr='test') + comp = SunZenithCorrector(name="sza_test", modifiers=tuple(), correction_limit=90) + res = comp((data_arr, sunz_sza), test_attr="test") np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]])) def test_imcompatible_areas(self, sunz_ds2, sunz_sza): """Test sunz correction on incompatible areas.""" from satpy.composites import IncompatibleAreas from satpy.modifiers.geometry import SunZenithCorrector - comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90) + comp = SunZenithCorrector(name="sza_test", modifiers=tuple(), correction_limit=90) with pytest.raises(IncompatibleAreas): - comp((sunz_ds2, sunz_sza), test_attr='test') + comp((sunz_ds2, sunz_sza), test_attr="test") class TestNIRReflectance(unittest.TestCase): @@ -167,24 +167,24 @@ def setUp(self): area = mock.MagicMock(get_lonlats=self.get_lonlats) self.start_time = 1 - self.metadata = {'platform_name': 'Meteosat-11', - 'sensor': 'seviri', - 'name': 'IR_039', - 'area': area, - 'start_time': self.start_time} + self.metadata = {"platform_name": "Meteosat-11", + "sensor": "seviri", + "name": "IR_039", + "area": area, + "start_time": self.start_time} nir_arr = np.random.random((2, 2)) - self.nir = xr.DataArray(da.from_array(nir_arr), dims=['y', 'x']) + self.nir = xr.DataArray(da.from_array(nir_arr), dims=["y", "x"]) self.nir.attrs.update(self.metadata) ir_arr = 100 * np.random.random((2, 2)) - self.ir_ = xr.DataArray(da.from_array(ir_arr), dims=['y', 'x']) - self.ir_.attrs['area'] = area + self.ir_ = xr.DataArray(da.from_array(ir_arr), dims=["y", "x"]) + self.ir_.attrs["area"] = area self.sunz_arr = 100 * np.random.random((2, 2)) - self.sunz = xr.DataArray(da.from_array(self.sunz_arr), dims=['y', 'x']) - self.sunz.attrs['standard_name'] = 'solar_zenith_angle' - self.sunz.attrs['area'] = area + self.sunz = xr.DataArray(da.from_array(self.sunz_arr), dims=["y", "x"]) + self.sunz.attrs["standard_name"] = "solar_zenith_angle" + self.sunz.attrs["area"] = area self.da_sunz = da.from_array(self.sunz_arr) refl_arr = np.random.random((2, 2)) @@ -200,9 +200,9 @@ def fake_refl_from_tbs(self, sun_zenith, da_nir, da_tb11, tb_ir_co2=None): return self.refl_with_co2 return self.refl - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_provide_sunz_no_co2(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided only sunz.""" calculator.return_value = mock.MagicMock( @@ -210,18 +210,18 @@ def test_provide_sunz_no_co2(self, calculator, apply_modifier_info, sza): sza.return_value = self.da_sunz from satpy.modifiers.spectral import NIRReflectance - comp = NIRReflectance(name='test') - info = {'modifiers': None} + comp = NIRReflectance(name="test") + info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) assert self.metadata.items() <= res.attrs.items() - assert res.attrs['units'] == '%' - assert res.attrs['sun_zenith_threshold'] is not None + assert res.attrs["units"] == "%" + assert res.attrs["sun_zenith_threshold"] is not None assert np.allclose(res.data, self.refl * 100).compute() - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_no_sunz_no_co2(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor with minimal parameters.""" calculator.return_value = mock.MagicMock( @@ -229,8 +229,8 @@ def test_no_sunz_no_co2(self, calculator, apply_modifier_info, sza): sza.return_value = self.da_sunz from satpy.modifiers.spectral import NIRReflectance - comp = NIRReflectance(name='test') - info = {'modifiers': None} + comp = NIRReflectance(name="test") + info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[], **info) # due to copying of DataArrays, self.get_lonlats is not the same as the one that was called @@ -240,9 +240,9 @@ def test_no_sunz_no_co2(self, calculator, apply_modifier_info, sza): self.refl_from_tbs.assert_called_with(self.da_sunz, self.nir.data, self.ir_.data, tb_ir_co2=None) assert np.allclose(res.data, self.refl * 100).compute() - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_no_sunz_with_co2(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided extra co2 info.""" calculator.return_value = mock.MagicMock( @@ -250,20 +250,20 @@ def test_no_sunz_with_co2(self, calculator, apply_modifier_info, sza): from satpy.modifiers.spectral import NIRReflectance sza.return_value = self.da_sunz - comp = NIRReflectance(name='test') - info = {'modifiers': None} + comp = NIRReflectance(name="test") + info = {"modifiers": None} co2_arr = np.random.random((2, 2)) - co2 = xr.DataArray(da.from_array(co2_arr), dims=['y', 'x']) - co2.attrs['wavelength'] = [12.0, 13.0, 14.0] - co2.attrs['units'] = 'K' + co2 = xr.DataArray(da.from_array(co2_arr), dims=["y", "x"]) + co2.attrs["wavelength"] = [12.0, 13.0, 14.0] + co2.attrs["units"] = "K" res = comp([self.nir, self.ir_], optional_datasets=[co2], **info) self.refl_from_tbs.assert_called_with(self.da_sunz, self.nir.data, self.ir_.data, tb_ir_co2=co2.data) assert np.allclose(res.data, self.refl_with_co2 * 100).compute() - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_provide_sunz_and_threshold(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided sunz and a sunz threshold.""" calculator.return_value = mock.MagicMock( @@ -271,32 +271,32 @@ def test_provide_sunz_and_threshold(self, calculator, apply_modifier_info, sza): from satpy.modifiers.spectral import NIRReflectance sza.return_value = self.da_sunz - comp = NIRReflectance(name='test', sunz_threshold=84.0) - info = {'modifiers': None} + comp = NIRReflectance(name="test", sunz_threshold=84.0) + info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) - self.assertEqual(res.attrs['sun_zenith_threshold'], 84.0) - calculator.assert_called_with('Meteosat-11', 'seviri', 'IR_039', + self.assertEqual(res.attrs["sun_zenith_threshold"], 84.0) + calculator.assert_called_with("Meteosat-11", "seviri", "IR_039", sunz_threshold=84.0, masking_limit=NIRReflectance.MASKING_LIMIT) - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_sunz_threshold_default_value_is_not_none(self, calculator, apply_modifier_info, sza): """Check that sun_zenith_threshold is not None.""" from satpy.modifiers.spectral import NIRReflectance - comp = NIRReflectance(name='test') - info = {'modifiers': None} + comp = NIRReflectance(name="test") + info = {"modifiers": None} calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) assert comp.sun_zenith_threshold is not None - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_provide_masking_limit(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided sunz and a sunz threshold.""" calculator.return_value = mock.MagicMock( @@ -304,23 +304,23 @@ def test_provide_masking_limit(self, calculator, apply_modifier_info, sza): from satpy.modifiers.spectral import NIRReflectance sza.return_value = self.da_sunz - comp = NIRReflectance(name='test', masking_limit=None) - info = {'modifiers': None} + comp = NIRReflectance(name="test", masking_limit=None) + info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) - self.assertIsNone(res.attrs['sun_zenith_masking_limit']) - calculator.assert_called_with('Meteosat-11', 'seviri', 'IR_039', + self.assertIsNone(res.attrs["sun_zenith_masking_limit"]) + calculator.assert_called_with("Meteosat-11", "seviri", "IR_039", sunz_threshold=NIRReflectance.TERMINATOR_LIMIT, masking_limit=None) - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_masking_limit_default_value_is_not_none(self, calculator, apply_modifier_info, sza): """Check that sun_zenith_threshold is not None.""" from satpy.modifiers.spectral import NIRReflectance - comp = NIRReflectance(name='test') - info = {'modifiers': None} + comp = NIRReflectance(name="test") + info = {"modifiers": None} calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) @@ -331,9 +331,9 @@ def test_masking_limit_default_value_is_not_none(self, calculator, apply_modifie class TestNIREmissivePartFromReflectance(unittest.TestCase): """Test the NIR Emissive part from reflectance compositor.""" - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_compositor(self, calculator, apply_modifier_info, sza): """Test the NIR emissive part from reflectance compositor.""" from satpy.modifiers.spectral import NIRReflectance @@ -353,12 +353,12 @@ def test_compositor(self, calculator, apply_modifier_info, sza): from satpy.modifiers.spectral import NIREmissivePartFromReflectance - comp = NIREmissivePartFromReflectance(name='test', sunz_threshold=86.0) - info = {'modifiers': None} + comp = NIREmissivePartFromReflectance(name="test", sunz_threshold=86.0) + info = {"modifiers": None} - platform = 'NOAA-20' - sensor = 'viirs' - chan_name = 'M12' + platform = "NOAA-20" + sensor = "viirs" + chan_name = "M12" get_lonlats = mock.MagicMock() lons, lats = 1, 2 @@ -366,29 +366,29 @@ def test_compositor(self, calculator, apply_modifier_info, sza): area = mock.MagicMock(get_lonlats=get_lonlats) nir_arr = np.random.random((2, 2)) - nir = xr.DataArray(da.from_array(nir_arr), dims=['y', 'x']) - nir.attrs['platform_name'] = platform - nir.attrs['sensor'] = sensor - nir.attrs['name'] = chan_name - nir.attrs['area'] = area + nir = xr.DataArray(da.from_array(nir_arr), dims=["y", "x"]) + nir.attrs["platform_name"] = platform + nir.attrs["sensor"] = sensor + nir.attrs["name"] = chan_name + nir.attrs["area"] = area ir_arr = np.random.random((2, 2)) - ir_ = xr.DataArray(da.from_array(ir_arr), dims=['y', 'x']) - ir_.attrs['area'] = area + ir_ = xr.DataArray(da.from_array(ir_arr), dims=["y", "x"]) + ir_.attrs["area"] = area sunz_arr = 100 * np.random.random((2, 2)) - sunz = xr.DataArray(da.from_array(sunz_arr), dims=['y', 'x']) - sunz.attrs['standard_name'] = 'solar_zenith_angle' - sunz.attrs['area'] = area + sunz = xr.DataArray(da.from_array(sunz_arr), dims=["y", "x"]) + sunz.attrs["standard_name"] = "solar_zenith_angle" + sunz.attrs["area"] = area sunz2 = da.from_array(sunz_arr) sza.return_value = sunz2 res = comp([nir, ir_], optional_datasets=[sunz], **info) - self.assertEqual(res.attrs['sun_zenith_threshold'], 86.0) - self.assertEqual(res.attrs['units'], 'K') - self.assertEqual(res.attrs['platform_name'], platform) - self.assertEqual(res.attrs['sensor'], sensor) - self.assertEqual(res.attrs['name'], chan_name) - calculator.assert_called_with('NOAA-20', 'viirs', 'M12', sunz_threshold=86.0, + self.assertEqual(res.attrs["sun_zenith_threshold"], 86.0) + self.assertEqual(res.attrs["units"], "K") + self.assertEqual(res.attrs["platform_name"], platform) + self.assertEqual(res.attrs["sensor"], sensor) + self.assertEqual(res.attrs["name"], chan_name) + calculator.assert_called_with("NOAA-20", "viirs", "M12", sunz_threshold=86.0, masking_limit=NIRReflectance.MASKING_LIMIT) @@ -400,9 +400,9 @@ def _make_data_area(self): rows = 3 cols = 5 area = AreaDefinition( - 'some_area_name', 'On-the-fly area', 'geosabii', - {'a': '6378137.0', 'b': '6356752.31414', 'h': '35786023.0', 'lon_0': '-89.5', 'proj': 'geos', 'sweep': 'x', - 'units': 'm'}, + "some_area_name", "On-the-fly area", "geosabii", + {"a": "6378137.0", "b": "6356752.31414", "h": "35786023.0", "lon_0": "-89.5", "proj": "geos", "sweep": "x", + "units": "m"}, cols, rows, (-5434894.954752679, -5434894.964451744, 5434894.964451744, 5434894.954752679)) @@ -415,46 +415,46 @@ def _make_data_area(self): def _create_test_data(self, name, wavelength, resolution): area, dnb = self._make_data_area() input_band = xr.DataArray(dnb, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'platform_name': 'Himawari-8', - 'calibration': 'reflectance', 'units': '%', 'wavelength': wavelength, - 'name': name, 'resolution': resolution, 'sensor': 'ahi', - 'start_time': '2017-09-20 17:30:40.800000', - 'end_time': '2017-09-20 17:41:17.500000', - 'area': area, 'ancillary_variables': [], - 'orbital_parameters': { - 'satellite_nominal_longitude': -89.5, - 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_altitude': 35786023.4375, + "platform_name": "Himawari-8", + "calibration": "reflectance", "units": "%", "wavelength": wavelength, + "name": name, "resolution": resolution, "sensor": "ahi", + "start_time": "2017-09-20 17:30:40.800000", + "end_time": "2017-09-20 17:41:17.500000", + "area": area, "ancillary_variables": [], + "orbital_parameters": { + "satellite_nominal_longitude": -89.5, + "satellite_nominal_latitude": 0.0, + "satellite_nominal_altitude": 35786023.4375, }, }) red_band = xr.DataArray(dnb, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'platform_name': 'Himawari-8', - 'calibration': 'reflectance', 'units': '%', 'wavelength': (0.62, 0.64, 0.66), - 'name': 'B03', 'resolution': 500, 'sensor': 'ahi', - 'start_time': '2017-09-20 17:30:40.800000', - 'end_time': '2017-09-20 17:41:17.500000', - 'area': area, 'ancillary_variables': [], - 'orbital_parameters': { - 'satellite_nominal_longitude': -89.5, - 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_altitude': 35786023.4375, + "platform_name": "Himawari-8", + "calibration": "reflectance", "units": "%", "wavelength": (0.62, 0.64, 0.66), + "name": "B03", "resolution": 500, "sensor": "ahi", + "start_time": "2017-09-20 17:30:40.800000", + "end_time": "2017-09-20 17:41:17.500000", + "area": area, "ancillary_variables": [], + "orbital_parameters": { + "satellite_nominal_longitude": -89.5, + "satellite_nominal_latitude": 0.0, + "satellite_nominal_altitude": 35786023.4375, }, }) fake_angle_data = da.ones_like(dnb, dtype=np.float32) * 90.0 angle1 = xr.DataArray(fake_angle_data, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'platform_name': 'Himawari-8', - 'calibration': 'reflectance', 'units': '%', 'wavelength': wavelength, - 'name': "satellite_azimuth_angle", 'resolution': resolution, 'sensor': 'ahi', - 'start_time': '2017-09-20 17:30:40.800000', - 'end_time': '2017-09-20 17:41:17.500000', - 'area': area, 'ancillary_variables': [], + "platform_name": "Himawari-8", + "calibration": "reflectance", "units": "%", "wavelength": wavelength, + "name": "satellite_azimuth_angle", "resolution": resolution, "sensor": "ahi", + "start_time": "2017-09-20 17:30:40.800000", + "end_time": "2017-09-20 17:41:17.500000", + "area": area, "ancillary_variables": [], }) return input_band, red_band, angle1, angle1, angle1, angle1 @@ -480,15 +480,15 @@ def test_rayleigh_corrector(self, name, wavelength, resolution, aerosol_type, re reduce_strength, exp_mean, exp_unique): """Test PSPRayleighReflectance with fake data.""" from satpy.modifiers.atmosphere import PSPRayleighReflectance - ray_cor = PSPRayleighReflectance(name=name, atmosphere='us-standard', aerosol_types=aerosol_type, + ray_cor = PSPRayleighReflectance(name=name, atmosphere="us-standard", aerosol_types=aerosol_type, reduce_lim_low=reduce_lim_low, reduce_lim_high=reduce_lim_high, reduce_strength=reduce_strength) - assert ray_cor.attrs['name'] == name - assert ray_cor.attrs['atmosphere'] == 'us-standard' - assert ray_cor.attrs['aerosol_types'] == aerosol_type - assert ray_cor.attrs['reduce_lim_low'] == reduce_lim_low - assert ray_cor.attrs['reduce_lim_high'] == reduce_lim_high - assert ray_cor.attrs['reduce_strength'] == reduce_strength + assert ray_cor.attrs["name"] == name + assert ray_cor.attrs["atmosphere"] == "us-standard" + assert ray_cor.attrs["aerosol_types"] == aerosol_type + assert ray_cor.attrs["reduce_lim_low"] == reduce_lim_low + assert ray_cor.attrs["reduce_lim_high"] == reduce_lim_high + assert ray_cor.attrs["reduce_strength"] == reduce_strength input_band, red_band, *_ = self._create_test_data(name, wavelength, resolution) res = ray_cor([input_band, red_band]) @@ -507,7 +507,7 @@ def test_rayleigh_with_angles(self, as_optionals): """Test PSPRayleighReflectance with angles provided.""" from satpy.modifiers.atmosphere import PSPRayleighReflectance aerosol_type = "rayleigh_only" - ray_cor = PSPRayleighReflectance(name="B01", atmosphere='us-standard', aerosol_types=aerosol_type) + ray_cor = PSPRayleighReflectance(name="B01", atmosphere="us-standard", aerosol_types=aerosol_type) prereqs, opt_prereqs = self._get_angles_prereqs_and_opts(as_optionals) with mock.patch("satpy.modifiers.atmosphere.get_angles") as get_angles: res = ray_cor(prereqs, opt_prereqs) @@ -558,15 +558,15 @@ def test_call(self): "nadir_latitude": 0.0, } band = xr.DataArray(da.zeros((5, 5)), - attrs={'area': area, - 'start_time': stime, - 'name': 'name', - 'platform_name': 'platform', - 'sensor': 'sensor', - 'orbital_parameters': orb_params}, - dims=('y', 'x')) + attrs={"area": area, + "start_time": stime, + "name": "name", + "platform_name": "platform", + "sensor": "sensor", + "orbital_parameters": orb_params}, + dims=("y", "x")) # Perform atmospherical correction - psp = PSPAtmosphericalCorrection(name='dummy') + psp = PSPAtmosphericalCorrection(name="dummy") res = psp(projectables=[band]) res.compute() diff --git a/satpy/tests/test_node.py b/satpy/tests/test_node.py index 8a41082266..7475b04d24 100644 --- a/satpy/tests/test_node.py +++ b/satpy/tests/test_node.py @@ -62,7 +62,7 @@ class TestCompositorNode(unittest.TestCase): def setUp(self): """Set up the test case.""" - self.name = 'hej' + self.name = "hej" self.fake = FakeCompositor(self.name) self.c_node = CompositorNode(self.fake) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index db50900cad..3b2888565b 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -40,27 +40,27 @@ os.environ.pop("PPP_CONFIG_DIR", None) os.environ.pop("SATPY_CONFIG_PATH", None) -local_id_keys_config = {'name': { - 'required': True, +local_id_keys_config = {"name": { + "required": True, }, - 'wavelength': { - 'type': WavelengthRange, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'reflectance', - 'brightness_temperature', - 'radiance', - 'counts' + "resolution": None, + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "counts" ] }, - 'polarization': None, - 'level': None, - 'modifiers': { - 'required': True, - 'default': ModifierTuple(), - 'type': ModifierTuple, + "polarization": None, + "level": None, + "modifiers": { + "required": True, + "default": ModifierTuple(), + "type": ModifierTuple, }, } @@ -70,22 +70,22 @@ @pytest.fixture def viirs_file(tmp_path, monkeypatch): """Create a dummy viirs file.""" - filename = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' + filename = "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5" monkeypatch.chdir(tmp_path) # touch the file so it exists on disk - open(filename, 'w').close() + open(filename, "w").close() return filename @pytest.fixture def atms_file(tmp_path, monkeypatch): """Create a dummy atms file.""" - filename = 'SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5' + filename = "SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5" monkeypatch.chdir(tmp_path) # touch the file so it exists on disk - open(filename, 'w').close() + open(filename, "w").close() return filename @@ -120,11 +120,11 @@ def setUp(self): calibration="reflectance", polarization="H"): "4refl", make_dataid(name="test5", - modifiers=('mod1', 'mod2')): "5_2mod", + modifiers=("mod1", "mod2")): "5_2mod", make_dataid(name="test5", - modifiers=('mod2',)): "5_1mod", - make_dataid(name='test6', level=100): '6_100', - make_dataid(name='test6', level=200): '6_200', + modifiers=("mod2",)): "5_1mod", + make_dataid(name="test6", level=100): "6_100", + make_dataid(name="test6", level=200): "6_200", } self.test_dict = DatasetDict(regular_dict) @@ -154,31 +154,31 @@ def test_getitem(self): # access by near wavelength of another dataset self.assertEqual(d[1.65], "3") # access by name with multiple levels - self.assertEqual(d['test6'], '6_100') + self.assertEqual(d["test6"], "6_100") self.assertEqual(d[make_dsq(wavelength=1.5)], "2") self.assertEqual(d[make_dsq(wavelength=0.5, resolution=1000)], "1") self.assertEqual(d[make_dsq(wavelength=0.5, resolution=500)], "1h") - self.assertEqual(d[make_dsq(name='test6', level=100)], '6_100') - self.assertEqual(d[make_dsq(name='test6', level=200)], '6_200') + self.assertEqual(d[make_dsq(name="test6", level=100)], "6_100") + self.assertEqual(d[make_dsq(name="test6", level=200)], "6_200") # higher resolution is returned self.assertEqual(d[0.5], "1h") - self.assertEqual(d['test4'], '4refl') - self.assertEqual(d[make_dataid(name='test4', calibration='radiance')], '4rad') - self.assertRaises(KeyError, d.getitem, '1h') + self.assertEqual(d["test4"], "4refl") + self.assertEqual(d[make_dataid(name="test4", calibration="radiance")], "4rad") + self.assertRaises(KeyError, d.getitem, "1h") # test with full tuple - self.assertEqual(d[make_dsq(name='test', wavelength=(0, 0.5, 1), resolution=1000)], "1") + self.assertEqual(d[make_dsq(name="test", wavelength=(0, 0.5, 1), resolution=1000)], "1") def test_get_key(self): """Test 'get_key' special functions.""" from satpy.dataset import DataQuery d = self.test_dict - res1 = get_key(make_dataid(name='test4'), d, calibration='radiance') - res2 = get_key(make_dataid(name='test4'), d, calibration='radiance', + res1 = get_key(make_dataid(name="test4"), d, calibration="radiance") + res2 = get_key(make_dataid(name="test4"), d, calibration="radiance", num_results=0) - res3 = get_key(make_dataid(name='test4'), d, calibration='radiance', + res3 = get_key(make_dataid(name="test4"), d, calibration="radiance", num_results=3) self.assertEqual(len(res2), 1) self.assertEqual(len(res3), 1) @@ -186,43 +186,43 @@ def test_get_key(self): res3 = res3[0] self.assertEqual(res1, res2) self.assertEqual(res1, res3) - res1 = get_key('test4', d, query=DataQuery(polarization='V')) - self.assertEqual(res1, make_dataid(name='test4', calibration='radiance', - polarization='V')) + res1 = get_key("test4", d, query=DataQuery(polarization="V")) + self.assertEqual(res1, make_dataid(name="test4", calibration="radiance", + polarization="V")) res1 = get_key(0.5, d, query=DataQuery(resolution=500)) - self.assertEqual(res1, make_dataid(name='testh', + self.assertEqual(res1, make_dataid(name="testh", wavelength=(0, 0.5, 1), resolution=500)) - res1 = get_key('test6', d, query=DataQuery(level=100)) - self.assertEqual(res1, make_dataid(name='test6', + res1 = get_key("test6", d, query=DataQuery(level=100)) + self.assertEqual(res1, make_dataid(name="test6", level=100)) - res1 = get_key('test5', d) - res2 = get_key('test5', d, query=DataQuery(modifiers=('mod2',))) - res3 = get_key('test5', d, query=DataQuery(modifiers=('mod1', 'mod2',))) - self.assertEqual(res1, make_dataid(name='test5', - modifiers=('mod2',))) + res1 = get_key("test5", d) + res2 = get_key("test5", d, query=DataQuery(modifiers=("mod2",))) + res3 = get_key("test5", d, query=DataQuery(modifiers=("mod1", "mod2",))) + self.assertEqual(res1, make_dataid(name="test5", + modifiers=("mod2",))) self.assertEqual(res1, res2) self.assertNotEqual(res1, res3) # more than 1 result when default is to ask for 1 result - self.assertRaises(KeyError, get_key, 'test4', d, best=False) + self.assertRaises(KeyError, get_key, "test4", d, best=False) def test_contains(self): """Test DatasetDict contains method.""" d = self.test_dict - self.assertIn('test', d) - self.assertFalse(d.contains('test')) - self.assertNotIn('test_bad', d) + self.assertIn("test", d) + self.assertFalse(d.contains("test")) + self.assertNotIn("test_bad", d) self.assertIn(0.5, d) self.assertFalse(d.contains(0.5)) self.assertIn(1.5, d) self.assertIn(1.55, d) self.assertIn(1.65, d) - self.assertIn(make_dataid(name='test4', calibration='radiance'), d) - self.assertIn('test4', d) + self.assertIn(make_dataid(name="test4", calibration="radiance"), d) + self.assertIn("test4", d) def test_keys(self): """Test keys method of DatasetDict.""" @@ -232,21 +232,21 @@ def test_keys(self): self.assertTrue(all(isinstance(x, DataID) for x in d.keys())) name_keys = d.keys(names=True) self.assertListEqual(sorted(set(name_keys))[:4], [ - 'test', 'test2', 'test3', 'test4']) + "test", "test2", "test3", "test4"]) wl_keys = tuple(d.keys(wavelengths=True)) self.assertIn((0, 0.5, 1), wl_keys) - self.assertIn((1, 1.5, 2, 'µm'), wl_keys) - self.assertIn((1.2, 1.7, 2.2, 'µm'), wl_keys) + self.assertIn((1, 1.5, 2, "µm"), wl_keys) + self.assertIn((1.2, 1.7, 2.2, "µm"), wl_keys) self.assertIn(None, wl_keys) def test_setitem(self): """Test setitem method of DatasetDict.""" d = self.test_dict - d['new_ds'] = {'metadata': 'new_ds'} - self.assertEqual(d['new_ds']['metadata'], 'new_ds') - d[0.5] = {'calibration': 'radiance'} - self.assertEqual(d[0.5]['resolution'], 500) - self.assertEqual(d[0.5]['name'], 'testh') + d["new_ds"] = {"metadata": "new_ds"} + self.assertEqual(d["new_ds"]["metadata"], "new_ds") + d[0.5] = {"calibration": "radiance"} + self.assertEqual(d[0.5]["resolution"], 500) + self.assertEqual(d[0.5]["name"], "testh") class TestReaderLoader(unittest.TestCase): @@ -261,7 +261,7 @@ def setUp(self): from satpy.tests.reader_tests.test_viirs_sdr import FakeHDF5FileHandler2 # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(VIIRSSDRFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -281,21 +281,21 @@ def test_no_args(self): def test_filenames_only(self): """Test with filenames specified.""" from satpy.readers import load_readers - ri = load_readers(filenames=['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5']) - self.assertListEqual(list(ri.keys()), ['viirs_sdr']) + ri = load_readers(filenames=["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"]) + self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) def test_filenames_and_reader(self): """Test with filenames and reader specified.""" from satpy.readers import load_readers - ri = load_readers(reader='viirs_sdr', - filenames=['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5']) - self.assertListEqual(list(ri.keys()), ['viirs_sdr']) + ri = load_readers(reader="viirs_sdr", + filenames=["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"]) + self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) def test_bad_reader_name_with_filenames(self): """Test bad reader name with filenames provided.""" from satpy.readers import load_readers - self.assertRaises(ValueError, load_readers, reader='i_dont_exist', filenames=[ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + self.assertRaises(ValueError, load_readers, reader="i_dont_exist", filenames=[ + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) def test_filenames_as_path(self): @@ -304,28 +304,28 @@ def test_filenames_as_path(self): from satpy.readers import load_readers ri = load_readers(filenames=[ - Path('SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'), + Path("SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"), ]) - self.assertListEqual(list(ri.keys()), ['viirs_sdr']) + self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) def test_filenames_as_dict(self): """Test loading readers where filenames are organized by reader.""" from satpy.readers import load_readers filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], } ri = load_readers(filenames=filenames) - self.assertListEqual(list(ri.keys()), ['viirs_sdr']) + self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) def test_filenames_as_dict_bad_reader(self): """Test loading with filenames dict but one of the readers is bad.""" from satpy.readers import load_readers filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], - '__fake__': ['fake.txt'], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], + "__fake__": ["fake.txt"], } self.assertRaisesRegex(ValueError, - r'(?=.*__fake__)(?!.*viirs)(^No reader.+)', + r"(?=.*__fake__)(?!.*viirs)(^No reader.+)", load_readers, filenames=filenames) def test_filenames_as_dict_with_reader(self): @@ -337,63 +337,63 @@ def test_filenames_as_dict_with_reader(self): """ from satpy.readers import load_readers filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], } - ri = load_readers(reader='viirs_sdr', filenames=filenames) - self.assertListEqual(list(ri.keys()), ['viirs_sdr']) + ri = load_readers(reader="viirs_sdr", filenames=filenames) + self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) def test_empty_filenames_as_dict(self): """Test passing filenames as a dictionary with an empty list of filenames.""" # only one reader from satpy.readers import load_readers filenames = { - 'viirs_sdr': [], + "viirs_sdr": [], } self.assertRaises(ValueError, load_readers, filenames=filenames) # two readers, one is empty filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], - 'viirs_l1b': [], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], + "viirs_l1b": [], } ri = load_readers(filenames) - self.assertListEqual(list(ri.keys()), ['viirs_sdr']) - - @mock.patch('satpy.readers.hrit_base.HRITFileHandler._get_hd') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler._get_header') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.start_time') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.end_time') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler.read_prologue') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue') + self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) + + @mock.patch("satpy.readers.hrit_base.HRITFileHandler._get_hd") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler._get_header") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.start_time") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.end_time") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler.read_prologue") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue") def test_missing_requirements(self, *mocks): """Test warnings and exceptions in case of missing requirements.""" from satpy.readers import load_readers # Filenames from a single scan - epi_pro_miss = ['H-000-MSG4__-MSG4________-IR_108___-000006___-201809050900-__'] - epi_miss = epi_pro_miss + ['H-000-MSG4__-MSG4________-_________-PRO______-201809050900-__'] - pro_miss = epi_pro_miss + ['H-000-MSG4__-MSG4________-_________-EPI______-201809050900-__'] + epi_pro_miss = ["H-000-MSG4__-MSG4________-IR_108___-000006___-201809050900-__"] + epi_miss = epi_pro_miss + ["H-000-MSG4__-MSG4________-_________-PRO______-201809050900-__"] + pro_miss = epi_pro_miss + ["H-000-MSG4__-MSG4________-_________-EPI______-201809050900-__"] with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=r"No handler for reading requirement.*", category=UserWarning) for filenames in [epi_miss, pro_miss, epi_pro_miss]: - self.assertRaises(ValueError, load_readers, reader='seviri_l1b_hrit', filenames=filenames) + self.assertRaises(ValueError, load_readers, reader="seviri_l1b_hrit", filenames=filenames) # Filenames from multiple scans at_least_one_complete = [ # 09:00 scan is ok - 'H-000-MSG4__-MSG4________-IR_108___-000006___-201809050900-__', - 'H-000-MSG4__-MSG4________-_________-PRO______-201809050900-__', - 'H-000-MSG4__-MSG4________-_________-EPI______-201809050900-__', + "H-000-MSG4__-MSG4________-IR_108___-000006___-201809050900-__", + "H-000-MSG4__-MSG4________-_________-PRO______-201809050900-__", + "H-000-MSG4__-MSG4________-_________-EPI______-201809050900-__", # 10:00 scan is incomplete - 'H-000-MSG4__-MSG4________-IR_108___-000006___-201809051000-__', + "H-000-MSG4__-MSG4________-IR_108___-000006___-201809051000-__", ] with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=r"No matching requirement file.*", category=UserWarning) try: - load_readers(filenames=at_least_one_complete, reader='seviri_l1b_hrit') + load_readers(filenames=at_least_one_complete, reader="seviri_l1b_hrit") except ValueError: - self.fail('If at least one set of filenames is complete, no ' - 'exception should be raised') + self.fail("If at least one set of filenames is complete, no " + "exception should be raised") def test_all_filtered(self): """Test behaviour if no file matches the filter parameters.""" @@ -401,13 +401,13 @@ def test_all_filtered(self): from satpy.readers import load_readers filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], } - filter_params = {'start_time': datetime.datetime(1970, 1, 1), - 'end_time': datetime.datetime(1970, 1, 2), - 'area': None} + filter_params = {"start_time": datetime.datetime(1970, 1, 1), + "end_time": datetime.datetime(1970, 1, 2), + "area": None} self.assertRaises(ValueError, load_readers, - filenames=filenames, reader_kwargs={'filter_parameters': filter_params}) + filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) def test_all_filtered_multiple(self): """Test behaviour if no file matches the filter parameters.""" @@ -415,13 +415,13 @@ def test_all_filtered_multiple(self): from satpy.readers import load_readers filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], - 'abi_l1b': ['OR_ABI-L1b-RadF-M3C01_G16_s20120561730408_e20120561741175_c20172631741218.nc'], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], + "abi_l1b": ["OR_ABI-L1b-RadF-M3C01_G16_s20120561730408_e20120561741175_c20172631741218.nc"], } - filter_params = {'start_time': datetime.datetime(1970, 1, 1), - 'end_time': datetime.datetime(1970, 1, 2)} + filter_params = {"start_time": datetime.datetime(1970, 1, 1), + "end_time": datetime.datetime(1970, 1, 2)} self.assertRaises(ValueError, load_readers, - filenames=filenames, reader_kwargs={'filter_parameters': filter_params}) + filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) def test_almost_all_filtered(self): """Test behaviour if only one reader has datasets.""" @@ -429,17 +429,17 @@ def test_almost_all_filtered(self): from satpy.readers import load_readers filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], - 'abi_l1b': ['OR_ABI-L1b-RadF-M3C01_G16_s20172631730408_e20172631741175_c20172631741218.nc'], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], + "abi_l1b": ["OR_ABI-L1b-RadF-M3C01_G16_s20172631730408_e20172631741175_c20172631741218.nc"], } - filter_params = {'start_time': datetime.datetime(2012, 2, 25), - 'end_time': datetime.datetime(2012, 2, 26)} + filter_params = {"start_time": datetime.datetime(2012, 2, 25), + "end_time": datetime.datetime(2012, 2, 26)} # viirs has data that matches the request, abi doesn't - readers = load_readers(filenames=filenames, reader_kwargs={'filter_parameters': filter_params}) - self.assertIn('viirs_sdr', readers) + readers = load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) + self.assertIn("viirs_sdr", readers) # abi_l1b reader was created, but no datasets available - self.assertIn('abi_l1b', readers) - self.assertEqual(len(list(readers['abi_l1b'].available_dataset_ids)), 0) + self.assertIn("abi_l1b", readers) + self.assertEqual(len(list(readers["abi_l1b"].available_dataset_ids)), 0) class TestFindFilesAndReaders: @@ -451,7 +451,7 @@ def setup_method(self): from satpy.tests.reader_tests.test_viirs_sdr import FakeHDF5FileHandler2 # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(VIIRSSDRFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -461,31 +461,31 @@ def teardown_method(self): def test_reader_name(self, viirs_file): """Test with default base_dir and reader specified.""" - ri = find_files_and_readers(reader='viirs_sdr') - assert list(ri.keys()) == ['viirs_sdr'] - assert ri['viirs_sdr'] == [viirs_file] + ri = find_files_and_readers(reader="viirs_sdr") + assert list(ri.keys()) == ["viirs_sdr"] + assert ri["viirs_sdr"] == [viirs_file] def test_reader_other_name(self, monkeypatch, tmp_path): """Test with default base_dir and reader specified.""" - filename = 'S_NWC_CPP_npp_32505_20180204T1114116Z_20180204T1128227Z.nc' + filename = "S_NWC_CPP_npp_32505_20180204T1114116Z_20180204T1128227Z.nc" monkeypatch.chdir(tmp_path) # touch the file so it exists on disk - open(filename, 'w').close() + open(filename, "w").close() - ri = find_files_and_readers(reader='nwcsaf-pps_nc') - assert list(ri.keys()) == ['nwcsaf-pps_nc'] - assert ri['nwcsaf-pps_nc'] == [filename] + ri = find_files_and_readers(reader="nwcsaf-pps_nc") + assert list(ri.keys()) == ["nwcsaf-pps_nc"] + assert ri["nwcsaf-pps_nc"] == [filename] def test_reader_name_matched_start_end_time(self, viirs_file): """Test with start and end time matching the filename.""" from datetime import datetime - ri = find_files_and_readers(reader='viirs_sdr', + ri = find_files_and_readers(reader="viirs_sdr", start_time=datetime(2012, 2, 25, 18, 0, 0), end_time=datetime(2012, 2, 25, 19, 0, 0), ) - assert list(ri.keys()) == ['viirs_sdr'] - assert ri['viirs_sdr'] == [viirs_file] + assert list(ri.keys()) == ["viirs_sdr"] + assert ri["viirs_sdr"] == [viirs_file] def test_reader_name_matched_start_time(self, viirs_file): """Test with start matching the filename. @@ -494,9 +494,9 @@ def test_reader_name_matched_start_time(self, viirs_file): """ from datetime import datetime - ri = find_files_and_readers(reader='viirs_sdr', start_time=datetime(2012, 2, 25, 18, 1, 30)) - assert list(ri.keys()) == ['viirs_sdr'] - assert ri['viirs_sdr'] == [viirs_file] + ri = find_files_and_readers(reader="viirs_sdr", start_time=datetime(2012, 2, 25, 18, 1, 30)) + assert list(ri.keys()) == ["viirs_sdr"] + assert ri["viirs_sdr"] == [viirs_file] def test_reader_name_matched_end_time(self, viirs_file): """Test with end matching the filename. @@ -506,16 +506,16 @@ def test_reader_name_matched_end_time(self, viirs_file): """ from datetime import datetime - ri = find_files_and_readers(reader='viirs_sdr', end_time=datetime(2012, 2, 25, 18, 1, 30)) - assert list(ri.keys()) == ['viirs_sdr'] - assert ri['viirs_sdr'] == [viirs_file] + ri = find_files_and_readers(reader="viirs_sdr", end_time=datetime(2012, 2, 25, 18, 1, 30)) + assert list(ri.keys()) == ["viirs_sdr"] + assert ri["viirs_sdr"] == [viirs_file] def test_reader_name_unmatched_start_end_time(self, viirs_file): """Test with start and end time matching the filename.""" from datetime import datetime with pytest.raises(ValueError): - find_files_and_readers(reader='viirs_sdr', + find_files_and_readers(reader="viirs_sdr", start_time=datetime(2012, 2, 26, 18, 0, 0), end_time=datetime(2012, 2, 26, 19, 0, 0)) @@ -524,8 +524,8 @@ def test_no_parameters(self, viirs_file): from satpy.readers import find_files_and_readers ri = find_files_and_readers() - assert list(ri.keys()) == ['viirs_sdr'] - assert ri['viirs_sdr'] == [viirs_file] + assert list(ri.keys()) == ["viirs_sdr"] + assert ri["viirs_sdr"] == [viirs_file] def test_no_parameters_both_atms_and_viirs(self, viirs_file, atms_file): """Test with no limiting parameters when there area both atms and viirs files in the same directory.""" @@ -533,31 +533,31 @@ def test_no_parameters_both_atms_and_viirs(self, viirs_file, atms_file): ri = find_files_and_readers() - assert 'atms_sdr_hdf5' in list(ri.keys()) - assert 'viirs_sdr' in list(ri.keys()) - assert ri['atms_sdr_hdf5'] == [atms_file] - assert ri['viirs_sdr'] == [viirs_file] + assert "atms_sdr_hdf5" in list(ri.keys()) + assert "viirs_sdr" in list(ri.keys()) + assert ri["atms_sdr_hdf5"] == [atms_file] + assert ri["viirs_sdr"] == [viirs_file] def test_bad_sensor(self): """Test bad sensor doesn't find any files.""" with pytest.raises(ValueError): - find_files_and_readers(sensor='i_dont_exist') + find_files_and_readers(sensor="i_dont_exist") def test_sensor(self, viirs_file): """Test that readers for the current sensor are loaded.""" # we can't easily know how many readers satpy has that support # 'viirs' so we just pass it and hope that this works - ri = find_files_and_readers(sensor='viirs') - assert list(ri.keys()) == ['viirs_sdr'] - assert ri['viirs_sdr'] == [viirs_file] + ri = find_files_and_readers(sensor="viirs") + assert list(ri.keys()) == ["viirs_sdr"] + assert ri["viirs_sdr"] == [viirs_file] def test_sensor_no_files(self): """Test that readers for the current sensor are loaded.""" # we can't easily know how many readers satpy has that support # 'viirs' so we just pass it and hope that this works with pytest.raises(ValueError): - find_files_and_readers(sensor='viirs') - assert find_files_and_readers(sensor='viirs', missing_ok=True) == {} + find_files_and_readers(sensor="viirs") + assert find_files_and_readers(sensor="viirs", missing_ok=True) == {} def test_reader_load_failed(self): """Test that an exception is raised when a reader can't be loaded.""" @@ -566,10 +566,10 @@ def test_reader_load_failed(self): from satpy.readers import find_files_and_readers # touch the file so it exists on disk - with mock.patch('yaml.load') as load: + with mock.patch("yaml.load") as load: load.side_effect = yaml.YAMLError("Import problems") with pytest.raises(yaml.YAMLError): - find_files_and_readers(reader='viirs_sdr') + find_files_and_readers(reader="viirs_sdr") def test_pending_old_reader_name_mapping(self): """Test that requesting pending old reader names raises a warning.""" @@ -602,17 +602,17 @@ def test_filename_matches_reader_name(self): class IgnoreLoader(yaml.SafeLoader): def _ignore_all_tags(self, tag_suffix, node): - return tag_suffix + ' ' + node.value - IgnoreLoader.add_multi_constructor('', IgnoreLoader._ignore_all_tags) + return tag_suffix + " " + node.value + IgnoreLoader.add_multi_constructor("", IgnoreLoader._ignore_all_tags) from satpy._config import glob_config from satpy.readers import read_reader_config - for reader_config in glob_config('readers/*.yaml'): + for reader_config in glob_config("readers/*.yaml"): reader_fn = os.path.basename(reader_config) reader_fn_name = os.path.splitext(reader_fn)[0] reader_info = read_reader_config([reader_config], loader=IgnoreLoader) - assert reader_fn_name == reader_info['name'], \ + assert reader_fn_name == reader_info["name"], \ "Reader YAML filename doesn't match reader name in the YAML file." def test_available_readers(self): @@ -622,16 +622,16 @@ def test_available_readers(self): reader_names = available_readers() assert len(reader_names) > 0 assert isinstance(reader_names[0], str) - assert 'viirs_sdr' in reader_names # needs h5py - assert 'abi_l1b' in reader_names # needs netcdf4 + assert "viirs_sdr" in reader_names # needs h5py + assert "abi_l1b" in reader_names # needs netcdf4 assert reader_names == sorted(reader_names) reader_infos = available_readers(as_dict=True) assert len(reader_names) == len(reader_infos) assert isinstance(reader_infos[0], dict) for reader_info in reader_infos: - assert 'name' in reader_info - assert reader_infos == sorted(reader_infos, key=lambda reader_info: reader_info['name']) + assert "name" in reader_info + assert reader_infos == sorted(reader_infos, key=lambda reader_info: reader_info["name"]) def test_available_readers_base_loader(self, monkeypatch): """Test the 'available_readers' function for yaml loader type BaseLoader.""" @@ -641,20 +641,20 @@ def test_available_readers_base_loader(self, monkeypatch): from satpy._config import glob_config def patched_import_error(name, globals=None, locals=None, fromlist=(), level=0): - if name in ('netcdf4', ): + if name in ("netcdf4", ): raise ImportError(f"Mocked import error {name}") return real_import(name, globals=globals, locals=locals, fromlist=fromlist, level=level) - monkeypatch.delitem(sys.modules, 'netcdf4', raising=False) - monkeypatch.setattr(builtins, '__import__', patched_import_error) + monkeypatch.delitem(sys.modules, "netcdf4", raising=False) + monkeypatch.setattr(builtins, "__import__", patched_import_error) with pytest.raises(ImportError): import netcdf4 # noqa: F401 reader_names = available_readers(yaml_loader=yaml.BaseLoader) - assert 'abi_l1b' in reader_names # needs netcdf4 - assert 'viirs_l1b' in reader_names - assert len(reader_names) == len(list(glob_config('readers/*.yaml'))) + assert "abi_l1b" in reader_names # needs netcdf4 + assert "viirs_l1b" in reader_names + assert len(reader_names) == len(list(glob_config("readers/*.yaml"))) class TestGroupFiles(unittest.TestCase): @@ -677,7 +677,7 @@ def setUp(self): "OR_ABI-L1b-RadC-M3C02_G16_s20171171527203_e20171171529576_c20171171530008.nc", ] self.g16_files = input_files - self.g17_files = [x.replace('G16', 'G17') for x in input_files] + self.g17_files = [x.replace("G16", "G17") for x in input_files] self.noaa20_files = [ "GITCO_j01_d20180511_t2027292_e2028538_b02476_c20190530192858056873_noac_ops.h5", "GITCO_j01_d20180511_t2028550_e2030195_b02476_c20190530192932937427_noac_ops.h5", @@ -729,41 +729,41 @@ def test_bad_reader(self): from satpy.readers import group_files # touch the file so it exists on disk - with mock.patch('yaml.load') as load: + with mock.patch("yaml.load") as load: load.side_effect = yaml.YAMLError("Import problems") - self.assertRaises(yaml.YAMLError, group_files, [], reader='abi_l1b') + self.assertRaises(yaml.YAMLError, group_files, [], reader="abi_l1b") def test_default_behavior(self): """Test the default behavior with the 'abi_l1b' reader.""" from satpy.readers import group_files - groups = group_files(self.g16_files, reader='abi_l1b') + groups = group_files(self.g16_files, reader="abi_l1b") self.assertEqual(6, len(groups)) - self.assertEqual(2, len(groups[0]['abi_l1b'])) + self.assertEqual(2, len(groups[0]["abi_l1b"])) def test_default_behavior_set(self): """Test the default behavior with the 'abi_l1b' reader.""" from satpy.readers import group_files files = set(self.g16_files) num_files = len(files) - groups = group_files(files, reader='abi_l1b') + groups = group_files(files, reader="abi_l1b") # we didn't modify it self.assertEqual(len(files), num_files) self.assertEqual(6, len(groups)) - self.assertEqual(2, len(groups[0]['abi_l1b'])) + self.assertEqual(2, len(groups[0]["abi_l1b"])) def test_non_datetime_group_key(self): """Test what happens when the start_time isn't used for grouping.""" from satpy.readers import group_files - groups = group_files(self.g16_files, reader='abi_l1b', group_keys=('platform_shortname',)) + groups = group_files(self.g16_files, reader="abi_l1b", group_keys=("platform_shortname",)) self.assertEqual(1, len(groups)) - self.assertEqual(12, len(groups[0]['abi_l1b'])) + self.assertEqual(12, len(groups[0]["abi_l1b"])) def test_large_time_threshold(self): """Test what happens when the time threshold holds multiple files.""" from satpy.readers import group_files - groups = group_files(self.g16_files, reader='abi_l1b', time_threshold=60*8) + groups = group_files(self.g16_files, reader="abi_l1b", time_threshold=60*8) self.assertEqual(3, len(groups)) - self.assertEqual(4, len(groups[0]['abi_l1b'])) + self.assertEqual(4, len(groups[0]["abi_l1b"])) def test_two_instruments_files(self): """Test the behavior when two instruments files are provided. @@ -776,9 +776,9 @@ def test_two_instruments_files(self): """ from satpy.readers import group_files - groups = group_files(self.g16_files + self.g17_files, reader='abi_l1b', group_keys=('start_time',)) + groups = group_files(self.g16_files + self.g17_files, reader="abi_l1b", group_keys=("start_time",)) self.assertEqual(6, len(groups)) - self.assertEqual(4, len(groups[0]['abi_l1b'])) + self.assertEqual(4, len(groups[0]["abi_l1b"])) def test_two_instruments_files_split(self): """Test the default behavior when two instruments files are provided and split. @@ -788,51 +788,51 @@ def test_two_instruments_files_split(self): """ from satpy.readers import group_files - groups = group_files(self.g16_files + self.g17_files, reader='abi_l1b', - group_keys=('start_time', 'platform_shortname')) + groups = group_files(self.g16_files + self.g17_files, reader="abi_l1b", + group_keys=("start_time", "platform_shortname")) self.assertEqual(12, len(groups)) - self.assertEqual(2, len(groups[0]['abi_l1b'])) + self.assertEqual(2, len(groups[0]["abi_l1b"])) # default for abi_l1b should also behave like this - groups = group_files(self.g16_files + self.g17_files, reader='abi_l1b') + groups = group_files(self.g16_files + self.g17_files, reader="abi_l1b") self.assertEqual(12, len(groups)) - self.assertEqual(2, len(groups[0]['abi_l1b'])) + self.assertEqual(2, len(groups[0]["abi_l1b"])) def test_viirs_orbits(self): """Test a reader that doesn't use 'start_time' for default grouping.""" from satpy.readers import group_files - groups = group_files(self.noaa20_files + self.npp_files, reader='viirs_sdr') + groups = group_files(self.noaa20_files + self.npp_files, reader="viirs_sdr") self.assertEqual(2, len(groups)) # the noaa-20 files will be first because the orbit number is smaller # 5 granules * 3 file types - self.assertEqual(5 * 3, len(groups[0]['viirs_sdr'])) + self.assertEqual(5 * 3, len(groups[0]["viirs_sdr"])) # 3 granules * 2 file types - self.assertEqual(6, len(groups[1]['viirs_sdr'])) + self.assertEqual(6, len(groups[1]["viirs_sdr"])) def test_viirs_override_keys(self): """Test overriding a group keys to add 'start_time'.""" from satpy.readers import group_files - groups = group_files(self.noaa20_files + self.npp_files, reader='viirs_sdr', - group_keys=('start_time', 'orbit', 'platform_shortname')) + groups = group_files(self.noaa20_files + self.npp_files, reader="viirs_sdr", + group_keys=("start_time", "orbit", "platform_shortname")) self.assertEqual(8, len(groups)) - self.assertEqual(2, len(groups[0]['viirs_sdr'])) # NPP - self.assertEqual(2, len(groups[1]['viirs_sdr'])) # NPP - self.assertEqual(2, len(groups[2]['viirs_sdr'])) # NPP - self.assertEqual(3, len(groups[3]['viirs_sdr'])) # N20 - self.assertEqual(3, len(groups[4]['viirs_sdr'])) # N20 - self.assertEqual(3, len(groups[5]['viirs_sdr'])) # N20 - self.assertEqual(3, len(groups[6]['viirs_sdr'])) # N20 - self.assertEqual(3, len(groups[7]['viirs_sdr'])) # N20 + self.assertEqual(2, len(groups[0]["viirs_sdr"])) # NPP + self.assertEqual(2, len(groups[1]["viirs_sdr"])) # NPP + self.assertEqual(2, len(groups[2]["viirs_sdr"])) # NPP + self.assertEqual(3, len(groups[3]["viirs_sdr"])) # N20 + self.assertEqual(3, len(groups[4]["viirs_sdr"])) # N20 + self.assertEqual(3, len(groups[5]["viirs_sdr"])) # N20 + self.assertEqual(3, len(groups[6]["viirs_sdr"])) # N20 + self.assertEqual(3, len(groups[7]["viirs_sdr"])) # N20 # Ask for a larger time span with our groups - groups = group_files(self.noaa20_files + self.npp_files, reader='viirs_sdr', + groups = group_files(self.noaa20_files + self.npp_files, reader="viirs_sdr", time_threshold=60 * 60 * 2, - group_keys=('start_time', 'orbit', 'platform_shortname')) + group_keys=("start_time", "orbit", "platform_shortname")) self.assertEqual(2, len(groups)) # NPP is first because it has an earlier time # 3 granules * 2 file types - self.assertEqual(6, len(groups[0]['viirs_sdr'])) + self.assertEqual(6, len(groups[0]["viirs_sdr"])) # 5 granules * 3 file types - self.assertEqual(5 * 3, len(groups[1]['viirs_sdr'])) + self.assertEqual(5 * 3, len(groups[1]["viirs_sdr"])) def test_multi_readers(self): """Test passing multiple readers.""" @@ -946,14 +946,14 @@ def _generate_random_string(): def _assert_is_open_file_and_close(opened): try: - assert hasattr(opened, 'tell') + assert hasattr(opened, "tell") finally: opened.close() def _posixify_path(filename): drive, driveless_name = os.path.splitdrive(filename) - return driveless_name.replace('\\', '/') + return driveless_name.replace("\\", "/") class TestFSFile(unittest.TestCase): @@ -975,7 +975,7 @@ def setUp(self): self.local_filename2 = os.path.join(tempfile.gettempdir(), self.random_string2) Path(self.local_filename2).touch() self.zip_name = os.path.join(tempfile.gettempdir(), self.random_string2 + ".zip") - zip_file = zipfile.ZipFile(self.zip_name, 'w', zipfile.ZIP_DEFLATED) + zip_file = zipfile.ZipFile(self.zip_name, "w", zipfile.ZIP_DEFLATED) zip_file.write(self.local_filename2) zip_file.close() os.remove(self.local_filename2) @@ -1057,7 +1057,7 @@ def test_sorting_fsfiles(self): file2 = FSFile(self.local_filename) - extra_file = os.path.normpath('/somedir/bla') + extra_file = os.path.normpath("/somedir/bla") sorted_filenames = [os.fspath(file) for file in sorted([file1, file2, extra_file])] expected_filenames = sorted([extra_file, os.fspath(file1), os.fspath(file2)]) assert sorted_filenames == expected_filenames diff --git a/satpy/tests/test_regressions.py b/satpy/tests/test_regressions.py index f85d9c37be..1f0a4924f8 100644 --- a/satpy/tests/test_regressions.py +++ b/satpy/tests/test_regressions.py @@ -26,22 +26,22 @@ from satpy.tests.utils import make_dataid -abi_file_list = ['/data/OR_ABI-L1b-RadF-M3C01_G16_s20180722030423_e20180722041189_c20180722041235-118900_0.nc', - '/data/OR_ABI-L1b-RadF-M3C02_G16_s20180722030423_e20180722041190_c20180722041228-120000_0.nc', - '/data/OR_ABI-L1b-RadF-M3C03_G16_s20180722030423_e20180722041190_c20180722041237-119000_0.nc', - '/data/OR_ABI-L1b-RadF-M3C04_G16_s20180722030423_e20180722041189_c20180722041221.nc', - '/data/OR_ABI-L1b-RadF-M3C05_G16_s20180722030423_e20180722041190_c20180722041237-119101_0.nc', - '/data/OR_ABI-L1b-RadF-M3C06_G16_s20180722030423_e20180722041195_c20180722041227.nc', - '/data/OR_ABI-L1b-RadF-M3C07_G16_s20180722030423_e20180722041201_c20180722041238.nc', - '/data/OR_ABI-L1b-RadF-M3C08_G16_s20180722030423_e20180722041190_c20180722041238.nc', - '/data/OR_ABI-L1b-RadF-M3C09_G16_s20180722030423_e20180722041195_c20180722041256.nc', - '/data/OR_ABI-L1b-RadF-M3C10_G16_s20180722030423_e20180722041201_c20180722041250.nc', - '/data/OR_ABI-L1b-RadF-M3C11_G16_s20180722030423_e20180722041189_c20180722041254.nc', - '/data/OR_ABI-L1b-RadF-M3C12_G16_s20180722030423_e20180722041195_c20180722041256.nc', - '/data/OR_ABI-L1b-RadF-M3C13_G16_s20180722030423_e20180722041201_c20180722041259.nc', - '/data/OR_ABI-L1b-RadF-M3C14_G16_s20180722030423_e20180722041190_c20180722041258.nc', - '/data/OR_ABI-L1b-RadF-M3C15_G16_s20180722030423_e20180722041195_c20180722041259.nc', - '/data/OR_ABI-L1b-RadF-M3C16_G16_s20180722030423_e20180722041202_c20180722041259.nc'] +abi_file_list = ["/data/OR_ABI-L1b-RadF-M3C01_G16_s20180722030423_e20180722041189_c20180722041235-118900_0.nc", + "/data/OR_ABI-L1b-RadF-M3C02_G16_s20180722030423_e20180722041190_c20180722041228-120000_0.nc", + "/data/OR_ABI-L1b-RadF-M3C03_G16_s20180722030423_e20180722041190_c20180722041237-119000_0.nc", + "/data/OR_ABI-L1b-RadF-M3C04_G16_s20180722030423_e20180722041189_c20180722041221.nc", + "/data/OR_ABI-L1b-RadF-M3C05_G16_s20180722030423_e20180722041190_c20180722041237-119101_0.nc", + "/data/OR_ABI-L1b-RadF-M3C06_G16_s20180722030423_e20180722041195_c20180722041227.nc", + "/data/OR_ABI-L1b-RadF-M3C07_G16_s20180722030423_e20180722041201_c20180722041238.nc", + "/data/OR_ABI-L1b-RadF-M3C08_G16_s20180722030423_e20180722041190_c20180722041238.nc", + "/data/OR_ABI-L1b-RadF-M3C09_G16_s20180722030423_e20180722041195_c20180722041256.nc", + "/data/OR_ABI-L1b-RadF-M3C10_G16_s20180722030423_e20180722041201_c20180722041250.nc", + "/data/OR_ABI-L1b-RadF-M3C11_G16_s20180722030423_e20180722041189_c20180722041254.nc", + "/data/OR_ABI-L1b-RadF-M3C12_G16_s20180722030423_e20180722041195_c20180722041256.nc", + "/data/OR_ABI-L1b-RadF-M3C13_G16_s20180722030423_e20180722041201_c20180722041259.nc", + "/data/OR_ABI-L1b-RadF-M3C14_G16_s20180722030423_e20180722041190_c20180722041258.nc", + "/data/OR_ABI-L1b-RadF-M3C15_G16_s20180722030423_e20180722041195_c20180722041259.nc", + "/data/OR_ABI-L1b-RadF-M3C16_G16_s20180722030423_e20180722041202_c20180722041259.nc"] def generate_fake_abi_xr_dataset(filename, chunks=None, **kwargs): @@ -50,31 +50,31 @@ def generate_fake_abi_xr_dataset(filename, chunks=None, **kwargs): This is an incomplete copy of existing file structures. """ dataset = Dataset(attrs={ - 'time_coverage_start': '2018-03-13T20:30:42.3Z', - 'time_coverage_end': '2018-03-13T20:41:18.9Z', + "time_coverage_start": "2018-03-13T20:30:42.3Z", + "time_coverage_end": "2018-03-13T20:41:18.9Z", }) projection = DataArray( [-214748364], attrs={ - 'long_name': 'GOES-R ABI fixed grid projection', - 'grid_mapping_name': 'geostationary', - 'perspective_point_height': 35786023.0, - 'semi_major_axis': 6378137.0, - 'semi_minor_axis': 6356752.31414, - 'inverse_flattening': 298.2572221, - 'latitude_of_projection_origin': 0.0, - 'longitude_of_projection_origin': -75.0, - 'sweep_angle_axis': 'x' + "long_name": "GOES-R ABI fixed grid projection", + "grid_mapping_name": "geostationary", + "perspective_point_height": 35786023.0, + "semi_major_axis": 6378137.0, + "semi_minor_axis": 6356752.31414, + "inverse_flattening": 298.2572221, + "latitude_of_projection_origin": 0.0, + "longitude_of_projection_origin": -75.0, + "sweep_angle_axis": "x" }) - dataset['goes_imager_projection'] = projection + dataset["goes_imager_projection"] = projection - if 'C01' in filename or 'C03' in filename or 'C05' in filename: + if "C01" in filename or "C03" in filename or "C05" in filename: stop = 10847 step = 2 scale = 2.8e-05 offset = 0.151858 - elif 'C02' in filename: + elif "C02" in filename: stop = 21693 step = 4 scale = 1.4e-05 @@ -88,117 +88,117 @@ def generate_fake_abi_xr_dataset(filename, chunks=None, **kwargs): y = DataArray( da.arange(0, stop, step), attrs={ - 'scale_factor': -scale, - 'add_offset': offset, - 'units': 'rad', - 'axis': 'Y', - 'long_name': 'GOES fixed grid projection y-coordinate', - 'standard_name': 'projection_y_coordinate' + "scale_factor": -scale, + "add_offset": offset, + "units": "rad", + "axis": "Y", + "long_name": "GOES fixed grid projection y-coordinate", + "standard_name": "projection_y_coordinate" }, - dims=['y']) + dims=["y"]) - dataset['y'] = y + dataset["y"] = y x = DataArray( da.arange(0, stop, step), attrs={ - 'scale_factor': scale, - 'add_offset': -offset, - 'units': 'rad', - 'axis': 'X', - 'long_name': 'GOES fixed grid projection x-coordinate', - 'standard_name': 'projection_x_coordinate' + "scale_factor": scale, + "add_offset": -offset, + "units": "rad", + "axis": "X", + "long_name": "GOES fixed grid projection x-coordinate", + "standard_name": "projection_x_coordinate" }, - dims=['x']) + dims=["x"]) - dataset['x'] = x + dataset["x"] = x rad = DataArray( da.random.randint(0, 1025, size=[len(y), len(x)], dtype=np.int16, chunks=chunks), attrs={ - '_FillValue': np.array(1023), - 'long_name': 'ABI L1b Radiances', - 'standard_name': 'toa_outgoing_radiance_per_unit_wavelength', - '_Unsigned': 'true', - 'sensor_band_bit_depth': 10, - 'valid_range': np.array([0, 1022], dtype=np.int16), - 'scale_factor': 0.8121064, - 'add_offset': -25.936647, - 'units': 'W m-2 sr-1 um-1', - 'resolution': 'y: 0.000028 rad x: 0.000028 rad', - 'grid_mapping': 'goes_imager_projection', - 'cell_methods': 't: point area: point' + "_FillValue": np.array(1023), + "long_name": "ABI L1b Radiances", + "standard_name": "toa_outgoing_radiance_per_unit_wavelength", + "_Unsigned": "true", + "sensor_band_bit_depth": 10, + "valid_range": np.array([0, 1022], dtype=np.int16), + "scale_factor": 0.8121064, + "add_offset": -25.936647, + "units": "W m-2 sr-1 um-1", + "resolution": "y: 0.000028 rad x: 0.000028 rad", + "grid_mapping": "goes_imager_projection", + "cell_methods": "t: point area: point" }, - dims=['y', 'x'] + dims=["y", "x"] ) - dataset['Rad'] = rad + dataset["Rad"] = rad sublat = DataArray(0.0, attrs={ - 'long_name': 'nominal satellite subpoint latitude (platform latitude)', - 'standard_name': 'latitude', - '_FillValue': -999.0, - 'units': 'degrees_north'}) - dataset['nominal_satellite_subpoint_lat'] = sublat + "long_name": "nominal satellite subpoint latitude (platform latitude)", + "standard_name": "latitude", + "_FillValue": -999.0, + "units": "degrees_north"}) + dataset["nominal_satellite_subpoint_lat"] = sublat sublon = DataArray(-75.0, attrs={ - 'long_name': 'nominal satellite subpoint longitude (platform longitude)', - 'standard_name': 'longitude', - '_FillValue': -999.0, - 'units': 'degrees_east'}) + "long_name": "nominal satellite subpoint longitude (platform longitude)", + "standard_name": "longitude", + "_FillValue": -999.0, + "units": "degrees_east"}) - dataset['nominal_satellite_subpoint_lon'] = sublon + dataset["nominal_satellite_subpoint_lon"] = sublon satheight = DataArray(35786.023, attrs={ - 'long_name': 'nominal satellite height above GRS 80 ellipsoid (platform altitude)', - 'standard_name': 'height_above_reference_ellipsoid', - '_FillValue': -999.0, - 'units': 'km'}) + "long_name": "nominal satellite height above GRS 80 ellipsoid (platform altitude)", + "standard_name": "height_above_reference_ellipsoid", + "_FillValue": -999.0, + "units": "km"}) - dataset['nominal_satellite_height'] = satheight + dataset["nominal_satellite_height"] = satheight yaw_flip_flag = DataArray(0, attrs={ - 'long_name': 'Flag indicating the spacecraft is operating in yaw flip configuration', - '_Unsigned': 'true', - '_FillValue': np.array(-1), - 'valid_range': np.array([0, 1], dtype=np.int8), - 'units': '1', - 'flag_values': '0 1', - 'flag_meanings': 'false true'}) + "long_name": "Flag indicating the spacecraft is operating in yaw flip configuration", + "_Unsigned": "true", + "_FillValue": np.array(-1), + "valid_range": np.array([0, 1], dtype=np.int8), + "units": "1", + "flag_values": "0 1", + "flag_meanings": "false true"}) - dataset['yaw_flip_flag'] = yaw_flip_flag + dataset["yaw_flip_flag"] = yaw_flip_flag return dataset -@patch('xarray.open_dataset') +@patch("xarray.open_dataset") def test_1258(fake_open_dataset): """Save true_color from abi with radiance doesn't need two resamplings.""" from satpy import Scene fake_open_dataset.side_effect = generate_fake_abi_xr_dataset - scene = Scene(abi_file_list, reader='abi_l1b') - scene.load(['true_color_nocorr', 'C04'], calibration='radiance') - resampled_scene = scene.resample(scene.coarsest_area(), resampler='native') + scene = Scene(abi_file_list, reader="abi_l1b") + scene.load(["true_color_nocorr", "C04"], calibration="radiance") + resampled_scene = scene.resample(scene.coarsest_area(), resampler="native") assert len(resampled_scene.keys()) == 2 -@patch('xarray.open_dataset') +@patch("xarray.open_dataset") def test_1088(fake_open_dataset): """Check that copied arrays gets resampled.""" from satpy import Scene fake_open_dataset.side_effect = generate_fake_abi_xr_dataset - scene = Scene(abi_file_list, reader='abi_l1b') - scene.load(['C04'], calibration='radiance') + scene = Scene(abi_file_list, reader="abi_l1b") + scene.load(["C04"], calibration="radiance") - my_id = make_dataid(name='my_name', wavelength=(10, 11, 12)) - scene[my_id] = scene['C04'].copy() - resampled = scene.resample('eurol') + my_id = make_dataid(name="my_name", wavelength=(10, 11, 12)) + scene[my_id] = scene["C04"].copy() + resampled = scene.resample("eurol") assert resampled[my_id].shape == (2048, 2560) -@patch('xarray.open_dataset') +@patch("xarray.open_dataset") def test_no_enums(fake_open_dataset): """Check that no enums are inserted in the resulting attrs.""" from enum import Enum @@ -206,7 +206,7 @@ def test_no_enums(fake_open_dataset): from satpy import Scene fake_open_dataset.side_effect = generate_fake_abi_xr_dataset - scene = Scene(abi_file_list, reader='abi_l1b') - scene.load(['C04'], calibration='radiance') - for value in scene['C04'].attrs.values(): + scene = Scene(abi_file_list, reader="abi_l1b") + scene.load(["C04"], calibration="radiance") + for value in scene["C04"].attrs.values(): assert not isinstance(value, Enum) diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index aa9063b95c..a9a3b24a01 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -37,7 +37,7 @@ def get_test_data(input_shape=(100, 50), output_shape=(200, 100), output_proj=None, - input_dims=('y', 'x')): + input_dims=("y", "x")): """Get common data objects used in testing. Returns: @@ -57,49 +57,49 @@ def get_test_data(input_shape=(100, 50), output_shape=(200, 100), output_proj=No from xarray import DataArray ds1 = DataArray(da.zeros(input_shape, chunks=85), dims=input_dims, - attrs={'name': 'test_data_name', 'test': 'test'}) - if input_dims and 'y' in input_dims: + attrs={"name": "test_data_name", "test": "test"}) + if input_dims and "y" in input_dims: ds1 = ds1.assign_coords(y=da.arange(input_shape[-2], chunks=85)) - if input_dims and 'x' in input_dims: + if input_dims and "x" in input_dims: ds1 = ds1.assign_coords(x=da.arange(input_shape[-1], chunks=85)) - if input_dims and 'bands' in input_dims: - ds1 = ds1.assign_coords(bands=list('RGBA'[:ds1.sizes['bands']])) + if input_dims and "bands" in input_dims: + ds1 = ds1.assign_coords(bands=list("RGBA"[:ds1.sizes["bands"]])) - input_proj_str = ('+proj=geos +lon_0=-95.0 +h=35786023.0 +a=6378137.0 ' - '+b=6356752.31414 +sweep=x +units=m +no_defs') + input_proj_str = ("+proj=geos +lon_0=-95.0 +h=35786023.0 +a=6378137.0 " + "+b=6356752.31414 +sweep=x +units=m +no_defs") source = AreaDefinition( - 'test_target', - 'test_target', - 'test_target', + "test_target", + "test_target", + "test_target", proj4_str_to_dict(input_proj_str), input_shape[1], # width input_shape[0], # height (-1000., -1500., 1000., 1500.)) - ds1.attrs['area'] = source + ds1.attrs["area"] = source crs = CRS.from_string(input_proj_str) ds1 = ds1.assign_coords(crs=crs) ds2 = ds1.copy() input_area_shape = tuple(ds1.sizes[dim] for dim in ds1.dims - if dim in ['y', 'x']) - geo_dims = ('y', 'x') if input_dims else None + if dim in ["y", "x"]) + geo_dims = ("y", "x") if input_dims else None lons = da.random.random(input_area_shape, chunks=50) lats = da.random.random(input_area_shape, chunks=50) swath_def = SwathDefinition( DataArray(lons, dims=geo_dims), DataArray(lats, dims=geo_dims)) - ds2.attrs['area'] = swath_def - crs = CRS.from_string('+proj=latlong +datum=WGS84 +ellps=WGS84') + ds2.attrs["area"] = swath_def + crs = CRS.from_string("+proj=latlong +datum=WGS84 +ellps=WGS84") ds2 = ds2.assign_coords(crs=crs) # set up target definition - output_proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') + output_proj_str = ("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") output_proj_str = output_proj or output_proj_str target = AreaDefinition( - 'test_target', - 'test_target', - 'test_target', + "test_target", + "test_target", + "test_target", proj4_str_to_dict(output_proj_str), output_shape[1], # width output_shape[0], # height @@ -116,14 +116,14 @@ def test_type_preserve(self): from pyresample.geometry import SwathDefinition from satpy.resample import resample_dataset - source_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=['y', 'x']), - xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=['y', 'x'])) - dest_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)) + .0001, dims=['y', 'x']), - xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)) + .0001, dims=['y', 'x'])) + source_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=["y", "x"]), + xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=["y", "x"])) + dest_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)) + .0001, dims=["y", "x"]), + xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)) + .0001, dims=["y", "x"])) expected_gap = np.array([[1, 2], [3, 255]]) - data = xr.DataArray(da.from_array(expected_gap, chunks=5), dims=['y', 'x']) - data.attrs['_FillValue'] = 255 - data.attrs['area'] = source_area + data = xr.DataArray(da.from_array(expected_gap, chunks=5), dims=["y", "x"]) + data.attrs["_FillValue"] = 255 + data.attrs["area"] = source_area res = resample_dataset(data, dest_area) self.assertEqual(res.dtype, data.dtype) self.assertTrue(np.all(res.values == expected_gap)) @@ -137,11 +137,11 @@ def test_type_preserve(self): class TestKDTreeResampler(unittest.TestCase): """Test the kd-tree resampler.""" - @mock.patch('satpy.resample.KDTreeResampler._check_numpy_cache') - @mock.patch('satpy.resample.xr.Dataset') - @mock.patch('satpy.resample.zarr.open') - @mock.patch('satpy.resample.KDTreeResampler._create_cache_filename') - @mock.patch('pyresample.kd_tree.XArrayResamplerNN') + @mock.patch("satpy.resample.KDTreeResampler._check_numpy_cache") + @mock.patch("satpy.resample.xr.Dataset") + @mock.patch("satpy.resample.zarr.open") + @mock.patch("satpy.resample.KDTreeResampler._create_cache_filename") + @mock.patch("pyresample.kd_tree.XArrayResamplerNN") def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, xr_dset, cnc): """Test the kd resampler.""" @@ -151,7 +151,7 @@ def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, xr_dset.return_value = mock_dset resampler = KDTreeResampler(source_swath, target_area) resampler.precompute( - mask=da.arange(5, chunks=5).astype(bool), cache_dir='.') + mask=da.arange(5, chunks=5).astype(bool), cache_dir=".") xr_resampler.assert_called_once() resampler.resampler.get_neighbour_info.assert_called() # swath definitions should not be cached @@ -166,7 +166,7 @@ def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, try: the_dir = tempfile.mkdtemp() resampler = KDTreeResampler(source_area, target_area) - create_filename.return_value = os.path.join(the_dir, 'test_cache.zarr') + create_filename.return_value = os.path.join(the_dir, "test_cache.zarr") zarr_open.side_effect = ValueError() resampler.precompute(cache_dir=the_dir) # assert data was saved to the on-disk cache @@ -216,8 +216,8 @@ def astype(self, dtype): resampler.compute(data, fill_value=fill_value) resampler.resampler.get_sample_from_neighbour_info.assert_called_with(data, fill_value) - @mock.patch('satpy.resample.np.load') - @mock.patch('satpy.resample.xr.Dataset') + @mock.patch("satpy.resample.np.load") + @mock.patch("satpy.resample.xr.Dataset") def test_check_numpy_cache(self, xr_Dataset, np_load): """Test that cache stored in .npz is converted to zarr.""" from satpy.resample import KDTreeResampler @@ -232,22 +232,22 @@ def test_check_numpy_cache(self, xr_Dataset, np_load): the_dir = tempfile.mkdtemp() kwargs = {} np_path = resampler._create_cache_filename(the_dir, - prefix='resample_lut-', - fmt='.npz', + prefix="resample_lut-", + fmt=".npz", mask=None, **kwargs) zarr_path = resampler._create_cache_filename(the_dir, - prefix='nn_lut-', - fmt='.zarr', + prefix="nn_lut-", + fmt=".zarr", mask=None, **kwargs) resampler._check_numpy_cache(the_dir) np_load.assert_not_called() zarr_out.to_zarr.assert_not_called() - with open(np_path, 'w') as fid: + with open(np_path, "w") as fid: fid.write("42") resampler._check_numpy_cache(the_dir) - np_load.assert_called_once_with(np_path, 'r') + np_load.assert_called_once_with(np_path, "r") zarr_out.to_zarr.assert_called_once_with(zarr_path) finally: shutil.rmtree(the_dir) @@ -259,9 +259,9 @@ def test_check_numpy_cache(self, xr_Dataset, np_load): class TestEWAResampler(unittest.TestCase): """Test EWA resampler class.""" - @mock.patch('satpy.resample.fornav') - @mock.patch('satpy.resample.ll2cr') - @mock.patch('satpy.resample.SwathDefinition.get_lonlats') + @mock.patch("satpy.resample.fornav") + @mock.patch("satpy.resample.ll2cr") + @mock.patch("satpy.resample.SwathDefinition.get_lonlats") def test_2d_ewa(self, get_lonlats, ll2cr, fornav): """Test EWA with a 2D dataset.""" import numpy as np @@ -278,11 +278,11 @@ def test_2d_ewa(self, get_lonlats, ll2cr, fornav): swath_data.data = swath_data.data.astype(np.float32) num_chunks = len(source_swath.lons.chunks[0]) * len(source_swath.lons.chunks[1]) - new_data = resample_dataset(swath_data, target_area, resampler='ewa') + new_data = resample_dataset(swath_data, target_area, resampler="ewa") self.assertTupleEqual(new_data.shape, (200, 100)) self.assertEqual(new_data.dtype, np.float32) - self.assertEqual(new_data.attrs['test'], 'test') - self.assertIs(new_data.attrs['area'], target_area) + self.assertEqual(new_data.attrs["test"], "test") + self.assertIs(new_data.attrs["area"], target_area) # make sure we can actually compute everything new_data.compute() lonlat_calls = get_lonlats.call_count @@ -291,26 +291,26 @@ def test_2d_ewa(self, get_lonlats, ll2cr, fornav): # resample a different dataset and make sure cache is used data = xr.DataArray( swath_data.data, - dims=('y', 'x'), attrs={'area': source_swath, 'test': 'test2', - 'name': 'test2'}) - new_data = resample_dataset(data, target_area, resampler='ewa') + dims=("y", "x"), attrs={"area": source_swath, "test": "test2", + "name": "test2"}) + new_data = resample_dataset(data, target_area, resampler="ewa") new_data.compute() # ll2cr will be called once more because of the computation self.assertEqual(ll2cr.call_count, ll2cr_calls + num_chunks) # but we should already have taken the lonlats from the SwathDefinition self.assertEqual(get_lonlats.call_count, lonlat_calls) - self.assertIn('y', new_data.coords) - self.assertIn('x', new_data.coords) - self.assertIn('crs', new_data.coords) - self.assertIsInstance(new_data.coords['crs'].item(), CRS) - self.assertIn('lambert', new_data.coords['crs'].item().coordinate_operation.method_name.lower()) - self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') - self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') - self.assertEqual(target_area.crs, new_data.coords['crs'].item()) - - @mock.patch('satpy.resample.fornav') - @mock.patch('satpy.resample.ll2cr') - @mock.patch('satpy.resample.SwathDefinition.get_lonlats') + self.assertIn("y", new_data.coords) + self.assertIn("x", new_data.coords) + self.assertIn("crs", new_data.coords) + self.assertIsInstance(new_data.coords["crs"].item(), CRS) + self.assertIn("lambert", new_data.coords["crs"].item().coordinate_operation.method_name.lower()) + self.assertEqual(new_data.coords["y"].attrs["units"], "meter") + self.assertEqual(new_data.coords["x"].attrs["units"], "meter") + self.assertEqual(target_area.crs, new_data.coords["crs"].item()) + + @mock.patch("satpy.resample.fornav") + @mock.patch("satpy.resample.ll2cr") + @mock.patch("satpy.resample.SwathDefinition.get_lonlats") def test_3d_ewa(self, get_lonlats, ll2cr, fornav): """Test EWA with a 3D dataset.""" import numpy as np @@ -318,7 +318,7 @@ def test_3d_ewa(self, get_lonlats, ll2cr, fornav): from satpy.resample import resample_dataset _, _, swath_data, source_swath, target_area = get_test_data( - input_shape=(3, 200, 100), input_dims=('bands', 'y', 'x')) + input_shape=(3, 200, 100), input_dims=("bands", "y", "x")) swath_data.data = swath_data.data.astype(np.float32) ll2cr.return_value = (100, np.zeros((10, 10), dtype=np.float32), @@ -328,11 +328,11 @@ def test_3d_ewa(self, get_lonlats, ll2cr, fornav): get_lonlats.return_value = (source_swath.lons, source_swath.lats) num_chunks = len(source_swath.lons.chunks[0]) * len(source_swath.lons.chunks[1]) - new_data = resample_dataset(swath_data, target_area, resampler='ewa') + new_data = resample_dataset(swath_data, target_area, resampler="ewa") self.assertTupleEqual(new_data.shape, (3, 200, 100)) self.assertEqual(new_data.dtype, np.float32) - self.assertEqual(new_data.attrs['test'], 'test') - self.assertIs(new_data.attrs['area'], target_area) + self.assertEqual(new_data.attrs["test"], "test") + self.assertIs(new_data.attrs["area"], target_area) # make sure we can actually compute everything new_data.compute() lonlat_calls = get_lonlats.call_count @@ -341,25 +341,25 @@ def test_3d_ewa(self, get_lonlats, ll2cr, fornav): # resample a different dataset and make sure cache is used swath_data = xr.DataArray( swath_data.data, - dims=('bands', 'y', 'x'), coords={'bands': ['R', 'G', 'B']}, - attrs={'area': source_swath, 'test': 'test'}) - new_data = resample_dataset(swath_data, target_area, resampler='ewa') + dims=("bands", "y", "x"), coords={"bands": ["R", "G", "B"]}, + attrs={"area": source_swath, "test": "test"}) + new_data = resample_dataset(swath_data, target_area, resampler="ewa") new_data.compute() # ll2cr will be called once more because of the computation self.assertEqual(ll2cr.call_count, ll2cr_calls + num_chunks) # but we should already have taken the lonlats from the SwathDefinition self.assertEqual(get_lonlats.call_count, lonlat_calls) - self.assertIn('y', new_data.coords) - self.assertIn('x', new_data.coords) - self.assertIn('bands', new_data.coords) - self.assertIn('crs', new_data.coords) - self.assertIsInstance(new_data.coords['crs'].item(), CRS) - self.assertIn('lambert', new_data.coords['crs'].item().coordinate_operation.method_name.lower()) - self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') - self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') - np.testing.assert_equal(new_data.coords['bands'].values, - ['R', 'G', 'B']) - self.assertEqual(target_area.crs, new_data.coords['crs'].item()) + self.assertIn("y", new_data.coords) + self.assertIn("x", new_data.coords) + self.assertIn("bands", new_data.coords) + self.assertIn("crs", new_data.coords) + self.assertIsInstance(new_data.coords["crs"].item(), CRS) + self.assertIn("lambert", new_data.coords["crs"].item().coordinate_operation.method_name.lower()) + self.assertEqual(new_data.coords["y"].attrs["units"], "meter") + self.assertEqual(new_data.coords["x"].attrs["units"], "meter") + np.testing.assert_equal(new_data.coords["bands"].values, + ["R", "G", "B"]) + self.assertEqual(target_area.crs, new_data.coords["crs"].item()) class TestNativeResampler: @@ -419,35 +419,35 @@ def test_expand_dims(self): assert new_data.shape == (200, 100) new_data2 = resampler.resample(ds1.compute()) np.testing.assert_equal(new_data.compute().data, new_data2.compute().data) - assert 'y' in new_data.coords - assert 'x' in new_data.coords - assert 'crs' in new_data.coords - assert isinstance(new_data.coords['crs'].item(), CRS) - assert 'lambert' in new_data.coords['crs'].item().coordinate_operation.method_name.lower() - assert new_data.coords['y'].attrs['units'] == 'meter' - assert new_data.coords['x'].attrs['units'] == 'meter' - assert target_area.crs == new_data.coords['crs'].item() + assert "y" in new_data.coords + assert "x" in new_data.coords + assert "crs" in new_data.coords + assert isinstance(new_data.coords["crs"].item(), CRS) + assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() + assert new_data.coords["y"].attrs["units"] == "meter" + assert new_data.coords["x"].attrs["units"] == "meter" + assert target_area.crs == new_data.coords["crs"].item() def test_expand_dims_3d(self): """Test expanding native resampling with 3D data.""" ds1, source_area, _, _, target_area = get_test_data( - input_shape=(3, 100, 50), input_dims=('bands', 'y', 'x')) + input_shape=(3, 100, 50), input_dims=("bands", "y", "x")) # source geo def doesn't actually matter resampler = NativeResampler(source_area, target_area) new_data = resampler.resample(ds1) assert new_data.shape == (3, 200, 100) new_data2 = resampler.resample(ds1.compute()) np.testing.assert_equal(new_data.compute().data, new_data2.compute().data) - assert 'y' in new_data.coords - assert 'x' in new_data.coords - assert 'bands' in new_data.coords - np.testing.assert_equal(new_data.coords['bands'].values, ['R', 'G', 'B']) - assert 'crs' in new_data.coords - assert isinstance(new_data.coords['crs'].item(), CRS) - assert 'lambert' in new_data.coords['crs'].item().coordinate_operation.method_name.lower() - assert new_data.coords['y'].attrs['units'] == 'meter' - assert new_data.coords['x'].attrs['units'] == 'meter' - assert target_area.crs == new_data.coords['crs'].item() + assert "y" in new_data.coords + assert "x" in new_data.coords + assert "bands" in new_data.coords + np.testing.assert_equal(new_data.coords["bands"].values, ["R", "G", "B"]) + assert "crs" in new_data.coords + assert isinstance(new_data.coords["crs"].item(), CRS) + assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() + assert new_data.coords["y"].attrs["units"] == "meter" + assert new_data.coords["x"].attrs["units"] == "meter" + assert target_area.crs == new_data.coords["crs"].item() def test_expand_without_dims(self): """Test expanding native resampling with no dimensions specified.""" @@ -458,10 +458,10 @@ def test_expand_without_dims(self): assert new_data.shape == (200, 100) new_data2 = resampler.resample(ds1.compute()) np.testing.assert_equal(new_data.compute().data, new_data2.compute().data) - assert 'crs' in new_data.coords - assert isinstance(new_data.coords['crs'].item(), CRS) - assert 'lambert' in new_data.coords['crs'].item().coordinate_operation.method_name.lower() - assert target_area.crs == new_data.coords['crs'].item() + assert "crs" in new_data.coords + assert isinstance(new_data.coords["crs"].item(), CRS) + assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() + assert target_area.crs == new_data.coords["crs"].item() def test_expand_without_dims_4D(self): """Test expanding native resampling with 4D data with no dimensions specified.""" @@ -476,9 +476,9 @@ def test_expand_without_dims_4D(self): class TestBilinearResampler(unittest.TestCase): """Test the bilinear resampler.""" - @mock.patch('satpy.resample._move_existing_caches') - @mock.patch('satpy.resample.BilinearResampler._create_cache_filename') - @mock.patch('pyresample.bilinear.XArrayBilinearResampler') + @mock.patch("satpy.resample._move_existing_caches") + @mock.patch("satpy.resample.BilinearResampler._create_cache_filename") + @mock.patch("pyresample.bilinear.XArrayBilinearResampler") def test_bil_resampling(self, xr_resampler, create_filename, move_existing_caches): """Test the bilinear resampler.""" @@ -496,29 +496,29 @@ def test_bil_resampling(self, xr_resampler, create_filename, # Test that get_sample_from_bil_info is called properly fill_value = 8 resampler.resampler.get_sample_from_bil_info.return_value = \ - xr.DataArray(da.zeros(target_area.shape), dims=('y', 'x')) + xr.DataArray(da.zeros(target_area.shape), dims=("y", "x")) new_data = resampler.compute(data, fill_value=fill_value) resampler.resampler.get_sample_from_bil_info.assert_called_with( data, fill_value=fill_value, output_shape=target_area.shape) - self.assertIn('y', new_data.coords) - self.assertIn('x', new_data.coords) - self.assertIn('crs', new_data.coords) - self.assertIsInstance(new_data.coords['crs'].item(), CRS) - self.assertIn('lambert', new_data.coords['crs'].item().coordinate_operation.method_name.lower()) - self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') - self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') - self.assertEqual(target_area.crs, new_data.coords['crs'].item()) + self.assertIn("y", new_data.coords) + self.assertIn("x", new_data.coords) + self.assertIn("crs", new_data.coords) + self.assertIsInstance(new_data.coords["crs"].item(), CRS) + self.assertIn("lambert", new_data.coords["crs"].item().coordinate_operation.method_name.lower()) + self.assertEqual(new_data.coords["y"].attrs["units"], "meter") + self.assertEqual(new_data.coords["x"].attrs["units"], "meter") + self.assertEqual(target_area.crs, new_data.coords["crs"].item()) # Test that the resampling info is tried to read from the disk resampler = BilinearResampler(source_swath, target_area) - resampler.precompute(cache_dir='.') + resampler.precompute(cache_dir=".") resampler.resampler.load_resampling_info.assert_called() # Test caching the resampling info try: the_dir = tempfile.mkdtemp() resampler = BilinearResampler(source_area, target_area) - create_filename.return_value = os.path.join(the_dir, 'test_cache.zarr') + create_filename.return_value = os.path.join(the_dir, "test_cache.zarr") xr_resampler.return_value.load_resampling_info.side_effect = IOError resampler.precompute(cache_dir=the_dir) @@ -544,9 +544,9 @@ def test_bil_resampling(self, xr_resampler, create_filename, resampler = BilinearResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) resampler.save_bil_info(cache_dir=the_dir) - zarr_file = os.path.join(the_dir, 'test_cache.zarr') + zarr_file = os.path.join(the_dir, "test_cache.zarr") # Save again faking the cache file already exists - with mock.patch('os.path.exists') as exists: + with mock.patch("os.path.exists") as exists: exists.return_value = True resampler.save_bil_info(cache_dir=the_dir) move_existing_caches.assert_called_once_with(the_dir, zarr_file) @@ -559,18 +559,18 @@ def test_move_existing_caches(self): try: the_dir = tempfile.mkdtemp() # Test that existing cache file is moved away - zarr_file = os.path.join(the_dir, 'test.zarr') - with open(zarr_file, 'w') as fid: - fid.write('42') + zarr_file = os.path.join(the_dir, "test.zarr") + with open(zarr_file, "w") as fid: + fid.write("42") from satpy.resample import _move_existing_caches _move_existing_caches(the_dir, zarr_file) self.assertFalse(os.path.exists(zarr_file)) self.assertTrue(os.path.exists( - os.path.join(the_dir, 'moved_by_satpy', - 'test.zarr'))) + os.path.join(the_dir, "moved_by_satpy", + "test.zarr"))) # Run again to see that the existing dir doesn't matter - with open(zarr_file, 'w') as fid: - fid.write('42') + with open(zarr_file, "w") as fid: + fid.write("42") _move_existing_caches(the_dir, zarr_file) finally: shutil.rmtree(the_dir) @@ -585,69 +585,69 @@ def test_area_def_coordinates(self): from satpy.resample import add_crs_xy_coords area_def = AreaDefinition( - 'test', 'test', 'test', {'proj': 'lcc', 'lat_1': 25, 'lat_0': 25}, + "test", "test", "test", {"proj": "lcc", "lat_1": 25, "lat_0": 25}, 100, 200, [-100, -100, 100, 100] ) data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), - attrs={'area': area_def}, - dims=('y', 'x'), + attrs={"area": area_def}, + dims=("y", "x"), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) - self.assertIn('y', new_data_arr.coords) - self.assertIn('x', new_data_arr.coords) + self.assertIn("y", new_data_arr.coords) + self.assertIn("x", new_data_arr.coords) - self.assertIn('units', new_data_arr.coords['y'].attrs) + self.assertIn("units", new_data_arr.coords["y"].attrs) self.assertEqual( - new_data_arr.coords['y'].attrs['units'], 'meter') - self.assertIn('units', new_data_arr.coords['x'].attrs) + new_data_arr.coords["y"].attrs["units"], "meter") + self.assertIn("units", new_data_arr.coords["x"].attrs) self.assertEqual( - new_data_arr.coords['x'].attrs['units'], 'meter') - self.assertIn('crs', new_data_arr.coords) - self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) - self.assertEqual(area_def.crs, new_data_arr.coords['crs'].item()) + new_data_arr.coords["x"].attrs["units"], "meter") + self.assertIn("crs", new_data_arr.coords) + self.assertIsInstance(new_data_arr.coords["crs"].item(), CRS) + self.assertEqual(area_def.crs, new_data_arr.coords["crs"].item()) # already has coords data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), - attrs={'area': area_def}, - dims=('y', 'x'), - coords={'y': np.arange(2, 202), 'x': np.arange(100)} + attrs={"area": area_def}, + dims=("y", "x"), + coords={"y": np.arange(2, 202), "x": np.arange(100)} ) new_data_arr = add_crs_xy_coords(data_arr, area_def) - self.assertIn('y', new_data_arr.coords) - self.assertNotIn('units', new_data_arr.coords['y'].attrs) - self.assertIn('x', new_data_arr.coords) - self.assertNotIn('units', new_data_arr.coords['x'].attrs) - np.testing.assert_equal(new_data_arr.coords['y'], np.arange(2, 202)) + self.assertIn("y", new_data_arr.coords) + self.assertNotIn("units", new_data_arr.coords["y"].attrs) + self.assertIn("x", new_data_arr.coords) + self.assertNotIn("units", new_data_arr.coords["x"].attrs) + np.testing.assert_equal(new_data_arr.coords["y"], np.arange(2, 202)) - self.assertIn('crs', new_data_arr.coords) - self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) - self.assertEqual(area_def.crs, new_data_arr.coords['crs'].item()) + self.assertIn("crs", new_data_arr.coords) + self.assertIsInstance(new_data_arr.coords["crs"].item(), CRS) + self.assertEqual(area_def.crs, new_data_arr.coords["crs"].item()) # lat/lon area area_def = AreaDefinition( - 'test', 'test', 'test', {'proj': 'latlong'}, + "test", "test", "test", {"proj": "latlong"}, 100, 200, [-100, -100, 100, 100] ) data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), - attrs={'area': area_def}, - dims=('y', 'x'), + attrs={"area": area_def}, + dims=("y", "x"), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) - self.assertIn('y', new_data_arr.coords) - self.assertIn('x', new_data_arr.coords) + self.assertIn("y", new_data_arr.coords) + self.assertIn("x", new_data_arr.coords) - self.assertIn('units', new_data_arr.coords['y'].attrs) + self.assertIn("units", new_data_arr.coords["y"].attrs) self.assertEqual( - new_data_arr.coords['y'].attrs['units'], 'degrees_north') - self.assertIn('units', new_data_arr.coords['x'].attrs) + new_data_arr.coords["y"].attrs["units"], "degrees_north") + self.assertIn("units", new_data_arr.coords["x"].attrs) self.assertEqual( - new_data_arr.coords['x'].attrs['units'], 'degrees_east') - self.assertIn('crs', new_data_arr.coords) - self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) - self.assertEqual(area_def.crs, new_data_arr.coords['crs'].item()) + new_data_arr.coords["x"].attrs["units"], "degrees_east") + self.assertIn("crs", new_data_arr.coords) + self.assertIsInstance(new_data_arr.coords["crs"].item(), CRS) + self.assertEqual(area_def.crs, new_data_arr.coords["crs"].item()) def test_swath_def_coordinates(self): """Test coordinates being added with an SwathDefinition.""" @@ -656,15 +656,15 @@ def test_swath_def_coordinates(self): from satpy.resample import add_crs_xy_coords lons_data = da.random.random((200, 100), chunks=50) lats_data = da.random.random((200, 100), chunks=50) - lons = xr.DataArray(lons_data, attrs={'units': 'degrees_east'}, - dims=('y', 'x')) - lats = xr.DataArray(lats_data, attrs={'units': 'degrees_north'}, - dims=('y', 'x')) + lons = xr.DataArray(lons_data, attrs={"units": "degrees_east"}, + dims=("y", "x")) + lats = xr.DataArray(lats_data, attrs={"units": "degrees_north"}, + dims=("y", "x")) area_def = SwathDefinition(lons, lats) data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), - attrs={'area': area_def}, - dims=('y', 'x'), + attrs={"area": area_def}, + dims=("y", "x"), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) # See https://github.com/pydata/xarray/issues/3068 @@ -679,11 +679,11 @@ def test_swath_def_coordinates(self): # new_data_arr.coords['latitude'].attrs['units'], 'degrees_north') # self.assertIsInstance(new_data_arr.coords['latitude'].data, da.Array) - self.assertIn('crs', new_data_arr.coords) - crs = new_data_arr.coords['crs'].item() + self.assertIn("crs", new_data_arr.coords) + crs = new_data_arr.coords["crs"].item() self.assertIsInstance(crs, CRS) assert crs.is_geographic - self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) + self.assertIsInstance(new_data_arr.coords["crs"].item(), CRS) class TestBucketAvg(unittest.TestCase): @@ -706,7 +706,7 @@ def test_init(self): self.assertTrue(self.bucket.source_geo_def == self.source_geo_def) self.assertTrue(self.bucket.target_geo_def == self.target_geo_def) - @mock.patch('pyresample.bucket.BucketResampler') + @mock.patch("pyresample.bucket.BucketResampler") def test_precompute(self, bucket): """Test bucket resampler precomputation.""" bucket.return_value = True @@ -740,7 +740,7 @@ def test_compute(self): res = self._compute_mocked_bucket_avg(data, return_data=data[0, :, :], fill_value=2) self.assertEqual(res.shape, (3, 5, 5)) - @mock.patch('satpy.resample.PR_USE_SKIPNA', True) + @mock.patch("satpy.resample.PR_USE_SKIPNA", True) def test_compute_and_use_skipna_handling(self): """Test bucket resampler computation and use skipna handling.""" data = da.ones((5,)) @@ -763,7 +763,7 @@ def test_compute_and_use_skipna_handling(self): fill_value=2, skipna=True) - @mock.patch('satpy.resample.PR_USE_SKIPNA', False) + @mock.patch("satpy.resample.PR_USE_SKIPNA", False) def test_compute_and_not_use_skipna_handling(self): """Test bucket resampler computation and not use skipna handling.""" data = da.ones((5,)) @@ -792,7 +792,7 @@ def test_compute_and_not_use_skipna_handling(self): fill_value=2, mask_all_nan=False) - @mock.patch('pyresample.bucket.BucketResampler') + @mock.patch("pyresample.bucket.BucketResampler") def test_resample(self, pyresample_bucket): """Test bucket resamplers resample method.""" self.bucket.resampler = mock.MagicMock() @@ -800,38 +800,38 @@ def test_resample(self, pyresample_bucket): self.bucket.compute = mock.MagicMock() # 1D input data - data = xr.DataArray(da.ones((5,)), dims=('foo'), attrs={'bar': 'baz'}) + data = xr.DataArray(da.ones((5,)), dims=("foo"), attrs={"bar": "baz"}) self.bucket.compute.return_value = da.ones((5, 5)) res = self.bucket.resample(data) self.bucket.precompute.assert_called_once() self.bucket.compute.assert_called_once() self.assertEqual(res.shape, (5, 5)) - self.assertEqual(res.dims, ('y', 'x')) - self.assertTrue('bar' in res.attrs) - self.assertEqual(res.attrs['bar'], 'baz') + self.assertEqual(res.dims, ("y", "x")) + self.assertTrue("bar" in res.attrs) + self.assertEqual(res.attrs["bar"], "baz") # 2D input data - data = xr.DataArray(da.ones((5, 5)), dims=('foo', 'bar')) + data = xr.DataArray(da.ones((5, 5)), dims=("foo", "bar")) self.bucket.compute.return_value = da.ones((5, 5)) res = self.bucket.resample(data) self.assertEqual(res.shape, (5, 5)) - self.assertEqual(res.dims, ('y', 'x')) + self.assertEqual(res.dims, ("y", "x")) # 3D input data with 'bands' dim - data = xr.DataArray(da.ones((1, 5, 5)), dims=('bands', 'foo', 'bar'), - coords={'bands': ['L']}) + data = xr.DataArray(da.ones((1, 5, 5)), dims=("bands", "foo", "bar"), + coords={"bands": ["L"]}) self.bucket.compute.return_value = da.ones((1, 5, 5)) res = self.bucket.resample(data) self.assertEqual(res.shape, (1, 5, 5)) - self.assertEqual(res.dims, ('bands', 'y', 'x')) - self.assertEqual(res.coords['bands'], ['L']) + self.assertEqual(res.dims, ("bands", "y", "x")) + self.assertEqual(res.coords["bands"], ["L"]) # 3D input data with misc dim names - data = xr.DataArray(da.ones((3, 5, 5)), dims=('foo', 'bar', 'baz')) + data = xr.DataArray(da.ones((3, 5, 5)), dims=("foo", "bar", "baz")) self.bucket.compute.return_value = da.ones((3, 5, 5)) res = self.bucket.resample(data) self.assertEqual(res.shape, (3, 5, 5)) - self.assertEqual(res.dims, ('foo', 'bar', 'baz')) + self.assertEqual(res.dims, ("foo", "bar", "baz")) class TestBucketSum(unittest.TestCase): @@ -871,7 +871,7 @@ def test_compute(self): res = self._compute_mocked_bucket_sum(data, return_data=data[0, :, :]) self.assertEqual(res.shape, (3, 5, 5)) - @mock.patch('satpy.resample.PR_USE_SKIPNA', True) + @mock.patch("satpy.resample.PR_USE_SKIPNA", True) def test_compute_and_use_skipna_handling(self): """Test bucket resampler computation and use skipna handling.""" data = da.ones((5,)) @@ -891,7 +891,7 @@ def test_compute_and_use_skipna_handling(self): data, skipna=True) - @mock.patch('satpy.resample.PR_USE_SKIPNA', False) + @mock.patch("satpy.resample.PR_USE_SKIPNA", False) def test_compute_and_not_use_skipna_handling(self): """Test bucket resampler computation and not use skipna handling.""" data = da.ones((5,)) @@ -995,7 +995,7 @@ def test_compute(self): with self.assertRaises(ValueError): _ = self.bucket.compute(data) - @mock.patch('pyresample.bucket.BucketResampler') + @mock.patch("pyresample.bucket.BucketResampler") def test_resample(self, pyresample_bucket): """Test fraction bucket resamplers resample method.""" self.bucket.resampler = mock.MagicMock() @@ -1003,10 +1003,10 @@ def test_resample(self, pyresample_bucket): self.bucket.compute = mock.MagicMock() # Fractions return a dict - data = xr.DataArray(da.ones((1, 5, 5)), dims=('bands', 'y', 'x')) + data = xr.DataArray(da.ones((1, 5, 5)), dims=("bands", "y", "x")) arr = da.ones((5, 5)) self.bucket.compute.return_value = {0: arr, 1: arr, 2: arr} res = self.bucket.resample(data) - self.assertTrue('categories' in res.coords) - self.assertTrue('categories' in res.dims) - self.assertTrue(np.all(res.coords['categories'] == np.array([0, 1, 2]))) + self.assertTrue("categories" in res.coords) + self.assertTrue("categories" in res.dims) + self.assertTrue(np.all(res.coords["categories"] == np.array([0, 1, 2]))) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 56dbe25324..29d940fbdc 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -205,21 +205,21 @@ def test_xyz2angle(self): def test_proj_units_to_meters(self): """Test proj units to meters conversion.""" - prj = '+asd=123123123123' + prj = "+asd=123123123123" res = proj_units_to_meters(prj) self.assertEqual(res, prj) - prj = '+a=6378.137' + prj = "+a=6378.137" res = proj_units_to_meters(prj) - self.assertEqual(res, '+a=6378137.000') - prj = '+a=6378.137 +units=km' + self.assertEqual(res, "+a=6378137.000") + prj = "+a=6378.137 +units=km" res = proj_units_to_meters(prj) - self.assertEqual(res, '+a=6378137.000') - prj = '+a=6378.137 +b=6378.137' + self.assertEqual(res, "+a=6378137.000") + prj = "+a=6378.137 +b=6378.137" res = proj_units_to_meters(prj) - self.assertEqual(res, '+a=6378137.000 +b=6378137.000') - prj = '+a=6378.137 +b=6378.137 +h=35785.863' + self.assertEqual(res, "+a=6378137.000 +b=6378137.000") + prj = "+a=6378.137 +b=6378.137 +h=35785.863" res = proj_units_to_meters(prj) - self.assertEqual(res, '+a=6378137.000 +b=6378137.000 +h=35785863.000') + self.assertEqual(res, "+a=6378137.000 +b=6378137.000 +h=35785863.000") class TestGetSatPos: @@ -243,21 +243,21 @@ class TestGetSatPos: def test_get_satpos(self, included_prefixes, preference, expected_result): """Test getting the satellite position.""" all_orb_params = { - 'nadir_longitude': 1, - 'satellite_actual_longitude': 1.1, - 'satellite_nominal_longitude': 1.2, - 'projection_longitude': 1.3, - 'nadir_latitude': 2, - 'satellite_actual_latitude': 2.1, - 'satellite_nominal_latitude': 2.2, - 'projection_latitude': 2.3, - 'satellite_actual_altitude': 3, - 'satellite_nominal_altitude': 3.1, - 'projection_altitude': 3.2 + "nadir_longitude": 1, + "satellite_actual_longitude": 1.1, + "satellite_nominal_longitude": 1.2, + "projection_longitude": 1.3, + "nadir_latitude": 2, + "satellite_actual_latitude": 2.1, + "satellite_nominal_latitude": 2.2, + "projection_latitude": 2.3, + "satellite_actual_altitude": 3, + "satellite_nominal_altitude": 3.1, + "projection_altitude": 3.2 } orb_params = {key: value for key, value in all_orb_params.items() if any(in_prefix in key for in_prefix in included_prefixes)} - data_arr = xr.DataArray((), attrs={'orbital_parameters': orb_params}) + data_arr = xr.DataArray((), attrs={"orbital_parameters": orb_params}) with warnings.catch_warnings(record=True) as caught_warnings: lon, lat, alt = get_satpos(data_arr, preference=preference) @@ -273,8 +273,8 @@ def test_get_satpos(self, included_prefixes, preference, expected_result): "attrs", ( {}, - {'orbital_parameters': {'projection_longitude': 1}}, - {'satellite_altitude': 1} + {"orbital_parameters": {"projection_longitude": 1}}, + {"satellite_altitude": 1} ) ) def test_get_satpos_fails_with_informative_error(self, attrs): @@ -321,7 +321,7 @@ def test_make_fake_scene(): sc = make_fake_scene({ "six": np.arange(25).reshape(5, 5)}) assert len(sc.keys()) == 1 - assert sc.keys().pop()['name'] == "six" + assert sc.keys().pop()["name"] == "six" assert sc["six"].attrs["area"].shape == (5, 5) sc = make_fake_scene({ "seven": np.arange(3*7).reshape(3, 7), @@ -353,12 +353,12 @@ def test_basic_check_satpy(self): def test_specific_check_satpy(self): """Test 'check_satpy' with specific features provided.""" from satpy.utils import check_satpy - with mock.patch('satpy.utils.print') as print_mock: - check_satpy(readers=['viirs_sdr'], extras=('cartopy', '__fake')) + with mock.patch("satpy.utils.print") as print_mock: + check_satpy(readers=["viirs_sdr"], extras=("cartopy", "__fake")) checked_fake = False for call in print_mock.mock_calls: - if len(call[1]) > 0 and '__fake' in call[1][0]: - self.assertNotIn('ok', call[1][1]) + if len(call[1]) > 0 and "__fake" in call[1][0]: + self.assertNotIn("ok", call[1][1]) checked_fake = True self.assertTrue(checked_fake, "Did not find __fake module " "mentioned in checks") @@ -586,7 +586,7 @@ def test_convert_remote_files_to_fsspec_windows_paths(): assert res == filenames -@mock.patch('fsspec.open_files') +@mock.patch("fsspec.open_files") def test_convert_remote_files_to_fsspec_storage_options(open_files): """Test convertion of remote files to fsspec objects. @@ -595,7 +595,7 @@ def test_convert_remote_files_to_fsspec_storage_options(open_files): from satpy.utils import convert_remote_files_to_fsspec filenames = ["s3://tmp/file1.nc"] - storage_options = {'anon': True} + storage_options = {"anon": True} _ = convert_remote_files_to_fsspec(filenames, storage_options=storage_options) diff --git a/satpy/tests/test_writers.py b/satpy/tests/test_writers.py index 986687b0d6..6e1ce7f2e2 100644 --- a/satpy/tests/test_writers.py +++ b/satpy/tests/test_writers.py @@ -39,10 +39,10 @@ def test_to_image_1d(self): """Conversion to image.""" # 1D from satpy.writers import to_image - p = xr.DataArray(np.arange(25), dims=['y']) + p = xr.DataArray(np.arange(25), dims=["y"]) self.assertRaises(ValueError, to_image, p) - @mock.patch('satpy.writers.XRImage') + @mock.patch("satpy.writers.XRImage") def test_to_image_2d(self, mock_geoimage): """Conversion to image.""" from satpy.writers import to_image @@ -51,33 +51,33 @@ def test_to_image_2d(self, mock_geoimage): data = np.arange(25).reshape((5, 5)) p = xr.DataArray(data, attrs=dict(mode="L", fill_value=0, palette=[0, 1, 2, 3, 4, 5]), - dims=['y', 'x']) + dims=["y", "x"]) to_image(p) np.testing.assert_array_equal( data, mock_geoimage.call_args[0][0].values) mock_geoimage.reset_mock() - @mock.patch('satpy.writers.XRImage') + @mock.patch("satpy.writers.XRImage") def test_to_image_3d(self, mock_geoimage): """Conversion to image.""" # 3D from satpy.writers import to_image data = np.arange(75).reshape((3, 5, 5)) - p = xr.DataArray(data, dims=['bands', 'y', 'x']) - p['bands'] = ['R', 'G', 'B'] + p = xr.DataArray(data, dims=["bands", "y", "x"]) + p["bands"] = ["R", "G", "B"] to_image(p) np.testing.assert_array_equal(data[0], mock_geoimage.call_args[0][0][0]) np.testing.assert_array_equal(data[1], mock_geoimage.call_args[0][0][1]) np.testing.assert_array_equal(data[2], mock_geoimage.call_args[0][0][2]) - @mock.patch('satpy.writers.get_enhanced_image') + @mock.patch("satpy.writers.get_enhanced_image") def test_show(self, mock_get_image): """Check showing.""" from satpy.writers import show data = np.arange(25).reshape((5, 5)) - p = xr.DataArray(data, dims=['y', 'x']) + p = xr.DataArray(data, dims=["y", "x"]) show(p) self.assertTrue(mock_get_image.return_value.show.called) @@ -128,7 +128,7 @@ def setup_class(cls): base_dir = os.path.dirname(fn) if base_dir: os.makedirs(base_dir, exist_ok=True) - with open(fn, 'w') as f: + with open(fn, "w") as f: f.write(content) # create fake test image writer @@ -136,7 +136,7 @@ def setup_class(cls): class CustomImageWriter(ImageWriter): def __init__(self, **kwargs): - super(CustomImageWriter, self).__init__(name='test', config_files=[], **kwargs) + super(CustomImageWriter, self).__init__(name="test", config_files=[], **kwargs) self.img = None def save_image(self, img, **kwargs): @@ -148,7 +148,7 @@ def teardown_class(cls): """Remove fake user configurations.""" for fn, _content in cls.TEST_CONFIGS.items(): base_dir = os.path.dirname(fn) - if base_dir not in ['.', ''] and os.path.isdir(base_dir): + if base_dir not in [".", ""] and os.path.isdir(base_dir): shutil.rmtree(base_dir) elif os.path.isfile(fn): os.remove(fn) @@ -157,8 +157,8 @@ def teardown_class(cls): class TestComplexSensorEnhancerConfigs(_BaseCustomEnhancementConfigTests): """Test enhancement configs that use or expect multiple sensors.""" - ENH_FN = 'test_sensor1.yaml' - ENH_FN2 = 'test_sensor2.yaml' + ENH_FN = "test_sensor1.yaml" + ENH_FN2 = "test_sensor2.yaml" TEST_CONFIGS = { ENH_FN: """ @@ -203,11 +203,11 @@ def test_multisensor_choice(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs={ - 'name': 'test1', - 'sensor': {'test_sensor2', 'test_sensor1'}, - 'mode': 'L' + "name": "test1", + "sensor": {"test_sensor2", "test_sensor1"}, + "mode": "L" }, - dims=['y', 'x']) + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) @@ -226,11 +226,11 @@ def test_multisensor_exact(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs={ - 'name': 'my_comp', - 'sensor': {'test_sensor2', 'test_sensor1'}, - 'mode': 'L' + "name": "my_comp", + "sensor": {"test_sensor2", "test_sensor1"}, + "mode": "L" }, - dims=['y', 'x']) + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) @@ -248,8 +248,8 @@ def test_enhance_bad_query_value(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(name=["I", "am", "invalid"], sensor='test_sensor2', mode='L'), - dims=['y', 'x']) + attrs=dict(name=["I", "am", "invalid"], sensor="test_sensor2", mode="L"), + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None with pytest.raises(KeyError, match="No .* found for None"): @@ -259,11 +259,11 @@ def test_enhance_bad_query_value(self): class TestEnhancerUserConfigs(_BaseCustomEnhancementConfigTests): """Test `Enhancer` functionality when user's custom configurations are present.""" - ENH_FN = 'test_sensor.yaml' - ENH_ENH_FN = os.path.join('enhancements', ENH_FN) - ENH_FN2 = 'test_sensor2.yaml' - ENH_ENH_FN2 = os.path.join('enhancements', ENH_FN2) - ENH_FN3 = 'test_empty.yaml' + ENH_FN = "test_sensor.yaml" + ENH_ENH_FN = os.path.join("enhancements", ENH_FN) + ENH_FN2 = "test_sensor2.yaml" + ENH_ENH_FN2 = os.path.join("enhancements", ENH_FN2) + ENH_FN3 = "test_empty.yaml" TEST_CONFIGS = { ENH_FN: """ @@ -303,8 +303,8 @@ def test_enhance_empty_config(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(sensor='test_empty', mode='L'), - dims=['y', 'x']) + attrs=dict(sensor="test_empty", mode="L"), + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None get_enhanced_image(ds, enhance=e) @@ -317,8 +317,8 @@ def test_enhance_with_sensor_no_entry(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(sensor='test_sensor2', mode='L'), - dims=['y', 'x']) + attrs=dict(sensor="test_sensor2", mode="L"), + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None get_enhanced_image(ds, enhance=e) @@ -332,8 +332,8 @@ def test_no_enhance(self): from satpy.writers import get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(name='test1', sensor='test_sensor', mode='L'), - dims=['y', 'x']) + attrs=dict(name="test1", sensor="test_sensor", mode="L"), + dims=["y", "x"]) img = get_enhanced_image(ds, enhance=False) np.testing.assert_allclose(img.data.data.compute().squeeze(), ds.data) @@ -341,8 +341,8 @@ def test_writer_no_enhance(self): """Test turning off enhancements with writer.""" from xarray import DataArray ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(name='test1', sensor='test_sensor', mode='L'), - dims=['y', 'x']) + attrs=dict(name="test1", sensor="test_sensor", mode="L"), + dims=["y", "x"]) writer = self.CustomImageWriter(enhance=False) writer.save_datasets((ds,), compute=False) img = writer.img @@ -354,8 +354,8 @@ def test_writer_custom_enhance(self): from satpy.writers import Enhancer ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(name='test1', sensor='test_sensor', mode='L'), - dims=['y', 'x']) + attrs=dict(name="test1", sensor="test_sensor", mode="L"), + dims=["y", "x"]) enhance = Enhancer() writer = self.CustomImageWriter(enhance=enhance) writer.save_datasets((ds,), compute=False) @@ -368,8 +368,8 @@ def test_enhance_with_sensor_entry(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(name='test1', sensor='test_sensor', mode='L'), - dims=['y', 'x']) + attrs=dict(name="test1", sensor="test_sensor", mode="L"), + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) @@ -380,8 +380,8 @@ def test_enhance_with_sensor_entry(self): 1.) ds = DataArray(da.arange(1, 11., chunks=5).reshape((2, 5)), - attrs=dict(name='test1', sensor='test_sensor', mode='L'), - dims=['y', 'x']) + attrs=dict(name="test1", sensor="test_sensor", mode="L"), + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) @@ -396,9 +396,9 @@ def test_enhance_with_sensor_entry2(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(name='test1', units='kelvin', - sensor='test_sensor', mode='L'), - dims=['y', 'x']) + attrs=dict(name="test1", units="kelvin", + sensor="test_sensor", mode="L"), + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) @@ -411,7 +411,7 @@ def test_enhance_with_sensor_entry2(self): class TestReaderEnhancerConfigs(_BaseCustomEnhancementConfigTests): """Test enhancement configs that use reader name.""" - ENH_FN = 'test_sensor1.yaml' + ENH_FN = "test_sensor1.yaml" # NOTE: The sections are ordered in a special way so that if 'reader' key # isn't provided that we'll get the section we didn't want and all tests @@ -452,11 +452,11 @@ def _get_test_data_array(self): from xarray import DataArray ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs={ - 'name': 'test1', - 'sensor': 'test_sensor1', - 'mode': 'L', + "name": "test1", + "sensor": "test_sensor1", + "mode": "L", }, - dims=['y', 'x']) + dims=["y", "x"]) return ds def _get_enhanced_image(self, data_arr): @@ -512,17 +512,17 @@ def test_filename_matches_writer_name(self): class IgnoreLoader(yaml.SafeLoader): def _ignore_all_tags(self, tag_suffix, node): - return tag_suffix + ' ' + node.value - IgnoreLoader.add_multi_constructor('', IgnoreLoader._ignore_all_tags) + return tag_suffix + " " + node.value + IgnoreLoader.add_multi_constructor("", IgnoreLoader._ignore_all_tags) from satpy._config import glob_config from satpy.writers import read_writer_config - for writer_config in glob_config('writers/*.yaml'): + for writer_config in glob_config("writers/*.yaml"): writer_fn = os.path.basename(writer_config) writer_fn_name = os.path.splitext(writer_fn)[0] writer_info = read_writer_config([writer_config], loader=IgnoreLoader) - self.assertEqual(writer_fn_name, writer_info['name'], + self.assertEqual(writer_fn_name, writer_info["name"], "Writer YAML filename doesn't match writer " "name in the YAML file.") @@ -532,13 +532,13 @@ def test_available_writers(self): writer_names = available_writers() self.assertGreater(len(writer_names), 0) self.assertIsInstance(writer_names[0], str) - self.assertIn('geotiff', writer_names) + self.assertIn("geotiff", writer_names) writer_infos = available_writers(as_dict=True) self.assertEqual(len(writer_names), len(writer_infos)) self.assertIsInstance(writer_infos[0], dict) for writer_info in writer_infos: - self.assertIn('name', writer_info) + self.assertIn("name", writer_info) class TestComputeWriterResults(unittest.TestCase): @@ -553,12 +553,12 @@ def setUp(self): ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime(2018, 1, 1, 0, 0, 0)} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime(2018, 1, 1, 0, 0, 0)} ) self.scn = Scene() - self.scn['test'] = ds1 + self.scn["test"] = ds1 # Temp dir self.base_dir = tempfile.mkdtemp() @@ -578,10 +578,10 @@ def test_empty(self): def test_simple_image(self): """Test writing to PNG file.""" from satpy.writers import compute_writer_results - fname = os.path.join(self.base_dir, 'simple_image.png') + fname = os.path.join(self.base_dir, "simple_image.png") res = self.scn.save_datasets(filename=fname, - datasets=['test'], - writer='simple_image', + datasets=["test"], + writer="simple_image", compute=False) compute_writer_results([res]) self.assertTrue(os.path.isfile(fname)) @@ -589,10 +589,10 @@ def test_simple_image(self): def test_geotiff(self): """Test writing to mitiff file.""" from satpy.writers import compute_writer_results - fname = os.path.join(self.base_dir, 'geotiff.tif') + fname = os.path.join(self.base_dir, "geotiff.tif") res = self.scn.save_datasets(filename=fname, - datasets=['test'], - writer='geotiff', compute=False) + datasets=["test"], + writer="geotiff", compute=False) compute_writer_results([res]) self.assertTrue(os.path.isfile(fname)) @@ -619,14 +619,14 @@ def test_geotiff(self): def test_multiple_geotiff(self): """Test writing to mitiff file.""" from satpy.writers import compute_writer_results - fname1 = os.path.join(self.base_dir, 'geotiff1.tif') + fname1 = os.path.join(self.base_dir, "geotiff1.tif") res1 = self.scn.save_datasets(filename=fname1, - datasets=['test'], - writer='geotiff', compute=False) - fname2 = os.path.join(self.base_dir, 'geotiff2.tif') + datasets=["test"], + writer="geotiff", compute=False) + fname2 = os.path.join(self.base_dir, "geotiff2.tif") res2 = self.scn.save_datasets(filename=fname2, - datasets=['test'], - writer='geotiff', compute=False) + datasets=["test"], + writer="geotiff", compute=False) compute_writer_results([res1, res2]) self.assertTrue(os.path.isfile(fname1)) self.assertTrue(os.path.isfile(fname2)) @@ -634,14 +634,14 @@ def test_multiple_geotiff(self): def test_multiple_simple(self): """Test writing to geotiff files.""" from satpy.writers import compute_writer_results - fname1 = os.path.join(self.base_dir, 'simple_image1.png') + fname1 = os.path.join(self.base_dir, "simple_image1.png") res1 = self.scn.save_datasets(filename=fname1, - datasets=['test'], - writer='simple_image', compute=False) - fname2 = os.path.join(self.base_dir, 'simple_image2.png') + datasets=["test"], + writer="simple_image", compute=False) + fname2 = os.path.join(self.base_dir, "simple_image2.png") res2 = self.scn.save_datasets(filename=fname2, - datasets=['test'], - writer='simple_image', compute=False) + datasets=["test"], + writer="simple_image", compute=False) compute_writer_results([res1, res2]) self.assertTrue(os.path.isfile(fname1)) self.assertTrue(os.path.isfile(fname2)) @@ -649,14 +649,14 @@ def test_multiple_simple(self): def test_mixed(self): """Test writing to multiple mixed-type files.""" from satpy.writers import compute_writer_results - fname1 = os.path.join(self.base_dir, 'simple_image3.png') + fname1 = os.path.join(self.base_dir, "simple_image3.png") res1 = self.scn.save_datasets(filename=fname1, - datasets=['test'], - writer='simple_image', compute=False) - fname2 = os.path.join(self.base_dir, 'geotiff3.tif') + datasets=["test"], + writer="simple_image", compute=False) + fname2 = os.path.join(self.base_dir, "geotiff3.tif") res2 = self.scn.save_datasets(filename=fname2, - datasets=['test'], - writer='geotiff', compute=False) + datasets=["test"], + writer="geotiff", compute=False) res3 = [] compute_writer_results([res1, res2, res3]) self.assertTrue(os.path.isfile(fname1)) @@ -675,18 +675,18 @@ def setup_method(self): ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'name': 'test', - 'start_time': datetime(2018, 1, 1, 0, 0, 0), - 'sensor': 'fake_sensor', + "name": "test", + "start_time": datetime(2018, 1, 1, 0, 0, 0), + "sensor": "fake_sensor", } ) ds2 = ds1.copy() - ds2.attrs['sensor'] = {'fake_sensor1', 'fake_sensor2'} + ds2.attrs["sensor"] = {"fake_sensor1", "fake_sensor2"} self.scn = Scene() - self.scn['test'] = ds1 - self.scn['test2'] = ds2 + self.scn["test"] = ds1 + self.scn["test2"] = ds2 # Temp dir self.base_dir = tempfile.mkdtemp() @@ -700,16 +700,16 @@ def teardown_method(self): def test_save_dataset_static_filename(self): """Test saving a dataset with a static filename specified.""" - self.scn.save_datasets(base_dir=self.base_dir, filename='geotiff.tif') - assert os.path.isfile(os.path.join(self.base_dir, 'geotiff.tif')) + self.scn.save_datasets(base_dir=self.base_dir, filename="geotiff.tif") + assert os.path.isfile(os.path.join(self.base_dir, "geotiff.tif")) @pytest.mark.parametrize( - ('fmt_fn', 'exp_fns'), + ("fmt_fn", "exp_fns"), [ - ('geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif', - ['geotiff_test_20180101_000000.tif', 'geotiff_test2_20180101_000000.tif']), - ('geotiff_{name}_{sensor}.tif', - ['geotiff_test_fake_sensor.tif', 'geotiff_test2_fake_sensor1-fake_sensor2.tif']), + ("geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif", + ["geotiff_test_20180101_000000.tif", "geotiff_test2_20180101_000000.tif"]), + ("geotiff_{name}_{sensor}.tif", + ["geotiff_test_fake_sensor.tif", "geotiff_test2_fake_sensor1-fake_sensor2.tif"]), ] ) def test_save_dataset_dynamic_filename(self, fmt_fn, exp_fns): @@ -721,14 +721,14 @@ def test_save_dataset_dynamic_filename(self, fmt_fn, exp_fns): def test_save_dataset_dynamic_filename_with_dir(self): """Test saving a dataset with a format filename that includes a directory.""" - fmt_fn = os.path.join('{start_time:%Y%m%d}', 'geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif') - exp_fn = os.path.join('20180101', 'geotiff_test_20180101_000000.tif') + fmt_fn = os.path.join("{start_time:%Y%m%d}", "geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif") + exp_fn = os.path.join("20180101", "geotiff_test_20180101_000000.tif") self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn) assert os.path.isfile(os.path.join(self.base_dir, exp_fn)) # change the filename pattern but keep the same directory - fmt_fn2 = os.path.join('{start_time:%Y%m%d}', 'geotiff_{name}_{start_time:%Y%m%d_%H}.tif') - exp_fn2 = os.path.join('20180101', 'geotiff_test_20180101_00.tif') + fmt_fn2 = os.path.join("{start_time:%Y%m%d}", "geotiff_{name}_{start_time:%Y%m%d_%H}.tif") + exp_fn2 = os.path.join("20180101", "geotiff_test_20180101_00.tif") self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn2) assert os.path.isfile(os.path.join(self.base_dir, exp_fn2)) # the original file should still exist @@ -743,53 +743,53 @@ def setUp(self): from pyresample.geometry import AreaDefinition from trollimage.xrimage import XRImage - proj_dict = {'proj': 'lcc', 'datum': 'WGS84', 'ellps': 'WGS84', - 'lon_0': -95., 'lat_0': 25, 'lat_1': 25, - 'units': 'm', 'no_defs': True} + proj_dict = {"proj": "lcc", "datum": "WGS84", "ellps": "WGS84", + "lon_0": -95., "lat_0": 25, "lat_1": 25, + "units": "m", "no_defs": True} self.area_def = AreaDefinition( - 'test', 'test', 'test', proj_dict, + "test", "test", "test", proj_dict, 200, 400, (-1000., -1500., 1000., 1500.), ) self.orig_rgb_img = XRImage( xr.DataArray(da.arange(75., chunks=10).reshape(3, 5, 5) / 75., - dims=('bands', 'y', 'x'), - coords={'bands': ['R', 'G', 'B']}, - attrs={'name': 'test_ds', 'area': self.area_def}) + dims=("bands", "y", "x"), + coords={"bands": ["R", "G", "B"]}, + attrs={"name": "test_ds", "area": self.area_def}) ) self.orig_l_img = XRImage( xr.DataArray(da.arange(25., chunks=10).reshape(5, 5) / 75., - dims=('y', 'x'), - attrs={'name': 'test_ds', 'area': self.area_def}) + dims=("y", "x"), + attrs={"name": "test_ds", "area": self.area_def}) ) self.decorate = { - 'decorate': [ - {'logo': {'logo_path': '', 'height': 143, 'bg': 'white', 'bg_opacity': 255}}, - {'text': { - 'txt': 'TEST', - 'align': {'top_bottom': 'bottom', 'left_right': 'right'}, - 'font': '', - 'font_size': 22, - 'height': 30, - 'bg': 'black', - 'bg_opacity': 255, - 'line': 'white'}}, - {'scale': { - 'colormap': greys, - 'extend': False, - 'width': 1670, 'height': 110, - 'tick_marks': 5, 'minor_tick_marks': 1, - 'cursor': [0, 0], 'bg': 'white', - 'title': 'TEST TITLE OF SCALE', - 'fontsize': 110, 'align': 'cc' + "decorate": [ + {"logo": {"logo_path": "", "height": 143, "bg": "white", "bg_opacity": 255}}, + {"text": { + "txt": "TEST", + "align": {"top_bottom": "bottom", "left_right": "right"}, + "font": "", + "font_size": 22, + "height": 30, + "bg": "black", + "bg_opacity": 255, + "line": "white"}}, + {"scale": { + "colormap": greys, + "extend": False, + "width": 1670, "height": 110, + "tick_marks": 5, "minor_tick_marks": 1, + "cursor": [0, 0], "bg": "white", + "title": "TEST TITLE OF SCALE", + "fontsize": 110, "align": "cc" }} ] } import_mock = mock.MagicMock() - modules = {'pycoast': import_mock.pycoast, - 'pydecorate': import_mock.pydecorate} - self.module_patcher = mock.patch.dict('sys.modules', modules) + modules = {"pycoast": import_mock.pycoast, + "pydecorate": import_mock.pydecorate} + self.module_patcher = mock.patch.dict("sys.modules", modules) self.module_patcher.start() def tearDown(self): @@ -801,21 +801,21 @@ def test_add_overlay_basic_rgb(self): from pycoast import ContourWriterAGG from satpy.writers import _burn_overlay, add_overlay - coast_dir = '/path/to/coast/data' + coast_dir = "/path/to/coast/data" with mock.patch.object(self.orig_rgb_img, "apply_pil") as apply_pil: apply_pil.return_value = self.orig_rgb_img new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, fill_value=0) self.assertEqual(self.orig_rgb_img.mode, new_img.mode) new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir) - self.assertEqual(self.orig_rgb_img.mode + 'A', new_img.mode) + self.assertEqual(self.orig_rgb_img.mode + "A", new_img.mode) with mock.patch.object(self.orig_rgb_img, "convert") as convert: convert.return_value = self.orig_rgb_img - overlays = {'coasts': {'outline': 'red'}} + overlays = {"coasts": {"outline": "red"}} new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, overlays=overlays, fill_value=0) pil_args = None - pil_kwargs = {'fill_value': 0} + pil_kwargs = {"fill_value": 0} fun_args = (self.orig_rgb_img.data.area, ContourWriterAGG.return_value, overlays) fun_kwargs = None apply_pil.assert_called_with(_burn_overlay, self.orig_rgb_img.mode, @@ -824,11 +824,11 @@ def test_add_overlay_basic_rgb(self): # test legacy call - grid = {'minor_is_tick': True} - color = 'red' - expected_overlays = {'coasts': {'outline': color, 'width': 0.5, 'level': 1}, - 'borders': {'outline': color, 'width': 0.5, 'level': 1}, - 'grid': grid} + grid = {"minor_is_tick": True} + color = "red" + expected_overlays = {"coasts": {"outline": color, "width": 0.5, "level": 1}, + "borders": {"outline": color, "width": 0.5, "level": 1}, + "grid": grid} with warnings.catch_warnings(record=True) as wns: warnings.simplefilter("always") new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, @@ -838,7 +838,7 @@ def test_add_overlay_basic_rgb(self): assert "deprecated" in str(wns[0].message) pil_args = None - pil_kwargs = {'fill_value': 0} + pil_kwargs = {"fill_value": 0} fun_args = (self.orig_rgb_img.data.area, ContourWriterAGG.return_value, expected_overlays) fun_kwargs = None apply_pil.assert_called_with(_burn_overlay, self.orig_rgb_img.mode, @@ -848,22 +848,22 @@ def test_add_overlay_basic_rgb(self): def test_add_overlay_basic_l(self): """Test basic add_overlay usage with L data.""" from satpy.writers import add_overlay - new_img = add_overlay(self.orig_l_img, self.area_def, '', fill_value=0) - self.assertEqual('RGB', new_img.mode) - new_img = add_overlay(self.orig_l_img, self.area_def, '') - self.assertEqual('RGBA', new_img.mode) + new_img = add_overlay(self.orig_l_img, self.area_def, "", fill_value=0) + self.assertEqual("RGB", new_img.mode) + new_img = add_overlay(self.orig_l_img, self.area_def, "") + self.assertEqual("RGBA", new_img.mode) def test_add_decorate_basic_rgb(self): """Test basic add_decorate usage with RGB data.""" from satpy.writers import add_decorate new_img = add_decorate(self.orig_rgb_img, **self.decorate) - self.assertEqual('RGBA', new_img.mode) + self.assertEqual("RGBA", new_img.mode) def test_add_decorate_basic_l(self): """Test basic add_decorate usage with L data.""" from satpy.writers import add_decorate new_img = add_decorate(self.orig_l_img, **self.decorate) - self.assertEqual('RGBA', new_img.mode) + self.assertEqual("RGBA", new_img.mode) def test_group_results_by_output_file(tmp_path): diff --git a/satpy/tests/test_yaml_reader.py b/satpy/tests/test_yaml_reader.py index 0d2e057f32..b829f46d23 100644 --- a/satpy/tests/test_yaml_reader.py +++ b/satpy/tests/test_yaml_reader.py @@ -37,47 +37,47 @@ from satpy.tests.utils import make_dataid MHS_YAML_READER_DICT = { - 'reader': {'name': 'mhs_l1c_aapp', - 'description': 'AAPP l1c Reader for AMSU-B/MHS data', - 'sensors': ['mhs'], - 'default_channels': [1, 2, 3, 4, 5], - 'data_identification_keys': {'name': {'required': True}, - 'frequency_double_sideband': - {'type': FrequencyDoubleSideBand}, - 'frequency_range': {'type': FrequencyRange}, - 'resolution': None, - 'polarization': {'enum': ['H', 'V']}, - 'calibration': {'enum': ['brightness_temperature'], 'transitive': True}, - 'modifiers': {'required': True, - 'default': [], - 'type': ModifierTuple}}, - 'config_files': ('satpy/etc/readers/mhs_l1c_aapp.yaml',)}, - 'datasets': {'1': {'name': '1', - 'frequency_range': {'central': 89.0, 'bandwidth': 2.8, 'unit': 'GHz'}, - 'polarization': 'V', - 'resolution': 16000, - 'calibration': {'brightness_temperature': {'standard_name': 'toa_brightness_temperature'}}, - 'coordinates': ['longitude', 'latitude'], - 'file_type': 'mhs_aapp_l1c'}, - '2': {'name': '2', - 'frequency_range': {'central': 157.0, 'bandwidth': 2.8, 'unit': 'GHz'}, - 'polarization': 'V', - 'resolution': 16000, - 'calibration': {'brightness_temperature': {'standard_name': 'toa_brightness_temperature'}}, - 'coordinates': ['longitude', 'latitude'], - 'file_type': 'mhs_aapp_l1c'}, - '3': {'name': '3', - 'frequency_double_sideband': {'unit': 'GHz', - 'central': 183.31, - 'side': 1.0, - 'bandwidth': 1.0}, - 'polarization': 'V', - 'resolution': 16000, - 'calibration': {'brightness_temperature': {'standard_name': 'toa_brightness_temperature'}}, - 'coordinates': ['longitude', 'latitude'], - 'file_type': 'mhs_aapp_l1c'}}, - 'file_types': {'mhs_aapp_l1c': {'file_reader': BaseFileHandler, - 'file_patterns': [ + "reader": {"name": "mhs_l1c_aapp", + "description": "AAPP l1c Reader for AMSU-B/MHS data", + "sensors": ["mhs"], + "default_channels": [1, 2, 3, 4, 5], + "data_identification_keys": {"name": {"required": True}, + "frequency_double_sideband": + {"type": FrequencyDoubleSideBand}, + "frequency_range": {"type": FrequencyRange}, + "resolution": None, + "polarization": {"enum": ["H", "V"]}, + "calibration": {"enum": ["brightness_temperature"], "transitive": True}, + "modifiers": {"required": True, + "default": [], + "type": ModifierTuple}}, + "config_files": ("satpy/etc/readers/mhs_l1c_aapp.yaml",)}, + "datasets": {"1": {"name": "1", + "frequency_range": {"central": 89.0, "bandwidth": 2.8, "unit": "GHz"}, + "polarization": "V", + "resolution": 16000, + "calibration": {"brightness_temperature": {"standard_name": "toa_brightness_temperature"}}, + "coordinates": ["longitude", "latitude"], + "file_type": "mhs_aapp_l1c"}, + "2": {"name": "2", + "frequency_range": {"central": 157.0, "bandwidth": 2.8, "unit": "GHz"}, + "polarization": "V", + "resolution": 16000, + "calibration": {"brightness_temperature": {"standard_name": "toa_brightness_temperature"}}, + "coordinates": ["longitude", "latitude"], + "file_type": "mhs_aapp_l1c"}, + "3": {"name": "3", + "frequency_double_sideband": {"unit": "GHz", + "central": 183.31, + "side": 1.0, + "bandwidth": 1.0}, + "polarization": "V", + "resolution": 16000, + "calibration": {"brightness_temperature": {"standard_name": "toa_brightness_temperature"}}, + "coordinates": ["longitude", "latitude"], + "file_type": "mhs_aapp_l1c"}}, + "file_types": {"mhs_aapp_l1c": {"file_reader": BaseFileHandler, + "file_patterns": [ 'mhsl1c_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1c']}}} # noqa @@ -91,7 +91,7 @@ def __init__(self, start_time, end_time): self._end_time = end_time self.get_bounding_box = MagicMock() fake_ds = MagicMock() - fake_ds.return_value.dims = ['x', 'y'] + fake_ds.return_value.dims = ["x", "y"] self.get_dataset = fake_ds self.combine_info = MagicMock() @@ -111,38 +111,38 @@ class TestUtils(unittest.TestCase): def test_get_filebase(self): """Check the get_filebase function.""" - base_dir = os.path.join(os.path.expanduser('~'), 'data', - 'satellite', 'Sentinel-3') - base_data = ('S3A_OL_1_EFR____20161020T081224_20161020T081524_' - '20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3') + base_dir = os.path.join(os.path.expanduser("~"), "data", + "satellite", "Sentinel-3") + base_data = ("S3A_OL_1_EFR____20161020T081224_20161020T081524_" + "20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3") base_dir = os.path.join(base_dir, base_data) - pattern = ('{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s' - '}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre' - 'ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati' - 've_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:' - '2s}_{collection:3s}.SEN3/geo_coordinates.nc') - pattern = os.path.join(*pattern.split('/')) - filename = os.path.join(base_dir, 'Oa05_radiance.nc') - expected = os.path.join(base_data, 'Oa05_radiance.nc') + pattern = ("{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s" + "}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre" + "ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati" + "ve_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:" + "2s}_{collection:3s}.SEN3/geo_coordinates.nc") + pattern = os.path.join(*pattern.split("/")) + filename = os.path.join(base_dir, "Oa05_radiance.nc") + expected = os.path.join(base_data, "Oa05_radiance.nc") self.assertEqual(yr._get_filebase(filename, pattern), expected) def test_match_filenames(self): """Check that matching filenames works.""" # just a fake path for testing that doesn't have to exist - base_dir = os.path.join(os.path.expanduser('~'), 'data', - 'satellite', 'Sentinel-3') - base_data = ('S3A_OL_1_EFR____20161020T081224_20161020T081524_' - '20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3') + base_dir = os.path.join(os.path.expanduser("~"), "data", + "satellite", "Sentinel-3") + base_data = ("S3A_OL_1_EFR____20161020T081224_20161020T081524_" + "20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3") base_dir = os.path.join(base_dir, base_data) - pattern = ('{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s' - '}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre' - 'ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati' - 've_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:' - '2s}_{collection:3s}.SEN3/geo_coordinates.nc') - pattern = os.path.join(*pattern.split('/')) - filenames = [os.path.join(base_dir, 'Oa05_radiance.nc'), - os.path.join(base_dir, 'geo_coordinates.nc')] - expected = os.path.join(base_dir, 'geo_coordinates.nc') + pattern = ("{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s" + "}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre" + "ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati" + "ve_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:" + "2s}_{collection:3s}.SEN3/geo_coordinates.nc") + pattern = os.path.join(*pattern.split("/")) + filenames = [os.path.join(base_dir, "Oa05_radiance.nc"), + os.path.join(base_dir, "geo_coordinates.nc")] + expected = os.path.join(base_dir, "geo_coordinates.nc") self.assertEqual(yr._match_filenames(filenames, pattern), {expected}) def test_match_filenames_windows_forward_slash(self): @@ -152,28 +152,28 @@ def test_match_filenames_windows_forward_slash(self): """ # just a fake path for testing that doesn't have to exist - base_dir = os.path.join(os.path.expanduser('~'), 'data', - 'satellite', 'Sentinel-3') - base_data = ('S3A_OL_1_EFR____20161020T081224_20161020T081524_' - '20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3') + base_dir = os.path.join(os.path.expanduser("~"), "data", + "satellite", "Sentinel-3") + base_data = ("S3A_OL_1_EFR____20161020T081224_20161020T081524_" + "20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3") base_dir = os.path.join(base_dir, base_data) - pattern = ('{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s' - '}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre' - 'ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati' - 've_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:' - '2s}_{collection:3s}.SEN3/geo_coordinates.nc') - pattern = os.path.join(*pattern.split('/')) - filenames = [os.path.join(base_dir, 'Oa05_radiance.nc').replace(os.sep, '/'), - os.path.join(base_dir, 'geo_coordinates.nc').replace(os.sep, '/')] - expected = os.path.join(base_dir, 'geo_coordinates.nc').replace(os.sep, '/') + pattern = ("{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s" + "}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre" + "ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati" + "ve_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:" + "2s}_{collection:3s}.SEN3/geo_coordinates.nc") + pattern = os.path.join(*pattern.split("/")) + filenames = [os.path.join(base_dir, "Oa05_radiance.nc").replace(os.sep, "/"), + os.path.join(base_dir, "geo_coordinates.nc").replace(os.sep, "/")] + expected = os.path.join(base_dir, "geo_coordinates.nc").replace(os.sep, "/") self.assertEqual(yr._match_filenames(filenames, pattern), {expected}) def test_listify_string(self): """Check listify_string.""" self.assertEqual(yr.listify_string(None), []) - self.assertEqual(yr.listify_string('some string'), ['some string']) - self.assertEqual(yr.listify_string(['some', 'string']), - ['some', 'string']) + self.assertEqual(yr.listify_string("some string"), ["some string"]) + self.assertEqual(yr.listify_string(["some", "string"]), + ["some", "string"]) class DummyReader(BaseFileHandler): @@ -203,47 +203,47 @@ class TestFileFileYAMLReaderMultiplePatterns(unittest.TestCase): def setUp(self): """Prepare a reader instance with a fake config.""" - patterns = ['a{something:3s}.bla', - 'a0{something:2s}.bla'] - res_dict = {'reader': {'name': 'fake', - 'sensors': ['canon']}, - 'file_types': {'ftype1': {'name': 'ft1', - 'file_patterns': patterns, - 'file_reader': DummyReader}}, - 'datasets': {'ch1': {'name': 'ch01', - 'wavelength': [0.5, 0.6, 0.7], - 'calibration': 'reflectance', - 'file_type': 'ftype1', - 'coordinates': ['lons', 'lats']}, - 'ch2': {'name': 'ch02', - 'wavelength': [0.7, 0.75, 0.8], - 'calibration': 'counts', - 'file_type': 'ftype1', - 'coordinates': ['lons', 'lats']}, - 'lons': {'name': 'lons', - 'file_type': 'ftype2'}, - 'lats': {'name': 'lats', - 'file_type': 'ftype2'}}} + patterns = ["a{something:3s}.bla", + "a0{something:2s}.bla"] + res_dict = {"reader": {"name": "fake", + "sensors": ["canon"]}, + "file_types": {"ftype1": {"name": "ft1", + "file_patterns": patterns, + "file_reader": DummyReader}}, + "datasets": {"ch1": {"name": "ch01", + "wavelength": [0.5, 0.6, 0.7], + "calibration": "reflectance", + "file_type": "ftype1", + "coordinates": ["lons", "lats"]}, + "ch2": {"name": "ch02", + "wavelength": [0.7, 0.75, 0.8], + "calibration": "counts", + "file_type": "ftype1", + "coordinates": ["lons", "lats"]}, + "lons": {"name": "lons", + "file_type": "ftype2"}, + "lats": {"name": "lats", + "file_type": "ftype2"}}} self.config = res_dict self.reader = yr.FileYAMLReader(self.config, filter_parameters={ - 'start_time': datetime(2000, 1, 1), - 'end_time': datetime(2000, 1, 2)}) + "start_time": datetime(2000, 1, 1), + "end_time": datetime(2000, 1, 2)}) def test_select_from_pathnames(self): """Check select_files_from_pathnames.""" - filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] + filelist = ["a001.bla", "a002.bla", "abcd.bla", "k001.bla", "a003.bli"] res = self.reader.select_files_from_pathnames(filelist) - for expected in ['a001.bla', 'a002.bla', 'abcd.bla']: + for expected in ["a001.bla", "a002.bla", "abcd.bla"]: self.assertIn(expected, res) self.assertEqual(len(res), 3) def test_fn_items_for_ft(self): """Check filename_items_for_filetype.""" - filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] - ft_info = self.config['file_types']['ftype1'] + filelist = ["a001.bla", "a002.bla", "abcd.bla", "k001.bla", "a003.bli"] + ft_info = self.config["file_types"]["ftype1"] fiter = self.reader.filename_items_for_filetype(filelist, ft_info) filenames = dict(fname for fname in fiter) @@ -251,11 +251,11 @@ def test_fn_items_for_ft(self): def test_create_filehandlers(self): """Check create_filehandlers.""" - filelist = ['a001.bla', 'a002.bla', 'a001.bla', 'a002.bla', - 'abcd.bla', 'k001.bla', 'a003.bli'] + filelist = ["a001.bla", "a002.bla", "a001.bla", "a002.bla", + "abcd.bla", "k001.bla", "a003.bli"] self.reader.create_filehandlers(filelist) - self.assertEqual(len(self.reader.file_handlers['ftype1']), 3) + self.assertEqual(len(self.reader.file_handlers["ftype1"]), 3) def test_serializable(self): """Check that a reader is serializable by dask. @@ -264,8 +264,8 @@ def test_serializable(self): readers. """ from distributed.protocol import deserialize, serialize - filelist = ['a001.bla', 'a002.bla', 'a001.bla', 'a002.bla', - 'abcd.bla', 'k001.bla', 'a003.bli'] + filelist = ["a001.bla", "a002.bla", "a001.bla", "a002.bla", + "abcd.bla", "k001.bla", "a003.bli"] self.reader.create_filehandlers(filelist) cloned_reader = deserialize(*serialize(self.reader)) @@ -281,8 +281,8 @@ def setUp(self): self.config = MHS_YAML_READER_DICT self.reader = yr.FileYAMLReader(MHS_YAML_READER_DICT, filter_parameters={ - 'start_time': datetime(2000, 1, 1), - 'end_time': datetime(2000, 1, 2), + "start_time": datetime(2000, 1, 1), + "end_time": datetime(2000, 1, 2), }) def test_custom_type_with_dict_contents_gets_parsed_correctly(self): @@ -298,70 +298,70 @@ class TestFileFileYAMLReader(unittest.TestCase): def setUp(self): """Prepare a reader instance with a fake config.""" - patterns = ['a{something:3s}.bla'] - res_dict = {'reader': {'name': 'fake', - 'sensors': ['canon']}, - 'file_types': {'ftype1': {'name': 'ft1', - 'file_reader': BaseFileHandler, - 'file_patterns': patterns}}, - 'datasets': {'ch1': {'name': 'ch01', - 'wavelength': [0.5, 0.6, 0.7], - 'calibration': 'reflectance', - 'file_type': 'ftype1', - 'coordinates': ['lons', 'lats']}, - 'ch2': {'name': 'ch02', - 'wavelength': [0.7, 0.75, 0.8], - 'calibration': 'counts', - 'file_type': 'ftype1', - 'coordinates': ['lons', 'lats']}, - 'lons': {'name': 'lons', - 'file_type': 'ftype2'}, - 'lats': {'name': 'lats', - 'file_type': 'ftype2'}}} + patterns = ["a{something:3s}.bla"] + res_dict = {"reader": {"name": "fake", + "sensors": ["canon"]}, + "file_types": {"ftype1": {"name": "ft1", + "file_reader": BaseFileHandler, + "file_patterns": patterns}}, + "datasets": {"ch1": {"name": "ch01", + "wavelength": [0.5, 0.6, 0.7], + "calibration": "reflectance", + "file_type": "ftype1", + "coordinates": ["lons", "lats"]}, + "ch2": {"name": "ch02", + "wavelength": [0.7, 0.75, 0.8], + "calibration": "counts", + "file_type": "ftype1", + "coordinates": ["lons", "lats"]}, + "lons": {"name": "lons", + "file_type": "ftype2"}, + "lats": {"name": "lats", + "file_type": "ftype2"}}} self.config = res_dict self.reader = yr.FileYAMLReader(res_dict, filter_parameters={ - 'start_time': datetime(2000, 1, 1), - 'end_time': datetime(2000, 1, 2), + "start_time": datetime(2000, 1, 1), + "end_time": datetime(2000, 1, 2), }) def test_deprecated_passing_config_files(self): """Test that we get an exception when config files are passed to inti.""" - self.assertRaises(ValueError, yr.FileYAMLReader, '/path/to/some/file.yaml') + self.assertRaises(ValueError, yr.FileYAMLReader, "/path/to/some/file.yaml") def test_all_data_ids(self): """Check that all datasets ids are returned.""" for dataid in self.reader.all_dataset_ids: - name = dataid['name'].replace('0', '') - assert self.config['datasets'][name]['name'] == dataid['name'] - if 'wavelength' in self.config['datasets'][name]: - assert self.config['datasets'][name]['wavelength'] == list(dataid['wavelength'])[:3] - if 'calibration' in self.config['datasets'][name]: - assert self.config['datasets'][name]['calibration'] == dataid['calibration'] + name = dataid["name"].replace("0", "") + assert self.config["datasets"][name]["name"] == dataid["name"] + if "wavelength" in self.config["datasets"][name]: + assert self.config["datasets"][name]["wavelength"] == list(dataid["wavelength"])[:3] + if "calibration" in self.config["datasets"][name]: + assert self.config["datasets"][name]["calibration"] == dataid["calibration"] def test_all_dataset_names(self): """Get all dataset names.""" self.assertSetEqual(self.reader.all_dataset_names, - set(['ch01', 'ch02', 'lons', 'lats'])) + set(["ch01", "ch02", "lons", "lats"])) def test_available_dataset_ids(self): """Get ids of the available datasets.""" - loadables = self.reader.select_files_from_pathnames(['a001.bla']) + loadables = self.reader.select_files_from_pathnames(["a001.bla"]) self.reader.create_filehandlers(loadables) self.assertSetEqual(set(self.reader.available_dataset_ids), - {make_dataid(name='ch02', + {make_dataid(name="ch02", wavelength=(0.7, 0.75, 0.8), - calibration='counts', + calibration="counts", modifiers=()), - make_dataid(name='ch01', + make_dataid(name="ch01", wavelength=(0.5, 0.6, 0.7), - calibration='reflectance', + calibration="reflectance", modifiers=())}) def test_available_dataset_names(self): """Get ids of the available datasets.""" - loadables = self.reader.select_files_from_pathnames(['a001.bla']) + loadables = self.reader.select_files_from_pathnames(["a001.bla"]) self.reader.create_filehandlers(loadables) self.assertSetEqual(set(self.reader.available_dataset_names), set(["ch01", "ch02"])) @@ -389,15 +389,15 @@ def test_filter_fh_by_time(self): res = self.reader.time_matches(fh.start_time, None) self.assertEqual(res, idx not in [0, 1, 4, 5]) - @patch('satpy.readers.yaml_reader.get_area_def') - @patch('satpy.readers.yaml_reader.AreaDefBoundary') - @patch('satpy.readers.yaml_reader.Boundary') + @patch("satpy.readers.yaml_reader.get_area_def") + @patch("satpy.readers.yaml_reader.AreaDefBoundary") + @patch("satpy.readers.yaml_reader.Boundary") def test_file_covers_area(self, bnd, adb, gad): """Test that area coverage is checked properly.""" file_handler = FakeFH(datetime(1999, 12, 31, 10, 0), datetime(2000, 1, 3, 12, 30)) - self.reader.filter_parameters['area'] = True + self.reader.filter_parameters["area"] = True bnd.return_value.contour_poly.intersection.return_value = True adb.return_value.contour_poly.intersection.return_value = True res = self.reader.check_file_covers_area(file_handler, True) @@ -409,7 +409,7 @@ def test_file_covers_area(self, bnd, adb, gad): self.assertFalse(res) file_handler.get_bounding_box.side_effect = NotImplementedError() - self.reader.filter_parameters['area'] = True + self.reader.filter_parameters["area"] = True res = self.reader.check_file_covers_area(file_handler, True) self.assertTrue(res) @@ -441,9 +441,9 @@ def get_end_time(): datetime(2000, 1, 3, 12, 30)) self.reader.file_handlers = { - '0': [fh1, fh2, fh3, fh4, fh5], - '1': [fh0, fh1, fh2, fh3, fh4, fh5], - '2': [fh2, fh3], + "0": [fh1, fh2, fh3, fh4, fh5], + "1": [fh0, fh1, fh2, fh3, fh4, fh5], + "2": [fh2, fh3], } self.assertEqual(self.reader.start_time, datetime(1999, 12, 30, 0, 0)) @@ -451,24 +451,24 @@ def get_end_time(): def test_select_from_pathnames(self): """Check select_files_from_pathnames.""" - filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] + filelist = ["a001.bla", "a002.bla", "abcd.bla", "k001.bla", "a003.bli"] res = self.reader.select_files_from_pathnames(filelist) - for expected in ['a001.bla', 'a002.bla', 'abcd.bla']: + for expected in ["a001.bla", "a002.bla", "abcd.bla"]: self.assertIn(expected, res) self.assertEqual(0, len(self.reader.select_files_from_pathnames([]))) def test_select_from_directory(self): """Check select_files_from_directory.""" - filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] + filelist = ["a001.bla", "a002.bla", "abcd.bla", "k001.bla", "a003.bli"] dpath = mkdtemp() for fname in filelist: - with open(os.path.join(dpath, fname), 'w'): + with open(os.path.join(dpath, fname), "w"): pass res = self.reader.select_files_from_directory(dpath) - for expected in ['a001.bla', 'a002.bla', 'abcd.bla']: + for expected in ["a001.bla", "a002.bla", "abcd.bla"]: self.assertIn(os.path.join(dpath, expected), res) for fname in filelist: @@ -490,10 +490,10 @@ def glob(self, pattern): def test_supports_sensor(self): """Check supports_sensor.""" - self.assertTrue(self.reader.supports_sensor('canon')) - self.assertFalse(self.reader.supports_sensor('nikon')) + self.assertTrue(self.reader.supports_sensor("canon")) + self.assertFalse(self.reader.supports_sensor("nikon")) - @patch('satpy.readers.yaml_reader.StackedAreaDefinition') + @patch("satpy.readers.yaml_reader.StackedAreaDefinition") def test_load_area_def(self, sad): """Test loading the area def for the reader.""" dataid = MagicMock() @@ -509,35 +509,35 @@ def test_load_area_def(self, sad): def test_preferred_filetype(self): """Test finding the preferred filetype.""" - self.reader.file_handlers = {'a': 'a', 'b': 'b', 'c': 'c'} - self.assertEqual(self.reader._preferred_filetype(['c', 'a']), 'c') - self.assertEqual(self.reader._preferred_filetype(['a', 'c']), 'a') - self.assertEqual(self.reader._preferred_filetype(['d', 'e']), None) + self.reader.file_handlers = {"a": "a", "b": "b", "c": "c"} + self.assertEqual(self.reader._preferred_filetype(["c", "a"]), "c") + self.assertEqual(self.reader._preferred_filetype(["a", "c"]), "a") + self.assertEqual(self.reader._preferred_filetype(["d", "e"]), None) def test_get_coordinates_for_dataset_key(self): """Test getting coordinates for a key.""" - ds_q = DataQuery(name='ch01', wavelength=(0.5, 0.6, 0.7, 'µm'), - calibration='reflectance', modifiers=()) + ds_q = DataQuery(name="ch01", wavelength=(0.5, 0.6, 0.7, "µm"), + calibration="reflectance", modifiers=()) res = self.reader._get_coordinates_for_dataset_key(ds_q) self.assertListEqual(res, - [make_dataid(name='lons'), - make_dataid(name='lats')]) + [make_dataid(name="lons"), + make_dataid(name="lats")]) def test_get_coordinates_for_dataset_key_without(self): """Test getting coordinates for a key without coordinates.""" - ds_id = make_dataid(name='lons', + ds_id = make_dataid(name="lons", modifiers=()) res = self.reader._get_coordinates_for_dataset_key(ds_id) self.assertListEqual(res, []) def test_get_coordinates_for_dataset_keys(self): """Test getting coordinates for keys.""" - ds_id1 = make_dataid(name='ch01', wavelength=(0.5, 0.6, 0.7), - calibration='reflectance', modifiers=()) - ds_id2 = make_dataid(name='ch02', wavelength=(0.7, 0.75, 0.8), - calibration='counts', modifiers=()) - lons = make_dataid(name='lons', modifiers=()) - lats = make_dataid(name='lats', modifiers=()) + ds_id1 = make_dataid(name="ch01", wavelength=(0.5, 0.6, 0.7), + calibration="reflectance", modifiers=()) + ds_id2 = make_dataid(name="ch02", wavelength=(0.7, 0.75, 0.8), + calibration="counts", modifiers=()) + lons = make_dataid(name="lons", modifiers=()) + lats = make_dataid(name="lats", modifiers=()) res = self.reader._get_coordinates_for_dataset_keys([ds_id1, ds_id2, lons]) @@ -547,16 +547,16 @@ def test_get_coordinates_for_dataset_keys(self): def test_get_file_handlers(self): """Test getting filehandler to load a dataset.""" - ds_id1 = make_dataid(name='ch01', wavelength=(0.5, 0.6, 0.7), - calibration='reflectance', modifiers=()) - self.reader.file_handlers = {'ftype1': 'bla'} + ds_id1 = make_dataid(name="ch01", wavelength=(0.5, 0.6, 0.7), + calibration="reflectance", modifiers=()) + self.reader.file_handlers = {"ftype1": "bla"} - self.assertEqual(self.reader._get_file_handlers(ds_id1), 'bla') + self.assertEqual(self.reader._get_file_handlers(ds_id1), "bla") - lons = make_dataid(name='lons', modifiers=()) + lons = make_dataid(name="lons", modifiers=()) self.assertEqual(self.reader._get_file_handlers(lons), None) - @patch('satpy.readers.yaml_reader.xr') + @patch("satpy.readers.yaml_reader.xr") def test_load_entire_dataset(self, xarray): """Check loading an entire dataset.""" file_handlers = [FakeFH(None, None), FakeFH(None, None), @@ -572,69 +572,69 @@ class TestFileYAMLReaderLoading(unittest.TestCase): def setUp(self): """Prepare a reader instance with a fake config.""" - patterns = ['a{something:3s}.bla'] - res_dict = {'reader': {'name': 'fake', - 'sensors': ['canon']}, - 'file_types': {'ftype1': {'name': 'ft1', - 'file_reader': BaseFileHandler, - 'file_patterns': patterns}}, - 'datasets': {'ch1': {'name': 'ch01', - 'wavelength': [0.5, 0.6, 0.7], - 'calibration': 'reflectance', - 'file_type': 'ftype1'}, + patterns = ["a{something:3s}.bla"] + res_dict = {"reader": {"name": "fake", + "sensors": ["canon"]}, + "file_types": {"ftype1": {"name": "ft1", + "file_reader": BaseFileHandler, + "file_patterns": patterns}}, + "datasets": {"ch1": {"name": "ch01", + "wavelength": [0.5, 0.6, 0.7], + "calibration": "reflectance", + "file_type": "ftype1"}, }} self.config = res_dict self.reader = yr.FileYAMLReader(res_dict, filter_parameters={ - 'start_time': datetime(2000, 1, 1), - 'end_time': datetime(2000, 1, 2), + "start_time": datetime(2000, 1, 1), + "end_time": datetime(2000, 1, 2), }) fake_fh = FakeFH(None, None) self.lons = xr.DataArray(np.ones((2, 2)) * 2, - dims=['y', 'x'], - attrs={'standard_name': 'longitude', - 'name': 'longitude'}) + dims=["y", "x"], + attrs={"standard_name": "longitude", + "name": "longitude"}) self.lats = xr.DataArray(np.ones((2, 2)) * 2, - dims=['y', 'x'], - attrs={'standard_name': 'latitude', - 'name': 'latitude'}) + dims=["y", "x"], + attrs={"standard_name": "latitude", + "name": "latitude"}) self.data = None def _assign_array(dsid, *_args, **_kwargs): - if dsid['name'] == 'longitude': + if dsid["name"] == "longitude": return self.lons - if dsid['name'] == 'latitude': + if dsid["name"] == "latitude": return self.lats return self.data fake_fh.get_dataset.side_effect = _assign_array - self.reader.file_handlers = {'ftype1': [fake_fh]} + self.reader.file_handlers = {"ftype1": [fake_fh]} def test_load_dataset_with_builtin_coords(self): """Test loading a dataset with builtin coordinates.""" self.data = xr.DataArray(np.ones((2, 2)), - coords={'longitude': self.lons, - 'latitude': self.lats}, - dims=['y', 'x']) + coords={"longitude": self.lons, + "latitude": self.lats}, + dims=["y", "x"]) self._check_area_for_ch01() def test_load_dataset_with_builtin_coords_in_wrong_order(self): """Test loading a dataset with builtin coordinates in the wrong order.""" self.data = xr.DataArray(np.ones((2, 2)), - coords={'latitude': self.lats, - 'longitude': self.lons}, - dims=['y', 'x']) + coords={"latitude": self.lats, + "longitude": self.lons}, + dims=["y", "x"]) self._check_area_for_ch01() def _check_area_for_ch01(self): - res = self.reader.load(['ch01']) - assert 'area' in res['ch01'].attrs - np.testing.assert_array_equal(res['ch01'].attrs['area'].lons, self.lons) - np.testing.assert_array_equal(res['ch01'].attrs['area'].lats, self.lats) - assert res['ch01'].attrs.get("reader") == "fake" + res = self.reader.load(["ch01"]) + assert "area" in res["ch01"].attrs + np.testing.assert_array_equal(res["ch01"].attrs["area"].lons, self.lons) + np.testing.assert_array_equal(res["ch01"].attrs["area"].lats, self.lats) + assert res["ch01"].attrs.get("reader") == "fake" class TestFileFileYAMLReaderMultipleFileTypes(unittest.TestCase): @@ -648,36 +648,36 @@ def setUp(self): # # For test completeness add one channel (ch3) which is only available # in ftype1. - patterns1 = ['a.nc'] - patterns2 = ['b.nc'] - patterns3 = ['geo.nc'] - res_dict = {'reader': {'name': 'fake', - 'sensors': ['canon']}, - 'file_types': {'ftype1': {'name': 'ft1', - 'file_patterns': patterns1}, - 'ftype2': {'name': 'ft2', - 'file_patterns': patterns2}, - 'ftype3': {'name': 'ft3', - 'file_patterns': patterns3}}, - 'datasets': {'ch1': {'name': 'ch01', - 'wavelength': [0.5, 0.6, 0.7], - 'calibration': 'reflectance', - 'file_type': ['ftype1', 'ftype2'], - 'coordinates': ['lons', 'lats']}, - 'ch2': {'name': 'ch02', - 'wavelength': [0.7, 0.75, 0.8], - 'calibration': 'counts', - 'file_type': ['ftype1', 'ftype2'], - 'coordinates': ['lons', 'lats']}, - 'ch3': {'name': 'ch03', - 'wavelength': [0.8, 0.85, 0.9], - 'calibration': 'counts', - 'file_type': 'ftype1', - 'coordinates': ['lons', 'lats']}, - 'lons': {'name': 'lons', - 'file_type': ['ftype1', 'ftype3']}, - 'lats': {'name': 'lats', - 'file_type': ['ftype1', 'ftype3']}}} + patterns1 = ["a.nc"] + patterns2 = ["b.nc"] + patterns3 = ["geo.nc"] + res_dict = {"reader": {"name": "fake", + "sensors": ["canon"]}, + "file_types": {"ftype1": {"name": "ft1", + "file_patterns": patterns1}, + "ftype2": {"name": "ft2", + "file_patterns": patterns2}, + "ftype3": {"name": "ft3", + "file_patterns": patterns3}}, + "datasets": {"ch1": {"name": "ch01", + "wavelength": [0.5, 0.6, 0.7], + "calibration": "reflectance", + "file_type": ["ftype1", "ftype2"], + "coordinates": ["lons", "lats"]}, + "ch2": {"name": "ch02", + "wavelength": [0.7, 0.75, 0.8], + "calibration": "counts", + "file_type": ["ftype1", "ftype2"], + "coordinates": ["lons", "lats"]}, + "ch3": {"name": "ch03", + "wavelength": [0.8, 0.85, 0.9], + "calibration": "counts", + "file_type": "ftype1", + "coordinates": ["lons", "lats"]}, + "lons": {"name": "lons", + "file_type": ["ftype1", "ftype3"]}, + "lats": {"name": "lats", + "file_type": ["ftype1", "ftype3"]}}} self.config = res_dict self.reader = yr.FileYAMLReader(self.config) @@ -687,13 +687,13 @@ def test_update_ds_ids_from_file_handlers(self): from functools import partial orig_ids = self.reader.all_ids - for ftype, resol in zip(('ftype1', 'ftype2'), (1, 2)): + for ftype, resol in zip(("ftype1", "ftype2"), (1, 2)): # need to copy this because the dataset infos will be modified _orig_ids = {key: val.copy() for key, val in orig_ids.items()} with patch.dict(self.reader.all_ids, _orig_ids, clear=True), \ patch.dict(self.reader.available_ids, {}, clear=True): # Add a file handler with resolution property - fh = MagicMock(filetype_info={'file_type': ftype}, + fh = MagicMock(filetype_info={"file_type": ftype}, resolution=resol) fh.available_datasets = partial(available_datasets, fh) fh.file_type_matches = partial(file_type_matches, fh) @@ -707,11 +707,11 @@ def test_update_ds_ids_from_file_handlers(self): # Make sure the resolution property has been transferred # correctly from the file handler to the dataset ID for ds_id, ds_info in self.reader.all_ids.items(): - file_types = ds_info['file_type'] + file_types = ds_info["file_type"] if not isinstance(file_types, list): file_types = [file_types] if ftype in file_types: - self.assertEqual(resol, ds_id['resolution']) + self.assertEqual(resol, ds_id["resolution"]) # Test methods @@ -725,10 +725,10 @@ def available_datasets(self, configured_datasets=None): if is_avail is not None: yield is_avail, ds_info - matches = self.file_type_matches(ds_info['file_type']) - if matches and ds_info.get('resolution') != res: + matches = self.file_type_matches(ds_info["file_type"]) + if matches and ds_info.get("resolution") != res: new_info = ds_info.copy() - new_info['resolution'] = res + new_info["resolution"] = res yield True, new_info elif is_avail is None: yield is_avail, ds_info @@ -736,9 +736,9 @@ def available_datasets(self, configured_datasets=None): def file_type_matches(self, ds_ftype): """Fake file_type_matches for testing multiple file types.""" - if isinstance(ds_ftype, str) and ds_ftype == self.filetype_info['file_type']: + if isinstance(ds_ftype, str) and ds_ftype == self.filetype_info["file_type"]: return True - if self.filetype_info['file_type'] in ds_ftype: + if self.filetype_info["file_type"] in ds_ftype: return True return None @@ -764,94 +764,94 @@ def test_load_dataset_with_area_for_single_areas(self, ldwa): original_array = np.arange(6).reshape((2, 3)) area_def = AreaDefinition( - 'test', - 'test', - 'test', - {'proj': 'geos', - 'h': 35785831, - 'type': 'crs'}, + "test", + "test", + "test", + {"proj": "geos", + "h": 35785831, + "type": "crs"}, 3, 2, original_area_extent, ) dummy_ds_xr = xr.DataArray(original_array, - coords={'y': np.arange(2), - 'x': np.arange(3), - 'time': ("y", np.arange(2))}, - attrs={'area': area_def}, - dims=('y', 'x')) + coords={"y": np.arange(2), + "x": np.arange(3), + "time": ("y", np.arange(2))}, + attrs={"area": area_def}, + dims=("y", "x")) # assign the dummy xr as return for the super _load_dataset_with_area method ldwa.return_value = dummy_ds_xr # check no input, nothing should change res = reader._load_dataset_with_area(dsid, coords) np.testing.assert_equal(res.values, original_array) - np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) - np.testing.assert_equal(res.coords['y'], np.arange(2)) - np.testing.assert_equal(res.coords['x'], np.arange(3)) - np.testing.assert_equal(res.coords['time'], np.arange(2)) + np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) + np.testing.assert_equal(res.coords["y"], np.arange(2)) + np.testing.assert_equal(res.coords["x"], np.arange(3)) + np.testing.assert_equal(res.coords["time"], np.arange(2)) # check wrong input with self.assertRaises(ValueError): - _ = reader._load_dataset_with_area(dsid, coords, 'wronginput') + _ = reader._load_dataset_with_area(dsid, coords, "wronginput") # check native orientation, nothing should change - res = reader._load_dataset_with_area(dsid, coords, 'native') + res = reader._load_dataset_with_area(dsid, coords, "native") np.testing.assert_equal(res.values, original_array) - np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) - np.testing.assert_equal(res.coords['y'], np.arange(2)) - np.testing.assert_equal(res.coords['x'], np.arange(3)) - np.testing.assert_equal(res.coords['time'], np.arange(2)) + np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) + np.testing.assert_equal(res.coords["y"], np.arange(2)) + np.testing.assert_equal(res.coords["x"], np.arange(3)) + np.testing.assert_equal(res.coords["time"], np.arange(2)) # check upright orientation, nothing should change since area is already upright - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, original_array) - np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) - np.testing.assert_equal(res.coords['y'], np.arange(2)) - np.testing.assert_equal(res.coords['x'], np.arange(3)) - np.testing.assert_equal(res.coords['time'], np.arange(2)) + np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) + np.testing.assert_equal(res.coords["y"], np.arange(2)) + np.testing.assert_equal(res.coords["x"], np.arange(3)) + np.testing.assert_equal(res.coords["time"], np.arange(2)) # check that left-right image is flipped correctly - dummy_ds_xr.attrs['area'] = area_def.copy(area_extent=(1500, -1000, -1500, 1000)) + dummy_ds_xr.attrs["area"] = area_def.copy(area_extent=(1500, -1000, -1500, 1000)) ldwa.return_value = dummy_ds_xr.copy() - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, np.fliplr(original_array)) - np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) - np.testing.assert_equal(res.coords['y'], np.arange(2)) - np.testing.assert_equal(res.coords['x'], np.flip(np.arange(3))) - np.testing.assert_equal(res.coords['time'], np.arange(2)) + np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) + np.testing.assert_equal(res.coords["y"], np.arange(2)) + np.testing.assert_equal(res.coords["x"], np.flip(np.arange(3))) + np.testing.assert_equal(res.coords["time"], np.arange(2)) # check that upside down image is flipped correctly - dummy_ds_xr.attrs['area'] = area_def.copy(area_extent=(-1500, 1000, 1500, -1000)) + dummy_ds_xr.attrs["area"] = area_def.copy(area_extent=(-1500, 1000, 1500, -1000)) ldwa.return_value = dummy_ds_xr.copy() - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, np.flipud(original_array)) - np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) - np.testing.assert_equal(res.coords['y'], np.flip(np.arange(2))) - np.testing.assert_equal(res.coords['x'], np.arange(3)) - np.testing.assert_equal(res.coords['time'], np.flip(np.arange(2))) + np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) + np.testing.assert_equal(res.coords["y"], np.flip(np.arange(2))) + np.testing.assert_equal(res.coords["x"], np.arange(3)) + np.testing.assert_equal(res.coords["time"], np.flip(np.arange(2))) # check different projection than geos, nothing should be changed area_def = AreaDefinition( - 'test', - 'test', - 'test', - {'proj': 'lcc', - 'lat_1': 25.0, - 'type': 'crs'}, + "test", + "test", + "test", + {"proj": "lcc", + "lat_1": 25.0, + "type": "crs"}, 3, 2, original_area_extent, ) dummy_ds_xr = xr.DataArray(original_array, - dims=('y', 'x'), - attrs={'area': area_def}) + dims=("y", "x"), + attrs={"area": area_def}) ldwa.return_value = dummy_ds_xr - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, original_array) - np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) + np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch.object(yr.FileYAMLReader, "_load_dataset_with_area") @@ -871,12 +871,12 @@ def test_load_dataset_with_area_for_stacked_areas(self, ldwa): original_array = np.arange(12).reshape((4, 3)) area_def0 = AreaDefinition( - 'test', - 'test', - 'test', - {'proj': 'geos', - 'h': 35785831, - 'type': 'crs'}, + "test", + "test", + "test", + {"proj": "geos", + "h": 35785831, + "type": "crs"}, 3, 2, original_area_extents[0], @@ -884,36 +884,36 @@ def test_load_dataset_with_area_for_stacked_areas(self, ldwa): area_def1 = area_def0.copy(area_extent=original_area_extents[1]) dummy_ds_xr = xr.DataArray(original_array, - dims=('y', 'x'), - coords={'y': np.arange(4), - 'x': np.arange(3), - 'time': ("y", np.arange(4))}, - attrs={'area': StackedAreaDefinition(area_def0, area_def1)}) + dims=("y", "x"), + coords={"y": np.arange(4), + "x": np.arange(3), + "time": ("y", np.arange(4))}, + attrs={"area": StackedAreaDefinition(area_def0, area_def1)}) # check that left-right image is flipped correctly - dummy_ds_xr.attrs['area'].defs[0] = area_def0.copy(area_extent=(1500, -1000, -1500, 1000)) - dummy_ds_xr.attrs['area'].defs[1] = area_def1.copy(area_extent=(7000, 5000, 3000, 8000)) + dummy_ds_xr.attrs["area"].defs[0] = area_def0.copy(area_extent=(1500, -1000, -1500, 1000)) + dummy_ds_xr.attrs["area"].defs[1] = area_def1.copy(area_extent=(7000, 5000, 3000, 8000)) ldwa.return_value = dummy_ds_xr.copy() - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, np.fliplr(original_array)) - np.testing.assert_equal(res.attrs['area'].defs[0].area_extent, original_area_extents[0]) - np.testing.assert_equal(res.attrs['area'].defs[1].area_extent, original_area_extents[1]) - np.testing.assert_equal(res.coords['y'], np.arange(4)) - np.testing.assert_equal(res.coords['x'], np.flip(np.arange(3))) - np.testing.assert_equal(res.coords['time'], np.arange(4)) + np.testing.assert_equal(res.attrs["area"].defs[0].area_extent, original_area_extents[0]) + np.testing.assert_equal(res.attrs["area"].defs[1].area_extent, original_area_extents[1]) + np.testing.assert_equal(res.coords["y"], np.arange(4)) + np.testing.assert_equal(res.coords["x"], np.flip(np.arange(3))) + np.testing.assert_equal(res.coords["time"], np.arange(4)) # check that upside down image is flipped correctly - dummy_ds_xr.attrs['area'].defs[0] = area_def0.copy(area_extent=(-1500, 1000, 1500, -1000)) - dummy_ds_xr.attrs['area'].defs[1] = area_def1.copy(area_extent=(3000, 8000, 7000, 5000)) + dummy_ds_xr.attrs["area"].defs[0] = area_def0.copy(area_extent=(-1500, 1000, 1500, -1000)) + dummy_ds_xr.attrs["area"].defs[1] = area_def1.copy(area_extent=(3000, 8000, 7000, 5000)) ldwa.return_value = dummy_ds_xr.copy() - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, np.flipud(original_array)) # note that the order of the stacked areadefs is flipped here, as expected - np.testing.assert_equal(res.attrs['area'].defs[1].area_extent, original_area_extents[0]) - np.testing.assert_equal(res.attrs['area'].defs[0].area_extent, original_area_extents[1]) - np.testing.assert_equal(res.coords['y'], np.flip(np.arange(4))) - np.testing.assert_equal(res.coords['x'], np.arange(3)) - np.testing.assert_equal(res.coords['time'], np.flip(np.arange(4))) + np.testing.assert_equal(res.attrs["area"].defs[1].area_extent, original_area_extents[0]) + np.testing.assert_equal(res.attrs["area"].defs[0].area_extent, original_area_extents[1]) + np.testing.assert_equal(res.coords["y"], np.flip(np.arange(4))) + np.testing.assert_equal(res.coords["x"], np.arange(3)) + np.testing.assert_equal(res.coords["time"], np.flip(np.arange(4))) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch.object(yr.FileYAMLReader, "_load_dataset_with_area") @@ -936,15 +936,15 @@ def test_load_dataset_with_area_for_swath_def_data(self, ldwa): swath_def = SwathDefinition(lons, lats) dummy_ds_xr = xr.DataArray(original_array, - coords={'y': dim}, - attrs={'area': swath_def}, - dims=('y',)) + coords={"y": dim}, + attrs={"area": swath_def}, + dims=("y",)) # assign the dummy xr as return for the super _load_dataset_with_area method ldwa.return_value = dummy_ds_xr # returned dataset should be unchanged since datasets with a swath definition are not flippable - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, original_array) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @@ -963,21 +963,21 @@ def test_load_dataset_with_area_for_data_without_area(self, ldwa): dim = np.arange(3) dummy_ds_xr = xr.DataArray(original_array, - coords={'y': dim}, + coords={"y": dim}, attrs={}, - dims=('y',)) + dims=("y",)) # assign the dummy xr as return for the super _load_dataset_with_area method ldwa.return_value = dummy_ds_xr # returned dataset should be unchanged since datasets without area information are not flippable - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, original_array) def _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info): seg_area = MagicMock() - seg_area.crs = 'some_crs' + seg_area.crs = "some_crs" seg_area.area_extent = aex seg_area.shape = ashape get_area_def = MagicMock() @@ -987,9 +987,9 @@ def _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_p get_segment_position_info.return_value = chk_pos_info fh = MagicMock() - filetype_info = {'expected_segments': expected_segments, - 'file_type': 'filetype1'} - filename_info = {'segment': segment} + filetype_info = {"expected_segments": expected_segments, + "file_type": "filetype1"} + filename_info = {"segment": segment} fh.filetype_info = filetype_info fh.filename_info = filename_info fh.get_area_def = get_area_def @@ -1011,44 +1011,44 @@ def test_get_expected_segments(self, cfh): fake_fh = MagicMock() fake_fh.filename_info = {} fake_fh.filetype_info = {} - cfh.return_value = {'ft1': [fake_fh]} + cfh.return_value = {"ft1": [fake_fh]} # default (1) - created_fhs = reader.create_filehandlers(['fake.nc']) - es = created_fhs['ft1'][0].filetype_info['expected_segments'] + created_fhs = reader.create_filehandlers(["fake.nc"]) + es = created_fhs["ft1"][0].filetype_info["expected_segments"] self.assertEqual(es, 1) # YAML defined for each file type - fake_fh.filetype_info['expected_segments'] = 2 - created_fhs = reader.create_filehandlers(['fake.nc']) - es = created_fhs['ft1'][0].filetype_info['expected_segments'] + fake_fh.filetype_info["expected_segments"] = 2 + created_fhs = reader.create_filehandlers(["fake.nc"]) + es = created_fhs["ft1"][0].filetype_info["expected_segments"] self.assertEqual(es, 2) # defined both in the filename and the YAML metadata # YAML has priority - fake_fh.filename_info = {'total_segments': 3} - fake_fh.filetype_info = {'expected_segments': 2} - created_fhs = reader.create_filehandlers(['fake.nc']) - es = created_fhs['ft1'][0].filetype_info['expected_segments'] + fake_fh.filename_info = {"total_segments": 3} + fake_fh.filetype_info = {"expected_segments": 2} + created_fhs = reader.create_filehandlers(["fake.nc"]) + es = created_fhs["ft1"][0].filetype_info["expected_segments"] self.assertEqual(es, 2) # defined in the filename - fake_fh.filename_info = {'total_segments': 3} + fake_fh.filename_info = {"total_segments": 3} fake_fh.filetype_info = {} - created_fhs = reader.create_filehandlers(['fake.nc']) - es = created_fhs['ft1'][0].filetype_info['expected_segments'] + created_fhs = reader.create_filehandlers(["fake.nc"]) + es = created_fhs["ft1"][0].filetype_info["expected_segments"] self.assertEqual(es, 3) # check correct FCI segment (aka chunk in the FCI world) number reading into segment - fake_fh.filename_info = {'count_in_repeat_cycle': 5} - created_fhs = reader.create_filehandlers(['fake.nc']) - es = created_fhs['ft1'][0].filename_info['segment'] + fake_fh.filename_info = {"count_in_repeat_cycle": 5} + created_fhs = reader.create_filehandlers(["fake.nc"]) + es = created_fhs["ft1"][0].filename_info["segment"] self.assertEqual(es, 5) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) - @patch('satpy.readers.yaml_reader.FileYAMLReader._load_dataset') - @patch('satpy.readers.yaml_reader.xr') - @patch('satpy.readers.yaml_reader._find_missing_segments') + @patch("satpy.readers.yaml_reader.FileYAMLReader._load_dataset") + @patch("satpy.readers.yaml_reader.xr") + @patch("satpy.readers.yaml_reader._find_missing_segments") def test_load_dataset(self, mss, xr, parent_load_dataset): """Test _load_dataset().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader @@ -1066,7 +1066,7 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): # Setup input, and output of mocked functions counter = 9 expected_segments = 8 - seg = MagicMock(dims=['y', 'x']) + seg = MagicMock(dims=["y", "x"]) slice_list = expected_segments * [seg, ] failure = False projectable = MagicMock() @@ -1139,10 +1139,10 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): file_handlers) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) - @patch('satpy.readers.yaml_reader._load_area_def') - @patch('satpy.readers.yaml_reader._stack_area_defs') - @patch('satpy.readers.yaml_reader.GEOSegmentYAMLReader._pad_earlier_segments_area') - @patch('satpy.readers.yaml_reader.GEOSegmentYAMLReader._pad_later_segments_area') + @patch("satpy.readers.yaml_reader._load_area_def") + @patch("satpy.readers.yaml_reader._stack_area_defs") + @patch("satpy.readers.yaml_reader.GEOSegmentYAMLReader._pad_earlier_segments_area") + @patch("satpy.readers.yaml_reader.GEOSegmentYAMLReader._pad_later_segments_area") def test_load_area_def(self, pesa, plsa, sad, parent_load_area_def): """Test _load_area_def().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader @@ -1160,7 +1160,7 @@ def test_load_area_def(self, pesa, plsa, sad, parent_load_area_def): parent_load_area_def.assert_called_once_with(dataid, file_handlers) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) - @patch('satpy.readers.yaml_reader.AreaDefinition') + @patch("satpy.readers.yaml_reader.AreaDefinition") def test_pad_later_segments_area(self, AreaDefinition): """Test _pad_later_segments_area().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader @@ -1172,16 +1172,16 @@ def test_pad_later_segments_area(self, AreaDefinition): ashape = [200, 500] fh_1, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, None) file_handlers = [fh_1] - dataid = 'dataid' + dataid = "dataid" res = reader._pad_later_segments_area(file_handlers, dataid) self.assertEqual(len(res), 2) seg2_extent = (0, 1500, 200, 1000) - expected_call = ('fill', 'fill', 'fill', 'some_crs', 500, 200, + expected_call = ("fill", "fill", "fill", "some_crs", 500, 200, seg2_extent) AreaDefinition.assert_called_once_with(*expected_call) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) - @patch('satpy.readers.yaml_reader.AreaDefinition') + @patch("satpy.readers.yaml_reader.AreaDefinition") def test_pad_earlier_segments_area(self, AreaDefinition): """Test _pad_earlier_segments_area().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader @@ -1194,12 +1194,12 @@ def test_pad_earlier_segments_area(self, AreaDefinition): fh_2, seg2_area = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, None) file_handlers = [fh_2] - dataid = 'dataid' + dataid = "dataid" area_defs = {2: seg2_area} res = reader._pad_earlier_segments_area(file_handlers, dataid, area_defs) self.assertEqual(len(res), 2) seg1_extent = (0, 500, 200, 0) - expected_call = ('fill', 'fill', 'fill', 'some_crs', 500, 200, + expected_call = ("fill", "fill", "fill", "some_crs", 500, 200, seg1_extent) AreaDefinition.assert_called_once_with(*expected_call) @@ -1208,15 +1208,15 @@ def test_find_missing_segments(self): from satpy.readers.yaml_reader import _find_missing_segments as fms # Dataset with only one segment - filename_info = {'segment': 1} + filename_info = {"segment": 1} fh_seg1 = MagicMock(filename_info=filename_info) - projectable = 'projectable' + projectable = "projectable" get_dataset = MagicMock() get_dataset.return_value = projectable fh_seg1.get_dataset = get_dataset file_handlers = [fh_seg1] - ds_info = {'file_type': []} - dataid = 'dataid' + ds_info = {"file_type": []} + dataid = "dataid" res = fms(file_handlers, ds_info, dataid) counter, expected_segments, slice_list, failure, proj = res self.assertEqual(counter, 2) @@ -1226,18 +1226,18 @@ def test_find_missing_segments(self): self.assertTrue(proj is projectable) # Three expected segments, first and last missing - filename_info = {'segment': 2} - filetype_info = {'expected_segments': 3, - 'file_type': 'foo'} + filename_info = {"segment": 2} + filetype_info = {"expected_segments": 3, + "file_type": "foo"} fh_seg2 = MagicMock(filename_info=filename_info, filetype_info=filetype_info) - projectable = 'projectable' + projectable = "projectable" get_dataset = MagicMock() get_dataset.return_value = projectable fh_seg2.get_dataset = get_dataset file_handlers = [fh_seg2] - ds_info = {'file_type': ['foo']} - dataid = 'dataid' + ds_info = {"file_type": ["foo"]} + dataid = "dataid" res = fms(file_handlers, ds_info, dataid) counter, expected_segments, slice_list, failure, proj = res self.assertEqual(counter, 3) @@ -1261,28 +1261,28 @@ def GVSYReader(): @pytest.fixture def fake_geswh(): """Get a fixture of the patched _get_empty_segment_with_height.""" - with patch('satpy.readers.yaml_reader._get_empty_segment_with_height') as geswh: + with patch("satpy.readers.yaml_reader._get_empty_segment_with_height") as geswh: yield geswh @pytest.fixture def fake_xr(): """Get a fixture of the patched xarray.""" - with patch('satpy.readers.yaml_reader.xr') as xr: + with patch("satpy.readers.yaml_reader.xr") as xr: yield xr @pytest.fixture def fake_mss(): """Get a fixture of the patched _find_missing_segments.""" - with patch('satpy.readers.yaml_reader._find_missing_segments') as mss: + with patch("satpy.readers.yaml_reader._find_missing_segments") as mss: yield mss @pytest.fixture def fake_adef(): """Get a fixture of the patched AreaDefinition.""" - with patch('satpy.readers.yaml_reader.AreaDefinition') as adef: + with patch("satpy.readers.yaml_reader.AreaDefinition") as adef: yield adef @@ -1293,14 +1293,14 @@ def test_get_empty_segment(self, GVSYReader, fake_mss, fake_xr, fake_geswh): """Test execution of (overridden) get_empty_segment inside _load_dataset.""" # Setup input, and output of mocked functions for first segment missing chk_pos_info = { - '1km': {'start_position_row': 0, - 'end_position_row': 0, - 'segment_height': 0, - 'grid_width': 11136}, - '2km': {'start_position_row': 140, - 'end_position_row': None, - 'segment_height': 278, - 'grid_width': 5568} + "1km": {"start_position_row": 0, + "end_position_row": 0, + "segment_height": 0, + "grid_width": 11136}, + "2km": {"start_position_row": 140, + "end_position_row": None, + "segment_height": 278, + "grid_width": 5568} } expected_segments = 2 segment = 2 @@ -1308,10 +1308,10 @@ def test_get_empty_segment(self, GVSYReader, fake_mss, fake_xr, fake_geswh): ashape = [278, 5568] fh_2, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) - GVSYReader.file_handlers = {'filetype1': [fh_2]} + GVSYReader.file_handlers = {"filetype1": [fh_2]} counter = 2 - seg = MagicMock(dims=['y', 'x']) + seg = MagicMock(dims=["y", "x"]) slice_list = [None, seg] failure = False projectable = MagicMock() @@ -1325,20 +1325,20 @@ def test_get_empty_segment(self, GVSYReader, fake_mss, fake_xr, fake_geswh): failure, projectable) GVSYReader._load_dataset(dataid, ds_info, [fh_2]) # the return of get_empty_segment - fake_geswh.assert_called_once_with(empty_segment, 139, dim='y') + fake_geswh.assert_called_once_with(empty_segment, 139, dim="y") def test_pad_earlier_segments_area(self, GVSYReader, fake_adef): """Test _pad_earlier_segments_area() for the variable segment case.""" # setting to 0 or None values that shouldn't be relevant chk_pos_info = { - '1km': {'start_position_row': 0, - 'end_position_row': 0, - 'segment_height': 0, - 'grid_width': 11136}, - '2km': {'start_position_row': 140, - 'end_position_row': None, - 'segment_height': 278, - 'grid_width': 5568} + "1km": {"start_position_row": 0, + "end_position_row": 0, + "segment_height": 0, + "grid_width": 11136}, + "2km": {"start_position_row": 140, + "end_position_row": None, + "segment_height": 278, + "grid_width": 5568} } expected_segments = 2 segment = 2 @@ -1346,8 +1346,8 @@ def test_pad_earlier_segments_area(self, GVSYReader, fake_adef): ashape = [278, 5568] fh_2, seg2_area = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) - GVSYReader.file_handlers = {'filetype1': [fh_2]} - dataid = 'dataid' + GVSYReader.file_handlers = {"filetype1": [fh_2]} + dataid = "dataid" area_defs = {2: seg2_area} res = GVSYReader._pad_earlier_segments_area([fh_2], dataid, area_defs) assert len(res) == 2 @@ -1358,29 +1358,29 @@ def test_pad_earlier_segments_area(self, GVSYReader, fake_adef): # half of the previous size (1000-500)/2=250. # The new area extent lower-left row is therefore 500-250=250 seg1_extent = (0, 500, 200, 250) - expected_call = ('fill', 'fill', 'fill', 'some_crs', 5568, 139, + expected_call = ("fill", "fill", "fill", "some_crs", 5568, 139, seg1_extent) fake_adef.assert_called_once_with(*expected_call) def test_pad_later_segments_area(self, GVSYReader, fake_adef): """Test _pad_later_segments_area() in the variable padding case.""" chk_pos_info = { - '1km': {'start_position_row': None, - 'end_position_row': 11136 - 278, - 'segment_height': 556, - 'grid_width': 11136}, - '2km': {'start_position_row': 0, - 'end_position_row': 0, - 'segment_height': 0, - 'grid_width': 5568}} + "1km": {"start_position_row": None, + "end_position_row": 11136 - 278, + "segment_height": 556, + "grid_width": 11136}, + "2km": {"start_position_row": 0, + "end_position_row": 0, + "segment_height": 0, + "grid_width": 5568}} expected_segments = 2 segment = 1 aex = [0, 1000, 200, 500] ashape = [556, 11136] fh_1, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) - GVSYReader.file_handlers = {'filetype1': [fh_1]} - dataid = 'dataid' + GVSYReader.file_handlers = {"filetype1": [fh_1]} + dataid = "dataid" res = GVSYReader._pad_later_segments_area([fh_1], dataid) assert len(res) == 2 @@ -1389,7 +1389,7 @@ def test_pad_later_segments_area(self, GVSYReader, fake_adef): # therefore, the new vertical area extent should be half of the previous size (1000-500)/2=250. # The new area extent lower-left row is therefore 1000+250=1250 seg2_extent = (0, 1250, 200, 1000) - expected_call = ('fill', 'fill', 'fill', 'some_crs', 11136, 278, + expected_call = ("fill", "fill", "fill", "some_crs", 11136, 278, seg2_extent) fake_adef.assert_called_once_with(*expected_call) @@ -1406,45 +1406,45 @@ def side_effect_areadef(a, b, c, crs, width, height, aex): fake_adef.side_effect = side_effect_areadef chk_pos_info = { - '1km': {'start_position_row': 11136 - 600 - 100 + 1, - 'end_position_row': 11136 - 600, - 'segment_height': 100, - 'grid_width': 11136}, - '2km': {'start_position_row': 0, - 'end_position_row': 0, - 'segment_height': 0, - 'grid_width': 5568}} + "1km": {"start_position_row": 11136 - 600 - 100 + 1, + "end_position_row": 11136 - 600, + "segment_height": 100, + "grid_width": 11136}, + "2km": {"start_position_row": 0, + "end_position_row": 0, + "segment_height": 0, + "grid_width": 5568}} expected_segments = 8 segment = 1 aex = [0, 1000, 200, 500] ashape = [100, 11136] fh_1, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) chk_pos_info = { - '1km': {'start_position_row': 11136 - 300 - 100 + 1, - 'end_position_row': 11136 - 300, - 'segment_height': 100, - 'grid_width': 11136}, - '2km': {'start_position_row': 0, - 'end_position_row': 0, - 'segment_height': 0, - 'grid_width': 5568}} + "1km": {"start_position_row": 11136 - 300 - 100 + 1, + "end_position_row": 11136 - 300, + "segment_height": 100, + "grid_width": 11136}, + "2km": {"start_position_row": 0, + "end_position_row": 0, + "segment_height": 0, + "grid_width": 5568}} segment = 4 fh_4, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) chk_pos_info = { - '1km': {'start_position_row': 11136 - 100 + 1, - 'end_position_row': None, - 'segment_height': 100, - 'grid_width': 11136}, - '2km': {'start_position_row': 0, - 'end_position_row': 0, - 'segment_height': 0, - 'grid_width': 5568}} + "1km": {"start_position_row": 11136 - 100 + 1, + "end_position_row": None, + "segment_height": 100, + "grid_width": 11136}, + "2km": {"start_position_row": 0, + "end_position_row": 0, + "segment_height": 0, + "grid_width": 5568}} segment = 8 fh_8, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) - GVSYReader.file_handlers = {'filetype1': [fh_1, fh_4, fh_8]} - dataid = 'dataid' + GVSYReader.file_handlers = {"filetype1": [fh_1, fh_4, fh_8]} + dataid = "dataid" res = GVSYReader._pad_later_segments_area([fh_1, fh_4, fh_8], dataid) assert len(res) == 8 @@ -1473,15 +1473,15 @@ def side_effect_areadef(a, b, c, crs, width, height, aex): # The second padded segment has 67px height -> 500*67/100=335 area extent height ->1330+335=1665 # The first padded segment has 67px height -> 500*67/100=335 area extent height ->1665+335=2000 assert fake_adef.call_count == 5 - expected_call1 = ('fill', 'fill', 'fill', 'some_crs', 11136, 100, + expected_call1 = ("fill", "fill", "fill", "some_crs", 11136, 100, (0, 1500.0, 200, 1000)) - expected_call2 = ('fill', 'fill', 'fill', 'some_crs', 11136, 100, + expected_call2 = ("fill", "fill", "fill", "some_crs", 11136, 100, (0, 2000.0, 200, 1500)) - expected_call3 = ('fill', 'fill', 'fill', 'some_crs', 11136, 66, + expected_call3 = ("fill", "fill", "fill", "some_crs", 11136, 66, (0, 1330.0, 200, 1000)) - expected_call4 = ('fill', 'fill', 'fill', 'some_crs', 11136, 67, + expected_call4 = ("fill", "fill", "fill", "some_crs", 11136, 67, (0, 1665.0, 200, 1330.0)) - expected_call5 = ('fill', 'fill', 'fill', 'some_crs', 11136, 67, + expected_call5 = ("fill", "fill", "fill", "some_crs", 11136, 67, (0, 2000.0, 200, 1665.0)) fake_adef.side_effect = None @@ -1496,22 +1496,22 @@ def test_get_empty_segment_with_height(self): """Test _get_empty_segment_with_height().""" from satpy.readers.yaml_reader import _get_empty_segment_with_height as geswh - dim = 'y' + dim = "y" # check expansion of empty segment - empty_segment = xr.DataArray(np.ones((139, 5568)), dims=['y', 'x']) + empty_segment = xr.DataArray(np.ones((139, 5568)), dims=["y", "x"]) new_height = 140 new_empty_segment = geswh(empty_segment, new_height, dim) assert new_empty_segment.shape == (140, 5568) # check reduction of empty segment - empty_segment = xr.DataArray(np.ones((140, 5568)), dims=['y', 'x']) + empty_segment = xr.DataArray(np.ones((140, 5568)), dims=["y", "x"]) new_height = 139 new_empty_segment = geswh(empty_segment, new_height, dim) assert new_empty_segment.shape == (139, 5568) # check that empty segment is not modified if it has the right height already - empty_segment = xr.DataArray(np.ones((140, 5568)), dims=['y', 'x']) + empty_segment = xr.DataArray(np.ones((140, 5568)), dims=["y", "x"]) new_height = 140 new_empty_segment = geswh(empty_segment, new_height, dim) assert new_empty_segment is empty_segment diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py index c87cd1055c..70f1ec80e5 100644 --- a/satpy/tests/utils.py +++ b/satpy/tests/utils.py @@ -69,7 +69,7 @@ def wrapper(self, *args, **kwargs): def convert_file_content_to_data_array(file_content, attrs=tuple(), - dims=('z', 'y', 'x')): + dims=("z", "y", "x")): """Help old reader tests that still use numpy arrays. A lot of old reader tests still use numpy arrays and depend on the @@ -98,8 +98,8 @@ def convert_file_content_to_data_array(file_content, attrs=tuple(), for key, val in file_content.items(): da_attrs = {} for a in attrs: - if key + '/attr/' + a in file_content: - da_attrs[a] = file_content[key + '/attr/' + a] + if key + "/attr/" + a in file_content: + da_attrs[a] = file_content[key + "/attr/" + a] if isinstance(val, np.ndarray): val = da.from_array(val, chunks=4096) @@ -120,14 +120,14 @@ def _filter_datasets(all_ds, names_or_ids): str_filter = [ds_name for ds_name in names_or_ids if isinstance(ds_name, str)] id_filter = [ds_id for ds_id in names_or_ids if not isinstance(ds_id, str)] for ds_id in all_ds: - if ds_id in id_filter or ds_id['name'] in str_filter: + if ds_id in id_filter or ds_id["name"] in str_filter: yield ds_id def _swath_def_of_data_arrays(rows, cols): return SwathDefinition( - DataArray(da.zeros((rows, cols)), dims=('y', 'x')), - DataArray(da.zeros((rows, cols)), dims=('y', 'x')), + DataArray(da.zeros((rows, cols)), dims=("y", "x")), + DataArray(da.zeros((rows, cols)), dims=("y", "x")), ) @@ -136,14 +136,14 @@ class FakeModifier(ModifierBase): def _handle_res_change(self, datasets, info): # assume this is used on the 500m version of ds5 - info['resolution'] = 250 + info["resolution"] = 250 rep_data_arr = datasets[0] - y_size = rep_data_arr.sizes['y'] - x_size = rep_data_arr.sizes['x'] + y_size = rep_data_arr.sizes["y"] + x_size = rep_data_arr.sizes["x"] data = da.zeros((y_size * 2, x_size * 2)) - if isinstance(rep_data_arr.attrs['area'], SwathDefinition): + if isinstance(rep_data_arr.attrs["area"], SwathDefinition): area = _swath_def_of_data_arrays(y_size * 2, x_size * 2) - info['area'] = area + info["area"] = area else: raise NotImplementedError("'res_change' modifier can't handle " "AreaDefinition changes yet.") @@ -151,20 +151,20 @@ def _handle_res_change(self, datasets, info): def __call__(self, datasets, optional_datasets=None, **kwargs): """Modify provided data depending on the modifier name and input data.""" - if self.attrs['optional_prerequisites']: - for opt_dep in self.attrs['optional_prerequisites']: - opt_dep_name = opt_dep if isinstance(opt_dep, str) else opt_dep.get('name', '') - if 'NOPE' in opt_dep_name or 'fail' in opt_dep_name: + if self.attrs["optional_prerequisites"]: + for opt_dep in self.attrs["optional_prerequisites"]: + opt_dep_name = opt_dep if isinstance(opt_dep, str) else opt_dep.get("name", "") + if "NOPE" in opt_dep_name or "fail" in opt_dep_name: continue assert (optional_datasets is not None and len(optional_datasets)) - resolution = datasets[0].attrs.get('resolution') - mod_name = self.attrs['modifiers'][-1] + resolution = datasets[0].attrs.get("resolution") + mod_name = self.attrs["modifiers"][-1] data = datasets[0].data i = datasets[0].attrs.copy() - if mod_name == 'res_change' and resolution is not None: + if mod_name == "res_change" and resolution is not None: data = self._handle_res_change(datasets, i) - elif 'incomp_areas' in mod_name: + elif "incomp_areas" in mod_name: raise IncompatibleAreas( "Test modifier 'incomp_areas' always raises IncompatibleAreas") self.apply_modifier_info(datasets[0].attrs, i) @@ -184,27 +184,27 @@ def __call__(self, projectables, nonprojectables=None, **kwargs): if nonprojectables: self.match_data_arrays(nonprojectables) info = self.attrs.copy() - if self.attrs['name'] in ('comp14', 'comp26'): + if self.attrs["name"] in ("comp14", "comp26"): # used as a test when composites update the dataset id with # information from prereqs - info['resolution'] = 555 - if self.attrs['name'] in ('comp24', 'comp25'): + info["resolution"] = 555 + if self.attrs["name"] in ("comp24", "comp25"): # other composites that copy the resolution from inputs - info['resolution'] = projectables[0].attrs.get('resolution') - if len(projectables) != len(self.attrs['prerequisites']): + info["resolution"] = projectables[0].attrs.get("resolution") + if len(projectables) != len(self.attrs["prerequisites"]): raise ValueError("Not enough prerequisite datasets passed") info.update(kwargs) if projectables: - info['area'] = projectables[0].attrs['area'] + info["area"] = projectables[0].attrs["area"] dim_sizes = projectables[0].sizes else: # static_image - dim_sizes = {'y': 4, 'x': 5} - return DataArray(data=da.zeros((dim_sizes['y'], dim_sizes['x'], 3)), + dim_sizes = {"y": 4, "x": 5} + return DataArray(data=da.zeros((dim_sizes["y"], dim_sizes["x"], 3)), attrs=info, - dims=['y', 'x', 'bands'], - coords={'bands': ['R', 'G', 'B']}) + dims=["y", "x", "bands"], + coords={"bands": ["R", "G", "B"]}) class FakeFileHandler(BaseFileHandler): @@ -228,21 +228,21 @@ def end_time(self): @property def sensor_names(self): """Get sensor name from filetype configuration.""" - sensor = self.filetype_info.get('sensor', 'fake_sensor') + sensor = self.filetype_info.get("sensor", "fake_sensor") return {sensor} def get_dataset(self, data_id: DataID, ds_info: dict): """Get fake DataArray for testing.""" - if data_id['name'] == 'ds9_fail_load': + if data_id["name"] == "ds9_fail_load": raise KeyError("Can't load '{}' because it is supposed to " - "fail.".format(data_id['name'])) + "fail.".format(data_id["name"])) attrs = data_id.to_dict() attrs.update(ds_info) - attrs['sensor'] = self.filetype_info.get('sensor', 'fake_sensor') - attrs['platform_name'] = 'fake_platform' - attrs['start_time'] = self.start_time - attrs['end_time'] = self.end_time - res = attrs.get('resolution', 250) + attrs["sensor"] = self.filetype_info.get("sensor", "fake_sensor") + attrs["platform_name"] = "fake_platform" + attrs["start_time"] = self.start_time + attrs["end_time"] = self.end_time + res = attrs.get("resolution", 250) rows = cols = { 250: 20, 500: 10, @@ -250,7 +250,7 @@ def get_dataset(self, data_id: DataID, ds_info: dict): }.get(res, 5) return DataArray(data=da.zeros((rows, cols)), attrs=attrs, - dims=['y', 'x']) + dims=["y", "x"]) def available_datasets(self, configured_datasets=None): """Report YAML datasets available unless 'not_available' is specified during creation.""" @@ -262,7 +262,7 @@ def available_datasets(self, configured_datasets=None): # file handler so let's yield early yield is_avail, ds_info continue - ft_matches = self.file_type_matches(ds_info['file_type']) + ft_matches = self.file_type_matches(ds_info["file_type"]) if not ft_matches: yield None, ds_info continue diff --git a/satpy/tests/writer_tests/test_awips_tiled.py b/satpy/tests/writer_tests/test_awips_tiled.py index a47552a708..eab72e8f5b 100644 --- a/satpy/tests/writer_tests/test_awips_tiled.py +++ b/satpy/tests/writer_tests/test_awips_tiled.py @@ -42,10 +42,10 @@ def _check_production_location(ds): - if 'production_site' in ds.attrs: - prod_loc_name = 'production_site' - elif 'production_location' in ds.attrs: - prod_loc_name = 'producton_location' + if "production_site" in ds.attrs: + prod_loc_name = "production_site" + elif "production_location" in ds.attrs: + prod_loc_name = "producton_location" else: return @@ -62,67 +62,67 @@ def check_required_properties(unmasked_ds, masked_ds): def _check_required_common_attributes(ds): """Check common properties of the created AWIPS tiles for validity.""" - for attr_name in ('tile_row_offset', 'tile_column_offset', - 'product_tile_height', 'product_tile_width', - 'number_product_tiles', - 'product_rows', 'product_columns'): + for attr_name in ("tile_row_offset", "tile_column_offset", + "product_tile_height", "product_tile_width", + "number_product_tiles", + "product_rows", "product_columns"): assert attr_name in ds.attrs _check_production_location(ds) for data_arr in ds.data_vars.values(): if data_arr.ndim == 0: # grid mapping variable - assert 'grid_mapping_name' in data_arr.attrs + assert "grid_mapping_name" in data_arr.attrs continue - assert data_arr.encoding.get('zlib', False) - assert 'grid_mapping' in data_arr.attrs - assert data_arr.attrs['grid_mapping'] in ds - assert 'units' in data_arr.attrs + assert data_arr.encoding.get("zlib", False) + assert "grid_mapping" in data_arr.attrs + assert data_arr.attrs["grid_mapping"] in ds + assert "units" in data_arr.attrs if data_arr.name != "DQF": assert data_arr.dtype == np.int16 assert data_arr.attrs["_Unsigned"] == "true" def _check_scaled_x_coordinate_variable(ds, masked_ds): - assert 'x' in ds.coords - x_coord = ds.coords['x'] + assert "x" in ds.coords + x_coord = ds.coords["x"] np.testing.assert_equal(np.diff(x_coord), 1) x_attrs = x_coord.attrs - assert x_attrs.get('standard_name') == 'projection_x_coordinate' - assert x_attrs.get('units') == 'meters' - assert 'scale_factor' in x_attrs - assert x_attrs['scale_factor'] > 0 - assert 'add_offset' in x_attrs + assert x_attrs.get("standard_name") == "projection_x_coordinate" + assert x_attrs.get("units") == "meters" + assert "scale_factor" in x_attrs + assert x_attrs["scale_factor"] > 0 + assert "add_offset" in x_attrs - unscaled_x = masked_ds.coords['x'].values + unscaled_x = masked_ds.coords["x"].values assert (np.diff(unscaled_x) > 0).all() def _check_scaled_y_coordinate_variable(ds, masked_ds): - assert 'y' in ds.coords - y_coord = ds.coords['y'] + assert "y" in ds.coords + y_coord = ds.coords["y"] np.testing.assert_equal(np.diff(y_coord), 1) y_attrs = y_coord.attrs - assert y_attrs.get('standard_name') == 'projection_y_coordinate' - assert y_attrs.get('units') == 'meters' - assert 'scale_factor' in y_attrs - assert y_attrs['scale_factor'] < 0 - assert 'add_offset' in y_attrs + assert y_attrs.get("standard_name") == "projection_y_coordinate" + assert y_attrs.get("units") == "meters" + assert "scale_factor" in y_attrs + assert y_attrs["scale_factor"] < 0 + assert "add_offset" in y_attrs - unscaled_y = masked_ds.coords['y'].values + unscaled_y = masked_ds.coords["y"].values assert (np.diff(unscaled_y) < 0).all() def _get_test_area(shape=(200, 100), crs=None, extents=None): from pyresample.geometry import AreaDefinition if crs is None: - crs = CRS('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') + crs = CRS("+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") if extents is None: extents = (-1000., -1500., 1000., 1500.) area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", crs, shape[1], shape[0], @@ -138,11 +138,11 @@ def _get_test_data(shape=(200, 100), chunks=50): def _get_test_lcc_data(dask_arr, area_def, extra_attrs=None): attrs = dict( - name='test_ds', - platform_name='PLAT', - sensor='SENSOR', - units='1', - standard_name='toa_bidirectional_reflectance', + name="test_ds", + platform_name="PLAT", + sensor="SENSOR", + units="1", + standard_name="toa_bidirectional_reflectance", area=area_def, start_time=START_TIME, end_time=END_TIME @@ -151,7 +151,7 @@ def _get_test_lcc_data(dask_arr, area_def, extra_attrs=None): attrs.update(extra_attrs) ds = xr.DataArray( dask_arr, - dims=('y', 'x') if dask_arr.ndim == 2 else ('bands', 'y', 'x'), + dims=("y", "x") if dask_arr.ndim == 2 else ("bands", "y", "x"), attrs=attrs, ) return update_resampled_coords(ds, ds, area_def) @@ -165,13 +165,13 @@ def test_init(self, tmp_path): from satpy.writers.awips_tiled import AWIPSTiledWriter AWIPSTiledWriter(base_dir=str(tmp_path)) - @pytest.mark.parametrize('use_save_dataset', + @pytest.mark.parametrize("use_save_dataset", [(False,), (True,)]) @pytest.mark.parametrize( - ('extra_attrs', 'expected_filename'), + ("extra_attrs", "expected_filename"), [ - ({}, 'TESTS_AII_PLAT_SENSOR_test_ds_TEST_T001_20180101_1200.nc'), - ({'sensor': 'viirs', 'name': 'I01'}, 'TESTS_AII_PLAT_viirs_I01_TEST_T001_20180101_1200.nc'), + ({}, "TESTS_AII_PLAT_SENSOR_test_ds_TEST_T001_20180101_1200.nc"), + ({"sensor": "viirs", "name": "I01"}, "TESTS_AII_PLAT_viirs_I01_TEST_T001_20180101_1200.nc"), ] ) def test_basic_numbered_1_tile(self, extra_attrs, expected_filename, use_save_dataset, caplog, tmp_path): @@ -183,21 +183,21 @@ def test_basic_numbered_1_tile(self, extra_attrs, expected_filename, use_save_da with caplog.at_level(logging.DEBUG): w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) if use_save_dataset: - w.save_dataset(input_data_arr, sector_id='TEST', source_name='TESTS') + w.save_dataset(input_data_arr, sector_id="TEST", source_name="TESTS") else: - w.save_datasets([input_data_arr], sector_id='TEST', source_name='TESTS') + w.save_datasets([input_data_arr], sector_id="TEST", source_name="TESTS") assert "no routine matching" not in caplog.text assert "Can't format string" not in caplog.text - all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert len(all_files) == 1 assert os.path.basename(all_files[0]) == expected_filename for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) output_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, output_ds) - scale_factor = output_ds['data'].encoding['scale_factor'] - np.testing.assert_allclose(input_data_arr.values, output_ds['data'].data, + scale_factor = output_ds["data"].encoding["scale_factor"] + np.testing.assert_allclose(input_data_arr.values, output_ds["data"].data, atol=scale_factor / 2) def test_units_length_warning(self, tmp_path): @@ -208,8 +208,8 @@ def test_units_length_warning(self, tmp_path): input_data_arr = _get_test_lcc_data(data, area_def) input_data_arr.attrs["units"] = "this is a really long units string" w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) - with pytest.warns(UserWarning, match=r'.*this is a really long units string.*too long.*'): - w.save_dataset(input_data_arr, sector_id='TEST', source_name='TESTS') + with pytest.warns(UserWarning, match=r".*this is a really long units string.*too long.*"): + w.save_dataset(input_data_arr, sector_id="TEST", source_name="TESTS") @pytest.mark.parametrize( ("tile_count", "tile_size"), @@ -228,33 +228,33 @@ def test_basic_numbered_tiles(self, tile_count, tile_size, tmp_path): input_data_arr = _get_test_lcc_data(data, area_def) w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) save_kwargs = dict( - sector_id='TEST', + sector_id="TEST", source_name="TESTS", tile_count=tile_count, tile_size=tile_size, - extra_global_attrs={'my_global': 'TEST'} + extra_global_attrs={"my_global": "TEST"} ) should_error = tile_count is None and tile_size is None if should_error: with dask.config.set(scheduler=CustomScheduler(0)), \ - pytest.raises(ValueError, match=r'Either.*tile_count.*'): + pytest.raises(ValueError, match=r"Either.*tile_count.*"): w.save_datasets([input_data_arr], **save_kwargs) else: with dask.config.set(scheduler=CustomScheduler(1 * 2)): # precompute=*2 w.save_datasets([input_data_arr], **save_kwargs) - all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) expected_num_files = 0 if should_error else 9 assert len(all_files) == expected_num_files for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - assert unmasked_ds.attrs['my_global'] == 'TEST' - assert unmasked_ds.attrs['sector_id'] == 'TEST' - assert 'physical_element' in unmasked_ds.attrs - stime = input_data_arr.attrs['start_time'] - assert unmasked_ds.attrs['start_date_time'] == stime.strftime('%Y-%m-%dT%H:%M:%S') + assert unmasked_ds.attrs["my_global"] == "TEST" + assert unmasked_ds.attrs["sector_id"] == "TEST" + assert "physical_element" in unmasked_ds.attrs + stime = input_data_arr.attrs["start_time"] + assert unmasked_ds.attrs["start_date_time"] == stime.strftime("%Y-%m-%dT%H:%M:%S") def test_basic_lettered_tiles(self, tmp_path): """Test creating a lettered grid.""" @@ -265,14 +265,14 @@ def test_basic_lettered_tiles(self, tmp_path): extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) # tile_count should be ignored since we specified lettered_grid - w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) - all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) + w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) + all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert len(all_files) == 16 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - assert masked_ds.attrs['start_date_time'] == START_TIME.strftime('%Y-%m-%dT%H:%M:%S') + assert masked_ds.attrs["start_date_time"] == START_TIME.strftime("%Y-%m-%dT%H:%M:%S") def test_basic_lettered_tiles_diff_projection(self, tmp_path): """Test creating a lettered grid from data with differing projection..""" @@ -284,20 +284,20 @@ def test_basic_lettered_tiles_diff_projection(self, tmp_path): extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) # tile_count should be ignored since we specified lettered_grid - w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) - all_files = sorted(glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc'))) + w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) + all_files = sorted(glob(os.path.join(str(tmp_path), "TESTS_AII*.nc"))) assert len(all_files) == 24 assert "TC02" in all_files[0] # the first tile should be TC02 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - assert masked_ds.attrs['start_date_time'] == START_TIME.strftime('%Y-%m-%dT%H:%M:%S') + assert masked_ds.attrs["start_date_time"] == START_TIME.strftime("%Y-%m-%dT%H:%M:%S") def test_lettered_tiles_update_existing(self, tmp_path): """Test updating lettered tiles with additional data.""" from satpy.writers.awips_tiled import AWIPSTiledWriter - first_base_dir = os.path.join(str(tmp_path), 'first') + first_base_dir = os.path.join(str(tmp_path), "first") w = AWIPSTiledWriter(base_dir=first_base_dir, compress=True) shape = (2000, 1000) data = np.linspace(0., 1., shape[0] * shape[1], dtype=np.float32).reshape(shape) @@ -308,11 +308,11 @@ def test_lettered_tiles_update_existing(self, tmp_path): extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) # tile_count should be ignored since we specified lettered_grid - w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) - all_files = sorted(glob(os.path.join(first_base_dir, 'TESTS_AII*.nc'))) + w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) + all_files = sorted(glob(os.path.join(first_base_dir, "TESTS_AII*.nc"))) assert len(all_files) == 16 first_files = [] - second_base_dir = os.path.join(str(tmp_path), 'second') + second_base_dir = os.path.join(str(tmp_path), "second") os.makedirs(second_base_dir) for fn in all_files: new_fn = fn.replace(first_base_dir, second_base_dir) @@ -335,23 +335,23 @@ def test_lettered_tiles_update_existing(self, tmp_path): # file multiple times...sometimes. If we limit dask to one worker # it seems to work fine. with dask.config.set(num_workers=1): - w.save_datasets([ds2], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) - all_files = glob(os.path.join(second_base_dir, 'TESTS_AII*.nc')) + w.save_datasets([ds2], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) + all_files = glob(os.path.join(second_base_dir, "TESTS_AII*.nc")) # 16 original tiles + 4 new tiles assert len(all_files) == 20 # these tiles should be the right-most edge of the first image - first_right_edge_files = [x for x in first_files if 'P02' in x or 'P04' in x or 'V02' in x or 'V04' in x] + first_right_edge_files = [x for x in first_files if "P02" in x or "P04" in x or "V02" in x or "V04" in x] for new_file in first_right_edge_files: orig_file = new_file.replace(second_base_dir, first_base_dir) orig_nc = xr.open_dataset(orig_file) - orig_data = orig_nc['data'].values + orig_data = orig_nc["data"].values if not np.isnan(orig_data).any(): # we only care about the tiles that had NaNs originally continue new_nc = xr.open_dataset(new_file) - new_data = new_nc['data'].values + new_data = new_nc["data"].values # there should be at least some areas of the file # that old data was present and hasn't been replaced np.testing.assert_allclose(orig_data[:, :20], new_data[:, :20]) @@ -369,17 +369,17 @@ def test_lettered_tiles_sector_ref(self, tmp_path): area_def = _get_test_area(shape=(2000, 1000), extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) - w.save_datasets([ds], sector_id='LCC', source_name="TESTS", + w.save_datasets([ds], sector_id="LCC", source_name="TESTS", lettered_grid=True, use_sector_reference=True, use_end_time=True) - all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert len(all_files) == 16 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - expected_start = (START_TIME + timedelta(minutes=20)).strftime('%Y-%m-%dT%H:%M:%S') - assert masked_ds.attrs['start_date_time'] == expected_start + expected_start = (START_TIME + timedelta(minutes=20)).strftime("%Y-%m-%dT%H:%M:%S") + assert masked_ds.attrs["start_date_time"] == expected_start def test_lettered_tiles_no_fit(self, tmp_path): """Test creating a lettered grid with no data overlapping the grid.""" @@ -389,9 +389,9 @@ def test_lettered_tiles_no_fit(self, tmp_path): area_def = _get_test_area(shape=(2000, 1000), extents=(4000000., 5000000., 5000000., 6000000.)) ds = _get_test_lcc_data(data, area_def) - w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) + w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) # No files created - all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert not all_files def test_lettered_tiles_no_valid_data(self, tmp_path): @@ -402,9 +402,9 @@ def test_lettered_tiles_no_valid_data(self, tmp_path): area_def = _get_test_area(shape=(2000, 1000), extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) - w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) + w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) # No files created - all NaNs should result in no tiles being created - all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert not all_files def test_lettered_tiles_bad_filename(self, tmp_path): @@ -417,8 +417,8 @@ def test_lettered_tiles_bad_filename(self, tmp_path): ds = _get_test_lcc_data(data, area_def) with pytest.raises(KeyError): w.save_datasets([ds], - sector_id='LCC', - source_name='TESTS', + sector_id="LCC", + source_name="TESTS", tile_count=(3, 3), lettered_grid=True) @@ -429,17 +429,17 @@ def test_basic_numbered_tiles_rgb(self, tmp_path): data = da.from_array(np.linspace(0., 1., 60000, dtype=np.float32).reshape((3, 200, 100)), chunks=50) area_def = _get_test_area() ds = _get_test_lcc_data(data, area_def) - ds = ds.rename(dict((old, new) for old, new in zip(ds.dims, ['bands', 'y', 'x']))) - ds.coords['bands'] = ['R', 'G', 'B'] + ds = ds.rename(dict((old, new) for old, new in zip(ds.dims, ["bands", "y", "x"]))) + ds.coords["bands"] = ["R", "G", "B"] - w.save_datasets([ds], sector_id='TEST', source_name="TESTS", tile_count=(3, 3)) - chan_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*test_ds_R*.nc')) + w.save_datasets([ds], sector_id="TEST", source_name="TESTS", tile_count=(3, 3)) + chan_files = glob(os.path.join(str(tmp_path), "TESTS_AII*test_ds_R*.nc")) all_files = chan_files[:] assert len(chan_files) == 9 - chan_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*test_ds_G*.nc')) + chan_files = glob(os.path.join(str(tmp_path), "TESTS_AII*test_ds_G*.nc")) all_files.extend(chan_files) assert len(chan_files) == 9 - chan_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*test_ds_B*.nc')) + chan_files = glob(os.path.join(str(tmp_path), "TESTS_AII*test_ds_B*.nc")) assert len(chan_files) == 9 all_files.extend(chan_files) for fn in all_files: @@ -449,54 +449,54 @@ def test_basic_numbered_tiles_rgb(self, tmp_path): @pytest.mark.parametrize( "sector", - ['C', - 'F'] + ["C", + "F"] ) @pytest.mark.parametrize( "extra_kwargs", [ {}, - {'environment_prefix': 'AA'}, - {'environment_prefix': 'BB', 'filename': '{environment_prefix}_{name}_GLM_T{tile_number:04d}.nc'}, + {"environment_prefix": "AA"}, + {"environment_prefix": "BB", "filename": "{environment_prefix}_{name}_GLM_T{tile_number:04d}.nc"}, ] ) def test_multivar_numbered_tiles_glm(self, sector, extra_kwargs, tmp_path): """Test creating a tiles with multiple variables.""" from satpy.writers.awips_tiled import AWIPSTiledWriter - os.environ['ORGANIZATION'] = '1' * 50 + os.environ["ORGANIZATION"] = "1" * 50 w = AWIPSTiledWriter(base_dir=tmp_path, compress=True) data = _get_test_data() area_def = _get_test_area() ds1 = _get_test_lcc_data(data, area_def) ds1.attrs.update( dict( - name='total_energy', - platform_name='GOES-17', - sensor='SENSOR', - units='1', - scan_mode='M3', + name="total_energy", + platform_name="GOES-17", + sensor="SENSOR", + units="1", + scan_mode="M3", scene_abbr=sector, platform_shortname="G17" ) ) ds2 = ds1.copy() ds2.attrs.update({ - 'name': 'flash_extent_density', + "name": "flash_extent_density", }) ds3 = ds1.copy() ds3.attrs.update({ - 'name': 'average_flash_area', + "name": "average_flash_area", }) dqf = ds1.copy() dqf = (dqf * 255).astype(np.uint8) dqf.attrs = ds1.attrs.copy() dqf.attrs.update({ - 'name': 'DQF', - '_FillValue': 1, + "name": "DQF", + "_FillValue": 1, }) - w.save_datasets([ds1, ds2, ds3, dqf], sector_id='TEST', source_name="TESTS", - tile_count=(3, 3), template='glm_l2_rad{}'.format(sector.lower()), + w.save_datasets([ds1, ds2, ds3, dqf], sector_id="TEST", source_name="TESTS", + tile_count=(3, 3), template="glm_l2_rad{}".format(sector.lower()), **extra_kwargs) fn_glob = self._get_glm_glob_filename(extra_kwargs) all_files = glob(os.path.join(str(tmp_path), fn_glob)) @@ -505,15 +505,15 @@ def test_multivar_numbered_tiles_glm(self, sector, extra_kwargs, tmp_path): unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - if sector == 'C': - assert masked_ds.attrs['time_coverage_end'] == END_TIME.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + if sector == "C": + assert masked_ds.attrs["time_coverage_end"] == END_TIME.strftime("%Y-%m-%dT%H:%M:%S.%fZ") else: # 'F' - assert masked_ds.attrs['time_coverage_end'] == END_TIME.strftime('%Y-%m-%dT%H:%M:%SZ') + assert masked_ds.attrs["time_coverage_end"] == END_TIME.strftime("%Y-%m-%dT%H:%M:%SZ") @staticmethod def _get_glm_glob_filename(extra_kwargs): - if 'filename' in extra_kwargs: - return 'BB*_GLM*.nc' - elif 'environment_prefix' in extra_kwargs: - return 'AA*_GLM*.nc' - return 'DR*_GLM*.nc' + if "filename" in extra_kwargs: + return "BB*_GLM*.nc" + elif "environment_prefix" in extra_kwargs: + return "AA*_GLM*.nc" + return "DR*_GLM*.nc" diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index a325cb9cc8..6a51a71b36 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -139,11 +139,11 @@ def test_preprocess_dataarray_name(): from satpy.writers.cf_writer import _preprocess_dataarray_name scn = Scene() - scn['1'] = xr.DataArray([1, 2, 3]) - dataarray = scn['1'] + scn["1"] = xr.DataArray([1, 2, 3]) + dataarray = scn["1"] # If numeric_name_prefix is a string, test add the original_name attributes out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="TEST", include_orig_name=True) - assert out_da.attrs['original_name'] == '1' + assert out_da.attrs["original_name"] == "1" # If numeric_name_prefix is empty string, False or None, test do not add original_name attributes out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="", include_orig_name=True) @@ -163,16 +163,16 @@ def test_add_time_cf_attrs(): scn = Scene() test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) - times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01', - '2018-05-30T10:05:02', '2018-05-30T10:05:03'], dtype=np.datetime64) - scn['test-array'] = xr.DataArray(test_array, - dims=['y', 'x'], - coords={'time': ('y', times)}, + times = np.array(["2018-05-30T10:05:00", "2018-05-30T10:05:01", + "2018-05-30T10:05:02", "2018-05-30T10:05:03"], dtype=np.datetime64) + scn["test-array"] = xr.DataArray(test_array, + dims=["y", "x"], + coords={"time": ("y", times)}, attrs=dict(start_time=times[0], end_time=times[-1])) - ds = scn['test-array'].to_dataset(name='test-array') + ds = scn["test-array"].to_dataset(name="test-array") ds = add_time_bounds_dimension(ds) assert "bnds_1d" in ds.dims - assert ds.dims['bnds_1d'] == 2 + assert ds.dims["bnds_1d"] == 2 assert "time_bnds" in list(ds.data_vars) assert "bounds" in ds["time"].attrs assert "standard_name" in ds["time"].attrs @@ -194,23 +194,23 @@ def test_init(self): from satpy.writers import configs_for_writer from satpy.writers.cf_writer import CFWriter - CFWriter(config_files=list(configs_for_writer('cf'))[0]) + CFWriter(config_files=list(configs_for_writer("cf"))[0]) def test_save_array(self): """Test saving an array to netcdf/cf.""" scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) - scn['test-array'] = xr.DataArray([1, 2, 3], + scn["test-array"] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time, - prerequisites=[make_dsq(name='hej')])) + prerequisites=[make_dsq(name="hej")])) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: - np.testing.assert_array_equal(f['test-array'][:], [1, 2, 3]) + np.testing.assert_array_equal(f["test-array"][:], [1, 2, 3]) expected_prereq = ("DataQuery(name='hej')") - assert f['test-array'].attrs['prerequisites'] == expected_prereq + assert f["test-array"].attrs["prerequisites"] == expected_prereq def test_save_array_coords(self): """Test saving array with coordinates.""" @@ -218,69 +218,69 @@ def test_save_array_coords(self): start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) coords = { - 'x': np.arange(3), - 'y': np.arange(1), + "x": np.arange(3), + "y": np.arange(1), } if CRS is not None: - proj_str = ('+proj=geos +lon_0=-95.0 +h=35786023.0 ' - '+a=6378137.0 +b=6356752.31414 +sweep=x ' - '+units=m +no_defs') - coords['crs'] = CRS.from_string(proj_str) - scn['test-array'] = xr.DataArray([[1, 2, 3]], - dims=('y', 'x'), + proj_str = ("+proj=geos +lon_0=-95.0 +h=35786023.0 " + "+a=6378137.0 +b=6356752.31414 +sweep=x " + "+units=m +no_defs") + coords["crs"] = CRS.from_string(proj_str) + scn["test-array"] = xr.DataArray([[1, 2, 3]], + dims=("y", "x"), coords=coords, attrs=dict(start_time=start_time, end_time=end_time, - prerequisites=[make_dsq(name='hej')])) + prerequisites=[make_dsq(name="hej")])) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: - np.testing.assert_array_equal(f['test-array'][:], [[1, 2, 3]]) - np.testing.assert_array_equal(f['x'][:], [0, 1, 2]) - np.testing.assert_array_equal(f['y'][:], [0]) - assert 'crs' not in f - assert '_FillValue' not in f['x'].attrs - assert '_FillValue' not in f['y'].attrs + np.testing.assert_array_equal(f["test-array"][:], [[1, 2, 3]]) + np.testing.assert_array_equal(f["x"][:], [0, 1, 2]) + np.testing.assert_array_equal(f["y"][:], [0]) + assert "crs" not in f + assert "_FillValue" not in f["x"].attrs + assert "_FillValue" not in f["y"].attrs expected_prereq = ("DataQuery(name='hej')") - assert f['test-array'].attrs['prerequisites'] == expected_prereq + assert f["test-array"].attrs["prerequisites"] == expected_prereq def test_save_dataset_a_digit(self): """Test saving an array to netcdf/cf where dataset name starting with a digit.""" scn = Scene() - scn['1'] = xr.DataArray([1, 2, 3]) + scn["1"] = xr.DataArray([1, 2, 3]) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: - np.testing.assert_array_equal(f['CHANNEL_1'][:], [1, 2, 3]) + np.testing.assert_array_equal(f["CHANNEL_1"][:], [1, 2, 3]) def test_save_dataset_a_digit_prefix(self): """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix.""" scn = Scene() - scn['1'] = xr.DataArray([1, 2, 3]) + scn["1"] = xr.DataArray([1, 2, 3]) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', numeric_name_prefix='TEST') + scn.save_datasets(filename=filename, writer="cf", numeric_name_prefix="TEST") with xr.open_dataset(filename) as f: - np.testing.assert_array_equal(f['TEST1'][:], [1, 2, 3]) + np.testing.assert_array_equal(f["TEST1"][:], [1, 2, 3]) def test_save_dataset_a_digit_prefix_include_attr(self): """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix include orig name.""" scn = Scene() - scn['1'] = xr.DataArray([1, 2, 3]) + scn["1"] = xr.DataArray([1, 2, 3]) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', include_orig_name=True, numeric_name_prefix='TEST') + scn.save_datasets(filename=filename, writer="cf", include_orig_name=True, numeric_name_prefix="TEST") with xr.open_dataset(filename) as f: - np.testing.assert_array_equal(f['TEST1'][:], [1, 2, 3]) - assert f['TEST1'].attrs['original_name'] == '1' + np.testing.assert_array_equal(f["TEST1"][:], [1, 2, 3]) + assert f["TEST1"].attrs["original_name"] == "1" def test_save_dataset_a_digit_no_prefix_include_attr(self): """Test saving an array to netcdf/cf dataset name starting with a digit with no prefix include orig name.""" scn = Scene() - scn['1'] = xr.DataArray([1, 2, 3]) + scn["1"] = xr.DataArray([1, 2, 3]) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', include_orig_name=True, numeric_name_prefix='') + scn.save_datasets(filename=filename, writer="cf", include_orig_name=True, numeric_name_prefix="") with xr.open_dataset(filename) as f: - np.testing.assert_array_equal(f['1'][:], [1, 2, 3]) - assert 'original_name' not in f['1'].attrs + np.testing.assert_array_equal(f["1"][:], [1, 2, 3]) + assert "original_name" not in f["1"].attrs def test_ancillary_variables(self): """Test ancillary_variables cited each other.""" @@ -291,16 +291,16 @@ def test_ancillary_variables(self): da = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time, - prerequisites=[make_dataid(name='hej')])) - scn['test-array-1'] = da - scn['test-array-2'] = da.copy() - scn['test-array-1'].attrs['ancillary_variables'] = [scn['test-array-2']] - scn['test-array-2'].attrs['ancillary_variables'] = [scn['test-array-1']] + prerequisites=[make_dataid(name="hej")])) + scn["test-array-1"] = da + scn["test-array-2"] = da.copy() + scn["test-array-1"].attrs["ancillary_variables"] = [scn["test-array-2"]] + scn["test-array-2"].attrs["ancillary_variables"] = [scn["test-array-1"]] with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: - assert f['test-array-1'].attrs['ancillary_variables'] == 'test-array-2' - assert f['test-array-2'].attrs['ancillary_variables'] == 'test-array-1' + assert f["test-array-1"].attrs["ancillary_variables"] == "test-array-2" + assert f["test-array-2"].attrs["ancillary_variables"] == "test-array-1" def test_groups(self): """Test creating a file with groups.""" @@ -319,34 +319,34 @@ def test_groups(self): time_hrv = [1, 2, 3] scn = Scene() - scn['VIS006'] = xr.DataArray(data_visir, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006)}, - attrs={'name': 'VIS006', 'start_time': tstart, 'end_time': tend}) - scn['IR_108'] = xr.DataArray(data_visir, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_ir_108)}, - attrs={'name': 'IR_108', 'start_time': tstart, 'end_time': tend}) - scn['HRV'] = xr.DataArray(data_hrv, - dims=('y', 'x'), - coords={'y': y_hrv, 'x': x_hrv, 'acq_time': ('y', time_hrv)}, - attrs={'name': 'HRV', 'start_time': tstart, 'end_time': tend}) + scn["VIS006"] = xr.DataArray(data_visir, + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir, "acq_time": ("y", time_vis006)}, + attrs={"name": "VIS006", "start_time": tstart, "end_time": tend}) + scn["IR_108"] = xr.DataArray(data_visir, + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir, "acq_time": ("y", time_ir_108)}, + attrs={"name": "IR_108", "start_time": tstart, "end_time": tend}) + scn["HRV"] = xr.DataArray(data_hrv, + dims=("y", "x"), + coords={"y": y_hrv, "x": x_hrv, "acq_time": ("y", time_hrv)}, + attrs={"name": "HRV", "start_time": tstart, "end_time": tend}) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', groups={'visir': ['IR_108', 'VIS006'], 'hrv': ['HRV']}, + scn.save_datasets(filename=filename, writer="cf", groups={"visir": ["IR_108", "VIS006"], "hrv": ["HRV"]}, pretty=True) nc_root = xr.open_dataset(filename) - assert 'history' in nc_root.attrs + assert "history" in nc_root.attrs assert set(nc_root.variables.keys()) == set() - nc_visir = xr.open_dataset(filename, group='visir') - nc_hrv = xr.open_dataset(filename, group='hrv') - assert set(nc_visir.variables.keys()) == {'VIS006', 'IR_108', - 'y', 'x', 'VIS006_acq_time', 'IR_108_acq_time'} - assert set(nc_hrv.variables.keys()) == {'HRV', 'y', 'x', 'acq_time'} - for tst, ref in zip([nc_visir['VIS006'], nc_visir['IR_108'], nc_hrv['HRV']], - [scn['VIS006'], scn['IR_108'], scn['HRV']]): + nc_visir = xr.open_dataset(filename, group="visir") + nc_hrv = xr.open_dataset(filename, group="hrv") + assert set(nc_visir.variables.keys()) == {"VIS006", "IR_108", + "y", "x", "VIS006_acq_time", "IR_108_acq_time"} + assert set(nc_hrv.variables.keys()) == {"HRV", "y", "x", "acq_time"} + for tst, ref in zip([nc_visir["VIS006"], nc_visir["IR_108"], nc_hrv["HRV"]], + [scn["VIS006"], scn["IR_108"], scn["HRV"]]): np.testing.assert_array_equal(tst.data, ref.data) nc_root.close() nc_visir.close() @@ -355,7 +355,7 @@ def test_groups(self): # Different projection coordinates in one group are not supported with TempFile() as filename: with pytest.raises(ValueError): - scn.save_datasets(datasets=['VIS006', 'HRV'], filename=filename, writer='cf') + scn.save_datasets(datasets=["VIS006", "HRV"], filename=filename, writer="cf") def test_single_time_value(self): """Test setting a single time value.""" @@ -363,32 +363,32 @@ def test_single_time_value(self): start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]) - scn['test-array'] = xr.DataArray(test_array, - dims=['x', 'y'], - coords={'time': np.datetime64('2018-05-30T10:05:00')}, + scn["test-array"] = xr.DataArray(test_array, + dims=["x", "y"], + coords={"time": np.datetime64("2018-05-30T10:05:00")}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename, decode_cf=True) as f: - np.testing.assert_array_equal(f['time'], scn['test-array']['time']) - bounds_exp = np.array([[start_time, end_time]], dtype='datetime64[m]') - np.testing.assert_array_equal(f['time_bnds'], bounds_exp) + np.testing.assert_array_equal(f["time"], scn["test-array"]["time"]) + bounds_exp = np.array([[start_time, end_time]], dtype="datetime64[m]") + np.testing.assert_array_equal(f["time_bnds"], bounds_exp) def test_time_coordinate_on_a_swath(self): """Test that time dimension is not added on swath data with time already as a coordinate.""" scn = Scene() test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) - times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01', - '2018-05-30T10:05:02', '2018-05-30T10:05:03'], dtype=np.datetime64) - scn['test-array'] = xr.DataArray(test_array, - dims=['y', 'x'], - coords={'time': ('y', times)}, + times = np.array(["2018-05-30T10:05:00", "2018-05-30T10:05:01", + "2018-05-30T10:05:02", "2018-05-30T10:05:03"], dtype=np.datetime64) + scn["test-array"] = xr.DataArray(test_array, + dims=["y", "x"], + coords={"time": ("y", times)}, attrs=dict(start_time=times[0], end_time=times[-1])) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', pretty=True) + scn.save_datasets(filename=filename, writer="cf", pretty=True) with xr.open_dataset(filename, decode_cf=True) as f: - np.testing.assert_array_equal(f['time'], scn['test-array']['time']) + np.testing.assert_array_equal(f["time"], scn["test-array"]["time"]) def test_bounds(self): """Test setting time bounds.""" @@ -396,30 +396,30 @@ def test_bounds(self): start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) - scn['test-array'] = xr.DataArray(test_array, - dims=['x', 'y', 'time'], - coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, + scn["test-array"] = xr.DataArray(test_array, + dims=["x", "y", "time"], + coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") # Check decoded time coordinates & bounds with xr.open_dataset(filename, decode_cf=True) as f: - bounds_exp = np.array([[start_time, end_time]], dtype='datetime64[m]') - np.testing.assert_array_equal(f['time_bnds'], bounds_exp) - assert f['time'].attrs['bounds'] == 'time_bnds' + bounds_exp = np.array([[start_time, end_time]], dtype="datetime64[m]") + np.testing.assert_array_equal(f["time_bnds"], bounds_exp) + assert f["time"].attrs["bounds"] == "time_bnds" # Check raw time coordinates & bounds with xr.open_dataset(filename, decode_cf=False) as f: - np.testing.assert_almost_equal(f['time_bnds'], [[-0.0034722, 0.0069444]]) + np.testing.assert_almost_equal(f["time_bnds"], [[-0.0034722, 0.0069444]]) # User-specified time encoding should have preference with TempFile() as filename: - time_units = 'seconds since 2018-01-01' - scn.save_datasets(filename=filename, encoding={'time': {'units': time_units}}, - writer='cf') + time_units = "seconds since 2018-01-01" + scn.save_datasets(filename=filename, encoding={"time": {"units": time_units}}, + writer="cf") with xr.open_dataset(filename, decode_cf=False) as f: - np.testing.assert_array_equal(f['time_bnds'], [[12909600, 12910500]]) + np.testing.assert_array_equal(f["time_bnds"], [[12909600, 12910500]]) def test_bounds_minimum(self): """Test minimum bounds.""" @@ -430,21 +430,21 @@ def test_bounds_minimum(self): end_timeB = datetime(2018, 5, 30, 10, 15) # expected to be used test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) - scn['test-arrayA'] = xr.DataArray(test_arrayA, - dims=['x', 'y', 'time'], - coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, + scn["test-arrayA"] = xr.DataArray(test_arrayA, + dims=["x", "y", "time"], + coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) - scn['test-arrayB'] = xr.DataArray(test_arrayB, - dims=['x', 'y', 'time'], - coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, + scn["test-arrayB"] = xr.DataArray(test_arrayB, + dims=["x", "y", "time"], + coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, attrs=dict(start_time=start_timeB, end_time=end_timeB)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename, decode_cf=True) as f: - bounds_exp = np.array([[start_timeA, end_timeB]], dtype='datetime64[m]') - np.testing.assert_array_equal(f['time_bnds'], bounds_exp) + bounds_exp = np.array([[start_timeA, end_timeB]], dtype="datetime64[m]") + np.testing.assert_array_equal(f["time_bnds"], bounds_exp) def test_bounds_missing_time_info(self): """Test time bounds generation in case of missing time.""" @@ -453,19 +453,19 @@ def test_bounds_missing_time_info(self): end_timeA = datetime(2018, 5, 30, 10, 15) test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) - scn['test-arrayA'] = xr.DataArray(test_arrayA, - dims=['x', 'y', 'time'], - coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, + scn["test-arrayA"] = xr.DataArray(test_arrayA, + dims=["x", "y", "time"], + coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) - scn['test-arrayB'] = xr.DataArray(test_arrayB, - dims=['x', 'y', 'time'], - coords={'time': [np.datetime64('2018-05-30T10:05:00')]}) + scn["test-arrayB"] = xr.DataArray(test_arrayB, + dims=["x", "y", "time"], + coords={"time": [np.datetime64("2018-05-30T10:05:00")]}) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename, decode_cf=True) as f: - bounds_exp = np.array([[start_timeA, end_timeA]], dtype='datetime64[m]') - np.testing.assert_array_equal(f['time_bnds'], bounds_exp) + bounds_exp = np.array([[start_timeA, end_timeA]], dtype="datetime64[m]") + np.testing.assert_array_equal(f["time_bnds"], bounds_exp) def test_unlimited_dims_kwarg(self): """Test specification of unlimited dimensions.""" @@ -473,51 +473,51 @@ def test_unlimited_dims_kwarg(self): start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]) - scn['test-array'] = xr.DataArray(test_array, - dims=['x', 'y'], - coords={'time': np.datetime64('2018-05-30T10:05:00')}, + scn["test-array"] = xr.DataArray(test_array, + dims=["x", "y"], + coords={"time": np.datetime64("2018-05-30T10:05:00")}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', unlimited_dims=['time']) + scn.save_datasets(filename=filename, writer="cf", unlimited_dims=["time"]) with xr.open_dataset(filename) as f: - assert set(f.encoding['unlimited_dims']) == {'time'} + assert set(f.encoding["unlimited_dims"]) == {"time"} def test_header_attrs(self): """Check global attributes are set.""" scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) - scn['test-array'] = xr.DataArray([1, 2, 3], + scn["test-array"] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: - header_attrs = {'sensor': 'SEVIRI', - 'orbit': 99999, - 'none': None, - 'list': [1, 2, 3], - 'set': {1, 2, 3}, - 'dict': {'a': 1, 'b': 2}, - 'nested': {'outer': {'inner1': 1, 'inner2': 2}}, - 'bool': True, - 'bool_': np.bool_(True)} + header_attrs = {"sensor": "SEVIRI", + "orbit": 99999, + "none": None, + "list": [1, 2, 3], + "set": {1, 2, 3}, + "dict": {"a": 1, "b": 2}, + "nested": {"outer": {"inner1": 1, "inner2": 2}}, + "bool": True, + "bool_": np.bool_(True)} scn.save_datasets(filename=filename, header_attrs=header_attrs, flatten_attrs=True, - writer='cf') + writer="cf") with xr.open_dataset(filename) as f: - assert 'history' in f.attrs - assert f.attrs['sensor'] == 'SEVIRI' - assert f.attrs['orbit'] == 99999 - np.testing.assert_array_equal(f.attrs['list'], [1, 2, 3]) - assert f.attrs['set'] == '{1, 2, 3}' - assert f.attrs['dict_a'] == 1 - assert f.attrs['dict_b'] == 2 - assert f.attrs['nested_outer_inner1'] == 1 - assert f.attrs['nested_outer_inner2'] == 2 - assert f.attrs['bool'] == 'true' - assert f.attrs['bool_'] == 'true' - assert 'none' not in f.attrs.keys() + assert "history" in f.attrs + assert f.attrs["sensor"] == "SEVIRI" + assert f.attrs["orbit"] == 99999 + np.testing.assert_array_equal(f.attrs["list"], [1, 2, 3]) + assert f.attrs["set"] == "{1, 2, 3}" + assert f.attrs["dict_a"] == 1 + assert f.attrs["dict_b"] == 2 + assert f.attrs["nested_outer_inner1"] == 1 + assert f.attrs["nested_outer_inner2"] == 2 + assert f.attrs["bool"] == "true" + assert f.attrs["bool_"] == "true" + assert "none" not in f.attrs.keys() def get_test_attrs(self): """Create some dataset attributes for testing purpose. @@ -526,79 +526,79 @@ def get_test_attrs(self): Attributes, encoded attributes, encoded and flattened attributes """ - attrs = {'name': 'IR_108', - 'start_time': datetime(2018, 1, 1, 0), - 'end_time': datetime(2018, 1, 1, 0, 15), - 'int': 1, - 'float': 1.0, - 'none': None, # should be dropped - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': True, - 'numpy_void': np.void(0), - 'numpy_bytes': np.bytes_('test'), - 'numpy_string': np.string_('test'), - 'list': [1, 2, np.float64(3)], - 'nested_list': ["1", ["2", [3]]], - 'bool': True, - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': np.array([True, False, True]), - 'array_2d': np.array([[1, 2], [3, 4]]), - 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), - 'dict': {'a': 1, 'b': 2}, - 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, - 'raw_metadata': OrderedDict([ - ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), - ('flag', np.bool_(True)), - ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) + attrs = {"name": "IR_108", + "start_time": datetime(2018, 1, 1, 0), + "end_time": datetime(2018, 1, 1, 0, 15), + "int": 1, + "float": 1.0, + "none": None, # should be dropped + "numpy_int": np.uint8(1), + "numpy_float": np.float32(1), + "numpy_bool": True, + "numpy_void": np.void(0), + "numpy_bytes": np.bytes_("test"), + "numpy_string": np.string_("test"), + "list": [1, 2, np.float64(3)], + "nested_list": ["1", ["2", [3]]], + "bool": True, + "array": np.array([1, 2, 3], dtype="uint8"), + "array_bool": np.array([True, False, True]), + "array_2d": np.array([[1, 2], [3, 4]]), + "array_3d": np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), + "dict": {"a": 1, "b": 2}, + "nested_dict": {"l1": {"l2": {"l3": np.array([1, 2, 3], dtype="uint8")}}}, + "raw_metadata": OrderedDict([ + ("recarray", np.zeros(3, dtype=[("x", "i4"), ("y", "u1")])), + ("flag", np.bool_(True)), + ("dict", OrderedDict([("a", 1), ("b", np.array([1, 2, 3], dtype="uint8"))])) ])} - encoded = {'name': 'IR_108', - 'start_time': '2018-01-01 00:00:00', - 'end_time': '2018-01-01 00:15:00', - 'int': 1, - 'float': 1.0, - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': 'true', - 'numpy_void': '[]', - 'numpy_bytes': 'test', - 'numpy_string': 'test', - 'list': [1, 2, np.float64(3)], - 'nested_list': '["1", ["2", [3]]]', - 'bool': 'true', - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': ['true', 'false', 'true'], - 'array_2d': '[[1, 2], [3, 4]]', - 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', - 'dict': '{"a": 1, "b": 2}', - 'nested_dict': '{"l1": {"l2": {"l3": [1, 2, 3]}}}', - 'raw_metadata': '{"recarray": [[0, 0], [0, 0], [0, 0]], ' + encoded = {"name": "IR_108", + "start_time": "2018-01-01 00:00:00", + "end_time": "2018-01-01 00:15:00", + "int": 1, + "float": 1.0, + "numpy_int": np.uint8(1), + "numpy_float": np.float32(1), + "numpy_bool": "true", + "numpy_void": "[]", + "numpy_bytes": "test", + "numpy_string": "test", + "list": [1, 2, np.float64(3)], + "nested_list": '["1", ["2", [3]]]', + "bool": "true", + "array": np.array([1, 2, 3], dtype="uint8"), + "array_bool": ["true", "false", "true"], + "array_2d": "[[1, 2], [3, 4]]", + "array_3d": "[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]", + "dict": '{"a": 1, "b": 2}', + "nested_dict": '{"l1": {"l2": {"l3": [1, 2, 3]}}}', + "raw_metadata": '{"recarray": [[0, 0], [0, 0], [0, 0]], ' '"flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}}'} - encoded_flat = {'name': 'IR_108', - 'start_time': '2018-01-01 00:00:00', - 'end_time': '2018-01-01 00:15:00', - 'int': 1, - 'float': 1.0, - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': 'true', - 'numpy_void': '[]', - 'numpy_bytes': 'test', - 'numpy_string': 'test', - 'list': [1, 2, np.float64(3)], - 'nested_list': '["1", ["2", [3]]]', - 'bool': 'true', - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': ['true', 'false', 'true'], - 'array_2d': '[[1, 2], [3, 4]]', - 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', - 'dict_a': 1, - 'dict_b': 2, - 'nested_dict_l1_l2_l3': np.array([1, 2, 3], dtype='uint8'), - 'raw_metadata_recarray': '[[0, 0], [0, 0], [0, 0]]', - 'raw_metadata_flag': 'true', - 'raw_metadata_dict_a': 1, - 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} + encoded_flat = {"name": "IR_108", + "start_time": "2018-01-01 00:00:00", + "end_time": "2018-01-01 00:15:00", + "int": 1, + "float": 1.0, + "numpy_int": np.uint8(1), + "numpy_float": np.float32(1), + "numpy_bool": "true", + "numpy_void": "[]", + "numpy_bytes": "test", + "numpy_string": "test", + "list": [1, 2, np.float64(3)], + "nested_list": '["1", ["2", [3]]]', + "bool": "true", + "array": np.array([1, 2, 3], dtype="uint8"), + "array_bool": ["true", "false", "true"], + "array_2d": "[[1, 2], [3, 4]]", + "array_3d": "[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]", + "dict_a": 1, + "dict_b": 2, + "nested_dict_l1_l2_l3": np.array([1, 2, 3], dtype="uint8"), + "raw_metadata_recarray": "[[0, 0], [0, 0], [0, 0]]", + "raw_metadata_flag": "true", + "raw_metadata_dict_a": 1, + "raw_metadata_dict_b": np.array([1, 2, 3], dtype="uint8")} return attrs, encoded, encoded_flat def assertDictWithArraysEqual(self, d1, d2): @@ -626,13 +626,13 @@ def test_encode_attrs_nc(self): self.assertDictWithArraysEqual(expected, encoded) # Test decoding of json-encoded attributes - raw_md_roundtrip = {'recarray': [[0, 0], [0, 0], [0, 0]], - 'flag': 'true', - 'dict': {'a': 1, 'b': [1, 2, 3]}} - assert json.loads(encoded['raw_metadata']) == raw_md_roundtrip - assert json.loads(encoded['array_3d']) == [[[1, 2], [3, 4]], [[1, 2], [3, 4]]] - assert json.loads(encoded['nested_dict']) == {"l1": {"l2": {"l3": [1, 2, 3]}}} - assert json.loads(encoded['nested_list']) == ["1", ["2", [3]]] + raw_md_roundtrip = {"recarray": [[0, 0], [0, 0], [0, 0]], + "flag": "true", + "dict": {"a": 1, "b": [1, 2, 3]}} + assert json.loads(encoded["raw_metadata"]) == raw_md_roundtrip + assert json.loads(encoded["array_3d"]) == [[[1, 2], [3, 4]], [[1, 2], [3, 4]]] + assert json.loads(encoded["nested_dict"]) == {"l1": {"l2": {"l3": [1, 2, 3]}}} + assert json.loads(encoded["nested_list"]) == ["1", ["2", [3]]] def test_da2cf(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" @@ -640,44 +640,44 @@ def test_da2cf(self): # Create set of test attributes attrs, attrs_expected, attrs_expected_flat = self.get_test_attrs() - attrs['area'] = 'some_area' - attrs['prerequisites'] = [make_dsq(name='hej')] - attrs['_satpy_id_name'] = 'myname' + attrs["area"] = "some_area" + attrs["prerequisites"] = [make_dsq(name="hej")] + attrs["_satpy_id_name"] = "myname" # Adjust expected attributes expected_prereq = ("DataQuery(name='hej')") - update = {'prerequisites': [expected_prereq], 'long_name': attrs['name']} + update = {"prerequisites": [expected_prereq], "long_name": attrs["name"]} attrs_expected.update(update) attrs_expected_flat.update(update) - attrs_expected.pop('name') - attrs_expected_flat.pop('name') + attrs_expected.pop("name") + attrs_expected_flat.pop("name") # Create test data array - arr = xr.DataArray(np.array([[1, 2], [3, 4]]), attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [1, 2], 'acq_time': ('y', [3, 4])}) + arr = xr.DataArray(np.array([[1, 2], [3, 4]]), attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [1, 2], "acq_time": ("y", [3, 4])}) # Test conversion to something cf-compliant res = CFWriter.da2cf(arr) - np.testing.assert_array_equal(res['x'], arr['x']) - np.testing.assert_array_equal(res['y'], arr['y']) - np.testing.assert_array_equal(res['acq_time'], arr['acq_time']) - assert res['x'].attrs == {'units': 'm', 'standard_name': 'projection_x_coordinate'} - assert res['y'].attrs == {'units': 'm', 'standard_name': 'projection_y_coordinate'} + np.testing.assert_array_equal(res["x"], arr["x"]) + np.testing.assert_array_equal(res["y"], arr["y"]) + np.testing.assert_array_equal(res["acq_time"], arr["acq_time"]) + assert res["x"].attrs == {"units": "m", "standard_name": "projection_x_coordinate"} + assert res["y"].attrs == {"units": "m", "standard_name": "projection_y_coordinate"} self.assertDictWithArraysEqual(res.attrs, attrs_expected) # Test attribute kwargs - res_flat = CFWriter.da2cf(arr, flatten_attrs=True, exclude_attrs=['int']) - attrs_expected_flat.pop('int') + res_flat = CFWriter.da2cf(arr, flatten_attrs=True, exclude_attrs=["int"]) + attrs_expected_flat.pop("int") self.assertDictWithArraysEqual(res_flat.attrs, attrs_expected_flat) def test_da2cf_one_dimensional_array(self): """Test the conversion of an 1d DataArray to a CF-compatible DataArray.""" from satpy.writers.cf_writer import CFWriter - arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=('y',), - coords={'y': [0, 1, 2, 3], 'acq_time': ('y', [0, 1, 2, 3])}) + arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=("y",), + coords={"y": [0, 1, 2, 3], "acq_time": ("y", [0, 1, 2, 3])}) _ = CFWriter.da2cf(arr) def test_collect_cf_dataarrays(self): @@ -685,10 +685,10 @@ def test_collect_cf_dataarrays(self): from satpy.writers.cf_writer import _collect_cf_dataset geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, + area_id="geos", + description="geos", + proj_id="geos", + projection={"proj": "geos", "h": 35785831., "a": 6378169., "b": 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) @@ -699,38 +699,38 @@ def test_collect_cf_dataarrays(self): time = [1, 2] tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) - list_dataarrays = [xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, - attrs={'name': 'var1', 'start_time': tstart, 'end_time': tend, 'area': geos}), - xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, - attrs={'name': 'var2', 'long_name': 'variable 2'})] + list_dataarrays = [xr.DataArray(data=data, dims=("y", "x"), coords={"y": y, "x": x, "acq_time": ("y", time)}, + attrs={"name": "var1", "start_time": tstart, "end_time": tend, "area": geos}), + xr.DataArray(data=data, dims=("y", "x"), coords={"y": y, "x": x, "acq_time": ("y", time)}, + attrs={"name": "var2", "long_name": "variable 2"})] # Collect datasets ds = _collect_cf_dataset(list_dataarrays, include_lonlats=True) # Test results assert len(ds.keys()) == 3 - assert set(ds.keys()) == {'var1', 'var2', 'geos'} + assert set(ds.keys()) == {"var1", "var2", "geos"} - da_var1 = ds['var1'] - da_var2 = ds['var2'] - assert da_var1.name == 'var1' - assert da_var1.attrs['grid_mapping'] == 'geos' - assert da_var1.attrs['long_name'] == 'var1' + da_var1 = ds["var1"] + da_var2 = ds["var2"] + assert da_var1.name == "var1" + assert da_var1.attrs["grid_mapping"] == "geos" + assert da_var1.attrs["long_name"] == "var1" # variable 2 - assert 'grid_mapping' not in da_var2.attrs - assert da_var2.attrs['long_name'] == 'variable 2' + assert "grid_mapping" not in da_var2.attrs + assert da_var2.attrs["long_name"] == "variable 2" def test_assert_xy_unique(self): """Test that the x and y coordinates are unique.""" from satpy.writers.cf_writer import assert_xy_unique dummy = [[1, 2], [3, 4]] - datas = {'a': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), - 'b': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), - 'n': xr.DataArray(data=dummy, dims=('v', 'w'), coords={'v': [1, 2], 'w': [3, 4]})} + datas = {"a": xr.DataArray(data=dummy, dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}), + "b": xr.DataArray(data=dummy, dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}), + "n": xr.DataArray(data=dummy, dims=("v", "w"), coords={"v": [1, 2], "w": [3, 4]})} assert_xy_unique(datas) - datas['c'] = xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 3], 'x': [3, 4]}) + datas["c"] = xr.DataArray(data=dummy, dims=("y", "x"), coords={"y": [1, 3], "x": [3, 4]}) with pytest.raises(ValueError): assert_xy_unique(datas) @@ -743,31 +743,31 @@ def test_link_coords(self): lon2 = np.zeros((1, 2, 2)) lat = np.ones((2, 2)) datasets = { - 'var1': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon lat'}), - 'var2': xr.DataArray(data=data, dims=('y', 'x')), - 'var3': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon2 lat'}), - 'var4': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'not_exist lon lat'}), - 'lon': xr.DataArray(data=lon, dims=('y', 'x')), - 'lon2': xr.DataArray(data=lon2, dims=('time', 'y', 'x')), - 'lat': xr.DataArray(data=lat, dims=('y', 'x')) + "var1": xr.DataArray(data=data, dims=("y", "x"), attrs={"coordinates": "lon lat"}), + "var2": xr.DataArray(data=data, dims=("y", "x")), + "var3": xr.DataArray(data=data, dims=("y", "x"), attrs={"coordinates": "lon2 lat"}), + "var4": xr.DataArray(data=data, dims=("y", "x"), attrs={"coordinates": "not_exist lon lat"}), + "lon": xr.DataArray(data=lon, dims=("y", "x")), + "lon2": xr.DataArray(data=lon2, dims=("time", "y", "x")), + "lat": xr.DataArray(data=lat, dims=("y", "x")) } link_coords(datasets) # Check that link has been established correctly and 'coordinate' atrribute has been dropped - assert 'lon' in datasets['var1'].coords - assert 'lat' in datasets['var1'].coords - np.testing.assert_array_equal(datasets['var1']['lon'].data, lon) - np.testing.assert_array_equal(datasets['var1']['lat'].data, lat) - assert 'coordinates' not in datasets['var1'].attrs + assert "lon" in datasets["var1"].coords + assert "lat" in datasets["var1"].coords + np.testing.assert_array_equal(datasets["var1"]["lon"].data, lon) + np.testing.assert_array_equal(datasets["var1"]["lat"].data, lat) + assert "coordinates" not in datasets["var1"].attrs # There should be no link if there was no 'coordinate' attribute - assert 'lon' not in datasets['var2'].coords - assert 'lat' not in datasets['var2'].coords + assert "lon" not in datasets["var2"].coords + assert "lat" not in datasets["var2"].coords # The non-existent dimension or coordinate should be dropped - assert 'time' not in datasets['var3'].coords - assert 'not_exist' not in datasets['var4'].coords + assert "time" not in datasets["var3"].coords + assert "not_exist" not in datasets["var4"].coords def test_make_alt_coords_unique(self): """Test that created coordinate variables are unique.""" @@ -778,87 +778,87 @@ def test_make_alt_coords_unique(self): x = [1, 2] time1 = [1, 2] time2 = [3, 4] - datasets = {'var1': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x, 'acq_time': ('y', time1)}), - 'var2': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x, 'acq_time': ('y', time2)})} + datasets = {"var1": xr.DataArray(data=data, + dims=("y", "x"), + coords={"y": y, "x": x, "acq_time": ("y", time1)}), + "var2": xr.DataArray(data=data, + dims=("y", "x"), + coords={"y": y, "x": x, "acq_time": ("y", time2)})} # Test that dataset names are prepended to alternative coordinates res = make_alt_coords_unique(datasets) - np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) - np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) - assert 'acq_time' not in res['var1'].coords - assert 'acq_time' not in res['var2'].coords + np.testing.assert_array_equal(res["var1"]["var1_acq_time"], time1) + np.testing.assert_array_equal(res["var2"]["var2_acq_time"], time2) + assert "acq_time" not in res["var1"].coords + assert "acq_time" not in res["var2"].coords # Make sure nothing else is modified - np.testing.assert_array_equal(res['var1']['x'], x) - np.testing.assert_array_equal(res['var1']['y'], y) - np.testing.assert_array_equal(res['var2']['x'], x) - np.testing.assert_array_equal(res['var2']['y'], y) + np.testing.assert_array_equal(res["var1"]["x"], x) + np.testing.assert_array_equal(res["var1"]["y"], y) + np.testing.assert_array_equal(res["var2"]["x"], x) + np.testing.assert_array_equal(res["var2"]["y"], y) # Coords not unique -> Dataset names must be prepended, even if pretty=True with pytest.warns(UserWarning, match='Cannot pretty-format "acq_time"'): res = make_alt_coords_unique(datasets, pretty=True) - np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) - np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) - assert 'acq_time' not in res['var1'].coords - assert 'acq_time' not in res['var2'].coords + np.testing.assert_array_equal(res["var1"]["var1_acq_time"], time1) + np.testing.assert_array_equal(res["var2"]["var2_acq_time"], time2) + assert "acq_time" not in res["var1"].coords + assert "acq_time" not in res["var2"].coords # Coords unique and pretty=True -> Don't modify coordinate names - datasets['var2']['acq_time'] = ('y', time1) + datasets["var2"]["acq_time"] = ("y", time1) res = make_alt_coords_unique(datasets, pretty=True) - np.testing.assert_array_equal(res['var1']['acq_time'], time1) - np.testing.assert_array_equal(res['var2']['acq_time'], time1) - assert 'var1_acq_time' not in res['var1'].coords - assert 'var2_acq_time' not in res['var2'].coords + np.testing.assert_array_equal(res["var1"]["acq_time"], time1) + np.testing.assert_array_equal(res["var2"]["acq_time"], time1) + assert "var1_acq_time" not in res["var1"].coords + assert "var2_acq_time" not in res["var2"].coords def test_area2cf(self): """Test the conversion of an area to CF standards.""" from satpy.writers.cf_writer import area2cf - ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, - attrs={'name': 'var1'}) + ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}, + attrs={"name": "var1"}) # a) Area Definition and strict=False geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, + area_id="geos", + description="geos", + proj_id="geos", + projection={"proj": "geos", "h": 35785831., "a": 6378169., "b": 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) ds = ds_base.copy(deep=True) - ds.attrs['area'] = geos + ds.attrs["area"] = geos res = area2cf(ds, include_lonlats=False) assert len(res) == 2 assert res[0].size == 1 # grid mapping variable - assert res[0].name == res[1].attrs['grid_mapping'] + assert res[0].name == res[1].attrs["grid_mapping"] # b) Area Definition and include_lonlats=False ds = ds_base.copy(deep=True) - ds.attrs['area'] = geos + ds.attrs["area"] = geos res = area2cf(ds, include_lonlats=True) # same as above assert len(res) == 2 assert res[0].size == 1 # grid mapping variable - assert res[0].name == res[1].attrs['grid_mapping'] + assert res[0].name == res[1].attrs["grid_mapping"] # but now also have the lon/lats - assert 'longitude' in res[1].coords - assert 'latitude' in res[1].coords + assert "longitude" in res[1].coords + assert "latitude" in res[1].coords # c) Swath Definition swath = pyresample.geometry.SwathDefinition(lons=[[1, 1], [2, 2]], lats=[[1, 2], [1, 2]]) ds = ds_base.copy(deep=True) - ds.attrs['area'] = swath + ds.attrs["area"] = swath res = area2cf(ds, include_lonlats=False) assert len(res) == 1 - assert 'longitude' in res[0].coords - assert 'latitude' in res[0].coords - assert 'grid_mapping' not in res[0].attrs + assert "longitude" in res[0].coords + assert "latitude" in res[0].coords + assert "grid_mapping" not in res[0].attrs def test__add_grid_mapping(self): """Test the conversion from pyresample area object to CF grid mapping.""" @@ -873,175 +873,175 @@ def _gm_matches(gmapping, expected): else: np.testing.assert_almost_equal(test_val, attr_val, decimal=3) - ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, - attrs={'name': 'var1'}) + ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}, + attrs={"name": "var1"}) # a) Projection has a corresponding CF representation (e.g. geos) a = 6378169. b = 6356583.8 h = 35785831. geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': h, 'a': a, 'b': b, - 'lat_0': 0, 'lon_0': 0}, + area_id="geos", + description="geos", + proj_id="geos", + projection={"proj": "geos", "h": h, "a": a, "b": b, + "lat_0": 0, "lon_0": 0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) geos_expected = xr.DataArray(data=0, - attrs={'perspective_point_height': h, - 'latitude_of_projection_origin': 0, - 'longitude_of_projection_origin': 0, - 'grid_mapping_name': 'geostationary', - 'semi_major_axis': a, - 'semi_minor_axis': b, + attrs={"perspective_point_height": h, + "latitude_of_projection_origin": 0, + "longitude_of_projection_origin": 0, + "grid_mapping_name": "geostationary", + "semi_major_axis": a, + "semi_minor_axis": b, # 'sweep_angle_axis': None, }) ds = ds_base.copy() - ds.attrs['area'] = geos + ds.attrs["area"] = geos new_ds, grid_mapping = _add_grid_mapping(ds) - if 'sweep_angle_axis' in grid_mapping.attrs: + if "sweep_angle_axis" in grid_mapping.attrs: # older versions of pyproj might not include this - assert grid_mapping.attrs['sweep_angle_axis'] == 'y' + assert grid_mapping.attrs["sweep_angle_axis"] == "y" - assert new_ds.attrs['grid_mapping'] == 'geos' + assert new_ds.attrs["grid_mapping"] == "geos" _gm_matches(grid_mapping, geos_expected) # should not have been modified - assert 'grid_mapping' not in ds.attrs + assert "grid_mapping" not in ds.attrs # b) Projection does not have a corresponding CF representation (COSMO) cosmo7 = pyresample.geometry.AreaDefinition( - area_id='cosmo7', - description='cosmo7', - proj_id='cosmo7', - projection={'proj': 'ob_tran', 'ellps': 'WGS84', 'lat_0': 46, 'lon_0': 4.535, - 'o_proj': 'stere', 'o_lat_p': 90, 'o_lon_p': -5.465}, + area_id="cosmo7", + description="cosmo7", + proj_id="cosmo7", + projection={"proj": "ob_tran", "ellps": "WGS84", "lat_0": 46, "lon_0": 4.535, + "o_proj": "stere", "o_lat_p": 90, "o_lon_p": -5.465}, width=597, height=510, area_extent=[-1812933, -1003565, 814056, 1243448] ) ds = ds_base.copy() - ds.attrs['area'] = cosmo7 + ds.attrs["area"] = cosmo7 new_ds, grid_mapping = _add_grid_mapping(ds) - assert 'crs_wkt' in grid_mapping.attrs - wkt = grid_mapping.attrs['crs_wkt'] + assert "crs_wkt" in grid_mapping.attrs + wkt = grid_mapping.attrs["crs_wkt"] assert 'ELLIPSOID["WGS 84"' in wkt assert 'PARAMETER["lat_0",46' in wkt assert 'PARAMETER["lon_0",4.535' in wkt assert 'PARAMETER["o_lat_p",90' in wkt assert 'PARAMETER["o_lon_p",-5.465' in wkt - assert new_ds.attrs['grid_mapping'] == 'cosmo7' + assert new_ds.attrs["grid_mapping"] == "cosmo7" # c) Projection Transverse Mercator lat_0 = 36.5 lon_0 = 15.0 tmerc = pyresample.geometry.AreaDefinition( - area_id='tmerc', - description='tmerc', - proj_id='tmerc', - projection={'proj': 'tmerc', 'ellps': 'WGS84', 'lat_0': 36.5, 'lon_0': 15.0}, + area_id="tmerc", + description="tmerc", + proj_id="tmerc", + projection={"proj": "tmerc", "ellps": "WGS84", "lat_0": 36.5, "lon_0": 15.0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) tmerc_expected = xr.DataArray(data=0, - attrs={'latitude_of_projection_origin': lat_0, - 'longitude_of_central_meridian': lon_0, - 'grid_mapping_name': 'transverse_mercator', - 'reference_ellipsoid_name': 'WGS 84', - 'false_easting': 0., - 'false_northing': 0., + attrs={"latitude_of_projection_origin": lat_0, + "longitude_of_central_meridian": lon_0, + "grid_mapping_name": "transverse_mercator", + "reference_ellipsoid_name": "WGS 84", + "false_easting": 0., + "false_northing": 0., }) ds = ds_base.copy() - ds.attrs['area'] = tmerc + ds.attrs["area"] = tmerc new_ds, grid_mapping = _add_grid_mapping(ds) - assert new_ds.attrs['grid_mapping'] == 'tmerc' + assert new_ds.attrs["grid_mapping"] == "tmerc" _gm_matches(grid_mapping, tmerc_expected) # d) Projection that has a representation but no explicit a/b h = 35785831. geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': h, 'datum': 'WGS84', 'ellps': 'GRS80', - 'lat_0': 0, 'lon_0': 0}, + area_id="geos", + description="geos", + proj_id="geos", + projection={"proj": "geos", "h": h, "datum": "WGS84", "ellps": "GRS80", + "lat_0": 0, "lon_0": 0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) geos_expected = xr.DataArray(data=0, - attrs={'perspective_point_height': h, - 'latitude_of_projection_origin': 0, - 'longitude_of_projection_origin': 0, - 'grid_mapping_name': 'geostationary', + attrs={"perspective_point_height": h, + "latitude_of_projection_origin": 0, + "longitude_of_projection_origin": 0, + "grid_mapping_name": "geostationary", # 'semi_major_axis': 6378137.0, # 'semi_minor_axis': 6356752.314, # 'sweep_angle_axis': None, }) ds = ds_base.copy() - ds.attrs['area'] = geos + ds.attrs["area"] = geos new_ds, grid_mapping = _add_grid_mapping(ds) - assert new_ds.attrs['grid_mapping'] == 'geos' + assert new_ds.attrs["grid_mapping"] == "geos" _gm_matches(grid_mapping, geos_expected) # e) oblique Mercator area = pyresample.geometry.AreaDefinition( - area_id='omerc_otf', - description='On-the-fly omerc area', - proj_id='omerc', - projection={'alpha': '9.02638777018478', 'ellps': 'WGS84', 'gamma': '0', 'k': '1', - 'lat_0': '-0.256794486098476', 'lonc': '13.7888658224205', - 'proj': 'omerc', 'units': 'm'}, + area_id="omerc_otf", + description="On-the-fly omerc area", + proj_id="omerc", + projection={"alpha": "9.02638777018478", "ellps": "WGS84", "gamma": "0", "k": "1", + "lat_0": "-0.256794486098476", "lonc": "13.7888658224205", + "proj": "omerc", "units": "m"}, width=2837, height=5940, area_extent=[-1460463.0893, 3455291.3877, 1538407.1158, 9615788.8787] ) - omerc_dict = {'azimuth_of_central_line': 9.02638777018478, - 'false_easting': 0., - 'false_northing': 0., + omerc_dict = {"azimuth_of_central_line": 9.02638777018478, + "false_easting": 0., + "false_northing": 0., # 'gamma': 0, # this is not CF compliant - 'grid_mapping_name': "oblique_mercator", - 'latitude_of_projection_origin': -0.256794486098476, - 'longitude_of_projection_origin': 13.7888658224205, + "grid_mapping_name": "oblique_mercator", + "latitude_of_projection_origin": -0.256794486098476, + "longitude_of_projection_origin": 13.7888658224205, # 'prime_meridian_name': "Greenwich", - 'reference_ellipsoid_name': "WGS 84"} + "reference_ellipsoid_name": "WGS 84"} omerc_expected = xr.DataArray(data=0, attrs=omerc_dict) ds = ds_base.copy() - ds.attrs['area'] = area + ds.attrs["area"] = area new_ds, grid_mapping = _add_grid_mapping(ds) - assert new_ds.attrs['grid_mapping'] == 'omerc_otf' + assert new_ds.attrs["grid_mapping"] == "omerc_otf" _gm_matches(grid_mapping, omerc_expected) # f) Projection that has a representation but no explicit a/b h = 35785831. geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': h, 'datum': 'WGS84', 'ellps': 'GRS80', - 'lat_0': 0, 'lon_0': 0}, + area_id="geos", + description="geos", + proj_id="geos", + projection={"proj": "geos", "h": h, "datum": "WGS84", "ellps": "GRS80", + "lat_0": 0, "lon_0": 0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) geos_expected = xr.DataArray(data=0, - attrs={'perspective_point_height': h, - 'latitude_of_projection_origin': 0, - 'longitude_of_projection_origin': 0, - 'grid_mapping_name': 'geostationary', - 'reference_ellipsoid_name': 'WGS 84', + attrs={"perspective_point_height": h, + "latitude_of_projection_origin": 0, + "longitude_of_projection_origin": 0, + "grid_mapping_name": "geostationary", + "reference_ellipsoid_name": "WGS 84", }) ds = ds_base.copy() - ds.attrs['area'] = geos + ds.attrs["area"] = geos new_ds, grid_mapping = _add_grid_mapping(ds) - assert new_ds.attrs['grid_mapping'] == 'geos' + assert new_ds.attrs["grid_mapping"] == "geos" _gm_matches(grid_mapping, geos_expected) def test_add_lonlat_coords(self): @@ -1049,50 +1049,50 @@ def test_add_lonlat_coords(self): from satpy.writers.cf_writer import add_lonlat_coords area = pyresample.geometry.AreaDefinition( - 'seviri', - 'Native SEVIRI grid', - 'geos', + "seviri", + "Native SEVIRI grid", + "geos", "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", 2, 2, [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] ) lons_ref, lats_ref = area.get_lonlats() - dataarray = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), attrs={'area': area}) + dataarray = xr.DataArray(data=[[1, 2], [3, 4]], dims=("y", "x"), attrs={"area": area}) res = add_lonlat_coords(dataarray) # original should be unmodified - assert 'longitude' not in dataarray.coords - assert set(res.coords) == {'longitude', 'latitude'} - lat = res['latitude'] - lon = res['longitude'] + assert "longitude" not in dataarray.coords + assert set(res.coords) == {"longitude", "latitude"} + lat = res["latitude"] + lon = res["longitude"] np.testing.assert_array_equal(lat.data, lats_ref) np.testing.assert_array_equal(lon.data, lons_ref) - assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() - assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() + assert {"name": "latitude", "standard_name": "latitude", "units": "degrees_north"}.items() <= lat.attrs.items() + assert {"name": "longitude", "standard_name": "longitude", "units": "degrees_east"}.items() <= lon.attrs.items() area = pyresample.geometry.AreaDefinition( - 'seviri', - 'Native SEVIRI grid', - 'geos', + "seviri", + "Native SEVIRI grid", + "geos", "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", 10, 10, [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] ) lons_ref, lats_ref = area.get_lonlats() dataarray = xr.DataArray(data=da.from_array(np.arange(3 * 10 * 10).reshape(3, 10, 10), chunks=(1, 5, 5)), - dims=('bands', 'y', 'x'), attrs={'area': area}) + dims=("bands", "y", "x"), attrs={"area": area}) res = add_lonlat_coords(dataarray) # original should be unmodified - assert 'longitude' not in dataarray.coords - assert set(res.coords) == {'longitude', 'latitude'} - lat = res['latitude'] - lon = res['longitude'] + assert "longitude" not in dataarray.coords + assert set(res.coords) == {"longitude", "latitude"} + lat = res["latitude"] + lon = res["longitude"] np.testing.assert_array_equal(lat.data, lats_ref) np.testing.assert_array_equal(lon.data, lons_ref) - assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() - assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() + assert {"name": "latitude", "standard_name": "latitude", "units": "degrees_north"}.items() <= lat.attrs.items() + assert {"name": "longitude", "standard_name": "longitude", "units": "degrees_east"}.items() <= lon.attrs.items() def test_load_module_with_old_pyproj(self): """Test that cf_writer can still be loaded with pyproj 1.9.6.""" @@ -1100,50 +1100,50 @@ def test_load_module_with_old_pyproj(self): import sys import pyproj # noqa 401 - old_version = sys.modules['pyproj'].__version__ - sys.modules['pyproj'].__version__ = "1.9.6" + old_version = sys.modules["pyproj"].__version__ + sys.modules["pyproj"].__version__ = "1.9.6" try: - importlib.reload(sys.modules['satpy.writers.cf_writer']) + importlib.reload(sys.modules["satpy.writers.cf_writer"]) finally: # Tear down - sys.modules['pyproj'].__version__ = old_version - importlib.reload(sys.modules['satpy.writers.cf_writer']) + sys.modules["pyproj"].__version__ = old_version + importlib.reload(sys.modules["satpy.writers.cf_writer"]) def test_global_attr_default_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) - scn['test-array'] = xr.DataArray([[1, 2, 3]], - dims=('y', 'x'), + scn["test-array"] = xr.DataArray([[1, 2, 3]], + dims=("y", "x"), attrs=dict(start_time=start_time, end_time=end_time, - prerequisites=[make_dsq(name='hej')])) + prerequisites=[make_dsq(name="hej")])) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: - assert f.attrs['Conventions'] == 'CF-1.7' - assert 'Created by pytroll/satpy on' in f.attrs['history'] + assert f.attrs["Conventions"] == "CF-1.7" + assert "Created by pytroll/satpy on" in f.attrs["history"] def test_global_attr_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) - scn['test-array'] = xr.DataArray([[1, 2, 3]], - dims=('y', 'x'), + scn["test-array"] = xr.DataArray([[1, 2, 3]], + dims=("y", "x"), attrs=dict(start_time=start_time, end_time=end_time, - prerequisites=[make_dsq(name='hej')])) + prerequisites=[make_dsq(name="hej")])) header_attrs = {} - header_attrs['history'] = ('TEST add history',) - header_attrs['Conventions'] = 'CF-1.7, ACDD-1.3' + header_attrs["history"] = ("TEST add history",) + header_attrs["Conventions"] = "CF-1.7, ACDD-1.3" with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', header_attrs=header_attrs) + scn.save_datasets(filename=filename, writer="cf", header_attrs=header_attrs) with xr.open_dataset(filename) as f: - assert f.attrs['Conventions'] == 'CF-1.7, ACDD-1.3' - assert 'TEST add history\n' in f.attrs['history'] - assert 'Created by pytroll/satpy on' in f.attrs['history'] + assert f.attrs["Conventions"] == "CF-1.7, ACDD-1.3" + assert "TEST add history\n" in f.attrs["history"] + assert "Created by pytroll/satpy on" in f.attrs["history"] class TestCFWriterData: @@ -1156,49 +1156,49 @@ def datasets(self): y = [1, 2] x = [1, 2] geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, + area_id="geos", + description="geos", + proj_id="geos", + projection={"proj": "geos", "h": 35785831., "a": 6378169., "b": 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) datasets = { - 'var1': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'var2': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'lat': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'lon': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x})} - datasets['lat'].attrs['standard_name'] = 'latitude' - datasets['var1'].attrs['standard_name'] = 'dummy' - datasets['var2'].attrs['standard_name'] = 'dummy' - datasets['var2'].attrs['area'] = geos - datasets['var1'].attrs['area'] = geos - datasets['lat'].attrs['name'] = 'lat' - datasets['var1'].attrs['name'] = 'var1' - datasets['var2'].attrs['name'] = 'var2' - datasets['lon'].attrs['name'] = 'lon' + "var1": xr.DataArray(data=data, + dims=("y", "x"), + coords={"y": y, "x": x}), + "var2": xr.DataArray(data=data, + dims=("y", "x"), + coords={"y": y, "x": x}), + "lat": xr.DataArray(data=data, + dims=("y", "x"), + coords={"y": y, "x": x}), + "lon": xr.DataArray(data=data, + dims=("y", "x"), + coords={"y": y, "x": x})} + datasets["lat"].attrs["standard_name"] = "latitude" + datasets["var1"].attrs["standard_name"] = "dummy" + datasets["var2"].attrs["standard_name"] = "dummy" + datasets["var2"].attrs["area"] = geos + datasets["var1"].attrs["area"] = geos + datasets["lat"].attrs["name"] = "lat" + datasets["var1"].attrs["name"] = "var1" + datasets["var2"].attrs["name"] = "var2" + datasets["lon"].attrs["name"] = "lon" return datasets def test_is_lon_or_lat_dataarray(self, datasets): """Test the is_lon_or_lat_dataarray function.""" from satpy.writers.cf_writer import is_lon_or_lat_dataarray - assert is_lon_or_lat_dataarray(datasets['lat']) - assert not is_lon_or_lat_dataarray(datasets['var1']) + assert is_lon_or_lat_dataarray(datasets["lat"]) + assert not is_lon_or_lat_dataarray(datasets["var1"]) def test_has_projection_coords(self, datasets): """Test the has_projection_coords function.""" from satpy.writers.cf_writer import has_projection_coords assert has_projection_coords(datasets) - datasets['lat'].attrs['standard_name'] = 'dummy' + datasets["lat"].attrs["standard_name"] = "dummy" assert not has_projection_coords(datasets) def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): @@ -1206,7 +1206,7 @@ def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): from satpy.writers.cf_writer import _collect_cf_dataset datasets_list = [datasets[key] for key in datasets.keys()] - datasets_list_no_latlon = [datasets[key] for key in ['var1', 'var2']] + datasets_list_no_latlon = [datasets[key] for key in ["var1", "var2"]] # Collect datasets ds = _collect_cf_dataset(datasets_list, include_lonlats=True) @@ -1214,13 +1214,13 @@ def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): # Test results assert len(ds.keys()) == 5 - assert set(ds.keys()) == {'var1', 'var2', 'lon', 'lat', 'geos'} + assert set(ds.keys()) == {"var1", "var2", "lon", "lat", "geos"} with pytest.raises(KeyError): - ds['var1'].attrs["latitude"] + ds["var1"].attrs["latitude"] with pytest.raises(KeyError): - ds['var1'].attrs["longitude"] - assert ds2['var1']['latitude'].attrs['name'] == 'latitude' - assert ds2['var1']['longitude'].attrs['name'] == 'longitude' + ds["var1"].attrs["longitude"] + assert ds2["var1"]["latitude"].attrs["name"] == "latitude" + assert ds2["var1"]["longitude"].attrs["name"] == "longitude" class EncodingUpdateTest: @@ -1229,21 +1229,21 @@ class EncodingUpdateTest: @pytest.fixture def fake_ds(self): """Create fake data for testing.""" - ds = xr.Dataset({'foo': (('y', 'x'), [[1, 2], [3, 4]]), - 'bar': (('y', 'x'), [[3, 4], [5, 6]])}, - coords={'y': [1, 2], - 'x': [3, 4], - 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) + ds = xr.Dataset({"foo": (("y", "x"), [[1, 2], [3, 4]]), + "bar": (("y", "x"), [[3, 4], [5, 6]])}, + coords={"y": [1, 2], + "x": [3, 4], + "lon": (("y", "x"), [[7, 8], [9, 10]])}) return ds @pytest.fixture def fake_ds_digit(self): """Create fake data for testing.""" - ds_digit = xr.Dataset({'CHANNEL_1': (('y', 'x'), [[1, 2], [3, 4]]), - 'CHANNEL_2': (('y', 'x'), [[3, 4], [5, 6]])}, - coords={'y': [1, 2], - 'x': [3, 4], - 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) + ds_digit = xr.Dataset({"CHANNEL_1": (("y", "x"), [[1, 2], [3, 4]]), + "CHANNEL_2": (("y", "x"), [[3, 4], [5, 6]])}, + coords={"y": [1, 2], + "x": [3, 4], + "lon": (("y", "x"), [[7, 8], [9, 10]])}) return ds_digit def test_dataset_name_digit(self, fake_ds_digit): @@ -1252,18 +1252,18 @@ def test_dataset_name_digit(self, fake_ds_digit): # Dataset with name staring with digit ds_digit = fake_ds_digit - kwargs = {'encoding': {'1': {'dtype': 'float32'}, - '2': {'dtype': 'float32'}}, - 'other': 'kwargs'} - enc, other_kwargs = update_encoding(ds_digit, kwargs, numeric_name_prefix='CHANNEL_') + kwargs = {"encoding": {"1": {"dtype": "float32"}, + "2": {"dtype": "float32"}}, + "other": "kwargs"} + enc, other_kwargs = update_encoding(ds_digit, kwargs, numeric_name_prefix="CHANNEL_") expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'CHANNEL_1': {'dtype': 'float32'}, - 'CHANNEL_2': {'dtype': 'float32'} + "y": {"_FillValue": None}, + "x": {"_FillValue": None}, + "CHANNEL_1": {"dtype": "float32"}, + "CHANNEL_2": {"dtype": "float32"} } assert enc == expected_dict - assert other_kwargs == {'other': 'kwargs'} + assert other_kwargs == {"other": "kwargs"} def test_without_time(self, fake_ds): """Test data with no time dimension.""" @@ -1271,29 +1271,29 @@ def test_without_time(self, fake_ds): # Without time dimension ds = fake_ds.chunk(2) - kwargs = {'encoding': {'bar': {'chunksizes': (1, 1)}}, - 'other': 'kwargs'} + kwargs = {"encoding": {"bar": {"chunksizes": (1, 1)}}, + "other": "kwargs"} enc, other_kwargs = update_encoding(ds, kwargs) expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (2, 2)}, - 'bar': {'chunksizes': (1, 1)} + "y": {"_FillValue": None}, + "x": {"_FillValue": None}, + "lon": {"chunksizes": (2, 2)}, + "foo": {"chunksizes": (2, 2)}, + "bar": {"chunksizes": (1, 1)} } assert enc == expected_dict - assert other_kwargs == {'other': 'kwargs'} + assert other_kwargs == {"other": "kwargs"} # Chunksize may not exceed shape ds = fake_ds.chunk(8) - kwargs = {'encoding': {}, 'other': 'kwargs'} + kwargs = {"encoding": {}, "other": "kwargs"} enc, other_kwargs = update_encoding(ds, kwargs) expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (2, 2)}, - 'bar': {'chunksizes': (2, 2)} + "y": {"_FillValue": None}, + "x": {"_FillValue": None}, + "lon": {"chunksizes": (2, 2)}, + "foo": {"chunksizes": (2, 2)}, + "bar": {"chunksizes": (2, 2)} } assert enc == expected_dict @@ -1302,26 +1302,26 @@ def test_with_time(self, fake_ds): from satpy.writers.cf_writer import update_encoding # With time dimension - ds = fake_ds.chunk(8).expand_dims({'time': [datetime(2009, 7, 1, 12, 15)]}) - kwargs = {'encoding': {'bar': {'chunksizes': (1, 1, 1)}}, - 'other': 'kwargs'} + ds = fake_ds.chunk(8).expand_dims({"time": [datetime(2009, 7, 1, 12, 15)]}) + kwargs = {"encoding": {"bar": {"chunksizes": (1, 1, 1)}}, + "other": "kwargs"} enc, other_kwargs = update_encoding(ds, kwargs) expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (1, 2, 2)}, - 'bar': {'chunksizes': (1, 1, 1)}, - 'time': {'_FillValue': None, - 'calendar': 'proleptic_gregorian', - 'units': 'days since 2009-07-01 12:15:00'}, - 'time_bnds': {'_FillValue': None, - 'calendar': 'proleptic_gregorian', - 'units': 'days since 2009-07-01 12:15:00'} + "y": {"_FillValue": None}, + "x": {"_FillValue": None}, + "lon": {"chunksizes": (2, 2)}, + "foo": {"chunksizes": (1, 2, 2)}, + "bar": {"chunksizes": (1, 1, 1)}, + "time": {"_FillValue": None, + "calendar": "proleptic_gregorian", + "units": "days since 2009-07-01 12:15:00"}, + "time_bnds": {"_FillValue": None, + "calendar": "proleptic_gregorian", + "units": "days since 2009-07-01 12:15:00"} } assert enc == expected_dict # User-defined encoding may not be altered - assert kwargs['encoding'] == {'bar': {'chunksizes': (1, 1, 1)}} + assert kwargs["encoding"] == {"bar": {"chunksizes": (1, 1, 1)}} class TestEncodingKwarg: @@ -1335,7 +1335,7 @@ def scene(self): "start_time": datetime(2018, 5, 30, 10, 0), "end_time": datetime(2018, 5, 30, 10, 15) } - scn['test-array'] = xr.DataArray([1., 2, 3], attrs=attrs) + scn["test-array"] = xr.DataArray([1., 2, 3], attrs=attrs) return scn @pytest.fixture(params=[True, False]) @@ -1347,11 +1347,11 @@ def compression_on(self, request): def encoding(self, compression_on): """Get encoding.""" enc = { - 'test-array': { - 'dtype': 'int8', - 'scale_factor': 0.1, - 'add_offset': 0.0, - '_FillValue': 3, + "test-array": { + "dtype": "int8", + "scale_factor": 0.1, + "add_offset": 0.0, + "_FillValue": 3, } } if compression_on: @@ -1384,15 +1384,15 @@ def expected(self, complevel_exp): def test_encoding_kwarg(self, scene, encoding, filename, expected): """Test 'encoding' keyword argument.""" - scene.save_datasets(filename=filename, encoding=encoding, writer='cf') + scene.save_datasets(filename=filename, encoding=encoding, writer="cf") self._assert_encoding_as_expected(filename, expected) def _assert_encoding_as_expected(self, filename, expected): with xr.open_dataset(filename, mask_and_scale=False) as f: - np.testing.assert_array_equal(f['test-array'][:], expected["data"]) - assert f['test-array'].attrs['scale_factor'] == expected["scale_factor"] - assert f['test-array'].attrs['_FillValue'] == expected["fill_value"] - assert f['test-array'].dtype == expected["dtype"] + np.testing.assert_array_equal(f["test-array"][:], expected["data"]) + assert f["test-array"].attrs["scale_factor"] == expected["scale_factor"] + assert f["test-array"].attrs["_FillValue"] == expected["fill_value"] + assert f["test-array"].dtype == expected["dtype"] assert f["test-array"].encoding["complevel"] == expected["complevel"] def test_warning_if_backends_dont_match(self, scene, filename, monkeypatch): @@ -1427,7 +1427,7 @@ def scene_with_encoding(self, scene, encoding): def test_encoding_attribute(self, scene_with_encoding, filename, expected): """Test 'encoding' dataset attribute.""" - scene_with_encoding.save_datasets(filename=filename, writer='cf') + scene_with_encoding.save_datasets(filename=filename, writer="cf") self._assert_encoding_as_expected(filename, expected) diff --git a/satpy/tests/writer_tests/test_geotiff.py b/satpy/tests/writer_tests/test_geotiff.py index bb6afc0c21..74fcd43609 100644 --- a/satpy/tests/writer_tests/test_geotiff.py +++ b/satpy/tests/writer_tests/test_geotiff.py @@ -34,9 +34,9 @@ def _get_test_datasets_2d(): """Create a single 2D test dataset.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), "units": "K"} ) return [ds1] @@ -56,10 +56,10 @@ def _get_test_datasets_3d(): """Create a single 3D test dataset.""" ds1 = xr.DataArray( da.zeros((3, 100, 200), chunks=50), - dims=('bands', 'y', 'x'), - coords={'bands': ['R', 'G', 'B']}, - attrs={'name': 'test', - 'start_time': datetime.utcnow()} + dims=("bands", "y", "x"), + coords={"bands": ["R", "G", "B"]}, + attrs={"name": "test", + "start_time": datetime.utcnow()} ) return [ds1] @@ -103,7 +103,7 @@ def test_simple_delayed_write(self, tmp_path): assert isinstance(res[0][0], da.Array) da.store(res[0], res[1]) for target in res[1]: - if hasattr(target, 'close'): + if hasattr(target, "close"): target.close() def test_colormap_write(self, tmp_path): @@ -138,43 +138,43 @@ def test_dtype_for_enhance_false(self, tmp_path): from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(base_dir=tmp_path, enhance=False) - with mock.patch('satpy.writers.XRImage.save') as save_method: + with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) - assert save_method.call_args[1]['dtype'] == np.float64 + assert save_method.call_args[1]["dtype"] == np.float64 def test_dtype_for_enhance_false_and_given_dtype(self, tmp_path): """Test that dtype of dataset is used if enhance=False and dtype=uint8.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(base_dir=tmp_path, enhance=False, dtype=np.uint8) - with mock.patch('satpy.writers.XRImage.save') as save_method: + with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) - assert save_method.call_args[1]['dtype'] == np.uint8 + assert save_method.call_args[1]["dtype"] == np.uint8 def test_fill_value_from_config(self, tmp_path): """Test fill_value coming from the writer config.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(base_dir=tmp_path) - w.info['fill_value'] = 128 - with mock.patch('satpy.writers.XRImage.save') as save_method: + w.info["fill_value"] = 128 + with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) - assert save_method.call_args[1]['fill_value'] == 128 + assert save_method.call_args[1]["fill_value"] == 128 def test_tags(self, tmp_path): """Test tags being added.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() - w = GeoTIFFWriter(tags={'test1': 1}, base_dir=tmp_path) - w.info['fill_value'] = 128 - with mock.patch('satpy.writers.XRImage.save') as save_method: + w = GeoTIFFWriter(tags={"test1": 1}, base_dir=tmp_path) + w.info["fill_value"] = 128 + with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None - w.save_datasets(datasets, tags={'test2': 2}, compute=False) - called_tags = save_method.call_args[1]['tags'] - assert called_tags == {'test1': 1, 'test2': 2} + w.save_datasets(datasets, tags={"test2": 2}, compute=False) + called_tags = save_method.call_args[1]["tags"] + assert called_tags == {"test1": 1, "test2": 2} @pytest.mark.parametrize( "input_func", @@ -195,11 +195,11 @@ def test_scale_offset(self, input_func, save_kwargs, tmp_path): """Test tags being added.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = input_func() - w = GeoTIFFWriter(tags={'test1': 1}, base_dir=tmp_path) - w.info['fill_value'] = 128 - with mock.patch('satpy.writers.XRImage.save') as save_method: + w = GeoTIFFWriter(tags={"test1": 1}, base_dir=tmp_path) + w.info["fill_value"] = 128 + with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None - w.save_datasets(datasets, tags={'test2': 2}, compute=False, **save_kwargs) + w.save_datasets(datasets, tags={"test2": 2}, compute=False, **save_kwargs) kwarg_name = "include_scale_offset_tags" if "include_scale_offset" in save_kwargs else "scale_offset_tags" kwarg_value = save_method.call_args[1].get(kwarg_name) assert kwarg_value is not None @@ -209,10 +209,10 @@ def test_tiled_value_from_config(self, tmp_path): from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(base_dir=tmp_path) - with mock.patch('satpy.writers.XRImage.save') as save_method: + with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) - assert save_method.call_args[1]['tiled'] + assert save_method.call_args[1]["tiled"] def test_float_write_with_unit_conversion(self, tmp_path): """Test that geotiffs can be written as floats and convert units.""" diff --git a/satpy/tests/writer_tests/test_mitiff.py b/satpy/tests/writer_tests/test_mitiff.py index 6369cddc51..4e5c8b7c9c 100644 --- a/satpy/tests/writer_tests/test_mitiff.py +++ b/satpy/tests/writer_tests/test_mitiff.py @@ -55,11 +55,11 @@ def _get_test_datasets(self): from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -67,48 +67,48 @@ def _get_test_datasets(self): ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': '1', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'TEST_SENSOR_NAME', - 'area': area_def, - 'prerequisites': ['1'], - 'calibration': 'reflectance', - 'metadata_requirements': { - 'order': ['1'], - 'config': { - '1': {'alias': '1-VIS0.63', - 'calibration': 'reflectance', - 'min-val': '0', - 'max-val': '100'}, + dims=("y", "x"), + attrs={"name": "1", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "TEST_SENSOR_NAME", + "area": area_def, + "prerequisites": ["1"], + "calibration": "reflectance", + "metadata_requirements": { + "order": ["1"], + "config": { + "1": {"alias": "1-VIS0.63", + "calibration": "reflectance", + "min-val": "0", + "max-val": "100"}, }, - 'translate': {'1': '1', + "translate": {"1": "1", }, - 'file_pattern': '1_{start_time:%Y%m%d_%H%M%S}.mitiff' + "file_pattern": "1_{start_time:%Y%m%d_%H%M%S}.mitiff" }} ) ds2 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': '4', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'TEST_SENSOR_NAME', - 'area': area_def, - 'prerequisites': ['4'], - 'calibration': 'brightness_temperature', - 'metadata_requirements': { - 'order': ['4'], - 'config': { - '4': {'alias': '4-IR10.8', - 'calibration': 'brightness_temperature', - 'min-val': '-150', - 'max-val': '50'}, + dims=("y", "x"), + attrs={"name": "4", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "TEST_SENSOR_NAME", + "area": area_def, + "prerequisites": ["4"], + "calibration": "brightness_temperature", + "metadata_requirements": { + "order": ["4"], + "config": { + "4": {"alias": "4-IR10.8", + "calibration": "brightness_temperature", + "min-val": "-150", + "max-val": "50"}, }, - 'translate': {'4': '4', + "translate": {"4": "4", }, - 'file_pattern': '4_{start_time:%Y%m%d_%H%M%S}.mitiff'} + "file_pattern": "4_{start_time:%Y%m%d_%H%M%S}.mitiff"} } ) return [ds1, ds2] @@ -122,11 +122,11 @@ def _get_test_datasets_sensor_set(self): from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -134,48 +134,48 @@ def _get_test_datasets_sensor_set(self): ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': '1', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': {'TEST_SENSOR_NAME'}, - 'area': area_def, - 'prerequisites': ['1'], - 'calibration': 'reflectance', - 'metadata_requirements': { - 'order': ['1'], - 'config': { - '1': {'alias': '1-VIS0.63', - 'calibration': 'reflectance', - 'min-val': '0', - 'max-val': '100'}, + dims=("y", "x"), + attrs={"name": "1", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": {"TEST_SENSOR_NAME"}, + "area": area_def, + "prerequisites": ["1"], + "calibration": "reflectance", + "metadata_requirements": { + "order": ["1"], + "config": { + "1": {"alias": "1-VIS0.63", + "calibration": "reflectance", + "min-val": "0", + "max-val": "100"}, }, - 'translate': {'1': '1', + "translate": {"1": "1", }, - 'file_pattern': '1_{start_time:%Y%m%d_%H%M%S}.mitiff' + "file_pattern": "1_{start_time:%Y%m%d_%H%M%S}.mitiff" }} ) ds2 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': '4', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': {'TEST_SENSOR_NAME'}, - 'area': area_def, - 'prerequisites': ['4'], - 'calibration': 'brightness_temperature', - 'metadata_requirements': { - 'order': ['4'], - 'config': { - '4': {'alias': '4-IR10.8', - 'calibration': 'brightness_temperature', - 'min-val': '-150', - 'max-val': '50'}, + dims=("y", "x"), + attrs={"name": "4", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": {"TEST_SENSOR_NAME"}, + "area": area_def, + "prerequisites": ["4"], + "calibration": "brightness_temperature", + "metadata_requirements": { + "order": ["4"], + "config": { + "4": {"alias": "4-IR10.8", + "calibration": "brightness_temperature", + "min-val": "-150", + "max-val": "50"}, }, - 'translate': {'4': '4', + "translate": {"4": "4", }, - 'file_pattern': '4_{start_time:%Y%m%d_%H%M%S}.mitiff'} + "file_pattern": "4_{start_time:%Y%m%d_%H%M%S}.mitiff"} } ) return [ds1, ds2] @@ -189,11 +189,11 @@ def _get_test_dataset(self, bands=3): from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -201,13 +201,13 @@ def _get_test_dataset(self, bands=3): ds1 = xr.DataArray( da.zeros((bands, 100, 200), chunks=50), - dims=('bands', 'y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'TEST_SENSOR_NAME', - 'area': area_def, - 'prerequisites': ['1', '2', '3']} + dims=("bands", "y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "TEST_SENSOR_NAME", + "area": area_def, + "prerequisites": ["1", "2", "3"]} ) return ds1 @@ -220,11 +220,11 @@ def _get_test_one_dataset(self): from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=geos +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. h=36000. +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=geos +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. h=36000. +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -232,13 +232,13 @@ def _get_test_one_dataset(self): ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'avhrr', - 'area': area_def, - 'prerequisites': [10.8]} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "avhrr", + "area": area_def, + "prerequisites": [10.8]} ) return ds1 @@ -251,11 +251,11 @@ def _get_test_one_dataset_sensor_set(self): from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=geos +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. h=36000. +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=geos +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. h=36000. +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -263,13 +263,13 @@ def _get_test_one_dataset_sensor_set(self): ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': {'avhrr'}, - 'area': area_def, - 'prerequisites': [10.8]} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": {"avhrr"}, + "area": area_def, + "prerequisites": [10.8]} ) return ds1 @@ -281,11 +281,11 @@ def _get_test_dataset_with_bad_values(self, bands=3): from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -298,13 +298,13 @@ def _get_test_dataset_with_bad_values(self, bands=3): rgb_data = np.stack([data, data, data]) ds1 = xr.DataArray(rgb_data, - dims=('bands', 'y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'TEST_SENSOR_NAME', - 'area': area_def, - 'prerequisites': ['1', '2', '3']}) + dims=("bands", "y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "TEST_SENSOR_NAME", + "area": area_def, + "prerequisites": ["1", "2", "3"]}) return ds1 def _get_test_dataset_calibration(self, bands=6): @@ -319,93 +319,93 @@ def _get_test_dataset_calibration(self, bands=6): from satpy.scene import Scene from satpy.tests.utils import make_dsq area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), ) prereqs = [ - make_dsq(name='1', calibration='reflectance'), - make_dsq(name='2', calibration='reflectance'), - make_dsq(name='3', calibration='brightness_temperature'), - make_dsq(name='4', calibration='brightness_temperature'), - make_dsq(name='5', calibration='brightness_temperature'), - make_dsq(name='6', calibration='reflectance') + make_dsq(name="1", calibration="reflectance"), + make_dsq(name="2", calibration="reflectance"), + make_dsq(name="3", calibration="brightness_temperature"), + make_dsq(name="4", calibration="brightness_temperature"), + make_dsq(name="5", calibration="brightness_temperature"), + make_dsq(name="6", calibration="reflectance") ] scene = Scene() scene["1"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'reflectance'}) + dims=("y", "x"), + attrs={"calibration": "reflectance"}) scene["2"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'reflectance'}) + dims=("y", "x"), + attrs={"calibration": "reflectance"}) scene["3"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'brightness_temperature'}) + dims=("y", "x"), + attrs={"calibration": "brightness_temperature"}) scene["4"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'brightness_temperature'}) + dims=("y", "x"), + attrs={"calibration": "brightness_temperature"}) scene["5"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'brightness_temperature'}) + dims=("y", "x"), + attrs={"calibration": "brightness_temperature"}) scene["6"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'reflectance'}) + dims=("y", "x"), + attrs={"calibration": "reflectance"}) - data = xr.concat(scene, 'bands', coords='minimal') + data = xr.concat(scene, "bands", coords="minimal") bands = [] calibration = [] for p in scene: - calibration.append(p.attrs['calibration']) - bands.append(p.attrs['name']) - data['bands'] = list(bands) - new_attrs = {'name': 'datasets', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'test-sensor', - 'area': area_def, - 'prerequisites': prereqs, - 'metadata_requirements': { - 'order': ['1', '2', '3', '4', '5', '6'], - 'config': { - '1': {'alias': '1-VIS0.63', - 'calibration': 'reflectance', - 'min-val': '0', - 'max-val': '100'}, - '2': {'alias': '2-VIS0.86', - 'calibration': 'reflectance', - 'min-val': '0', - 'max-val': '100'}, - '3': {'alias': '3(3B)-IR3.7', - 'calibration': 'brightness_temperature', - 'min-val': '-150', - 'max-val': '50'}, - '4': {'alias': '4-IR10.8', - 'calibration': 'brightness_temperature', - 'min-val': '-150', - 'max-val': '50'}, - '5': {'alias': '5-IR11.5', - 'calibration': 'brightness_temperature', - 'min-val': '-150', - 'max-val': '50'}, - '6': {'alias': '6(3A)-VIS1.6', - 'calibration': 'reflectance', - 'min-val': '0', - 'max-val': '100'} + calibration.append(p.attrs["calibration"]) + bands.append(p.attrs["name"]) + data["bands"] = list(bands) + new_attrs = {"name": "datasets", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "test-sensor", + "area": area_def, + "prerequisites": prereqs, + "metadata_requirements": { + "order": ["1", "2", "3", "4", "5", "6"], + "config": { + "1": {"alias": "1-VIS0.63", + "calibration": "reflectance", + "min-val": "0", + "max-val": "100"}, + "2": {"alias": "2-VIS0.86", + "calibration": "reflectance", + "min-val": "0", + "max-val": "100"}, + "3": {"alias": "3(3B)-IR3.7", + "calibration": "brightness_temperature", + "min-val": "-150", + "max-val": "50"}, + "4": {"alias": "4-IR10.8", + "calibration": "brightness_temperature", + "min-val": "-150", + "max-val": "50"}, + "5": {"alias": "5-IR11.5", + "calibration": "brightness_temperature", + "min-val": "-150", + "max-val": "50"}, + "6": {"alias": "6(3A)-VIS1.6", + "calibration": "reflectance", + "min-val": "0", + "max-val": "100"} }, - 'translate': {'1': '1', - '2': '2', - '3': '3', - '4': '4', - '5': '5', - '6': '6' + "translate": {"1": "1", + "2": "2", + "3": "3", + "4": "4", + "5": "5", + "6": "6" }, - 'file_pattern': 'test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff' + "file_pattern": "test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff" } } ds1 = xr.DataArray(data=data.data, attrs=new_attrs, @@ -424,43 +424,43 @@ def _get_test_dataset_calibration_one_dataset(self, bands=1): from satpy.scene import Scene from satpy.tests.utils import make_dsq area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), ) - prereqs = [make_dsq(name='4', calibration='brightness_temperature')] + prereqs = [make_dsq(name="4", calibration="brightness_temperature")] scene = Scene() scene["4"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'brightness_temperature'}) + dims=("y", "x"), + attrs={"calibration": "brightness_temperature"}) - data = scene['4'] + data = scene["4"] calibration = [] for p in scene: - calibration.append(p.attrs['calibration']) - new_attrs = {'name': 'datasets', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'test-sensor', - 'area': area_def, - 'prerequisites': prereqs, - 'metadata_requirements': { - 'order': ['4'], - 'config': { - '4': {'alias': 'BT', - 'calibration': 'brightness_temperature', - 'min-val': '-150', - 'max-val': '50'}, + calibration.append(p.attrs["calibration"]) + new_attrs = {"name": "datasets", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "test-sensor", + "area": area_def, + "prerequisites": prereqs, + "metadata_requirements": { + "order": ["4"], + "config": { + "4": {"alias": "BT", + "calibration": "brightness_temperature", + "min-val": "-150", + "max-val": "50"}, }, - 'translate': {'4': '4', + "translate": {"4": "4", }, - 'file_pattern': 'test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff' + "file_pattern": "test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff" } } ds1 = xr.DataArray(data=data.data, attrs=new_attrs, @@ -478,11 +478,11 @@ def _get_test_dataset_three_bands_two_prereq(self, bands=3): from satpy.tests.utils import make_dsq area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -490,15 +490,15 @@ def _get_test_dataset_three_bands_two_prereq(self, bands=3): ds1 = xr.DataArray( da.zeros((bands, 100, 200), chunks=50), - coords=[['R', 'G', 'B'], list(range(100)), list(range(200))], - dims=('bands', 'y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'TEST_SENSOR_NAME', - 'area': area_def, - 'prerequisites': [make_dsq(name='1', calibration='reflectance'), - make_dsq(name='2', calibration='reflectance')]} + coords=[["R", "G", "B"], list(range(100)), list(range(200))], + dims=("bands", "y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "TEST_SENSOR_NAME", + "area": area_def, + "prerequisites": [make_dsq(name="1", calibration="reflectance"), + make_dsq(name="2", calibration="reflectance")]} ) return ds1 @@ -513,11 +513,11 @@ def _get_test_dataset_three_bands_prereq(self, bands=3): from satpy.tests.utils import make_dsq area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -525,15 +525,15 @@ def _get_test_dataset_three_bands_prereq(self, bands=3): ds1 = xr.DataArray( da.zeros((bands, 100, 200), chunks=50), - coords=[['R', 'G', 'B'], list(range(100)), list(range(200))], - dims=('bands', 'y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'TEST_SENSOR_NAME', - 'area': area_def, - 'prerequisites': [make_dsq(wavelength=0.6, modifiers=('sunz_corrected',)), - make_dsq(wavelength=0.8, modifiers=('sunz_corrected',)), + coords=[["R", "G", "B"], list(range(100)), list(range(200))], + dims=("bands", "y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "TEST_SENSOR_NAME", + "area": area_def, + "prerequisites": [make_dsq(wavelength=0.6, modifiers=("sunz_corrected",)), + make_dsq(wavelength=0.8, modifiers=("sunz_corrected",)), 10.8]}) return ds1 @@ -547,7 +547,7 @@ def _read_back_mitiff_and_check(self, filename, expected, test_shape=(100, 200)) def _imagedescription_from_mitiff(self, filename): pillow_tif = Image.open(filename) IMAGEDESCRIPTION = 270 - imgdesc = (pillow_tif.tag_v2.get(IMAGEDESCRIPTION)).split('\n') + imgdesc = (pillow_tif.tag_v2.get(IMAGEDESCRIPTION)).split("\n") return imgdesc def test_init(self): @@ -569,8 +569,8 @@ def test_save_datasets(self): dataset = self._get_test_datasets() w = MITIFFWriter(base_dir=self.base_dir) w.save_datasets(dataset) - filename = (dataset[0].attrs['metadata_requirements']['file_pattern']).format( - start_time=dataset[0].attrs['start_time']) + filename = (dataset[0].attrs["metadata_requirements"]["file_pattern"]).format( + start_time=dataset[0].attrs["start_time"]) self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_save_datasets_sensor_set(self): @@ -580,8 +580,8 @@ def test_save_datasets_sensor_set(self): dataset = self._get_test_datasets_sensor_set() w = MITIFFWriter(base_dir=self.base_dir) w.save_datasets(dataset) - filename = (dataset[0].attrs['metadata_requirements']['file_pattern']).format( - start_time=dataset[0].attrs['start_time']) + filename = (dataset[0].attrs["metadata_requirements"]["file_pattern"]).format( + start_time=dataset[0].attrs["start_time"]) self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_save_one_dataset(self): @@ -592,8 +592,8 @@ def test_save_one_dataset(self): w.save_dataset(dataset) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, os.listdir(self.base_dir)[0])) for key in imgdesc: - if 'In this file' in key: - self.assertEqual(key, ' Channels: 1 In this file: 1') + if "In this file" in key: + self.assertEqual(key, " Channels: 1 In this file: 1") def test_save_one_dataset_sensor_set(self): """Test basic writer operation with one dataset ie. no bands.""" @@ -603,8 +603,8 @@ def test_save_one_dataset_sensor_set(self): w.save_dataset(dataset) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, os.listdir(self.base_dir)[0])) for key in imgdesc: - if 'In this file' in key: - self.assertEqual(key, ' Channels: 1 In this file: 1') + if "In this file" in key: + self.assertEqual(key, " Channels: 1 In this file: 1") def test_save_dataset_with_calibration(self): """Test writer operation with calibration.""" @@ -613,159 +613,159 @@ def test_save_dataset_with_calibration(self): expected_ir = np.full((100, 200), 255) expected_vis = np.full((100, 200), 0) expected = np.stack([expected_vis, expected_vis, expected_ir, expected_ir, expected_ir, expected_vis]) - expected_key_channel = ['Table_calibration: 1-VIS0.63, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 1.57 ' - '1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 8.24 ' - '8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 14.12 ' - '14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 19.61 ' - '20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 25.10 ' - '25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 30.59 ' - '30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 36.08 ' - '36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 41.57 ' - '41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 47.06 ' - '47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 52.55 ' - '52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 58.04 ' - '58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 63.53 ' - '63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 69.02 ' - '69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 74.51 ' - '74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 80.00 ' - '80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 85.49 ' - '85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 90.98 ' - '91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 96.47 ' - '96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]', - 'Table_calibration: 2-VIS0.86, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 1.57 ' - '1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 8.24 ' - '8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 14.12 ' - '14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 19.61 ' - '20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 25.10 ' - '25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 30.59 ' - '30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 36.08 ' - '36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 41.57 ' - '41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 47.06 ' - '47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 52.55 ' - '52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 58.04 ' - '58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 63.53 ' - '63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 69.02 ' - '69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 74.51 ' - '74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 80.00 ' - '80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 85.49 ' - '85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 90.98 ' - '91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 96.47 ' - '96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]', - u'Table_calibration: 3(3B)-IR3.7, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 ' - '45.29 44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 ' - '34.31 33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 ' - '23.33 22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 ' - '12.35 11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 ' - '-0.20 -0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 ' - '-11.18 -11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 ' - '-20.59 -21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 ' - '-30.00 -30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 ' - '-39.41 -40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 ' - '-48.82 -49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 ' - '-58.24 -59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 ' - '-67.65 -68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 ' - '-77.06 -77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 ' - '-86.47 -87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 ' - '-95.88 -96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 ' - '-104.51 -105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 ' - '-112.35 -113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 ' - '-120.20 -120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 ' - '-128.04 -128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 ' - '-135.88 -136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 ' - '-143.73 -144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', - u'Table_calibration: 4-IR10.8, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 ' - '45.29 ' - '44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 ' - '33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 ' - '22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 ' - '11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 ' - '-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 ' - '-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 ' - '-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 ' - '-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 ' - '-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 ' - '-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 ' - '-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 ' - '-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 ' - '-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 ' - '-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 ' - '-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 ' - '-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 ' - '-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 ' - '-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 ' - '-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 ' - '-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 ' - '-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', - u'Table_calibration: 5-IR11.5, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 ' - '45.29 ' - '44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 ' - '33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 ' - '22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 ' - '11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 ' - '-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 ' - '-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 ' - '-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 ' - '-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 ' - '-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 ' - '-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 ' - '-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 ' - '-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 ' - '-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 ' - '-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 ' - '-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 ' - '-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 ' - '-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 ' - '-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 ' - '-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 ' - '-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 ' - '-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', - 'Table_calibration: 6(3A)-VIS1.6, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 ' - '1.57 1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 ' - '8.24 8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 ' - '14.12 14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 ' - '19.61 20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 ' - '25.10 25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 ' - '30.59 30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 ' - '36.08 36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 ' - '41.57 41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 ' - '47.06 47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 ' - '52.55 52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 ' - '58.04 58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 ' - '63.53 63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 ' - '69.02 69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 ' - '74.51 74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 ' - '80.00 80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 ' - '85.49 85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 ' - '90.98 91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 ' - '96.47 96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]'] + expected_key_channel = ["Table_calibration: 1-VIS0.63, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 1.57 " + "1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 8.24 " + "8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 14.12 " + "14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 19.61 " + "20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 25.10 " + "25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 30.59 " + "30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 36.08 " + "36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 41.57 " + "41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 47.06 " + "47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 52.55 " + "52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 58.04 " + "58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 63.53 " + "63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 69.02 " + "69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 74.51 " + "74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 80.00 " + "80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 85.49 " + "85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 90.98 " + "91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 96.47 " + "96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]", + "Table_calibration: 2-VIS0.86, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 1.57 " + "1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 8.24 " + "8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 14.12 " + "14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 19.61 " + "20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 25.10 " + "25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 30.59 " + "30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 36.08 " + "36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 41.57 " + "41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 47.06 " + "47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 52.55 " + "52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 58.04 " + "58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 63.53 " + "63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 69.02 " + "69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 74.51 " + "74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 80.00 " + "80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 85.49 " + "85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 90.98 " + "91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 96.47 " + "96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]", + u"Table_calibration: 3(3B)-IR3.7, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 " + "45.29 44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 " + "34.31 33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 " + "23.33 22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 " + "12.35 11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 " + "-0.20 -0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 " + "-11.18 -11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 " + "-20.59 -21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 " + "-30.00 -30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 " + "-39.41 -40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 " + "-48.82 -49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 " + "-58.24 -59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 " + "-67.65 -68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 " + "-77.06 -77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 " + "-86.47 -87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 " + "-95.88 -96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 " + "-104.51 -105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 " + "-112.35 -113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 " + "-120.20 -120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 " + "-128.04 -128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 " + "-135.88 -136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 " + "-143.73 -144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]", + u"Table_calibration: 4-IR10.8, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 " + "45.29 " + "44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 " + "33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 " + "22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 " + "11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 " + "-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 " + "-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 " + "-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 " + "-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 " + "-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 " + "-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 " + "-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 " + "-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 " + "-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 " + "-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 " + "-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 " + "-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 " + "-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 " + "-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 " + "-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 " + "-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 " + "-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]", + u"Table_calibration: 5-IR11.5, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 " + "45.29 " + "44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 " + "33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 " + "22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 " + "11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 " + "-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 " + "-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 " + "-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 " + "-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 " + "-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 " + "-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 " + "-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 " + "-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 " + "-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 " + "-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 " + "-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 " + "-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 " + "-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 " + "-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 " + "-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 " + "-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 " + "-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]", + "Table_calibration: 6(3A)-VIS1.6, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 " + "1.57 1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 " + "8.24 8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 " + "14.12 14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 " + "19.61 20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 " + "25.10 25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 " + "30.59 30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 " + "36.08 36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 " + "41.57 41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 " + "47.06 47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 " + "52.55 52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 " + "58.04 58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 " + "63.53 63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 " + "69.02 69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 " + "74.51 74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 " + "80.00 80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 " + "85.49 85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 " + "90.98 91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 " + "96.47 96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]"] dataset = self._get_test_dataset_calibration() - w = MITIFFWriter(filename=dataset.attrs['metadata_requirements']['file_pattern'], base_dir=self.base_dir) + w = MITIFFWriter(filename=dataset.attrs["metadata_requirements"]["file_pattern"], base_dir=self.base_dir) w.save_dataset(dataset) - filename = (dataset.attrs['metadata_requirements']['file_pattern']).format( - start_time=dataset.attrs['start_time']) + filename = (dataset.attrs["metadata_requirements"]["file_pattern"]).format( + start_time=dataset.attrs["start_time"]) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, filename)) found_table_calibration = False number_of_calibrations = 0 for key in imgdesc: - if 'Table_calibration' in key: + if "Table_calibration" in key: found_table_calibration = True - if '1-VIS0.63' in key: + if "1-VIS0.63" in key: self.assertEqual(key, expected_key_channel[0]) number_of_calibrations += 1 - elif '2-VIS0.86' in key: + elif "2-VIS0.86" in key: self.assertEqual(key, expected_key_channel[1]) number_of_calibrations += 1 - elif '3(3B)-IR3.7' in key: + elif "3(3B)-IR3.7" in key: self.assertEqual(key, expected_key_channel[2]) number_of_calibrations += 1 - elif '4-IR10.8' in key: + elif "4-IR10.8" in key: self.assertEqual(key, expected_key_channel[3]) number_of_calibrations += 1 - elif '5-IR11.5' in key: + elif "5-IR11.5" in key: self.assertEqual(key, expected_key_channel[4]) number_of_calibrations += 1 - elif '6(3A)-VIS1.6' in key: + elif "6(3A)-VIS1.6" in key: self.assertEqual(key, expected_key_channel[5]) number_of_calibrations += 1 else: @@ -781,42 +781,42 @@ def test_save_dataset_with_calibration_one_dataset(self): from satpy.writers.mitiff import MITIFFWriter expected = [np.full((100, 200), 255)] - expected_key_channel = [u'Table_calibration: BT, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 45.29 ' - '44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 ' - '33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 ' - '22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 ' - '11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 ' - '-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 ' - '-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 ' - '-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 ' - '-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 ' - '-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 ' - '-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 ' - '-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 ' - '-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 ' - '-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 ' - '-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 ' - '-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 ' - '-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 ' - '-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 ' - '-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 ' - '-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 ' - '-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 ' - '-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', ] + expected_key_channel = [u"Table_calibration: BT, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 45.29 " + "44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 " + "33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 " + "22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 " + "11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 " + "-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 " + "-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 " + "-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 " + "-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 " + "-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 " + "-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 " + "-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 " + "-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 " + "-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 " + "-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 " + "-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 " + "-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 " + "-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 " + "-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 " + "-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 " + "-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 " + "-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]", ] dataset = self._get_test_dataset_calibration_one_dataset() - w = MITIFFWriter(filename=dataset.attrs['metadata_requirements']['file_pattern'], base_dir=self.base_dir) + w = MITIFFWriter(filename=dataset.attrs["metadata_requirements"]["file_pattern"], base_dir=self.base_dir) w.save_dataset(dataset) - filename = (dataset.attrs['metadata_requirements']['file_pattern']).format( - start_time=dataset.attrs['start_time']) + filename = (dataset.attrs["metadata_requirements"]["file_pattern"]).format( + start_time=dataset.attrs["start_time"]) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, filename)) found_table_calibration = False number_of_calibrations = 0 for key in imgdesc: - if 'Table_calibration' in key: + if "Table_calibration" in key: found_table_calibration = True - if 'BT' in key: + if "BT" in key: self.assertEqual(key, expected_key_channel[0]) number_of_calibrations += 1 self.assertTrue(found_table_calibration, "Expected table_calibration is not found in the imagedescription.") @@ -833,8 +833,8 @@ def test_save_dataset_with_bad_value(self): dataset = self._get_test_dataset_with_bad_values() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) - filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs['name'], - dataset.attrs['start_time']) + filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs["name"], + dataset.attrs["start_time"]) self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected, test_shape=(2, 5)) def test_convert_proj4_string(self): @@ -844,32 +844,32 @@ def test_convert_proj4_string(self): from pyresample.geometry import AreaDefinition from satpy.writers.mitiff import MITIFFWriter - checks = [{'epsg': '+init=EPSG:32631', - 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 ' - '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' - '+y_0=1515.000000\n')}, - {'epsg': '+init=EPSG:32632', - 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=9 +k=0.9996 ' - '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' - '+y_0=1515.000000\n')}, - {'epsg': '+init=EPSG:32633', - 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=15 +k=0.9996 ' - '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' - '+y_0=1515.000000\n')}, - {'epsg': '+init=EPSG:32634', - 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=21 +k=0.9996 ' - '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' - '+y_0=1515.000000\n')}, - {'epsg': '+init=EPSG:32635', - 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=27 +k=0.9996 ' - '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' - '+y_0=1515.000000\n')}] + checks = [{"epsg": "+init=EPSG:32631", + "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 " + "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " + "+y_0=1515.000000\n")}, + {"epsg": "+init=EPSG:32632", + "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=9 +k=0.9996 " + "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " + "+y_0=1515.000000\n")}, + {"epsg": "+init=EPSG:32633", + "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=15 +k=0.9996 " + "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " + "+y_0=1515.000000\n")}, + {"epsg": "+init=EPSG:32634", + "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=21 +k=0.9996 " + "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " + "+y_0=1515.000000\n")}, + {"epsg": "+init=EPSG:32635", + "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=27 +k=0.9996 " + "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " + "+y_0=1515.000000\n")}] for check in checks: area_def = AreaDefinition( - 'test', - 'test', - 'test', - check['epsg'], + "test", + "test", + "test", + check["epsg"], 100, 200, (-1000., -1500., 1000., 1500.), @@ -877,13 +877,13 @@ def test_convert_proj4_string(self): ds1 = xr.DataArray( da.zeros((10, 20), chunks=20), - dims=('y', 'x'), - attrs={'area': area_def} + dims=("y", "x"), + attrs={"area": area_def} ) - w = MITIFFWriter(filename='dummy.tif', base_dir=self.base_dir) + w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) proj4_string = w._add_proj4_string(ds1, ds1) - self.assertEqual(proj4_string, check['proj4']) + self.assertEqual(proj4_string, check["proj4"]) def test_save_dataset_palette(self): """Test writer operation as palette.""" @@ -918,20 +918,20 @@ def test_save_dataset_palette(self): 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] color_map = (0, 1, 2, 3, 4, 5) - pal_desc = ['test', 'test2'] + pal_desc = ["test", "test2"] unit = "Test" dataset = self._get_test_one_dataset() - palette = {'palette': True, - 'palette_color_map': color_map, - 'palette_description': pal_desc, - 'palette_unit': unit, - 'palette_channel_name': dataset.attrs['name']} + palette = {"palette": True, + "palette_color_map": color_map, + "palette_description": pal_desc, + "palette_unit": unit, + "palette_channel_name": dataset.attrs["name"]} w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset, **palette) - filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs['name'], - dataset.attrs['start_time']) + filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs["name"], + dataset.attrs["start_time"]) pillow_tif = Image.open(os.path.join(self.base_dir, filename)) # Need to check PHOTOMETRIC is 3, ie palette self.assertEqual(pillow_tif.tag_v2.get(262), 3) @@ -958,14 +958,14 @@ def test_save_dataset_palette(self): unit_name = key unit_name_found = True found_color_info = False - elif 'COLOR INFO:' in key: + elif "COLOR INFO:" in key: found_color_info = True # Check the name of the palette description self.assertEqual(name_length, 2) # Check the name and unit name of the palette - self.assertEqual(unit_name, ' Test') + self.assertEqual(unit_name, " Test") # Check the palette description of the palette - self.assertEqual(names, [' test', ' test2']) + self.assertEqual(names, [" test", " test2"]) self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_simple_write_two_bands(self): @@ -982,12 +982,12 @@ def test_get_test_dataset_three_bands_prereq(self): dataset = self._get_test_dataset_three_bands_prereq() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) - filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs['name'], - dataset.attrs['start_time']) + filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs["name"], + dataset.attrs["start_time"]) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, filename)) for element in imgdesc: - if ' Channels:' in element: - self.assertEqual(element, ' Channels: 3 In this file: 1 2 3') + if " Channels:" in element: + self.assertEqual(element, " Channels: 3 In this file: 1 2 3") def test_save_dataset_with_calibration_error_one_dataset(self): """Test saving if mitiff as dataset with only one channel with invalid calibration.""" @@ -998,9 +998,9 @@ def test_save_dataset_with_calibration_error_one_dataset(self): logger.level = logging.DEBUG dataset = self._get_test_dataset_calibration_one_dataset() - prereqs = [make_dsq(name='4', calibration='not_valid_calibration_name')] - dataset.attrs['prerequisites'] = prereqs - w = MITIFFWriter(filename=dataset.attrs['metadata_requirements']['file_pattern'], base_dir=self.base_dir) + prereqs = [make_dsq(name="4", calibration="not_valid_calibration_name")] + dataset.attrs["prerequisites"] = prereqs + w = MITIFFWriter(filename=dataset.attrs["metadata_requirements"]["file_pattern"], base_dir=self.base_dir) _reverse_offset = 0. _reverse_scale = 1. _decimals = 2 @@ -1024,17 +1024,17 @@ def test_save_dataset_with_missing_palette(self): logger.setLevel(logging.DEBUG) dataset = self._get_test_one_dataset() - pal_desc = ['test', 'test2'] + pal_desc = ["test", "test2"] unit = "Test" - palette = {'palette': True, - 'palette_description': pal_desc, - 'palette_unit': unit, - 'palette_channel_name': dataset.attrs['name']} + palette = {"palette": True, + "palette_description": pal_desc, + "palette_unit": unit, + "palette_channel_name": dataset.attrs["name"]} w = MITIFFWriter(base_dir=self.base_dir) tiffinfo = {} - tiffinfo[270] = "Just dummy image desc".encode('utf-8') - filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs['name'], - dataset.attrs['start_time']) + tiffinfo[270] = "Just dummy image desc".encode("utf-8") + filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs["name"], + dataset.attrs["start_time"]) try: with self.assertLogs(logger, logging.ERROR) as lc: w._save_as_palette(dataset.compute(), os.path.join(self.base_dir, filename), tiffinfo, **palette) diff --git a/satpy/tests/writer_tests/test_ninjogeotiff.py b/satpy/tests/writer_tests/test_ninjogeotiff.py index ac75b68cbf..bba3e9b44e 100644 --- a/satpy/tests/writer_tests/test_ninjogeotiff.py +++ b/satpy/tests/writer_tests/test_ninjogeotiff.py @@ -477,7 +477,7 @@ def now(cls, tz=datetime.timezone.utc): return datetime.datetime(2033, 5, 18, 3, 33, 20, tzinfo=tz) - monkeypatch.setattr(datetime, 'datetime', mydatetime) + monkeypatch.setattr(datetime, "datetime", mydatetime) def test_write_and_read_file(test_image_small_mid_atlantic_L, tmp_path): diff --git a/satpy/tests/writer_tests/test_ninjotiff.py b/satpy/tests/writer_tests/test_ninjotiff.py index 58f991e73d..f36f1028b7 100644 --- a/satpy/tests/writer_tests/test_ninjotiff.py +++ b/satpy/tests/writer_tests/test_ninjotiff.py @@ -43,58 +43,58 @@ def get_scaling_from_history(self): pyninjotiff_mock.ninjotiff = mock.Mock() -@mock.patch.dict(sys.modules, {'pyninjotiff': pyninjotiff_mock, 'pyninjotiff.ninjotiff': pyninjotiff_mock.ninjotiff}) +@mock.patch.dict(sys.modules, {"pyninjotiff": pyninjotiff_mock, "pyninjotiff.ninjotiff": pyninjotiff_mock.ninjotiff}) class TestNinjoTIFFWriter(unittest.TestCase): """The ninjo tiff writer tests.""" - @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) + @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_init(self): """Test the init.""" from satpy.writers.ninjotiff import NinjoTIFFWriter - ninjo_tags = {40000: 'NINJO'} + ninjo_tags = {40000: "NINJO"} ntw = NinjoTIFFWriter(tags=ninjo_tags) self.assertDictEqual(ntw.tags, ninjo_tags) - @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_dataset') - @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) + @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_dataset") + @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_dataset(self, iwsd): """Test saving a dataset.""" from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() - dataset = xr.DataArray([1, 2, 3], attrs={'units': 'K'}) - with mock.patch('satpy.writers.ninjotiff.convert_units') as uconv: - ntw.save_dataset(dataset, physic_unit='CELSIUS') - uconv.assert_called_once_with(dataset, 'K', 'CELSIUS') + dataset = xr.DataArray([1, 2, 3], attrs={"units": "K"}) + with mock.patch("satpy.writers.ninjotiff.convert_units") as uconv: + ntw.save_dataset(dataset, physic_unit="CELSIUS") + uconv.assert_called_once_with(dataset, "K", "CELSIUS") self.assertEqual(iwsd.call_count, 1) - @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_dataset') - @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) + @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_dataset") + @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_dataset_skip_unit_conversion(self, iwsd): """Test saving a dataset without unit conversion.""" from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() - dataset = xr.DataArray([1, 2, 3], attrs={'units': 'K'}) - with mock.patch('satpy.writers.ninjotiff.convert_units') as uconv: - ntw.save_dataset(dataset, physic_unit='CELSIUS', + dataset = xr.DataArray([1, 2, 3], attrs={"units": "K"}) + with mock.patch("satpy.writers.ninjotiff.convert_units") as uconv: + ntw.save_dataset(dataset, physic_unit="CELSIUS", convert_temperature_units=False) uconv.assert_not_called() self.assertEqual(iwsd.call_count, 1) - @mock.patch('satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset') - @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_image') - @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) + @mock.patch("satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset") + @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_image") + @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_image(self, iwsi, save_dataset): """Test saving an image.""" nt = pyninjotiff_mock.ninjotiff nt.reset_mock() from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() - dataset = xr.DataArray([1, 2, 3], attrs={'units': 'K'}) - img = FakeImage(dataset, 'L') - ret = ntw.save_image(img, filename='bla.tif', compute=False) + dataset = xr.DataArray([1, 2, 3], attrs={"units": "K"}) + img = FakeImage(dataset, "L") + ret = ntw.save_image(img, filename="bla.tif", compute=False) nt.save.assert_called() - assert nt.save.mock_calls[0][2]['compute'] is False - assert nt.save.mock_calls[0][2]['ch_min_measurement_unit'] < nt.save.mock_calls[0][2]['ch_max_measurement_unit'] + assert nt.save.mock_calls[0][2]["compute"] is False + assert nt.save.mock_calls[0][2]["ch_min_measurement_unit"] < nt.save.mock_calls[0][2]["ch_max_measurement_unit"] assert ret == nt.save.return_value def test_convert_units_self(self): @@ -145,9 +145,9 @@ def test_convert_units_other(self): with pytest.raises(NotImplementedError): convert_units(ds_in, "millimeter/hour", "m/s") - @mock.patch('satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset') - @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_image') - @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) + @mock.patch("satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset") + @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_image") + @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_P_image_is_uint8(self, iwsi, save_dataset): """Test that a P-mode image is converted to uint8s.""" nt = pyninjotiff_mock.ninjotiff @@ -155,6 +155,6 @@ def test_P_image_is_uint8(self, iwsi, save_dataset): from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() dataset = xr.DataArray([1, 2, 3]).astype(int) - img = FakeImage(dataset, 'P') - ntw.save_image(img, filename='bla.tif', compute=False) + img = FakeImage(dataset, "P") + ntw.save_image(img, filename="bla.tif", compute=False) assert nt.save.mock_calls[0][1][0].data.dtype == np.uint8 diff --git a/satpy/tests/writer_tests/test_simple_image.py b/satpy/tests/writer_tests/test_simple_image.py index 5ebf0dfb47..b3e92c9510 100644 --- a/satpy/tests/writer_tests/test_simple_image.py +++ b/satpy/tests/writer_tests/test_simple_image.py @@ -44,9 +44,9 @@ def _get_test_datasets(): import xarray as xr ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow()} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow()} ) return [ds1] diff --git a/satpy/tests/writer_tests/test_utils.py b/satpy/tests/writer_tests/test_utils.py index 10a199d8b0..a0cf88e54f 100644 --- a/satpy/tests/writer_tests/test_utils.py +++ b/satpy/tests/writer_tests/test_utils.py @@ -27,9 +27,9 @@ class WriterUtilsTest(unittest.TestCase): def test_flatten_dict(self): """Test dictionary flattening.""" - d = {'a': 1, 'b': {'c': 1, 'd': {'e': 1, 'f': {'g': [1, 2]}}}} - expected = {'a': 1, - 'b_c': 1, - 'b_d_e': 1, - 'b_d_f_g': [1, 2]} + d = {"a": 1, "b": {"c": 1, "d": {"e": 1, "f": {"g": [1, 2]}}}} + expected = {"a": 1, + "b_c": 1, + "b_d_e": 1, + "b_d_f_g": [1, 2]} self.assertDictEqual(wutils.flatten_dict(d), expected) diff --git a/satpy/utils.py b/satpy/utils.py index a9785a544a..1ef9a3524c 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -130,12 +130,12 @@ def logging_on(level=logging.WARNING): console = logging.StreamHandler() console.setFormatter(logging.Formatter("[%(levelname)s: %(asctime)s :" " %(name)s] %(message)s", - '%Y-%m-%d %H:%M:%S')) + "%Y-%m-%d %H:%M:%S")) console.setLevel(level) - logging.getLogger('').addHandler(console) + logging.getLogger("").addHandler(console) _is_logging_on = True - log = logging.getLogger('') + log = logging.getLogger("") log.setLevel(level) for h in log.handlers: h.setLevel(level) @@ -143,13 +143,13 @@ def logging_on(level=logging.WARNING): def logging_off(): """Turn logging off.""" - logging.getLogger('').handlers = [logging.NullHandler()] + logging.getLogger("").handlers = [logging.NullHandler()] def get_logger(name): """Return logger with null handler added if needed.""" - if not hasattr(logging.Logger, 'trace'): - logging.addLevelName(TRACE_LEVEL, 'TRACE') + if not hasattr(logging.Logger, "trace"): + logging.addLevelName(TRACE_LEVEL, "TRACE") def trace(self, message, *args, **kwargs): if self.isEnabledFor(TRACE_LEVEL): @@ -165,7 +165,7 @@ def trace(self, message, *args, **kwargs): def in_ipynb(): """Check if we are in a jupyter notebook.""" try: - return 'ZMQ' in get_ipython().__class__.__name__ + return "ZMQ" in get_ipython().__class__.__name__ except NameError: return False @@ -243,20 +243,20 @@ def proj_units_to_meters(proj_str): proj_parts = proj_str.split() new_parts = [] for itm in proj_parts: - key, val = itm.split('=') - key = key.strip('+') - if key in ['a', 'b', 'h']: + key, val = itm.split("=") + key = key.strip("+") + if key in ["a", "b", "h"]: val = float(val) if val < 6e6: val *= 1000. - val = '%.3f' % val + val = "%.3f" % val - if key == 'units' and val == 'km': + if key == "units" and val == "km": continue - new_parts.append('+%s=%s' % (key, val)) + new_parts.append("+%s=%s" % (key, val)) - return ' '.join(new_parts) + return " ".join(new_parts) def _get_sunz_corr_li_and_shibata(cos_zen): @@ -371,9 +371,9 @@ def _get_sat_altitude(data_arr, key_prefixes): try: alt = _get_first_available_item(orb_params, alt_keys) except KeyError: - alt = orb_params['projection_altitude'] + alt = orb_params["projection_altitude"] warnings.warn( - 'Actual satellite altitude not available, using projection altitude instead.', + "Actual satellite altitude not available, using projection altitude instead.", stacklevel=3 ) return alt @@ -387,10 +387,10 @@ def _get_sat_lonlat(data_arr, key_prefixes): lon = _get_first_available_item(orb_params, lon_keys) lat = _get_first_available_item(orb_params, lat_keys) except KeyError: - lon = orb_params['projection_longitude'] - lat = orb_params['projection_latitude'] + lon = orb_params["projection_longitude"] + lat = orb_params["projection_latitude"] warnings.warn( - 'Actual satellite lon/lat not available, using projection center instead.', + "Actual satellite lon/lat not available, using projection center instead.", stacklevel=3 ) return lon, lat @@ -454,21 +454,21 @@ def _check_yaml_configs(configs, key): diagnostic = {} for i in configs: for fname in i: - msg = 'ok' + msg = "ok" res = None - with open(fname, 'r', encoding='utf-8') as stream: + with open(fname, "r", encoding="utf-8") as stream: try: res = yaml.load(stream, Loader=UnsafeLoader) except yaml.YAMLError as err: stream.seek(0) res = yaml.load(stream, Loader=BaseLoader) - if err.context == 'while constructing a Python object': + if err.context == "while constructing a Python object": msg = err.problem else: - msg = 'error' + msg = "error" finally: try: - diagnostic[res[key]['name']] = msg + diagnostic[res[key]["name"]] = msg except (KeyError, TypeError): # this object doesn't have a 'name' pass @@ -481,7 +481,7 @@ def _check_import(module_names): for module_name in module_names: try: __import__(module_name) - res = 'ok' + res = "ok" except ImportError as err: res = str(err) diagnostics[module_name] = res @@ -503,23 +503,23 @@ def check_satpy(readers=None, writers=None, extras=None): from satpy.readers import configs_for_reader from satpy.writers import configs_for_writer - print('Readers') - print('=======') - for reader, res in sorted(_check_yaml_configs(configs_for_reader(reader=readers), 'reader').items()): - print(reader + ': ', res) + print("Readers") + print("=======") + for reader, res in sorted(_check_yaml_configs(configs_for_reader(reader=readers), "reader").items()): + print(reader + ": ", res) print() - print('Writers') - print('=======') - for writer, res in sorted(_check_yaml_configs(configs_for_writer(writer=writers), 'writer').items()): - print(writer + ': ', res) + print("Writers") + print("=======") + for writer, res in sorted(_check_yaml_configs(configs_for_writer(writer=writers), "writer").items()): + print(writer + ": ", res) print() - print('Extras') - print('======') - module_names = extras if extras is not None else ('cartopy', 'geoviews') + print("Extras") + print("======") + module_names = extras if extras is not None else ("cartopy", "geoviews") for module_name, res in sorted(_check_import(module_names).items()): - print(module_name + ': ', res) + print(module_name + ": ", res) print() @@ -619,7 +619,7 @@ def get_legacy_chunk_size(): def _get_pytroll_chunk_size(): try: - chunk_size = int(os.environ['PYTROLL_CHUNK_SIZE']) + chunk_size = int(os.environ["PYTROLL_CHUNK_SIZE"]) warnings.warn( "The PYTROLL_CHUNK_SIZE environment variable is pending deprecation. " "You can use the dask config setting `array.chunk-size` (or the DASK_ARRAY__CHUNK_SIZE environment" @@ -668,7 +668,7 @@ def _sort_files_to_local_remote_and_fsfiles(filenames): fs_files.append(f) elif isinstance(f, pathlib.Path): local_files.append(f) - elif urlparse(f).scheme in ('', 'file') or "\\" in f: + elif urlparse(f).scheme in ("", "file") or "\\" in f: local_files.append(f) else: remote_files.append(f) @@ -709,7 +709,7 @@ def _get_storage_dictionary_options(reader_kwargs): # set base storage options if there are any storage_opt_dict[reader_name] = shared_storage_options.copy() if isinstance(rkwargs, dict) and "storage_options" in rkwargs: - storage_opt_dict.setdefault(reader_name, {}).update(rkwargs.pop('storage_options')) + storage_opt_dict.setdefault(reader_name, {}).update(rkwargs.pop("storage_options")) return storage_opt_dict diff --git a/satpy/writers/__init__.py b/satpy/writers/__init__.py index 0af433f28d..dcf482188d 100644 --- a/satpy/writers/__init__.py +++ b/satpy/writers/__init__.py @@ -44,18 +44,18 @@ def read_writer_config(config_files, loader=UnsafeLoader): """Read the writer `config_files` and return the info extracted.""" conf = {} - LOG.debug('Reading %s', str(config_files)) + LOG.debug("Reading %s", str(config_files)) for config_file in config_files: with open(config_file) as fd: conf.update(yaml.load(fd.read(), Loader=loader)) try: - writer_info = conf['writer'] + writer_info = conf["writer"] except KeyError: raise KeyError( "Malformed config file {}: missing writer 'writer'".format( config_files)) - writer_info['config_files'] = config_files + writer_info["config_files"] = config_files return writer_info @@ -63,7 +63,7 @@ def load_writer_configs(writer_configs, **writer_kwargs): """Load the writer from the provided `writer_configs`.""" try: writer_info = read_writer_config(writer_configs) - writer_class = writer_info['writer'] + writer_class = writer_info["writer"] except (ValueError, KeyError, yaml.YAMLError): raise ValueError("Invalid writer configs: " "'{}'".format(writer_configs)) @@ -78,11 +78,11 @@ def load_writer(writer, **writer_kwargs): config_fn = writer + ".yaml" if "." not in writer else writer config_files = config_search_paths(os.path.join("writers", config_fn)) writer_kwargs.setdefault("config_files", config_files) - if not writer_kwargs['config_files']: + if not writer_kwargs["config_files"]: raise ValueError("Unknown writer '{}'".format(writer)) try: - return load_writer_configs(writer_kwargs['config_files'], + return load_writer_configs(writer_kwargs["config_files"], **writer_kwargs) except ValueError: raise ValueError("Writer '{}' does not exist or could not be " @@ -102,15 +102,15 @@ def configs_for_writer(writer=None): if not isinstance(writer, (list, tuple)): writer = [writer] # given a config filename or writer name - config_files = [w if w.endswith('.yaml') else w + '.yaml' for w in writer] + config_files = [w if w.endswith(".yaml") else w + ".yaml" for w in writer] else: - paths = get_entry_points_config_dirs('satpy.writers') - writer_configs = glob_config(os.path.join('writers', '*.yaml'), search_dirs=paths) + paths = get_entry_points_config_dirs("satpy.writers") + writer_configs = glob_config(os.path.join("writers", "*.yaml"), search_dirs=paths) config_files = set(writer_configs) for config_file in config_files: config_basename = os.path.basename(config_file) - paths = get_entry_points_config_dirs('satpy.writers') + paths = get_entry_points_config_dirs("satpy.writers") writer_configs = config_search_paths( os.path.join("writers", config_basename), search_dirs=paths, @@ -143,7 +143,7 @@ def available_writers(as_dict=False): LOG.warning("Could not import writer config from: %s", writer_configs) LOG.debug("Error loading YAML", exc_info=True) continue - writers.append(writer_info if as_dict else writer_info['name']) + writers.append(writer_info if as_dict else writer_info["name"]) return writers @@ -231,11 +231,11 @@ def add_overlay(orig_img, area, coast_dir, color=None, width=None, resolution=No DeprecationWarning, stacklevel=2 ) - if hasattr(orig_img, 'convert'): + if hasattr(orig_img, "convert"): # image must be in RGB space to work with pycoast/pydecorate - res_mode = ('RGBA' if orig_img.final_mode(fill_value).endswith('A') else 'RGB') + res_mode = ("RGBA" if orig_img.final_mode(fill_value).endswith("A") else "RGB") orig_img = orig_img.convert(res_mode) - elif not orig_img.mode.startswith('RGB'): + elif not orig_img.mode.startswith("RGB"): raise RuntimeError("'trollimage' 1.6+ required to support adding " "overlays/decorations to non-RGB data.") @@ -244,7 +244,7 @@ def add_overlay(orig_img, area, coast_dir, color=None, width=None, resolution=No cw_ = ContourWriterAGG(coast_dir) new_image = orig_img.apply_pil(_burn_overlay, res_mode, - None, {'fill_value': fill_value}, + None, {"fill_value": fill_value}, (area, cw_, overlays), None) return new_image @@ -253,25 +253,25 @@ def _create_overlays_dict(color, width, grid, level_coast, level_borders): """Fill in the overlays dict.""" overlays = dict() # fill with sensible defaults - general_params = {'outline': color or (0, 0, 0), - 'width': width or 0.5} + general_params = {"outline": color or (0, 0, 0), + "width": width or 0.5} for key, val in general_params.items(): if val is not None: - overlays.setdefault('coasts', {}).setdefault(key, val) - overlays.setdefault('borders', {}).setdefault(key, val) + overlays.setdefault("coasts", {}).setdefault(key, val) + overlays.setdefault("borders", {}).setdefault(key, val) if level_coast is None: level_coast = 1 - overlays.setdefault('coasts', {}).setdefault('level', level_coast) + overlays.setdefault("coasts", {}).setdefault("level", level_coast) if level_borders is None: level_borders = 1 - overlays.setdefault('borders', {}).setdefault('level', level_borders) + overlays.setdefault("borders", {}).setdefault("level", level_borders) if grid is not None: - if 'major_lonlat' in grid and grid['major_lonlat']: - major_lonlat = grid.pop('major_lonlat') - minor_lonlat = grid.pop('minor_lonlat', major_lonlat) - grid.update({'Dlonlat': major_lonlat, 'dlonlat': minor_lonlat}) + if "major_lonlat" in grid and grid["major_lonlat"]: + major_lonlat = grid.pop("major_lonlat") + minor_lonlat = grid.pop("minor_lonlat", major_lonlat) + grid.update({"Dlonlat": major_lonlat, "dlonlat": minor_lonlat}) for key, val in grid.items(): - overlays.setdefault('grid', {}).setdefault(key, val) + overlays.setdefault("grid", {}).setdefault(key, val) return overlays @@ -288,10 +288,10 @@ def add_text(orig, dc, img, text): arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) - new_data = xr.DataArray(arr, dims=['y', 'x', 'bands'], - coords={'y': orig.data.coords['y'], - 'x': orig.data.coords['x'], - 'bands': list(img.mode)}, + new_data = xr.DataArray(arr, dims=["y", "x", "bands"], + coords={"y": orig.data.coords["y"], + "x": orig.data.coords["x"], + "bands": list(img.mode)}, attrs=orig.data.attrs) return XRImage(new_data) @@ -309,10 +309,10 @@ def add_logo(orig, dc, img, logo): arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) - new_data = xr.DataArray(arr, dims=['y', 'x', 'bands'], - coords={'y': orig.data.coords['y'], - 'x': orig.data.coords['x'], - 'bands': list(img.mode)}, + new_data = xr.DataArray(arr, dims=["y", "x", "bands"], + coords={"y": orig.data.coords["y"], + "x": orig.data.coords["x"], + "bands": list(img.mode)}, attrs=orig.data.attrs) return XRImage(new_data) @@ -330,10 +330,10 @@ def add_scale(orig, dc, img, scale): arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) - new_data = xr.DataArray(arr, dims=['y', 'x', 'bands'], - coords={'y': orig.data.coords['y'], - 'x': orig.data.coords['x'], - 'bands': list(img.mode)}, + new_data = xr.DataArray(arr, dims=["y", "x", "bands"], + coords={"y": orig.data.coords["y"], + "x": orig.data.coords["x"], + "bands": list(img.mode)}, attrs=orig.data.attrs) return XRImage(new_data) @@ -373,10 +373,10 @@ def add_decorate(orig, fill_value=None, **decorate): # Need to create this here to possible keep the alignment # when adding text and/or logo with pydecorate - if hasattr(orig, 'convert'): + if hasattr(orig, "convert"): # image must be in RGB space to work with pycoast/pydecorate - orig = orig.convert('RGBA' if orig.mode.endswith('A') else 'RGB') - elif not orig.mode.startswith('RGB'): + orig = orig.convert("RGBA" if orig.mode.endswith("A") else "RGB") + elif not orig.mode.startswith("RGB"): raise RuntimeError("'trollimage' 1.6+ required to support adding " "overlays/decorations to non-RGB data.") img_orig = orig.pil_image(fill_value=fill_value) @@ -386,14 +386,14 @@ def add_decorate(orig, fill_value=None, **decorate): # decorate need to be a list to maintain the alignment # as ordered in the list img = orig - if 'decorate' in decorate: - for dec in decorate['decorate']: - if 'logo' in dec: - img = add_logo(img, dc, img_orig, logo=dec['logo']) - elif 'text' in dec: - img = add_text(img, dc, img_orig, text=dec['text']) - elif 'scale' in dec: - img = add_scale(img, dc, img_orig, scale=dec['scale']) + if "decorate" in decorate: + for dec in decorate["decorate"]: + if "logo" in dec: + img = add_logo(img, dc, img_orig, logo=dec["logo"]) + elif "text" in dec: + img = add_text(img, dc, img_orig, text=dec["text"]) + elif "scale" in dec: + img = add_scale(img, dc, img_orig, scale=dec["scale"]) return img @@ -445,7 +445,7 @@ def get_enhanced_image(dataset, enhance=None, overlay=None, decorate=None, enhancer.apply(img, **dataset.attrs) if overlay is not None: - img = add_overlay(img, dataset.attrs['area'], fill_value=fill_value, **overlay) + img = add_overlay(img, dataset.attrs["area"], fill_value=fill_value, **overlay) if decorate is not None: img = add_decorate(img, fill_value=fill_value, **decorate) @@ -595,7 +595,7 @@ def compute_writer_results(results): if targets: for target in targets: - if hasattr(target, 'close'): + if hasattr(target, "close"): target.close() @@ -632,23 +632,23 @@ def __init__(self, name=None, filename=None, base_dir=None, **kwargs): """ # Load the config Plugin.__init__(self, **kwargs) - self.info = self.config.get('writer', {}) + self.info = self.config.get("writer", {}) - if 'file_pattern' in self.info: + if "file_pattern" in self.info: warnings.warn( "Writer YAML config is using 'file_pattern' which " "has been deprecated, use 'filename' instead.", stacklevel=2 ) - self.info['filename'] = self.info.pop('file_pattern') + self.info["filename"] = self.info.pop("file_pattern") - if 'file_pattern' in kwargs: + if "file_pattern" in kwargs: warnings.warn( "'file_pattern' has been deprecated, use 'filename' instead.", DeprecationWarning, stacklevel=2 ) - filename = kwargs.pop('file_pattern') + filename = kwargs.pop("file_pattern") # Use options from the config file if they weren't passed as arguments self.name = self.info.get("name", None) if name is None else name @@ -679,7 +679,7 @@ def separate_init_kwargs(cls, kwargs): # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs = {} kwargs = kwargs.copy() - for kw in ['base_dir', 'filename', 'file_pattern']: + for kw in ["base_dir", "filename", "file_pattern"]: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) return init_kwargs, kwargs @@ -696,8 +696,8 @@ def create_filename_parser(self, base_dir): @staticmethod def _prepare_metadata_for_filename_formatting(attrs): - if isinstance(attrs.get('sensor'), set): - attrs['sensor'] = '-'.join(sorted(attrs['sensor'])) + if isinstance(attrs.get("sensor"), set): + attrs["sensor"] = "-".join(sorted(attrs["sensor"])) def get_filename(self, **kwargs): """Create a filename where output data will be saved. @@ -863,7 +863,7 @@ def separate_init_kwargs(cls, kwargs): """Separate the init kwargs.""" # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs, kwargs = super(ImageWriter, cls).separate_init_kwargs(kwargs) - for kw in ['enhancement_config', 'enhance']: + for kw in ["enhancement_config", "enhance"]: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) return init_kwargs, kwargs @@ -1179,7 +1179,7 @@ def __init__(self, enhancement_config_file=None): # it wasn't specified in the config or in the kwargs, we should # provide a default config_fn = os.path.join("enhancements", "generic.yaml") - paths = get_entry_points_config_dirs('satpy.enhancements') + paths = get_entry_points_config_dirs("satpy.enhancements") self.enhancement_config_file = config_search_paths(config_fn, search_dirs=paths) if not self.enhancement_config_file: @@ -1199,7 +1199,7 @@ def get_sensor_enhancement_config(self, sensor): # one single sensor sensor = [sensor] - paths = get_entry_points_config_dirs('satpy.enhancements') + paths = get_entry_points_config_dirs("satpy.enhancements") for sensor_name in sensor: config_fn = os.path.join("enhancements", sensor_name + ".yaml") config_files = config_search_paths(config_fn, search_dirs=paths) @@ -1227,8 +1227,8 @@ def apply(self, img, **info): backup_id = f"" data_id = info.get("_satpy_id", backup_id) LOG.debug(f"Data for {data_id} will be enhanced with options:\n\t{enh_kwargs['operations']}") - for operation in enh_kwargs['operations']: - fun = operation['method'] - args = operation.get('args', []) - kwargs = operation.get('kwargs', {}) + for operation in enh_kwargs["operations"]: + fun = operation["method"] + args = operation.get("args", []) + kwargs = operation.get("kwargs", {}) fun(img, *args, **kwargs) diff --git a/satpy/writers/awips_tiled.py b/satpy/writers/awips_tiled.py index 4b7f32d1df..5f10418e8a 100644 --- a/satpy/writers/awips_tiled.py +++ b/satpy/writers/awips_tiled.py @@ -233,25 +233,25 @@ from satpy.writers import DecisionTree, Enhancer, Writer, get_enhanced_image LOG = logging.getLogger(__name__) -DEFAULT_OUTPUT_PATTERN = '{source_name}_AII_{platform_name}_{sensor}_' \ - '{name}_{sector_id}_{tile_id}_' \ - '{start_time:%Y%m%d_%H%M}.nc' +DEFAULT_OUTPUT_PATTERN = "{source_name}_AII_{platform_name}_{sensor}_" \ + "{name}_{sector_id}_{tile_id}_" \ + "{start_time:%Y%m%d_%H%M}.nc" UNIT_CONV = { - 'micron': 'microm', - 'mm h-1': 'mm/h', - '1': '*1', - 'none': '*1', - 'percent': '%', - 'Kelvin': 'kelvin', - 'K': 'kelvin', + "micron": "microm", + "mm h-1": "mm/h", + "1": "*1", + "none": "*1", + "percent": "%", + "Kelvin": "kelvin", + "K": "kelvin", } -TileInfo = namedtuple('TileInfo', ['tile_count', 'image_shape', 'tile_shape', - 'tile_row_offset', 'tile_column_offset', 'tile_id', - 'tile_number', - 'x', 'y', 'xy_factors', 'tile_slices', 'data_slices']) -XYFactors = namedtuple('XYFactors', ['mx', 'bx', 'my', 'by']) +TileInfo = namedtuple("TileInfo", ["tile_count", "image_shape", "tile_shape", + "tile_row_offset", "tile_column_offset", "tile_id", + "tile_number", + "x", "y", "xy_factors", "tile_slices", "data_slices"]) +XYFactors = namedtuple("XYFactors", ["mx", "bx", "my", "by"]) def fix_awips_file(fn): @@ -265,9 +265,9 @@ def fix_awips_file(fn): # of NetCDF LOG.info("Modifying output NetCDF file to work with AWIPS") import h5py - h = h5py.File(fn, 'a') - if '_NCProperties' in h.attrs: - del h.attrs['_NCProperties'] + h = h5py.File(fn, "a") + if "_NCProperties" in h.attrs: + del h.attrs["_NCProperties"] h.close() @@ -604,12 +604,12 @@ def _generate_tile_info(self): def _get_factor_offset_fill(input_data_arr, vmin, vmax, encoding): - dtype_str = encoding['dtype'] + dtype_str = encoding["dtype"] dtype = np.dtype(getattr(np, dtype_str)) file_bit_depth = dtype.itemsize * 8 - unsigned_in_signed = encoding.get('_Unsigned') == "true" - is_unsigned = dtype.kind == 'u' - bit_depth = input_data_arr.attrs.get('bit_depth', file_bit_depth) + unsigned_in_signed = encoding.get("_Unsigned") == "true" + is_unsigned = dtype.kind == "u" + bit_depth = input_data_arr.attrs.get("bit_depth", file_bit_depth) num_fills = 1 # future: possibly support more than one fill value if bit_depth is None: bit_depth = file_bit_depth @@ -666,7 +666,7 @@ def _add_valid_ranges(data_arrs): # we don't want to effect the original attrs data_arr = data_arr.copy(deep=False) # these are dask arrays, they need to get computed later - data_arr.attrs['valid_range'] = (vmin, vmax) + data_arr.attrs["valid_range"] = (vmin, vmax) yield data_arr @@ -676,7 +676,7 @@ class AWIPSTiledVariableDecisionTree(DecisionTree): def __init__(self, decision_dicts, **kwargs): """Initialize decision tree with specific keys to look for.""" # Fields used to match a product object to it's correct configuration - attrs = kwargs.pop('attrs', + attrs = kwargs.pop("attrs", ["name", "standard_name", "satellite", @@ -693,30 +693,30 @@ class NetCDFTemplate: def __init__(self, template_dict): """Parse template dictionary and prepare for rendering.""" - self.is_single_variable = template_dict.get('single_variable', False) - self.global_attributes = template_dict.get('global_attributes', {}) + self.is_single_variable = template_dict.get("single_variable", False) + self.global_attributes = template_dict.get("global_attributes", {}) default_var_config = { "default": { "encoding": {"dtype": "uint16"}, } } - self.variables = template_dict.get('variables', default_var_config) + self.variables = template_dict.get("variables", default_var_config) default_coord_config = { "default": { "encoding": {"dtype": "uint16"}, } } - self.coordinates = template_dict.get('coordinates', default_coord_config) + self.coordinates = template_dict.get("coordinates", default_coord_config) self._var_tree = AWIPSTiledVariableDecisionTree([self.variables]) self._coord_tree = AWIPSTiledVariableDecisionTree([self.coordinates]) - self._filename_format_str = template_dict.get('filename') + self._filename_format_str = template_dict.get("filename") self._str_formatter = StringFormatter() self._template_dict = template_dict - def get_filename(self, base_dir='', **kwargs): + def get_filename(self, base_dir="", **kwargs): """Generate output NetCDF file from metadata.""" # format the filename if self._filename_format_str is None: @@ -794,7 +794,7 @@ def get_attr_value(self, attr_name, input_metadata, value=None, raw_key=None, ra if func is not None: value = func(input_metadata) if value is None: - LOG.debug('no routine matching %s', meth_name) + LOG.debug("no routine matching %s", meth_name) return value def _render_attrs(self, attr_configs, input_metadata, prefix="_"): @@ -814,28 +814,28 @@ def _render_global_attributes(self, input_metadata): prefix="_global_") def _render_variable_attributes(self, var_config, input_metadata): - attr_configs = var_config['attributes'] + attr_configs = var_config["attributes"] var_attrs = self._render_attrs(attr_configs, input_metadata, prefix="_data_") return var_attrs def _render_coordinate_attributes(self, coord_config, input_metadata): - attr_configs = coord_config['attributes'] + attr_configs = coord_config["attributes"] coord_attrs = self._render_attrs(attr_configs, input_metadata, prefix="_coord_") return coord_attrs def _render_variable_encoding(self, var_config, input_data_arr): new_encoding = input_data_arr.encoding.copy() # determine fill value and - if 'encoding' in var_config: - new_encoding.update(var_config['encoding']) + if "encoding" in var_config: + new_encoding.update(var_config["encoding"]) if "dtype" not in new_encoding: - new_encoding['dtype'] = 'int16' - new_encoding['_Unsigned'] = 'true' + new_encoding["dtype"] = "int16" + new_encoding["_Unsigned"] = "true" return new_encoding def _render_variable(self, data_arr): var_config = self._var_tree.find_match(**data_arr.attrs) - new_var_name = var_config.get('var_name', data_arr.attrs['name']) + new_var_name = var_config.get("var_name", data_arr.attrs["name"]) new_data_arr = data_arr.copy() # remove coords which may cause issues later on new_data_arr = new_data_arr.reset_coords(drop=True) @@ -848,8 +848,8 @@ def _render_variable(self, data_arr): def _get_matchable_coordinate_metadata(self, coord_name, coord_attrs): match_kwargs = {} - if 'name' not in coord_attrs: - match_kwargs['name'] = coord_name + if "name" not in coord_attrs: + match_kwargs["name"] = coord_name match_kwargs.update(coord_attrs) return match_kwargs @@ -897,29 +897,29 @@ def __init__(self, template_dict, swap_end_time=False): def _swap_attributes_end_time(self, template_dict): """Swap every use of 'start_time' to use 'end_time' instead.""" - variable_attributes = [var_section['attributes'] for var_section in template_dict.get('variables', {}).values()] - global_attributes = template_dict.get('global_attributes', {}) + variable_attributes = [var_section["attributes"] for var_section in template_dict.get("variables", {}).values()] + global_attributes = template_dict.get("global_attributes", {}) for attr_section in variable_attributes + [global_attributes]: for attr_name in attr_section: attr_config = attr_section[attr_name] - if '{start_time' in attr_config.get('value', ''): - attr_config['value'] = attr_config['value'].replace('{start_time', '{end_time') - if attr_config.get('raw_key', '') == 'start_time': - attr_config['raw_key'] = 'end_time' + if "{start_time" in attr_config.get("value", ""): + attr_config["value"] = attr_config["value"].replace("{start_time", "{end_time") + if attr_config.get("raw_key", "") == "start_time": + attr_config["raw_key"] = "end_time" def _data_units(self, input_metadata): - units = input_metadata.get('units', '1') + units = input_metadata.get("units", "1") # we *know* AWIPS can't handle some units return UNIT_CONV.get(units, units) def _global_start_date_time(self, input_metadata): - start_time = input_metadata['start_time'] + start_time = input_metadata["start_time"] if self._swap_end_time: - start_time = input_metadata['end_time'] + start_time = input_metadata["end_time"] return start_time.strftime("%Y-%m-%dT%H:%M:%S") def _global_awips_id(self, input_metadata): - return "AWIPS_" + input_metadata['name'] + return "AWIPS_" + input_metadata["name"] def _global_physical_element(self, input_metadata): var_config = self._var_tree.find_match(**input_metadata) @@ -930,11 +930,11 @@ def _global_physical_element(self, input_metadata): def _global_production_location(self, input_metadata): """Get default global production_location attribute.""" del input_metadata - org = os.environ.get('ORGANIZATION', None) + org = os.environ.get("ORGANIZATION", None) if org is not None: prod_location = org else: - LOG.warning('environment ORGANIZATION not set for .production_location attribute, using hostname') + LOG.warning("environment ORGANIZATION not set for .production_location attribute, using hostname") import socket prod_location = socket.gethostname() # FUTURE: something more correct but this will do for now @@ -954,25 +954,25 @@ def _global_production_location(self, input_metadata): @staticmethod def _get_vmin_vmax(var_config, input_data_arr): - if 'valid_range' in var_config: - return var_config['valid_range'] + if "valid_range" in var_config: + return var_config["valid_range"] data_vmin, data_vmax = _get_data_vmin_vmax(input_data_arr) return data_vmin, data_vmax def _render_variable_encoding(self, var_config, input_data_arr): new_encoding = super()._render_variable_encoding(var_config, input_data_arr) vmin, vmax = self._get_vmin_vmax(var_config, input_data_arr) - has_flag_meanings = 'flag_meanings' in input_data_arr.attrs + has_flag_meanings = "flag_meanings" in input_data_arr.attrs is_int = np.issubdtype(input_data_arr.dtype, np.integer) is_cat = has_flag_meanings or is_int - has_sf = new_encoding.get('scale_factor') is not None + has_sf = new_encoding.get("scale_factor") is not None if not has_sf and is_cat: # AWIPS doesn't like Identity conversion so we can't have # a factor of 1 and an offset of 0 # new_encoding['scale_factor'] = None # new_encoding['add_offset'] = None - if '_FillValue' in input_data_arr.attrs: - new_encoding['_FillValue'] = input_data_arr.attrs['_FillValue'] + if "_FillValue" in input_data_arr.attrs: + new_encoding["_FillValue"] = input_data_arr.attrs["_FillValue"] elif not has_sf and vmin is not None and vmax is not None: # calculate scale_factor and add_offset sf, ao, fill = _get_factor_offset_fill( @@ -980,57 +980,57 @@ def _render_variable_encoding(self, var_config, input_data_arr): ) # NOTE: These could be dask arrays that will be computed later # when we go to write the files. - new_encoding['scale_factor'] = sf - new_encoding['add_offset'] = ao - new_encoding['_FillValue'] = fill - new_encoding['coordinates'] = ' '.join([ele for ele in input_data_arr.dims]) + new_encoding["scale_factor"] = sf + new_encoding["add_offset"] = ao + new_encoding["_FillValue"] = fill + new_encoding["coordinates"] = " ".join([ele for ele in input_data_arr.dims]) return new_encoding def _get_projection_attrs(self, area_def): """Assign projection attributes per CF standard.""" proj_attrs = area_def.crs.to_cf() proj_encoding = {"dtype": "i4"} - proj_attrs['short_name'] = area_def.area_id - gmap_name = proj_attrs['grid_mapping_name'] + proj_attrs["short_name"] = area_def.area_id + gmap_name = proj_attrs["grid_mapping_name"] preferred_names = { - 'geostationary': 'fixedgrid_projection', - 'lambert_conformal_conic': 'lambert_projection', - 'polar_stereographic': 'polar_projection', - 'mercator': 'mercator_projection', + "geostationary": "fixedgrid_projection", + "lambert_conformal_conic": "lambert_projection", + "polar_stereographic": "polar_projection", + "mercator": "mercator_projection", } if gmap_name not in preferred_names: LOG.warning("Data is in projection %s which may not be supported " "by AWIPS", gmap_name) - area_id_as_var_name = area_def.area_id.replace('-', '_').lower() + area_id_as_var_name = area_def.area_id.replace("-", "_").lower() proj_name = preferred_names.get(gmap_name, area_id_as_var_name) return proj_name, proj_attrs, proj_encoding def _set_xy_coords_attrs(self, new_ds, crs): - y_attrs = new_ds.coords['y'].attrs + y_attrs = new_ds.coords["y"].attrs if crs.is_geographic: - self._fill_units_and_standard_name(y_attrs, 'degrees_north', 'latitude') + self._fill_units_and_standard_name(y_attrs, "degrees_north", "latitude") else: - self._fill_units_and_standard_name(y_attrs, 'meters', 'projection_y_coordinate') - y_attrs['axis'] = 'Y' + self._fill_units_and_standard_name(y_attrs, "meters", "projection_y_coordinate") + y_attrs["axis"] = "Y" - x_attrs = new_ds.coords['x'].attrs + x_attrs = new_ds.coords["x"].attrs if crs.is_geographic: - self._fill_units_and_standard_name(x_attrs, 'degrees_east', 'longitude') + self._fill_units_and_standard_name(x_attrs, "degrees_east", "longitude") else: - self._fill_units_and_standard_name(x_attrs, 'meters', 'projection_x_coordinate') - x_attrs['axis'] = 'X' + self._fill_units_and_standard_name(x_attrs, "meters", "projection_x_coordinate") + x_attrs["axis"] = "X" @staticmethod def _fill_units_and_standard_name(attrs, units, standard_name): """Fill in units and standard_name if not set in `attrs`.""" - if attrs.get('units') is None: - attrs['units'] = units - if attrs['units'] in ('meter', 'metre'): + if attrs.get("units") is None: + attrs["units"] = units + if attrs["units"] in ("meter", "metre"): # AWIPS doesn't like 'meter' - attrs['units'] = 'meters' - if attrs.get('standard_name') is None: - attrs['standard_name'] = standard_name + attrs["units"] = "meters" + if attrs.get("standard_name") is None: + attrs["standard_name"] = standard_name def apply_area_def(self, new_ds, area_def): """Apply information we can gather from the AreaDefinition.""" @@ -1040,25 +1040,25 @@ def apply_area_def(self, new_ds, area_def): new_ds[gmap_name] = gmap_data_arr self._set_xy_coords_attrs(new_ds, area_def.crs) for data_arr in new_ds.data_vars.values(): - if 'y' in data_arr.dims and 'x' in data_arr.dims: - data_arr.attrs['grid_mapping'] = gmap_name + if "y" in data_arr.dims and "x" in data_arr.dims: + data_arr.attrs["grid_mapping"] = gmap_name - new_ds.attrs['pixel_x_size'] = area_def.pixel_size_x / 1000.0 - new_ds.attrs['pixel_y_size'] = area_def.pixel_size_y / 1000.0 + new_ds.attrs["pixel_x_size"] = area_def.pixel_size_x / 1000.0 + new_ds.attrs["pixel_y_size"] = area_def.pixel_size_y / 1000.0 return new_ds def apply_tile_coord_encoding(self, new_ds, xy_factors): """Add encoding information specific to the coordinate variables.""" - if 'x' in new_ds.coords: - new_ds.coords['x'].encoding['dtype'] = 'int16' - new_ds.coords['x'].encoding['scale_factor'] = np.float64(xy_factors.mx) - new_ds.coords['x'].encoding['add_offset'] = np.float64(xy_factors.bx) - new_ds.coords['x'].encoding['_FillValue'] = -1 - if 'y' in new_ds.coords: - new_ds.coords['y'].encoding['dtype'] = 'int16' - new_ds.coords['y'].encoding['scale_factor'] = np.float64(xy_factors.my) - new_ds.coords['y'].encoding['add_offset'] = np.float64(xy_factors.by) - new_ds.coords['y'].encoding['_FillValue'] = -1 + if "x" in new_ds.coords: + new_ds.coords["x"].encoding["dtype"] = "int16" + new_ds.coords["x"].encoding["scale_factor"] = np.float64(xy_factors.mx) + new_ds.coords["x"].encoding["add_offset"] = np.float64(xy_factors.bx) + new_ds.coords["x"].encoding["_FillValue"] = -1 + if "y" in new_ds.coords: + new_ds.coords["y"].encoding["dtype"] = "int16" + new_ds.coords["y"].encoding["scale_factor"] = np.float64(xy_factors.my) + new_ds.coords["y"].encoding["add_offset"] = np.float64(xy_factors.by) + new_ds.coords["y"].encoding["_FillValue"] = -1 return new_ds def apply_tile_info(self, new_ds, tile_info): @@ -1067,25 +1067,25 @@ def apply_tile_info(self, new_ds, tile_info): total_pixels = tile_info.image_shape tile_row = tile_info.tile_row_offset tile_column = tile_info.tile_column_offset - tile_height = new_ds.sizes['y'] - tile_width = new_ds.sizes['x'] - new_ds.attrs['tile_row_offset'] = tile_row - new_ds.attrs['tile_column_offset'] = tile_column - new_ds.attrs['product_tile_height'] = tile_height - new_ds.attrs['product_tile_width'] = tile_width - new_ds.attrs['number_product_tiles'] = total_tiles[0] * total_tiles[1] - new_ds.attrs['product_rows'] = total_pixels[0] - new_ds.attrs['product_columns'] = total_pixels[1] + tile_height = new_ds.sizes["y"] + tile_width = new_ds.sizes["x"] + new_ds.attrs["tile_row_offset"] = tile_row + new_ds.attrs["tile_column_offset"] = tile_column + new_ds.attrs["product_tile_height"] = tile_height + new_ds.attrs["product_tile_width"] = tile_width + new_ds.attrs["number_product_tiles"] = total_tiles[0] * total_tiles[1] + new_ds.attrs["product_rows"] = total_pixels[0] + new_ds.attrs["product_columns"] = total_pixels[1] return new_ds def _add_sector_id_global(self, new_ds, sector_id): - if not self._template_dict.get('add_sector_id_global'): + if not self._template_dict.get("add_sector_id_global"): return if sector_id is None: raise ValueError("Keyword 'sector_id' is required for this " "template.") - new_ds.attrs['sector_id'] = sector_id + new_ds.attrs["sector_id"] = sector_id def apply_misc_metadata(self, new_ds, sector_id=None, creator=None, creation_time=None): """Add attributes that don't fit into any other category.""" @@ -1095,9 +1095,9 @@ def apply_misc_metadata(self, new_ds, sector_id=None, creator=None, creation_tim creation_time = datetime.utcnow() self._add_sector_id_global(new_ds, sector_id) - new_ds.attrs['Conventions'] = "CF-1.7" - new_ds.attrs['creator'] = creator - new_ds.attrs['creation_time'] = creation_time.strftime('%Y-%m-%dT%H:%M:%S') + new_ds.attrs["Conventions"] = "CF-1.7" + new_ds.attrs["creator"] = creator + new_ds.attrs["creation_time"] = creation_time.strftime("%Y-%m-%dT%H:%M:%S") return new_ds def _render_variable_attributes(self, var_config, input_metadata): @@ -1128,7 +1128,7 @@ def render(self, dataset_or_data_arrays, area_def, def _notnull(data_arr, check_categories=True): is_int = np.issubdtype(data_arr.dtype, np.integer) - fill_value = data_arr.encoding.get('_FillValue', data_arr.attrs.get('_FillValue')) + fill_value = data_arr.encoding.get("_FillValue", data_arr.attrs.get("_FillValue")) if is_int and fill_value is not None: # some DQF datasets are always valid if check_categories: @@ -1178,7 +1178,7 @@ def _copy_to_existing(dataset_to_save, output_filename): new_data[valid_current] = var_data_arr.data[valid_current] var_data_arr.data[:] = new_data var_data_arr.encoding.update(existing_data_arr.encoding) - var_data_arr.encoding.pop('source', None) + var_data_arr.encoding.pop("source", None) return dataset_to_save @@ -1187,10 +1187,10 @@ def _extract_factors(dataset_to_save): factors = {} for data_var in dataset_to_save.data_vars.values(): enc = data_var.encoding - data_var.attrs.pop('valid_range', None) - factor_set = (enc.pop('scale_factor', None), - enc.pop('add_offset', None), - enc.pop('_FillValue', None)) + data_var.attrs.pop("valid_range", None) + factor_set = (enc.pop("scale_factor", None), + enc.pop("add_offset", None), + enc.pop("_FillValue", None)) factors[data_var.name] = factor_set return factors @@ -1199,11 +1199,11 @@ def _reapply_factors(dataset_to_save, factors): for var_name, factor_set in factors.items(): data_arr = dataset_to_save[var_name] if factor_set[0] is not None: - data_arr.encoding['scale_factor'] = factor_set[0] + data_arr.encoding["scale_factor"] = factor_set[0] if factor_set[1] is not None: - data_arr.encoding['add_offset'] = factor_set[1] + data_arr.encoding["add_offset"] = factor_set[1] if factor_set[2] is not None: - data_arr.encoding['_FillValue'] = factor_set[2] + data_arr.encoding["_FillValue"] = factor_set[2] return dataset_to_save @@ -1228,9 +1228,9 @@ def to_nonempty_netcdf(dataset_to_save: xr.Dataset, # TODO: Allow for new variables to be created if update_existing and os.path.isfile(output_filename): dataset_to_save = _copy_to_existing(dataset_to_save, output_filename) - mode = 'a' + mode = "a" else: - mode = 'w' + mode = "w" return dataset_to_save, output_filename, mode # return dataset_to_save.to_netcdf(output_filename, mode=mode) # if fix_awips: @@ -1258,9 +1258,9 @@ class AWIPSTiledWriter(Writer): def __init__(self, compress=False, fix_awips=False, **kwargs): """Initialize writer and decision trees.""" super(AWIPSTiledWriter, self).__init__(default_config_filename="writers/awips_tiled.yaml", **kwargs) - self.base_dir = kwargs.get('base_dir', '') - self.awips_sectors = self.config['sectors'] - self.templates = self.config['templates'] + self.base_dir = kwargs.get("base_dir", "") + self.awips_sectors = self.config["sectors"] + self.templates = self.config["templates"] self.compress = compress self.fix_awips = fix_awips self._fill_sector_info() @@ -1289,7 +1289,7 @@ def separate_init_kwargs(cls, kwargs): # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs, kwargs = super(AWIPSTiledWriter, cls).separate_init_kwargs( kwargs) - for kw in ['compress', 'fix_awips']: + for kw in ["compress", "fix_awips"]: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) @@ -1298,16 +1298,16 @@ def separate_init_kwargs(cls, kwargs): def _fill_sector_info(self): """Convert sector extents if needed.""" for sector_info in self.awips_sectors.values(): - sector_info['projection'] = CRS.from_user_input(sector_info['projection']) - p = Proj(sector_info['projection']) - if 'lower_left_xy' in sector_info: - sector_info['lower_left_lonlat'] = p(*sector_info['lower_left_xy'], inverse=True) + sector_info["projection"] = CRS.from_user_input(sector_info["projection"]) + p = Proj(sector_info["projection"]) + if "lower_left_xy" in sector_info: + sector_info["lower_left_lonlat"] = p(*sector_info["lower_left_xy"], inverse=True) else: - sector_info['lower_left_xy'] = p(*sector_info['lower_left_lonlat']) - if 'upper_right_xy' in sector_info: - sector_info['upper_right_lonlat'] = p(*sector_info['upper_right_xy'], inverse=True) + sector_info["lower_left_xy"] = p(*sector_info["lower_left_lonlat"]) + if "upper_right_xy" in sector_info: + sector_info["upper_right_lonlat"] = p(*sector_info["upper_right_xy"], inverse=True) else: - sector_info['upper_right_xy'] = p(*sector_info['upper_right_lonlat']) + sector_info["upper_right_xy"] = p(*sector_info["upper_right_lonlat"]) def _get_lettered_sector_info(self, sector_id): """Get metadata for the current sector if configured. @@ -1334,9 +1334,9 @@ def _get_tile_generator(self, area_def, lettered_grid, sector_id, sector_info = self._get_lettered_sector_info(sector_id) tile_gen = LetteredTileGenerator( area_def, - sector_info['lower_left_xy'] + sector_info['upper_right_xy'], - sector_crs=sector_info['projection'], - cell_size=sector_info['resolution'], + sector_info["lower_left_xy"] + sector_info["upper_right_xy"], + sector_crs=sector_info["projection"], + cell_size=sector_info["resolution"], num_subtiles=num_subtiles, use_sector_reference=use_sector_reference, ) @@ -1356,18 +1356,18 @@ def _area_id(area_def): # get all of the datasets stored by area area_datasets = {} for x in datasets: - area_id = _area_id(x.attrs['area']) - area, ds_list = area_datasets.setdefault(area_id, (x.attrs['area'], [])) + area_id = _area_id(x.attrs["area"]) + area, ds_list = area_datasets.setdefault(area_id, (x.attrs["area"], [])) ds_list.append(x) return area_datasets def _split_rgbs(self, ds): """Split a single RGB dataset in to multiple.""" - for component in 'RGB': + for component in "RGB": band_data = ds.sel(bands=component) - band_data.attrs['name'] += '_{}'.format(component) - band_data.attrs['valid_min'] = 0.0 - band_data.attrs['valid_max'] = 1.0 + band_data.attrs["name"] += "_{}".format(component) + band_data.attrs["valid_min"] = 0.0 + band_data.attrs["valid_max"] = 1.0 yield band_data def _enhance_and_split_rgbs(self, datasets): @@ -1377,7 +1377,7 @@ def _enhance_and_split_rgbs(self, datasets): if ds.ndim == 2: new_datasets.append(ds) continue - elif ds.ndim > 3 or ds.ndim < 1 or (ds.ndim == 3 and 'bands' not in ds.coords): + elif ds.ndim > 3 or ds.ndim < 1 or (ds.ndim == 3 and "bands" not in ds.coords): LOG.error("Can't save datasets with more or less than 2 dimensions " "that aren't RGBs to AWIPS Tiled format: %s", ds.name) else: @@ -1389,31 +1389,31 @@ def _enhance_and_split_rgbs(self, datasets): return new_datasets def _tile_filler(self, tile_info, data_arr): - fill = np.nan if np.issubdtype(data_arr.dtype, np.floating) else data_arr.attrs.get('_FillValue', 0) + fill = np.nan if np.issubdtype(data_arr.dtype, np.floating) else data_arr.attrs.get("_FillValue", 0) data_arr_data = data_arr.data[tile_info.data_slices] data_arr_data = data_arr_data.rechunk(data_arr_data.shape) new_data = da.map_blocks(tile_filler, data_arr_data, tile_info.tile_shape, tile_info.tile_slices, fill, dtype=data_arr.dtype, chunks=tile_info.tile_shape) - return xr.DataArray(new_data, dims=('y', 'x'), + return xr.DataArray(new_data, dims=("y", "x"), attrs=data_arr.attrs.copy()) def _slice_and_update_coords(self, tile_info, data_arrays): - new_x = xr.DataArray(tile_info.x, dims=('x',)) - if 'x' in data_arrays[0].coords: - old_x = data_arrays[0].coords['x'] + new_x = xr.DataArray(tile_info.x, dims=("x",)) + if "x" in data_arrays[0].coords: + old_x = data_arrays[0].coords["x"] new_x.attrs.update(old_x.attrs) new_x.encoding = old_x.encoding - new_y = xr.DataArray(tile_info.y, dims=('y',)) - if 'y' in data_arrays[0].coords: - old_y = data_arrays[0].coords['y'] + new_y = xr.DataArray(tile_info.y, dims=("y",)) + if "y" in data_arrays[0].coords: + old_y = data_arrays[0].coords["y"] new_y.attrs.update(old_y.attrs) new_y.encoding = old_y.encoding for data_arr in data_arrays: new_data_arr = self._tile_filler(tile_info, data_arr) - new_data_arr.coords['x'] = new_x - new_data_arr.coords['y'] = new_y + new_data_arr.coords["x"] = new_x + new_data_arr.coords["y"] = new_y yield new_data_arr def _iter_tile_info_and_datasets(self, tile_gen, data_arrays, single_variable=True): @@ -1491,9 +1491,9 @@ def _get_tile_data_info(self, data_arrs, creation_time, source_name): # use the first data array as a "representative" for the group ds_info = data_arrs[0].attrs.copy() # we want to use our own creation_time - ds_info['creation_time'] = creation_time + ds_info["creation_time"] = creation_time if source_name is not None: - ds_info['source_name'] = source_name + ds_info["source_name"] = source_name self._adjust_metadata_times(ds_info) return ds_info @@ -1503,8 +1503,8 @@ def save_datasets(self, datasets, sector_id=None, tile_count=(1, 1), tile_size=None, lettered_grid=False, num_subtiles=None, use_end_time=False, use_sector_reference=False, - template='polar', check_categories=True, - extra_global_attrs=None, environment_prefix='DR', + template="polar", check_categories=True, + extra_global_attrs=None, environment_prefix="DR", compute=True, **kwargs): """Write a series of DataArray objects to multiple NetCDF4 Tile files. @@ -1583,7 +1583,7 @@ def save_datasets(self, datasets, sector_id=None, """ if not isinstance(template, dict): - template = self.config['templates'][template] + template = self.config["templates"][template] template = AWIPSNetCDFTemplate(template, swap_end_time=use_end_time) area_data_arrs = self._group_by_area(datasets) datasets_to_save = [] @@ -1609,9 +1609,9 @@ def save_datasets(self, datasets, sector_id=None, shared_attrs=ds_info, extra_global_attrs=extra_global_attrs) if self.compress: - new_ds.encoding['zlib'] = True + new_ds.encoding["zlib"] = True for var in new_ds.variables.values(): - var.encoding['zlib'] = True + var.encoding["zlib"] = True datasets_to_save.append(new_ds) output_filenames.append(output_filename) @@ -1669,24 +1669,24 @@ def dataset_iter(_delayed_gen): return dataset_iter -def _create_debug_array(sector_info, num_subtiles, font_path='Verdana.ttf'): +def _create_debug_array(sector_info, num_subtiles, font_path="Verdana.ttf"): from PIL import Image, ImageDraw, ImageFont from pkg_resources import resource_filename as get_resource_filename size = (1000, 1000) img = Image.new("L", size, 0) draw = ImageDraw.Draw(img) - if ':' in font_path: + if ":" in font_path: # load from a python package - font_path = get_resource_filename(*font_path.split(':')) + font_path = get_resource_filename(*font_path.split(":")) font = ImageFont.truetype(font_path, 25) - ll_extent = sector_info['lower_left_xy'] - ur_extent = sector_info['upper_right_xy'] + ll_extent = sector_info["lower_left_xy"] + ur_extent = sector_info["upper_right_xy"] total_meters_x = ur_extent[0] - ll_extent[0] total_meters_y = ur_extent[1] - ll_extent[1] - fcs_x = np.ceil(float(sector_info['resolution'][1]) / num_subtiles[1]) - fcs_y = np.ceil(float(sector_info['resolution'][0]) / num_subtiles[0]) + fcs_x = np.ceil(float(sector_info["resolution"][1]) / num_subtiles[1]) + fcs_y = np.ceil(float(sector_info["resolution"][0]) / num_subtiles[0]) total_cells_x = np.ceil(total_meters_x / fcs_x) total_cells_y = np.ceil(total_meters_y / fcs_y) total_cells_x = np.ceil(total_cells_x / num_subtiles[1]) * num_subtiles[1] @@ -1735,10 +1735,10 @@ def _create_debug_array(sector_info, num_subtiles, font_path='Verdana.ttf'): ur_extent[1], ) grid_def = AreaDefinition( - 'debug_grid', - 'debug_grid', - 'debug_grid', - sector_info['projection'], + "debug_grid", + "debug_grid", + "debug_grid", + sector_info["projection"], 1000, 1000, new_extents @@ -1756,26 +1756,26 @@ def draw_rectangle(draw, coordinates, outline=None, fill=None, width=1): def create_debug_lettered_tiles(**writer_kwargs): """Create tile files with tile identifiers "burned" in to the image data for debugging.""" - writer_kwargs['lettered_grid'] = True - writer_kwargs['num_subtiles'] = (2, 2) # default, don't use command line argument + writer_kwargs["lettered_grid"] = True + writer_kwargs["num_subtiles"] = (2, 2) # default, don't use command line argument init_kwargs, save_kwargs = AWIPSTiledWriter.separate_init_kwargs(**writer_kwargs) writer = AWIPSTiledWriter(**init_kwargs) - sector_id = save_kwargs['sector_id'] + sector_id = save_kwargs["sector_id"] sector_info = writer.awips_sectors[sector_id] - area_def, arr = _create_debug_array(sector_info, save_kwargs['num_subtiles']) + area_def, arr = _create_debug_array(sector_info, save_kwargs["num_subtiles"]) now = datetime.utcnow() - product = xr.DataArray(da.from_array(arr, chunks='auto'), attrs=dict( - name='debug_{}'.format(sector_id), - platform_name='DEBUG', - sensor='TILES', + product = xr.DataArray(da.from_array(arr, chunks="auto"), attrs=dict( + name="debug_{}".format(sector_id), + platform_name="DEBUG", + sensor="TILES", start_time=now, end_time=now, area=area_def, standard_name="toa_bidirectional_reflectance", - units='1', + units="1", valid_min=0, valid_max=255, )) @@ -1790,12 +1790,12 @@ def main(): """Command line interface mimicing CSPP Polar2Grid.""" import argparse parser = argparse.ArgumentParser(description="Create AWIPS compatible NetCDF tile files") - parser.add_argument("--create-debug", action='store_true', - help='Create debug NetCDF files to show tile locations in AWIPS') - parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0, - help='each occurrence increases verbosity 1 level through ' - 'ERROR-WARNING-INFO-DEBUG (default INFO)') - parser.add_argument('-l', '--log', dest="log_fn", default=None, + parser.add_argument("--create-debug", action="store_true", + help="Create debug NetCDF files to show tile locations in AWIPS") + parser.add_argument("-v", "--verbose", dest="verbosity", action="count", default=0, + help="each occurrence increases verbosity 1 level through " + "ERROR-WARNING-INFO-DEBUG (default INFO)") + parser.add_argument("-l", "--log", dest="log_fn", default=None, help="specify the log filename") group_1 = parser.add_argument_group(title="Writer Initialization") @@ -1812,17 +1812,17 @@ def main(): help="Specify how many pixels are in each tile (overrides '--tiles')") # group.add_argument('--tile-offset', nargs=2, default=(0, 0), # help="Start counting tiles from this offset ('row_offset col_offset')") - group_2.add_argument("--letters", dest="lettered_grid", action='store_true', + group_2.add_argument("--letters", dest="lettered_grid", action="store_true", help="Create tiles from a static letter-based grid based on the product projection") group_2.add_argument("--letter-subtiles", nargs=2, type=int, default=(2, 2), help="Specify number of subtiles in each lettered tile: \'row col\'") group_2.add_argument("--output-pattern", default=DEFAULT_OUTPUT_PATTERN, help="output filenaming pattern") - group_2.add_argument("--source-name", default='SSEC', + group_2.add_argument("--source-name", default="SSEC", help="specify processing source name used in attributes and filename (default 'SSEC')") group_2.add_argument("--sector-id", required=True, help="specify name for sector/region used in attributes and filename (example 'LCC')") - group_2.add_argument("--template", default='polar', + group_2.add_argument("--template", default="polar", help="specify the template name to use (default: polar)") args = parser.parse_args() @@ -1838,5 +1838,5 @@ def main(): raise NotImplementedError("Command line interface not implemented yet for AWIPS tiled writer") -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main()) diff --git a/satpy/writers/cf/coords_attrs.py b/satpy/writers/cf/coords_attrs.py index c7e559adc2..6ae80da468 100644 --- a/satpy/writers/cf/coords_attrs.py +++ b/satpy/writers/cf/coords_attrs.py @@ -19,28 +19,28 @@ def add_xy_coords_attrs(dataarray): dataarray = _add_xy_projected_coords_attrs(dataarray) else: dataarray = _add_xy_geographic_coords_attrs(dataarray) - if 'crs' in dataarray.coords: - dataarray = dataarray.drop_vars('crs') + if "crs" in dataarray.coords: + dataarray = dataarray.drop_vars("crs") return dataarray -def _add_xy_projected_coords_attrs(dataarray, x='x', y='y'): +def _add_xy_projected_coords_attrs(dataarray, x="x", y="y"): """Add relevant attributes to x, y coordinates of a projected CRS.""" if x in dataarray.coords: - dataarray[x].attrs['standard_name'] = 'projection_x_coordinate' - dataarray[x].attrs['units'] = 'm' + dataarray[x].attrs["standard_name"] = "projection_x_coordinate" + dataarray[x].attrs["units"] = "m" if y in dataarray.coords: - dataarray[y].attrs['standard_name'] = 'projection_y_coordinate' - dataarray[y].attrs['units'] = 'm' + dataarray[y].attrs["standard_name"] = "projection_y_coordinate" + dataarray[y].attrs["units"] = "m" return dataarray -def _add_xy_geographic_coords_attrs(dataarray, x='x', y='y'): +def _add_xy_geographic_coords_attrs(dataarray, x="x", y="y"): """Add relevant attributes to x, y coordinates of a geographic CRS.""" if x in dataarray.coords: - dataarray[x].attrs['standard_name'] = 'longitude' - dataarray[x].attrs['units'] = 'degrees_east' + dataarray[x].attrs["standard_name"] = "longitude" + dataarray[x].attrs["units"] = "degrees_east" if y in dataarray.coords: - dataarray[y].attrs['standard_name'] = 'latitude' - dataarray[y].attrs['units'] = 'degrees_north' + dataarray[y].attrs["standard_name"] = "latitude" + dataarray[y].attrs["units"] = "degrees_north" return dataarray diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index b9a24b9292..6446e53bc3 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -190,41 +190,41 @@ # Ensure that either netCDF4 or h5netcdf is available to avoid silent failure if netCDF4 is None and h5netcdf is None: - raise ImportError('Ensure that the netCDF4 or h5netcdf package is installed.') + raise ImportError("Ensure that the netCDF4 or h5netcdf package is installed.") # Numpy datatypes compatible with all netCDF4 backends. ``np.unicode_`` is # excluded because h5py (and thus h5netcdf) has problems with unicode, see # https://github.com/h5py/h5py/issues/624.""" -NC4_DTYPES = [np.dtype('int8'), np.dtype('uint8'), - np.dtype('int16'), np.dtype('uint16'), - np.dtype('int32'), np.dtype('uint32'), - np.dtype('int64'), np.dtype('uint64'), - np.dtype('float32'), np.dtype('float64'), +NC4_DTYPES = [np.dtype("int8"), np.dtype("uint8"), + np.dtype("int16"), np.dtype("uint16"), + np.dtype("int32"), np.dtype("uint32"), + np.dtype("int64"), np.dtype("uint64"), + np.dtype("float32"), np.dtype("float64"), np.string_] # Unsigned and int64 isn't CF 1.7 compatible # Note: Unsigned and int64 are CF 1.9 compatible -CF_DTYPES = [np.dtype('int8'), - np.dtype('int16'), - np.dtype('int32'), - np.dtype('float32'), - np.dtype('float64'), +CF_DTYPES = [np.dtype("int8"), + np.dtype("int16"), + np.dtype("int32"), + np.dtype("float32"), + np.dtype("float64"), np.string_] -CF_VERSION = 'CF-1.7' +CF_VERSION = "CF-1.7" def get_extra_ds(dataarray, keys=None): """Get the ancillary_variables DataArrays associated to a dataset.""" ds_collection = {} # Retrieve ancillary variable datarrays - for ancillary_dataarray in dataarray.attrs.get('ancillary_variables', []): + for ancillary_dataarray in dataarray.attrs.get("ancillary_variables", []): ancillary_variable = ancillary_dataarray.name if keys and ancillary_variable not in keys: keys.append(ancillary_variable) ds_collection.update(get_extra_ds(ancillary_dataarray, keys=keys)) # Add input dataarray - ds_collection[dataarray.attrs['name']] = dataarray + ds_collection[dataarray.attrs["name"]] = dataarray return ds_collection @@ -235,20 +235,20 @@ def get_extra_ds(dataarray, keys=None): def add_lonlat_coords(dataarray): """Add 'longitude' and 'latitude' coordinates to DataArray.""" dataarray = dataarray.copy() - area = dataarray.attrs['area'] - ignore_dims = {dim: 0 for dim in dataarray.dims if dim not in ['x', 'y']} - chunks = getattr(dataarray.isel(**ignore_dims), 'chunks', None) + area = dataarray.attrs["area"] + ignore_dims = {dim: 0 for dim in dataarray.dims if dim not in ["x", "y"]} + chunks = getattr(dataarray.isel(**ignore_dims), "chunks", None) lons, lats = area.get_lonlats(chunks=chunks) - dataarray['longitude'] = xr.DataArray(lons, dims=['y', 'x'], - attrs={'name': "longitude", - 'standard_name': "longitude", - 'units': 'degrees_east'}, - name='longitude') - dataarray['latitude'] = xr.DataArray(lats, dims=['y', 'x'], - attrs={'name': "latitude", - 'standard_name': "latitude", - 'units': 'degrees_north'}, - name='latitude') + dataarray["longitude"] = xr.DataArray(lons, dims=["y", "x"], + attrs={"name": "longitude", + "standard_name": "longitude", + "units": "degrees_east"}, + name="longitude") + dataarray["latitude"] = xr.DataArray(lats, dims=["y", "x"], + attrs={"name": "latitude", + "standard_name": "latitude", + "units": "degrees_north"}, + name="latitude") return dataarray @@ -256,7 +256,7 @@ def _create_grid_mapping(area): """Create the grid mapping instance for `area`.""" import pyproj - if Version(pyproj.__version__) < Version('2.4.1'): + if Version(pyproj.__version__) < Version("2.4.1"): # technically 2.2, but important bug fixes in 2.4.1 raise ImportError("'cf' writer requires pyproj 2.4.1 or greater") # let pyproj do the heavily lifting (pyproj 2.0+ required) @@ -267,18 +267,18 @@ def _create_grid_mapping(area): def _add_grid_mapping(dataarray): """Convert an area to at CF grid mapping.""" dataarray = dataarray.copy() - area = dataarray.attrs['area'] + area = dataarray.attrs["area"] gmapping_var_name, attrs = _create_grid_mapping(area) - dataarray.attrs['grid_mapping'] = gmapping_var_name + dataarray.attrs["grid_mapping"] = gmapping_var_name return dataarray, xr.DataArray(0, attrs=attrs, name=gmapping_var_name) def area2cf(dataarray, include_lonlats=False, got_lonlats=False): """Convert an area to at CF grid mapping or lon and lats.""" res = [] - if not got_lonlats and (isinstance(dataarray.attrs['area'], SwathDefinition) or include_lonlats): + if not got_lonlats and (isinstance(dataarray.attrs["area"], SwathDefinition) or include_lonlats): dataarray = add_lonlat_coords(dataarray) - if isinstance(dataarray.attrs['area'], AreaDefinition): + if isinstance(dataarray.attrs["area"], AreaDefinition): dataarray, gmapping = _add_grid_mapping(dataarray) res.append(gmapping) res.append(dataarray) @@ -287,7 +287,7 @@ def area2cf(dataarray, include_lonlats=False, got_lonlats=False): def is_lon_or_lat_dataarray(dataarray): """Check if the DataArray represents the latitude or longitude coordinate.""" - if 'standard_name' in dataarray.attrs and dataarray.attrs['standard_name'] in ['longitude', 'latitude']: + if "standard_name" in dataarray.attrs and dataarray.attrs["standard_name"] in ["longitude", "latitude"]: return True return False @@ -339,12 +339,12 @@ def make_alt_coords_unique(datas, pretty=False): if pretty: warnings.warn( 'Cannot pretty-format "{}" coordinates because they are ' - 'not identical among the given datasets'.format(coord_name), + "not identical among the given datasets".format(coord_name), stacklevel=2 ) for ds_name, dataset in datas.items(): if coord_name in dataset.coords: - rename = {coord_name: '{}_{}'.format(ds_name, coord_name)} + rename = {coord_name: "{}_{}".format(ds_name, coord_name)} new_datas[ds_name] = new_datas[ds_name].rename(rename) return new_datas @@ -355,15 +355,15 @@ def assert_xy_unique(datas): unique_x = set() unique_y = set() for dataset in datas.values(): - if 'y' in dataset.dims: - token_y = tokenize(dataset['y'].data) + if "y" in dataset.dims: + token_y = tokenize(dataset["y"].data) unique_y.add(token_y) - if 'x' in dataset.dims: - token_x = tokenize(dataset['x'].data) + if "x" in dataset.dims: + token_x = tokenize(dataset["x"].data) unique_x.add(token_x) if len(unique_x) > 1 or len(unique_y) > 1: - raise ValueError('Datasets to be saved in one file (or one group) must have identical projection coordinates. ' - 'Please group them by area or save them in separate files.') + raise ValueError("Datasets to be saved in one file (or one group) must have identical projection coordinates. " + "Please group them by area or save them in separate files.") def link_coords(datas): @@ -376,9 +376,9 @@ def link_coords(datas): """ for da_name, data in datas.items(): - declared_coordinates = data.attrs.get('coordinates', []) + declared_coordinates = data.attrs.get("coordinates", []) if isinstance(declared_coordinates, str): - declared_coordinates = declared_coordinates.split(' ') + declared_coordinates = declared_coordinates.split(" ") for coord in declared_coordinates: if coord not in data.coords: try: @@ -387,13 +387,13 @@ def link_coords(datas): except KeyError: warnings.warn( 'Coordinate "{}" referenced by dataarray {} does not ' - 'exist, dropping reference.'.format(coord, da_name), + "exist, dropping reference.".format(coord, da_name), stacklevel=2 ) continue # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() - data.attrs.pop('coordinates', None) + data.attrs.pop("coordinates", None) # ###--------------------------------------------------------------------------. @@ -410,11 +410,11 @@ def add_time_bounds_dimension(ds, time="time"): if start_time is not None) end_time = min(end_time for end_time in end_times if end_time is not None) - ds['time_bnds'] = xr.DataArray([[np.datetime64(start_time), + ds["time_bnds"] = xr.DataArray([[np.datetime64(start_time), np.datetime64(end_time)]], - dims=['time', 'bnds_1d']) - ds[time].attrs['bounds'] = "time_bnds" - ds[time].attrs['standard_name'] = "time" + dims=["time", "bnds_1d"]) + ds[time].attrs["bounds"] = "time_bnds" + ds[time].attrs["standard_name"] = "time" return ds @@ -429,13 +429,13 @@ def _process_time_coord(dataarray, epoch): - the time coordinate has size 1 """ - if 'time' in dataarray.coords: - dataarray['time'].encoding['units'] = epoch - dataarray['time'].attrs['standard_name'] = 'time' - dataarray['time'].attrs.pop('bounds', None) + if "time" in dataarray.coords: + dataarray["time"].encoding["units"] = epoch + dataarray["time"].attrs["standard_name"] = "time" + dataarray["time"].attrs.pop("bounds", None) - if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape: - dataarray = dataarray.expand_dims('time') + if "time" not in dataarray.dims and dataarray["time"].size not in dataarray.shape: + dataarray = dataarray.expand_dims("time") return dataarray @@ -503,7 +503,7 @@ def _encode_nc(obj): return [s.lower() for s in obj.astype(str)] return obj.tolist() - raise ValueError('Unable to encode') + raise ValueError("Unable to encode") def encode_nc(obj): @@ -552,10 +552,10 @@ def encode_attrs_nc(attrs): def _add_ancillary_variables_attrs(dataarray): """Replace ancillary_variables DataArray with a list of their name.""" - list_ancillary_variable_names = [da_ancillary.attrs['name'] - for da_ancillary in dataarray.attrs.get('ancillary_variables', [])] + list_ancillary_variable_names = [da_ancillary.attrs["name"] + for da_ancillary in dataarray.attrs.get("ancillary_variables", [])] if list_ancillary_variable_names: - dataarray.attrs['ancillary_variables'] = ' '.join(list_ancillary_variable_names) + dataarray.attrs["ancillary_variables"] = " ".join(list_ancillary_variable_names) else: dataarray.attrs.pop("ancillary_variables", None) return dataarray @@ -572,17 +572,17 @@ def _drop_exclude_attrs(dataarray, exclude_attrs): def _remove_satpy_attrs(new_data): """Remove _satpy attribute.""" - satpy_attrs = [key for key in new_data.attrs if key.startswith('_satpy')] + satpy_attrs = [key for key in new_data.attrs if key.startswith("_satpy")] for satpy_attr in satpy_attrs: new_data.attrs.pop(satpy_attr) - new_data.attrs.pop('_last_resampler', None) + new_data.attrs.pop("_last_resampler", None) return new_data def _format_prerequisites_attrs(dataarray): """Reformat prerequisites attribute value to string.""" - if 'prerequisites' in dataarray.attrs: - dataarray.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] + if "prerequisites" in dataarray.attrs: + dataarray.attrs["prerequisites"] = [np.string_(str(prereq)) for prereq in dataarray.attrs["prerequisites"]] return dataarray @@ -603,8 +603,8 @@ def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): dataarray = _remove_none_attrs(dataarray) _ = dataarray.attrs.pop("area", None) - if 'long_name' not in dataarray.attrs and 'standard_name' not in dataarray.attrs: - dataarray.attrs['long_name'] = dataarray.name + if "long_name" not in dataarray.attrs and "standard_name" not in dataarray.attrs: + dataarray.attrs["long_name"] = dataarray.name if flatten_attrs: dataarray.attrs = flatten_dict(dataarray.attrs) @@ -642,7 +642,7 @@ def _set_default_chunks(encoding, dataset): variable.shape]).min(axis=0) ) # Chunksize may not exceed shape encoding.setdefault(var_name, {}) - encoding[var_name].setdefault('chunksizes', chunks) + encoding[var_name].setdefault("chunksizes", chunks) return encoding @@ -657,7 +657,7 @@ def _set_default_fill_value(encoding, dataset): coord_vars.extend(set(data_array.dims).intersection(data_array.coords)) for coord_var in coord_vars: encoding.setdefault(coord_var, {}) - encoding[coord_var].update({'_FillValue': None}) + encoding[coord_var].update({"_FillValue": None}) return encoding @@ -668,20 +668,20 @@ def _set_default_time_encoding(encoding, dataset): Default is xarray's CF datetime encoding, which can be overridden by user-defined encoding. """ - if 'time' in dataset: + if "time" in dataset: try: - dtnp64 = dataset['time'].data[0] + dtnp64 = dataset["time"].data[0] except IndexError: - dtnp64 = dataset['time'].data + dtnp64 = dataset["time"].data default = CFDatetimeCoder().encode(xr.DataArray(dtnp64)) - time_enc = {'units': default.attrs['units'], 'calendar': default.attrs['calendar']} - time_enc.update(encoding.get('time', {})) - bounds_enc = {'units': time_enc['units'], - 'calendar': time_enc['calendar'], - '_FillValue': None} - encoding['time'] = time_enc - encoding['time_bnds'] = bounds_enc # FUTURE: Not required anymore with xarray-0.14+ + time_enc = {"units": default.attrs["units"], "calendar": default.attrs["calendar"]} + time_enc.update(encoding.get("time", {})) + bounds_enc = {"units": time_enc["units"], + "calendar": time_enc["calendar"], + "_FillValue": None} + encoding["time"] = time_enc + encoding["time_bnds"] = bounds_enc # FUTURE: Not required anymore with xarray-0.14+ return encoding @@ -697,20 +697,20 @@ def _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix): for var_name in list(dataset.variables): if not numeric_name_prefix or not var_name.startswith(numeric_name_prefix): continue - orig_var_name = var_name.replace(numeric_name_prefix, '') + orig_var_name = var_name.replace(numeric_name_prefix, "") if orig_var_name in encoding: encoding[var_name] = encoding.pop(orig_var_name) return encoding -def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix='CHANNEL_'): +def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix="CHANNEL_"): """Update encoding. Preserve dask chunks, avoid fill values in coordinate variables and make sure that time & time bounds have the same units. """ other_to_netcdf_kwargs = to_netcdf_kwargs.copy() - encoding = other_to_netcdf_kwargs.pop('encoding', {}).copy() + encoding = other_to_netcdf_kwargs.pop("encoding", {}).copy() encoding = _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix) encoding = _set_default_chunks(encoding, dataset) encoding = _set_default_fill_value(encoding, dataset) @@ -728,7 +728,7 @@ def _handle_dataarray_name(original_name, numeric_name_prefix): new_name = numeric_name_prefix + original_name else: warnings.warn( - f'Invalid NetCDF dataset name: {original_name} starts with a digit.', + f"Invalid NetCDF dataset name: {original_name} starts with a digit.", stacklevel=5 ) new_name = original_name # occurs when numeric_name_prefix = '', None or False @@ -741,26 +741,26 @@ def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name """Change the DataArray name by prepending numeric_name_prefix if the name is a digit.""" original_name = None dataarray = dataarray.copy() - if 'name' in dataarray.attrs: - original_name = dataarray.attrs.pop('name') + if "name" in dataarray.attrs: + original_name = dataarray.attrs.pop("name") original_name, new_name = _handle_dataarray_name(original_name, numeric_name_prefix) dataarray = dataarray.rename(new_name) if include_orig_name and numeric_name_prefix and original_name and original_name != new_name: - dataarray.attrs['original_name'] = original_name + dataarray.attrs["original_name"] = original_name return dataarray def _add_history(attrs): """Add 'history' attribute to dictionary.""" - _history_create = 'Created by pytroll/satpy on {}'.format(datetime.utcnow()) - if 'history' in attrs: - if isinstance(attrs['history'], list): - attrs['history'] = ''.join(attrs['history']) - attrs['history'] += '\n' + _history_create + _history_create = "Created by pytroll/satpy on {}".format(datetime.utcnow()) + if "history" in attrs: + if isinstance(attrs["history"], list): + attrs["history"] = "".join(attrs["history"]) + attrs["history"] += "\n" + _history_create else: - attrs['history'] = _history_create + attrs["history"] = _history_create return attrs @@ -776,7 +776,7 @@ def _get_groups(groups, list_datarrays): grouped_dataarrays = defaultdict(list) for datarray in list_datarrays: for group_name, group_members in groups.items(): - if datarray.attrs['name'] in group_members: + if datarray.attrs["name"] in group_members: grouped_dataarrays[group_name].append(datarray) break return grouped_dataarrays @@ -787,7 +787,7 @@ def make_cf_dataarray(dataarray, flatten_attrs=False, exclude_attrs=None, include_orig_name=True, - numeric_name_prefix='CHANNEL_'): + numeric_name_prefix="CHANNEL_"): """Make the xr.DataArray CF-compliant. Parameters @@ -833,7 +833,7 @@ def _collect_cf_dataset(list_dataarrays, include_lonlats=True, pretty=False, include_orig_name=True, - numeric_name_prefix='CHANNEL_'): + numeric_name_prefix="CHANNEL_"): """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Dataset. Parameters @@ -881,7 +881,7 @@ def _collect_cf_dataset(list_dataarrays, dataarray_type = dataarray.dtype if dataarray_type not in CF_DTYPES: warnings.warn( - f'dtype {dataarray_type} not compatible with {CF_VERSION}.', + f"dtype {dataarray_type} not compatible with {CF_VERSION}.", stacklevel=3 ) # Deep copy the datarray since adding/modifying attributes and coordinates @@ -938,7 +938,7 @@ def collect_cf_datasets(list_dataarrays, include_lonlats=True, epoch=EPOCH, include_orig_name=True, - numeric_name_prefix='CHANNEL_', + numeric_name_prefix="CHANNEL_", groups=None): """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Datasets. @@ -1003,7 +1003,7 @@ def collect_cf_datasets(list_dataarrays, # If not grouped, add CF conventions. # - If 'Conventions' key already present, do not overwrite ! if "Conventions" not in header_attrs and not is_grouped: - header_attrs['Conventions'] = CF_VERSION + header_attrs["Conventions"] = CF_VERSION # Create dictionary of group xr.Datasets # --> If no groups (groups=None) --> group_name=None @@ -1022,7 +1022,7 @@ def collect_cf_datasets(list_dataarrays, if not is_grouped: ds.attrs = header_attrs - if 'time' in ds: + if "time" in ds: ds = add_time_bounds_dimension(ds, time="time") grouped_datasets[group_name] = ds @@ -1032,7 +1032,7 @@ def collect_cf_datasets(list_dataarrays, def _sanitize_writer_kwargs(writer_kwargs): """Remove satpy-specific kwargs.""" writer_kwargs = copy.deepcopy(writer_kwargs) - satpy_kwargs = ['overlay', 'decorate', 'config_files'] + satpy_kwargs = ["overlay", "decorate", "config_files"] for kwarg in satpy_kwargs: writer_kwargs.pop(kwarg, None) return writer_kwargs @@ -1042,9 +1042,9 @@ def _initialize_root_netcdf(filename, engine, header_attrs, to_netcdf_kwargs): """Initialize root empty netCDF.""" root = xr.Dataset({}, attrs=header_attrs) init_nc_kwargs = to_netcdf_kwargs.copy() - init_nc_kwargs.pop('encoding', None) # No variables to be encoded at this point - init_nc_kwargs.pop('unlimited_dims', None) - written = [root.to_netcdf(filename, engine=engine, mode='w', **init_nc_kwargs)] + init_nc_kwargs.pop("encoding", None) # No variables to be encoded at this point + init_nc_kwargs.pop("unlimited_dims", None) + written = [root.to_netcdf(filename, engine=engine, mode="w", **init_nc_kwargs)] return written @@ -1053,7 +1053,7 @@ class CFWriter(Writer): @staticmethod def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, - include_orig_name=True, numeric_name_prefix='CHANNEL_'): + include_orig_name=True, numeric_name_prefix="CHANNEL_"): """Convert the dataarray to something cf-compatible. Args: @@ -1070,8 +1070,8 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, numeric_name_prefix (str): Prepend dataset name with this if starting with a digit """ - warnings.warn('CFWriter.da2cf is deprecated.' - 'Use satpy.writers.cf_writer.make_cf_dataarray instead.', + warnings.warn("CFWriter.da2cf is deprecated." + "Use satpy.writers.cf_writer.make_cf_dataarray instead.", DeprecationWarning, stacklevel=3) return make_cf_dataarray(dataarray=dataarray, epoch=epoch, @@ -1083,8 +1083,8 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, @staticmethod def update_encoding(dataset, to_netcdf_kwargs): """Update encoding info (deprecated).""" - warnings.warn('CFWriter.update_encoding is deprecated. ' - 'Use satpy.writers.cf_writer.update_encoding instead.', + warnings.warn("CFWriter.update_encoding is deprecated. " + "Use satpy.writers.cf_writer.update_encoding instead.", DeprecationWarning, stacklevel=3) return update_encoding(dataset, to_netcdf_kwargs) @@ -1094,7 +1094,7 @@ def save_dataset(self, dataset, filename=None, fill_value=None, **kwargs): def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False, - include_orig_name=True, numeric_name_prefix='CHANNEL_', **to_netcdf_kwargs): + include_orig_name=True, numeric_name_prefix="CHANNEL_", **to_netcdf_kwargs): """Save the given datasets in one netCDF file. Note that all datasets (if grouping: in one group) must have the same projection coordinates. @@ -1130,7 +1130,7 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. """ - logger.info('Saving datasets to NetCDF4/CF.') + logger.info("Saving datasets to NetCDF4/CF.") _check_backend_versions() # Define netCDF filename if not provided diff --git a/satpy/writers/geotiff.py b/satpy/writers/geotiff.py index ba3cad7d6a..1a522ecd68 100644 --- a/satpy/writers/geotiff.py +++ b/satpy/writers/geotiff.py @@ -131,7 +131,7 @@ def separate_init_kwargs(cls, kwargs): # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs, kwargs = super(GeoTIFFWriter, cls).separate_init_kwargs( kwargs) - for kw in ['dtype', 'tags']: + for kw in ["dtype", "tags"]: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) @@ -246,7 +246,7 @@ def save_image( gdal_options = self._get_gdal_options(kwargs) if fill_value is None: # fall back to fill_value from configuration file - fill_value = self.info.get('fill_value') + fill_value = self.info.get("fill_value") dtype = dtype if dtype is not None else self.dtype if dtype is None and self.enhancer is not False: @@ -268,14 +268,14 @@ def save_image( fill_value = np.nan if keep_palette and cmap is None and img.palette is not None: from satpy.enhancements import create_colormap - cmap = create_colormap({'colors': img.palette}) + cmap = create_colormap({"colors": img.palette}) cmap.set_range(0, len(img.palette) - 1) if tags is None: tags = {} tags.update(self.tags) - return img.save(filename, fformat='tif', driver=driver, + return img.save(filename, fformat="tif", driver=driver, fill_value=fill_value, dtype=dtype, compute=compute, keep_palette=keep_palette, cmap=cmap, diff --git a/satpy/writers/mitiff.py b/satpy/writers/mitiff.py index 11f847c114..950fce8b21 100644 --- a/satpy/writers/mitiff.py +++ b/satpy/writers/mitiff.py @@ -35,19 +35,19 @@ def _adjust_kwargs(dataset, kwargs): - if 'platform_name' not in kwargs: - kwargs['platform_name'] = dataset.attrs['platform_name'] - if 'name' not in kwargs: - kwargs['name'] = dataset.attrs['name'] - if 'start_time' not in kwargs: - kwargs['start_time'] = dataset.attrs['start_time'] - if 'sensor' not in kwargs: - kwargs['sensor'] = dataset.attrs['sensor'] + if "platform_name" not in kwargs: + kwargs["platform_name"] = dataset.attrs["platform_name"] + if "name" not in kwargs: + kwargs["name"] = dataset.attrs["name"] + if "start_time" not in kwargs: + kwargs["start_time"] = dataset.attrs["start_time"] + if "sensor" not in kwargs: + kwargs["sensor"] = dataset.attrs["sensor"] # Sensor attrs could be set. MITIFFs needing to handle sensor can only have one sensor # Assume the first value of set as the sensor. - if isinstance(kwargs['sensor'], set): - LOG.warning('Sensor is set, will use the first value: %s', kwargs['sensor']) - kwargs['sensor'] = (list(kwargs['sensor']))[0] + if isinstance(kwargs["sensor"], set): + LOG.warning("Sensor is set, will use the first value: %s", kwargs["sensor"]) + kwargs["sensor"] = (list(kwargs["sensor"]))[0] class MITIFFWriter(ImageWriter): @@ -80,22 +80,22 @@ def save_dataset(self, dataset, filename=None, fill_value=None, def _delayed_create(dataset): try: - if 'palette' in kwargs: - self.palette = kwargs['palette'] + if "palette" in kwargs: + self.palette = kwargs["palette"] _adjust_kwargs(dataset, kwargs) try: - self.mitiff_config[kwargs['sensor']] = dataset.attrs['metadata_requirements']['config'] - self.channel_order[kwargs['sensor']] = dataset.attrs['metadata_requirements']['order'] - self.file_pattern = dataset.attrs['metadata_requirements']['file_pattern'] + self.mitiff_config[kwargs["sensor"]] = dataset.attrs["metadata_requirements"]["config"] + self.channel_order[kwargs["sensor"]] = dataset.attrs["metadata_requirements"]["order"] + self.file_pattern = dataset.attrs["metadata_requirements"]["file_pattern"] except KeyError: # For some mitiff products this info is needed, for others not. # If needed you should know how to fix this pass try: - self.translate_channel_name[kwargs['sensor']] = \ - dataset.attrs['metadata_requirements']['translate'] + self.translate_channel_name[kwargs["sensor"]] = \ + dataset.attrs["metadata_requirements"]["translate"] except KeyError: # For some mitiff products this info is needed, for others not. # If needed you should know how to fix this @@ -127,11 +127,11 @@ def _delayed_create(datasets): _adjust_kwargs(dataset, kwargs) try: - self.mitiff_config[kwargs['sensor']] = dataset.attrs['metadata_requirements']['config'] - translate = dataset.attrs['metadata_requirements']['translate'] - self.translate_channel_name[kwargs['sensor']] = translate - self.channel_order[kwargs['sensor']] = dataset.attrs['metadata_requirements']['order'] - self.file_pattern = dataset.attrs['metadata_requirements']['file_pattern'] + self.mitiff_config[kwargs["sensor"]] = dataset.attrs["metadata_requirements"]["config"] + translate = dataset.attrs["metadata_requirements"]["translate"] + self.translate_channel_name[kwargs["sensor"]] = translate + self.channel_order[kwargs["sensor"]] = dataset.attrs["metadata_requirements"]["order"] + self.file_pattern = dataset.attrs["metadata_requirements"]["file_pattern"] except KeyError: # For some mitiff products this info is needed, for others not. # If needed you should know how to fix this @@ -140,9 +140,9 @@ def _delayed_create(datasets): image_description = self._make_image_description(datasets, **kwargs) LOG.debug("File pattern %s", self.file_pattern) if isinstance(datasets, list): - kwargs['start_time'] = dataset.attrs['start_time'] + kwargs["start_time"] = dataset.attrs["start_time"] else: - kwargs['start_time'] = datasets.attrs['start_time'] + kwargs["start_time"] = datasets.attrs["start_time"] gen_filename = filename or self.get_filename(**kwargs) LOG.info("Saving mitiff to: %s ...", gen_filename) self._save_datasets_as_mitiff(datasets, image_description, gen_filename, **kwargs) @@ -161,8 +161,8 @@ def _make_channel_list(self, datasets, **kwargs): if self.channel_order: channels = self._reorder_channels(datasets, **kwargs) elif self.palette: - if 'palette_channel_name' in kwargs: - channels.append(kwargs['palette_channel_name'].upper()) + if "palette_channel_name" in kwargs: + channels.append(kwargs["palette_channel_name"].upper()) else: LOG.error("Is palette but can not find palette_channel_name to name the dataset") else: @@ -175,17 +175,17 @@ def _make_channel_list(self, datasets, **kwargs): def _reorder_channels(self, datasets, **kwargs): channels = [] - for cn in self.channel_order[kwargs['sensor']]: + for cn in self.channel_order[kwargs["sensor"]]: for ch, ds in enumerate(datasets): - if isinstance(ds.attrs['prerequisites'][ch], (DataQuery, DataID)): - if ds.attrs['prerequisites'][ch]['name'] == cn: + if isinstance(ds.attrs["prerequisites"][ch], (DataQuery, DataID)): + if ds.attrs["prerequisites"][ch]["name"] == cn: channels.append( - ds.attrs['prerequisites'][ch]['name']) + ds.attrs["prerequisites"][ch]["name"]) break else: - if ds.attrs['prerequisites'][ch] == cn: + if ds.attrs["prerequisites"][ch] == cn: channels.append( - ds.attrs['prerequisites'][ch]) + ds.attrs["prerequisites"][ch]) break return channels @@ -194,29 +194,29 @@ def _channel_names(self, channels, cns, **kwargs): for ch in channels: try: _image_description += str( - self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['alias']) + self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)]["alias"]) except KeyError: _image_description += str(ch) - _image_description += ' ' + _image_description += " " # Replace last char(space) with \n _image_description = _image_description[:-1] - _image_description += '\n' + _image_description += "\n" return _image_description def _add_sizes(self, datasets, first_dataset): - _image_description = ' Xsize: ' + _image_description = " Xsize: " if isinstance(datasets, list): - _image_description += str(first_dataset.sizes['x']) + '\n' + _image_description += str(first_dataset.sizes["x"]) + "\n" else: - _image_description += str(datasets.sizes['x']) + '\n' + _image_description += str(datasets.sizes["x"]) + "\n" - _image_description += ' Ysize: ' + _image_description += " Ysize: " if isinstance(datasets, list): - _image_description += str(first_dataset.sizes['y']) + '\n' + _image_description += str(first_dataset.sizes["y"]) + "\n" else: - _image_description += str(datasets.sizes['y']) + '\n' + _image_description += str(datasets.sizes["y"]) + "\n" return _image_description @@ -224,12 +224,12 @@ def _add_proj4_string(self, datasets, first_dataset): proj4_string = " Proj string: " if isinstance(datasets, list): - area = first_dataset.attrs['area'] + area = first_dataset.attrs["area"] else: - area = datasets.attrs['area'] + area = datasets.attrs["area"] # Use pyproj's CRS object to get a valid EPSG code if possible # only in newer pyresample versions with pyproj 2.0+ installed - if hasattr(area, 'crs') and area.crs.to_epsg() is not None: + if hasattr(area, "crs") and area.crs.to_epsg() is not None: proj4_string += "+init=EPSG:{}".format(area.crs.to_epsg()) else: proj4_string += area.proj_str @@ -239,23 +239,23 @@ def _add_proj4_string(self, datasets, first_dataset): # FUTURE: Use pyproj 2.0+ to convert EPSG to PROJ4 if possible proj4_string, x_0 = self._convert_epsg_to_proj(proj4_string, x_0) - if 'geos' in proj4_string: + if "geos" in proj4_string: proj4_string = proj4_string.replace("+sweep=x ", "") - if '+a=6378137.0 +b=6356752.31414' in proj4_string: + if "+a=6378137.0 +b=6356752.31414" in proj4_string: proj4_string = proj4_string.replace("+a=6378137.0 +b=6356752.31414", "+ellps=WGS84") - if '+units=m' in proj4_string: + if "+units=m" in proj4_string: proj4_string = proj4_string.replace("+units=m", "+units=km") - if not any(datum in proj4_string for datum in ['datum', 'towgs84']): - proj4_string += ' +towgs84=0,0,0' + if not any(datum in proj4_string for datum in ["datum", "towgs84"]): + proj4_string += " +towgs84=0,0,0" - if 'units' not in proj4_string: - proj4_string += ' +units=km' + if "units" not in proj4_string: + proj4_string += " +units=km" proj4_string = self._append_projection_center(proj4_string, datasets, first_dataset, x_0, y_0) LOG.debug("proj4_string: %s", proj4_string) - proj4_string += '\n' + proj4_string += "\n" return proj4_string @@ -264,59 +264,59 @@ def _append_projection_center(self, proj4_string, datasets, first_dataset, x_0, dataset = first_dataset else: dataset = datasets - if 'x_0' not in proj4_string: - proj4_string += ' +x_0=%.6f' % ( - (-dataset.attrs['area'].area_extent[0] + - dataset.attrs['area'].pixel_size_x) + x_0) - proj4_string += ' +y_0=%.6f' % ( - (-dataset.attrs['area'].area_extent[1] + - dataset.attrs['area'].pixel_size_y) + y_0) - elif '+x_0=0' in proj4_string and '+y_0=0' in proj4_string: - proj4_string = proj4_string.replace("+x_0=0", '+x_0=%.6f' % ( - (-dataset.attrs['area'].area_extent[0] + - dataset.attrs['area'].pixel_size_x) + x_0)) - proj4_string = proj4_string.replace("+y_0=0", '+y_0=%.6f' % ( - (-dataset.attrs['area'].area_extent[1] + - dataset.attrs['area'].pixel_size_y) + y_0)) + if "x_0" not in proj4_string: + proj4_string += " +x_0=%.6f" % ( + (-dataset.attrs["area"].area_extent[0] + + dataset.attrs["area"].pixel_size_x) + x_0) + proj4_string += " +y_0=%.6f" % ( + (-dataset.attrs["area"].area_extent[1] + + dataset.attrs["area"].pixel_size_y) + y_0) + elif "+x_0=0" in proj4_string and "+y_0=0" in proj4_string: + proj4_string = proj4_string.replace("+x_0=0", "+x_0=%.6f" % ( + (-dataset.attrs["area"].area_extent[0] + + dataset.attrs["area"].pixel_size_x) + x_0)) + proj4_string = proj4_string.replace("+y_0=0", "+y_0=%.6f" % ( + (-dataset.attrs["area"].area_extent[1] + + dataset.attrs["area"].pixel_size_y) + y_0)) return proj4_string def _convert_epsg_to_proj(self, proj4_string, x_0): - if 'EPSG:32631' in proj4_string: + if "EPSG:32631" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32631", "+proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 - elif 'EPSG:32632' in proj4_string: + elif "EPSG:32632" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32632", "+proj=etmerc +lat_0=0 +lon_0=9 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 - elif 'EPSG:32633' in proj4_string: + elif "EPSG:32633" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32633", "+proj=etmerc +lat_0=0 +lon_0=15 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 - elif 'EPSG:32634' in proj4_string: + elif "EPSG:32634" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32634", "+proj=etmerc +lat_0=0 +lon_0=21 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 - elif 'EPSG:32635' in proj4_string: + elif "EPSG:32635" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32635", "+proj=etmerc +lat_0=0 +lon_0=27 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 - elif 'EPSG' in proj4_string: + elif "EPSG" in proj4_string: LOG.warning("EPSG used in proj string but not converted. Please add this in code") return proj4_string, x_0 def _add_pixel_sizes(self, datasets, first_dataset): _image_description = "" if isinstance(datasets, list): - _image_description += ' Ax: %.6f' % ( - first_dataset.attrs['area'].pixel_size_x / 1000.) - _image_description += ' Ay: %.6f' % ( - first_dataset.attrs['area'].pixel_size_y / 1000.) + _image_description += " Ax: %.6f" % ( + first_dataset.attrs["area"].pixel_size_x / 1000.) + _image_description += " Ay: %.6f" % ( + first_dataset.attrs["area"].pixel_size_y / 1000.) else: - _image_description += ' Ax: %.6f' % ( - datasets.attrs['area'].pixel_size_x / 1000.) - _image_description += ' Ay: %.6f' % ( - datasets.attrs['area'].pixel_size_y / 1000.) + _image_description += " Ax: %.6f" % ( + datasets.attrs["area"].pixel_size_x / 1000.) + _image_description += " Ay: %.6f" % ( + datasets.attrs["area"].pixel_size_y / 1000.) return _image_description @@ -326,21 +326,21 @@ def _add_corners(self, datasets, first_dataset): # Therefor use the center of the upper left pixel. _image_description = "" if isinstance(datasets, list): - _image_description += ' Bx: %.6f' % ( - first_dataset.attrs['area'].area_extent[0] / 1000. + - first_dataset.attrs['area'].pixel_size_x / 1000. / 2.) # LL_x - _image_description += ' By: %.6f' % ( - first_dataset.attrs['area'].area_extent[3] / 1000. - - first_dataset.attrs['area'].pixel_size_y / 1000. / 2.) # UR_y + _image_description += " Bx: %.6f" % ( + first_dataset.attrs["area"].area_extent[0] / 1000. + + first_dataset.attrs["area"].pixel_size_x / 1000. / 2.) # LL_x + _image_description += " By: %.6f" % ( + first_dataset.attrs["area"].area_extent[3] / 1000. - + first_dataset.attrs["area"].pixel_size_y / 1000. / 2.) # UR_y else: - _image_description += ' Bx: %.6f' % ( - datasets.attrs['area'].area_extent[0] / 1000. + - datasets.attrs['area'].pixel_size_x / 1000. / 2.) # LL_x - _image_description += ' By: %.6f' % ( - datasets.attrs['area'].area_extent[3] / 1000. - - datasets.attrs['area'].pixel_size_y / 1000. / 2.) # UR_y - - _image_description += '\n' + _image_description += " Bx: %.6f" % ( + datasets.attrs["area"].area_extent[0] / 1000. + + datasets.attrs["area"].pixel_size_x / 1000. / 2.) # LL_x + _image_description += " By: %.6f" % ( + datasets.attrs["area"].area_extent[3] / 1000. - + datasets.attrs["area"].pixel_size_y / 1000. / 2.) # UR_y + + _image_description += "\n" return _image_description def _add_calibration_datasets(self, ch, datasets, reverse_offset, reverse_scale, decimals): @@ -351,34 +351,34 @@ def _add_calibration_datasets(self, ch, datasets, reverse_offset, reverse_scale, found_calibration = False skip_calibration = False ds_list = datasets - if not isinstance(datasets, list) and 'bands' not in datasets.sizes: + if not isinstance(datasets, list) and "bands" not in datasets.sizes: ds_list = [datasets] for i, ds in enumerate(ds_list): - if ('prerequisites' in ds.attrs and - isinstance(ds.attrs['prerequisites'], list) and - len(ds.attrs['prerequisites']) >= i + 1 and - isinstance(ds.attrs['prerequisites'][i], (DataQuery, DataID))): - if ds.attrs['prerequisites'][i].get('name') == str(ch): - if ds.attrs['prerequisites'][i].get('calibration') == 'RADIANCE': + if ("prerequisites" in ds.attrs and + isinstance(ds.attrs["prerequisites"], list) and + len(ds.attrs["prerequisites"]) >= i + 1 and + isinstance(ds.attrs["prerequisites"][i], (DataQuery, DataID))): + if ds.attrs["prerequisites"][i].get("name") == str(ch): + if ds.attrs["prerequisites"][i].get("calibration") == "RADIANCE": raise NotImplementedError( "Mitiff radiance calibration not implemented.") # _table_calibration += ', Radiance, ' # _table_calibration += '[W/m²/µm/sr]' # _decimals = 8 - elif ds.attrs['prerequisites'][i].get('calibration') == 'brightness_temperature': + elif ds.attrs["prerequisites"][i].get("calibration") == "brightness_temperature": found_calibration = True - _table_calibration += ', BT, ' + _table_calibration += ", BT, " _table_calibration += "\N{DEGREE SIGN}" - _table_calibration += u'[C]' + _table_calibration += u"[C]" _reverse_offset = 255. _reverse_scale = -1. _decimals = 2 - elif ds.attrs['prerequisites'][i].get('calibration') == 'reflectance': + elif ds.attrs["prerequisites"][i].get("calibration") == "reflectance": found_calibration = True - _table_calibration += ', Reflectance(Albedo), ' - _table_calibration += '[%]' + _table_calibration += ", Reflectance(Albedo), " + _table_calibration += "[%]" _decimals = 2 else: LOG.warning("Unknown calib type. Must be Radiance, Reflectance or BT.") @@ -399,13 +399,13 @@ def _add_calibration_datasets(self, ch, datasets, reverse_offset, reverse_scale, def _add_palette_info(self, datasets, palette_unit, palette_description, **kwargs): # mitiff key word for palette interpretion - _palette = '\n COLOR INFO:\n' + _palette = "\n COLOR INFO:\n" # mitiff info for the unit of the interpretion - _palette += ' {}\n'.format(palette_unit) + _palette += " {}\n".format(palette_unit) # The length of the palette description as needed by mitiff in DIANA - _palette += ' {}\n'.format(len(palette_description)) + _palette += " {}\n".format(len(palette_description)) for desc in palette_description: - _palette += ' {}\n'.format(desc) + _palette += " {}\n".format(desc) return _palette def _add_calibration(self, channels, cns, datasets, **kwargs): @@ -419,10 +419,10 @@ def _add_calibration(self, channels, cns, datasets, **kwargs): if palette: raise NotImplementedError("Mitiff palette saving is not implemented.") else: - _table_calibration += 'Table_calibration: ' + _table_calibration += "Table_calibration: " try: _table_calibration += str( - self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['alias']) + self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)]["alias"]) except KeyError: _table_calibration += str(ch) @@ -435,18 +435,18 @@ def _add_calibration(self, channels, cns, datasets, **kwargs): _table_calibration += __table_calibration if not skip_calibration: - _table_calibration += ', 8, [ ' + _table_calibration += ", 8, [ " for val in range(0, 256): # Comma separated list of values - _table_calibration += '{0:.{1}f} '.format((float(self.mitiff_config[ - kwargs['sensor']][cns.get(ch, ch)]['min-val']) + + _table_calibration += "{0:.{1}f} ".format((float(self.mitiff_config[ + kwargs["sensor"]][cns.get(ch, ch)]["min-val"]) + ((_reverse_offset + _reverse_scale * val) * - (float(self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['max-val']) - - float(self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['min-val']))) / 255.), + (float(self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)]["max-val"]) - + float(self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)]["min-val"]))) / 255.), _decimals) # _table_calibration += '0.00000000 ' - _table_calibration += ']\n\n' + _table_calibration += "]\n\n" else: _table_calibration = "" @@ -498,14 +498,14 @@ def _make_image_description(self, datasets, **kwargs): []\n\n """ - translate_platform_name = {'metop01': 'Metop-B', - 'metop02': 'Metop-A', - 'metop03': 'Metop-C', - 'noaa15': 'NOAA-15', - 'noaa16': 'NOAA-16', - 'noaa17': 'NOAA-17', - 'noaa18': 'NOAA-18', - 'noaa19': 'NOAA-19'} + translate_platform_name = {"metop01": "Metop-B", + "metop02": "Metop-A", + "metop03": "Metop-C", + "noaa15": "NOAA-15", + "noaa16": "NOAA-16", + "noaa17": "NOAA-17", + "noaa18": "NOAA-18", + "noaa19": "NOAA-19"} first_dataset = datasets if isinstance(datasets, list): @@ -514,40 +514,40 @@ def _make_image_description(self, datasets, **kwargs): _platform_name = self._get_platform_name(first_dataset, translate_platform_name, kwargs) - _image_description = '' - _image_description.encode('utf-8') + _image_description = "" + _image_description.encode("utf-8") - _image_description += ' Satellite: ' + _image_description += " Satellite: " if _platform_name is not None: _image_description += _platform_name - _image_description += '\n' + _image_description += "\n" - _image_description += ' Date and Time: ' + _image_description += " Date and Time: " # Select earliest start_time first = True earliest = 0 for dataset in datasets: if first: - earliest = dataset.attrs['start_time'] + earliest = dataset.attrs["start_time"] else: - if dataset.attrs['start_time'] < earliest: - earliest = dataset.attrs['start_time'] + if dataset.attrs["start_time"] < earliest: + earliest = dataset.attrs["start_time"] first = False LOG.debug("earliest start_time: %s", earliest) _image_description += earliest.strftime("%H:%M %d/%m-%Y\n") - _image_description += ' SatDir: 0\n' + _image_description += " SatDir: 0\n" - _image_description += ' Channels: ' + _image_description += " Channels: " _image_description += self._get_dataset_len(datasets) - _image_description += ' In this file: ' + _image_description += " In this file: " channels = self._make_channel_list(datasets, **kwargs) try: - cns = self.translate_channel_name.get(kwargs['sensor'], {}) + cns = self.translate_channel_name.get(kwargs["sensor"], {}) except KeyError: pass @@ -555,25 +555,25 @@ def _make_image_description(self, datasets, **kwargs): _image_description += self._add_sizes(datasets, first_dataset) - _image_description += ' Map projection: Stereographic\n' + _image_description += " Map projection: Stereographic\n" _image_description += self._add_proj4_string(datasets, first_dataset) - _image_description += ' TrueLat: 60N\n' - _image_description += ' GridRot: 0\n' + _image_description += " TrueLat: 60N\n" + _image_description += " GridRot: 0\n" - _image_description += ' Xunit:1000 m Yunit: 1000 m\n' + _image_description += " Xunit:1000 m Yunit: 1000 m\n" - _image_description += ' NPX: %.6f' % (0) - _image_description += ' NPY: %.6f' % (0) + '\n' + _image_description += " NPX: %.6f" % (0) + _image_description += " NPY: %.6f" % (0) + "\n" _image_description += self._add_pixel_sizes(datasets, first_dataset) _image_description += self._add_corners(datasets, first_dataset) if isinstance(datasets, list): - LOG.debug("Area extent: %s", first_dataset.attrs['area'].area_extent) + LOG.debug("Area extent: %s", first_dataset.attrs["area"].area_extent) else: - LOG.debug("Area extent: %s", datasets.attrs['area'].area_extent) + LOG.debug("Area extent: %s", datasets.attrs["area"].area_extent) if self.palette: LOG.debug("Doing palette image") @@ -587,24 +587,24 @@ def _get_dataset_len(self, datasets): if isinstance(datasets, list): LOG.debug("len datasets: %s", len(datasets)) dataset_len = str(len(datasets)) - elif 'bands' in datasets.sizes: - LOG.debug("len datasets: %s", datasets.sizes['bands']) - dataset_len = str(datasets.sizes['bands']) + elif "bands" in datasets.sizes: + LOG.debug("len datasets: %s", datasets.sizes["bands"]) + dataset_len = str(datasets.sizes["bands"]) elif len(datasets.sizes) == 2: LOG.debug("len datasets: 1") - dataset_len = '1' + dataset_len = "1" else: dataset_len = "" return dataset_len def _get_platform_name(self, first_dataset, translate_platform_name, kwargs): - if 'platform_name' in first_dataset.attrs: + if "platform_name" in first_dataset.attrs: _platform_name = translate_platform_name.get( - first_dataset.attrs['platform_name'], - first_dataset.attrs['platform_name']) - elif 'platform_name' in kwargs: + first_dataset.attrs["platform_name"], + first_dataset.attrs["platform_name"]) + elif "platform_name" in kwargs: _platform_name = translate_platform_name.get( - kwargs['platform_name'], kwargs['platform_name']) + kwargs["platform_name"], kwargs["platform_name"]) else: _platform_name = None return _platform_name @@ -612,7 +612,7 @@ def _get_platform_name(self, first_dataset, translate_platform_name, kwargs): def _calibrate_data(self, dataset, calibration, min_val, max_val): reverse_offset = 0. reverse_scale = 1. - if calibration == 'brightness_temperature': + if calibration == "brightness_temperature": # If data is brightness temperature, the data must be inverted. reverse_offset = 255. reverse_scale = -1. @@ -631,44 +631,44 @@ def _save_as_palette(self, datasets, tmp_gen_filename, tiffinfo, **kwargs): # The value of the component is used as an index into the red, green and blue curves # in the ColorMap field to retrieve an RGB triplet that defines the color. When # PhotometricInterpretation=3 is used, ColorMap must be present and SamplesPerPixel must be 1. - tiffinfo[270] = tiffinfo[270].decode('utf-8') + tiffinfo[270] = tiffinfo[270].decode("utf-8") - img = Image.fromarray(datasets.data.astype(np.uint8), mode='P') - if 'palette_color_map' in kwargs: - img.putpalette(ImagePalette.ImagePalette('RGB', kwargs['palette_color_map'])) + img = Image.fromarray(datasets.data.astype(np.uint8), mode="P") + if "palette_color_map" in kwargs: + img.putpalette(ImagePalette.ImagePalette("RGB", kwargs["palette_color_map"])) else: LOG.error("In a mitiff palette image a color map must be provided: palette_color_map is missing.") return - img.save(tmp_gen_filename, compression='raw', compress_level=9, tiffinfo=tiffinfo) + img.save(tmp_gen_filename, compression="raw", compress_level=9, tiffinfo=tiffinfo) def _save_as_enhanced(self, datasets, tmp_gen_filename, **kwargs): """Save datasets as an enhanced RGB image.""" img = get_enhanced_image(datasets.squeeze(), enhance=self.enhancer) tiffinfo = {} - if 'bands' in img.data.sizes and 'bands' not in datasets.sizes: + if "bands" in img.data.sizes and "bands" not in datasets.sizes: LOG.debug("Datasets without 'bands' become image with 'bands' due to enhancement.") LOG.debug("Needs to regenerate mitiff image description") image_description = self._make_image_description(img.data, **kwargs) - tiffinfo[IMAGEDESCRIPTION] = (image_description).encode('utf-8') + tiffinfo[IMAGEDESCRIPTION] = (image_description).encode("utf-8") mitiff_frames = [] - for band in img.data['bands']: + for band in img.data["bands"]: chn = img.data.sel(bands=band) data = chn.values.clip(0, 1) * 254. + 1 data = data.clip(0, 255) - mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode='L')) + mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode="L")) mitiff_frames[0].save(tmp_gen_filename, save_all=True, append_images=mitiff_frames[1:], - compression='raw', compress_level=9, tiffinfo=tiffinfo) + compression="raw", compress_level=9, tiffinfo=tiffinfo) def _generate_intermediate_filename(self, gen_filename): """Replace mitiff ext because pillow doesn't recognise the file type.""" bs, ex = os.path.splitext(gen_filename) tmp_gen_filename = gen_filename - if ex.endswith('mitiff'): + if ex.endswith("mitiff"): bd = os.path.dirname(bs) bn = os.path.basename(bs) - tmp_gen_filename = os.path.join(bd, '.' + bn + '.tif') + tmp_gen_filename = os.path.join(bd, "." + bn + ".tif") return tmp_gen_filename def _save_datasets_as_mitiff(self, datasets, image_description, @@ -680,25 +680,25 @@ def _save_datasets_as_mitiff(self, datasets, image_description, """ tmp_gen_filename = self._generate_intermediate_filename(gen_filename) tiffinfo = {} - tiffinfo[IMAGEDESCRIPTION] = (image_description).encode('latin-1') + tiffinfo[IMAGEDESCRIPTION] = (image_description).encode("latin-1") - cns = self.translate_channel_name.get(kwargs['sensor'], {}) + cns = self.translate_channel_name.get(kwargs["sensor"], {}) if isinstance(datasets, list): LOG.debug("Saving datasets as list") mitiff_frames = [] - for _cn in self.channel_order[kwargs['sensor']]: + for _cn in self.channel_order[kwargs["sensor"]]: for dataset in datasets: - if dataset.attrs['name'] == _cn: + if dataset.attrs["name"] == _cn: # Need to possible translate channels names from satpy to mitiff - cn = cns.get(dataset.attrs['name'], dataset.attrs['name']) - data = self._calibrate_data(dataset, dataset.attrs['calibration'], - self.mitiff_config[kwargs['sensor']][cn]['min-val'], - self.mitiff_config[kwargs['sensor']][cn]['max-val']) - mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode='L')) + cn = cns.get(dataset.attrs["name"], dataset.attrs["name"]) + data = self._calibrate_data(dataset, dataset.attrs["calibration"], + self.mitiff_config[kwargs["sensor"]][cn]["min-val"], + self.mitiff_config[kwargs["sensor"]][cn]["max-val"]) + mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode="L")) break mitiff_frames[0].save(tmp_gen_filename, save_all=True, append_images=mitiff_frames[1:], - compression='raw', compress_level=9, tiffinfo=tiffinfo) - elif 'dataset' in datasets.attrs['name']: + compression="raw", compress_level=9, tiffinfo=tiffinfo) + elif "dataset" in datasets.attrs["name"]: LOG.debug("Saving dataset as single dataset.") self._save_single_dataset(datasets, cns, tmp_gen_filename, tiffinfo, kwargs) elif self.palette: @@ -710,35 +710,35 @@ def _save_datasets_as_mitiff(self, datasets, image_description, os.rename(tmp_gen_filename, gen_filename) def _save_single_dataset(self, datasets, cns, tmp_gen_filename, tiffinfo, kwargs): - LOG.debug("Saving %s as a dataset.", datasets.attrs['name']) - if len(datasets.dims) == 2 and (all('bands' not in i for i in datasets.dims)): + LOG.debug("Saving %s as a dataset.", datasets.attrs["name"]) + if len(datasets.dims) == 2 and (all("bands" not in i for i in datasets.dims)): # Special case with only one channel ie. no bands # Need to possible translate channels names from satpy to mitiff # Note the last index is a tuple index. - cn = cns.get(datasets.attrs['prerequisites'][0]['name'], - datasets.attrs['prerequisites'][0]['name']) - data = self._calibrate_data(datasets, datasets.attrs['prerequisites'][0].get('calibration'), - self.mitiff_config[kwargs['sensor']][cn]['min-val'], - self.mitiff_config[kwargs['sensor']][cn]['max-val']) + cn = cns.get(datasets.attrs["prerequisites"][0]["name"], + datasets.attrs["prerequisites"][0]["name"]) + data = self._calibrate_data(datasets, datasets.attrs["prerequisites"][0].get("calibration"), + self.mitiff_config[kwargs["sensor"]][cn]["min-val"], + self.mitiff_config[kwargs["sensor"]][cn]["max-val"]) Image.fromarray(data.astype(np.uint8)).save(tmp_gen_filename, compression="raw", compress_level=9, tiffinfo=tiffinfo) else: mitiff_frames = [] - for _cn_i, _cn in enumerate(self.channel_order[kwargs['sensor']]): - for band in datasets['bands']: + for _cn_i, _cn in enumerate(self.channel_order[kwargs["sensor"]]): + for band in datasets["bands"]: if band == _cn: chn = datasets.sel(bands=band) # Need to possible translate channels names from satpy to mitiff # Note the last index is a tuple index. - cn = cns.get(chn.attrs['prerequisites'][_cn_i]['name'], - chn.attrs['prerequisites'][_cn_i]['name']) - data = self._calibrate_data(chn, chn.attrs['prerequisites'][_cn_i].get('calibration'), - self.mitiff_config[kwargs['sensor']][cn]['min-val'], - self.mitiff_config[kwargs['sensor']][cn]['max-val']) + cn = cns.get(chn.attrs["prerequisites"][_cn_i]["name"], + chn.attrs["prerequisites"][_cn_i]["name"]) + data = self._calibrate_data(chn, chn.attrs["prerequisites"][_cn_i].get("calibration"), + self.mitiff_config[kwargs["sensor"]][cn]["min-val"], + self.mitiff_config[kwargs["sensor"]][cn]["max-val"]) - mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode='L')) + mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode="L")) break mitiff_frames[0].save(tmp_gen_filename, save_all=True, append_images=mitiff_frames[1:], - compression='raw', compress_level=9, tiffinfo=tiffinfo) + compression="raw", compress_level=9, tiffinfo=tiffinfo) diff --git a/satpy/writers/utils.py b/satpy/writers/utils.py index 3308115ff9..fe9ff00625 100644 --- a/satpy/writers/utils.py +++ b/satpy/writers/utils.py @@ -18,7 +18,7 @@ """Writer utilities.""" -def flatten_dict(d, parent_key='', sep='_'): +def flatten_dict(d, parent_key="", sep="_"): """Flatten a nested dictionary. Based on https://stackoverflow.com/a/6027615/5703449 diff --git a/setup.py b/setup.py index 612db4fa05..76c2919200 100644 --- a/setup.py +++ b/setup.py @@ -22,68 +22,68 @@ from setuptools import find_packages, setup -requires = ['numpy >=1.13', 'pillow', 'pyresample >=1.24.0', 'trollsift', - 'trollimage >=1.20', 'pykdtree', 'pyyaml >=5.1', 'xarray >=0.10.1, !=0.13.0', - 'dask[array] >=0.17.1', 'pyproj>=2.2', 'zarr', 'donfig', 'appdirs', - 'packaging', 'pooch', 'pyorbital'] +requires = ["numpy >=1.13", "pillow", "pyresample >=1.24.0", "trollsift", + "trollimage >=1.20", "pykdtree", "pyyaml >=5.1", "xarray >=0.10.1, !=0.13.0", + "dask[array] >=0.17.1", "pyproj>=2.2", "zarr", "donfig", "appdirs", + "packaging", "pooch", "pyorbital"] -test_requires = ['behave', 'h5py', 'netCDF4', 'pyhdf', 'imageio', - 'rasterio', 'geoviews', 'trollimage', 'fsspec', 'bottleneck', - 'rioxarray', 'pytest', 'pytest-lazy-fixture', 'defusedxml', - 's3fs', 'eccodes', 'h5netcdf', 'xarray-datatree', - 'skyfield', 'ephem', 'pint-xarray', 'astropy', 'dask-image'] +test_requires = ["behave", "h5py", "netCDF4", "pyhdf", "imageio", + "rasterio", "geoviews", "trollimage", "fsspec", "bottleneck", + "rioxarray", "pytest", "pytest-lazy-fixture", "defusedxml", + "s3fs", "eccodes", "h5netcdf", "xarray-datatree", + "skyfield", "ephem", "pint-xarray", "astropy", "dask-image"] extras_require = { # Readers: - 'avhrr_l1b_gaclac': ['pygac >= 1.3.0'], - 'modis_l1b': ['pyhdf', 'python-geotiepoints >= 1.1.7'], - 'geocat': ['pyhdf'], - 'acspo': ['netCDF4 >= 1.1.8'], - 'clavrx': ['netCDF4 >= 1.1.8'], - 'viirs_l1b': ['netCDF4 >= 1.1.8'], - 'viirs_sdr': ['h5py >= 2.7.0'], - 'viirs_compact': ['h5py >= 2.7.0'], - 'omps_edr': ['h5py >= 2.7.0'], - 'amsr2_l1b': ['h5py >= 2.7.0'], - 'hrpt': ['pyorbital >= 1.3.1', 'pygac', 'python-geotiepoints >= 1.1.7'], - 'hrit_msg': ['pytroll-schedule'], - 'msi_safe': ['rioxarray', "bottleneck", "python-geotiepoints"], - 'nc_nwcsaf_msg': ['netCDF4 >= 1.1.8'], - 'sar_c': ['python-geotiepoints >= 1.1.7', 'rasterio', 'rioxarray', 'defusedxml'], - 'abi_l1b': ['h5netcdf'], - 'seviri_l1b_hrit': ['pyorbital >= 1.3.1'], - 'seviri_l1b_native': ['pyorbital >= 1.3.1'], - 'seviri_l1b_nc': ['pyorbital >= 1.3.1', 'netCDF4 >= 1.1.8'], - 'seviri_l2_bufr': ['eccodes'], - 'seviri_l2_grib': ['eccodes'], - 'hsaf_grib': ['pygrib'], - 'remote_reading': ['fsspec'], - 'insat_3d': ['xarray-datatree'], - 'gms5-vissr_l1b': ["numba"], + "avhrr_l1b_gaclac": ["pygac >= 1.3.0"], + "modis_l1b": ["pyhdf", "python-geotiepoints >= 1.1.7"], + "geocat": ["pyhdf"], + "acspo": ["netCDF4 >= 1.1.8"], + "clavrx": ["netCDF4 >= 1.1.8"], + "viirs_l1b": ["netCDF4 >= 1.1.8"], + "viirs_sdr": ["h5py >= 2.7.0"], + "viirs_compact": ["h5py >= 2.7.0"], + "omps_edr": ["h5py >= 2.7.0"], + "amsr2_l1b": ["h5py >= 2.7.0"], + "hrpt": ["pyorbital >= 1.3.1", "pygac", "python-geotiepoints >= 1.1.7"], + "hrit_msg": ["pytroll-schedule"], + "msi_safe": ["rioxarray", "bottleneck", "python-geotiepoints"], + "nc_nwcsaf_msg": ["netCDF4 >= 1.1.8"], + "sar_c": ["python-geotiepoints >= 1.1.7", "rasterio", "rioxarray", "defusedxml"], + "abi_l1b": ["h5netcdf"], + "seviri_l1b_hrit": ["pyorbital >= 1.3.1"], + "seviri_l1b_native": ["pyorbital >= 1.3.1"], + "seviri_l1b_nc": ["pyorbital >= 1.3.1", "netCDF4 >= 1.1.8"], + "seviri_l2_bufr": ["eccodes"], + "seviri_l2_grib": ["eccodes"], + "hsaf_grib": ["pygrib"], + "remote_reading": ["fsspec"], + "insat_3d": ["xarray-datatree"], + "gms5-vissr_l1b": ["numba"], # Writers: - 'cf': ['h5netcdf >= 0.7.3'], - 'awips_tiled': ['netCDF4 >= 1.1.8'], - 'geotiff': ['rasterio', 'trollimage[geotiff]'], - 'ninjo': ['pyninjotiff', 'pint'], + "cf": ["h5netcdf >= 0.7.3"], + "awips_tiled": ["netCDF4 >= 1.1.8"], + "geotiff": ["rasterio", "trollimage[geotiff]"], + "ninjo": ["pyninjotiff", "pint"], "units": ["pint-xarray"], # Composites/Modifiers: - 'rayleigh': ['pyspectral >= 0.10.1'], - 'angles': ['pyorbital >= 1.3.1'], - 'filters': ['dask-image'], + "rayleigh": ["pyspectral >= 0.10.1"], + "angles": ["pyorbital >= 1.3.1"], + "filters": ["dask-image"], # MultiScene: - 'animations': ['imageio'], + "animations": ["imageio"], # Documentation: - 'doc': ['sphinx', 'sphinx_rtd_theme', 'sphinxcontrib-apidoc'], + "doc": ["sphinx", "sphinx_rtd_theme", "sphinxcontrib-apidoc"], # Other - 'geoviews': ['geoviews'], - 'overlays': ['pycoast', 'pydecorate'], - 'satpos_from_tle': ['skyfield', 'astropy'], - 'tests': test_requires, + "geoviews": ["geoviews"], + "overlays": ["pycoast", "pydecorate"], + "satpos_from_tle": ["skyfield", "astropy"], + "tests": test_requires, } all_extras = [] for extra_deps in extras_require.values(): all_extras.extend(extra_deps) -extras_require['all'] = list(set(all_extras)) +extras_require["all"] = list(set(all_extras)) def _config_data_files(base_dirs, extensions=(".cfg", )): @@ -110,21 +110,21 @@ def _config_data_files(base_dirs, extensions=(".cfg", )): entry_points = { - 'console_scripts': [ - 'satpy_retrieve_all_aux_data=satpy.aux_download:retrieve_all_cmd', + "console_scripts": [ + "satpy_retrieve_all_aux_data=satpy.aux_download:retrieve_all_cmd", ], } -NAME = 'satpy' -with open('README.rst', 'r') as readme: +NAME = "satpy" +with open("README.rst", "r") as readme: README = readme.read() setup(name=NAME, - description='Python package for earth-observing satellite data processing', + description="Python package for earth-observing satellite data processing", long_description=README, - author='The Pytroll Team', - author_email='pytroll@googlegroups.com', + author="The Pytroll Team", + author_email="pytroll@googlegroups.com", classifiers=["Development Status :: 5 - Production/Stable", "Intended Audience :: Science/Research", "License :: OSI Approved :: GNU General Public License v3 " + @@ -147,23 +147,23 @@ def _config_data_files(base_dirs, extensions=(".cfg", )): packages=find_packages(), # Always use forward '/', even on Windows # See https://setuptools.readthedocs.io/en/latest/userguide/datafiles.html#data-files-support - package_data={'satpy': ['etc/geo_image.cfg', - 'etc/areas.yaml', - 'etc/satpy.cfg', - 'etc/himawari-8.cfg', - 'etc/eps_avhrrl1b_6.5.xml', - 'etc/readers/*.yaml', - 'etc/writers/*.yaml', - 'etc/composites/*.yaml', - 'etc/enhancements/*.cfg', - 'etc/enhancements/*.yaml', - 'tests/etc/readers/*.yaml', - 'tests/etc/composites/*.yaml', - 'tests/etc/writers/*.yaml', + package_data={"satpy": ["etc/geo_image.cfg", + "etc/areas.yaml", + "etc/satpy.cfg", + "etc/himawari-8.cfg", + "etc/eps_avhrrl1b_6.5.xml", + "etc/readers/*.yaml", + "etc/writers/*.yaml", + "etc/composites/*.yaml", + "etc/enhancements/*.cfg", + "etc/enhancements/*.yaml", + "tests/etc/readers/*.yaml", + "tests/etc/composites/*.yaml", + "tests/etc/writers/*.yaml", ]}, zip_safe=False, install_requires=requires, - python_requires='>=3.9', + python_requires=">=3.9", extras_require=extras_require, entry_points=entry_points, ) diff --git a/utils/convert_to_ninjotiff.py b/utils/convert_to_ninjotiff.py index e457ee35e3..2189a11dec 100644 --- a/utils/convert_to_ninjotiff.py +++ b/utils/convert_to_ninjotiff.py @@ -38,20 +38,20 @@ debug_on() -parser = argparse.ArgumentParser(description='Turn an image into a NinjoTiff.') -parser.add_argument('--cfg', dest='cfg', action="store", +parser = argparse.ArgumentParser(description="Turn an image into a NinjoTiff.") +parser.add_argument("--cfg", dest="cfg", action="store", help="YAML configuration as an alternative to the command line input for NinJo metadata.") -parser.add_argument('--input_dir', dest='input_dir', action="store", +parser.add_argument("--input_dir", dest="input_dir", action="store", help="Directory with input data, that must contain a timestamp in the filename.") -parser.add_argument('--chan_id', dest='chan_id', action="store", help="Channel ID", default="9999") -parser.add_argument('--sat_id', dest='sat_id', action="store", help="Satellite ID", default="8888") -parser.add_argument('--data_cat', dest='data_cat', action="store", +parser.add_argument("--chan_id", dest="chan_id", action="store", help="Channel ID", default="9999") +parser.add_argument("--sat_id", dest="sat_id", action="store", help="Satellite ID", default="8888") +parser.add_argument("--data_cat", dest="data_cat", action="store", help="Category of data (one of GORN, GPRN, PORN)", default="GORN") -parser.add_argument('--area', dest='areadef', action="store", +parser.add_argument("--area", dest="areadef", action="store", help="Area name, the definition must exist in your areas configuration file", default="nrEURO1km_NPOL_COALeqc") -parser.add_argument('--ph_unit', dest='ph_unit', action="store", help="Physical unit", default="CELSIUS") -parser.add_argument('--data_src', dest='data_src', action="store", help="Data source", default="EUMETCAST") +parser.add_argument("--ph_unit", dest="ph_unit", action="store", help="Physical unit", default="CELSIUS") +parser.add_argument("--data_src", dest="data_src", action="store", help="Data source", default="EUMETCAST") args = parser.parse_args() if (args.input_dir is not None): @@ -59,21 +59,21 @@ cfg = vars(args) if (args.cfg is not None): - with open(args.cfg, 'r') as ymlfile: + with open(args.cfg, "r") as ymlfile: cfg = yaml.load(ymlfile, Loader=UnsafeLoader) narea = get_area_def(args.areadef) global_data = Scene(reader="generic_image") -global_data.load(['image']) +global_data.load(["image"]) -global_data['image'].info['area'] = narea -fname = global_data['image'].info['filename'] +global_data["image"].info["area"] = narea +fname = global_data["image"].info["filename"] ofname = fname[:-3] + "tif" # global_data.save_dataset('image', filename="out.png", writer="simple_image") -global_data.save_dataset('image', filename=ofname, writer="ninjotiff", - sat_id=cfg['sat_id'], - chan_id=cfg['chan_id'], - data_cat=cfg['data_cat'], - data_source=cfg['data_src'], - physic_unit=cfg['ph_unit']) +global_data.save_dataset("image", filename=ofname, writer="ninjotiff", + sat_id=cfg["sat_id"], + chan_id=cfg["chan_id"], + data_cat=cfg["data_cat"], + data_source=cfg["data_src"], + physic_unit=cfg["ph_unit"]) diff --git a/utils/coord2area_def.py b/utils/coord2area_def.py index e3727b9aba..8b6aa0478b 100644 --- a/utils/coord2area_def.py +++ b/utils/coord2area_def.py @@ -66,7 +66,7 @@ from pyproj import Proj -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("name", @@ -126,7 +126,7 @@ " +".join(("proj=" + proj + ",lat_0=" + str(lat_0) + ",lon_0=" + str(lon_0) + ",ellps=WGS84").split(",")) - print('### ' + proj4_string) + print("### " + proj4_string) print() print(name + ":") print(" description: " + name) @@ -146,14 +146,14 @@ sys.exit(0) from PIL import Image from pycoast import ContourWriterAGG - img = Image.new('RGB', (xsize, ysize)) + img = Image.new("RGB", (xsize, ysize)) area_def = (proj4_string, area_extent) cw = ContourWriterAGG(args.shapes) cw.add_coastlines(img, (proj4_string, area_extent), - resolution='l', width=0.5) + resolution="l", width=0.5) - cw.add_grid(img, area_def, (10.0, 10.0), (2.0, 2.0), write_text=False, outline='white', outline_opacity=175, - width=1.0, minor_outline='white', minor_outline_opacity=175, minor_width=0.2, minor_is_tick=False) + cw.add_grid(img, area_def, (10.0, 10.0), (2.0, 2.0), write_text=False, outline="white", outline_opacity=175, + width=1.0, minor_outline="white", minor_outline_opacity=175, minor_width=0.2, minor_is_tick=False) img.show() From f29762f05303ff5717a503c436cf73cf9b96464a Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 24 Oct 2023 13:49:14 +0200 Subject: [PATCH 327/702] Fix too long like --- pyproject.toml | 2 +- satpy/readers/fci_l1c_nc.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 01b0272e89..c6f9770ef1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ # select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] -select = ["Q"] +select = ["E", "Q"] ignore = ["B905"] # only available from python 3.10 line-length = 120 diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index a405c86201..e42975b3a4 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -68,8 +68,8 @@ The reading routine supports channel data in counts, radiances, and (depending -on channel) brightness temperatures or reflectances. The brightness temperature and reflectance calculation is based on the formulas indicated in -`PUG`_. +on channel) brightness temperatures or reflectances. The brightness temperature and reflectance calculation is based on +the formulas indicated in `PUG`_. Radiance datasets are returned in units of radiance per unit wavenumber (mW m-2 sr-1 (cm-1)-1). Radiances can be converted to units of radiance per unit wavelength (W m-2 um-1 sr-1) by multiplying with the `radiance_unit_conversion_coefficient` dataset attribute. From 84d38493615397e247c4078b1269d764e6907f8d Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 24 Oct 2023 13:52:36 +0200 Subject: [PATCH 328/702] Add exception for conftest importing unused modules --- pyproject.toml | 2 +- satpy/tests/reader_tests/modis_tests/conftest.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c6f9770ef1..cc7d219274 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ # select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] -select = ["E", "Q"] +select = ["E", "W", "F", "Q"] ignore = ["B905"] # only available from python 3.10 line-length = 120 diff --git a/satpy/tests/reader_tests/modis_tests/conftest.py b/satpy/tests/reader_tests/modis_tests/conftest.py index 09f98049db..5d7e8adfef 100644 --- a/satpy/tests/reader_tests/modis_tests/conftest.py +++ b/satpy/tests/reader_tests/modis_tests/conftest.py @@ -17,7 +17,7 @@ # satpy. If not, see . """Setup and configuration for all reader tests.""" -from ._modis_fixtures import ( +from ._modis_fixtures import ( # noqa: F401 modis_l1b_imapp_1000m_file, modis_l1b_imapp_geo_file, modis_l1b_nasa_1km_mod03_files, From 0e1efa3f549aaebff6570d377968aa6651e4032e Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 24 Oct 2023 13:56:02 +0200 Subject: [PATCH 329/702] Fix imports --- pyproject.toml | 2 +- satpy/demo/__init__.py | 2 +- satpy/enhancements/__init__.py | 3 +- satpy/modifiers/__init__.py | 2 +- .../enhancement_tests/test_enhancements.py | 3 +- satpy/tests/modifier_tests/test_crefl.py | 2 +- satpy/tests/modifier_tests/test_parallax.py | 59 +++++++++---------- .../reader_tests/modis_tests/conftest.py | 2 +- satpy/tests/reader_tests/test_cmsaf_claas.py | 2 +- satpy/tests/writer_tests/test_cf.py | 2 +- satpy/tests/writer_tests/test_ninjotiff.py | 9 +-- 11 files changed, 41 insertions(+), 47 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index cc7d219274..547cfd146c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ # select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] -select = ["E", "W", "F", "Q"] +select = ["E", "W", "F", "I", "TID", "C90", "Q"] ignore = ["B905"] # only available from python 3.10 line-length = 120 diff --git a/satpy/demo/__init__.py b/satpy/demo/__init__.py index b12c9e285b..e6ad87eb88 100644 --- a/satpy/demo/__init__.py +++ b/satpy/demo/__init__.py @@ -50,7 +50,7 @@ """ -from .abi_l1b import get_hurricane_florence_abi # noqa: F401 +from .abi_l1b import get_hurricane_florence_abi # noqa: F401, I001 from .abi_l1b import get_us_midlatitude_cyclone_abi # noqa: F401 from .ahi_hsd import download_typhoon_surigae_ahi # noqa: F401 from .fci import download_fci_test_data # noqa: F401 diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py index b74cc2c8bd..e2dda9cf63 100644 --- a/satpy/enhancements/__init__.py +++ b/satpy/enhancements/__init__.py @@ -31,8 +31,7 @@ from satpy._compat import ArrayLike from satpy._config import get_config_path - -from ..utils import find_in_ancillary +from satpy.utils import find_in_ancillary LOG = logging.getLogger(__name__) diff --git a/satpy/modifiers/__init__.py b/satpy/modifiers/__init__.py index a0888167b3..d77a5ff58e 100644 --- a/satpy/modifiers/__init__.py +++ b/satpy/modifiers/__init__.py @@ -20,7 +20,7 @@ # file deepcode ignore W0611: Ignore unused imports in init module from .base import ModifierBase # noqa: F401, isort: skip -from .atmosphere import CO2Corrector # noqa: F401 +from .atmosphere import CO2Corrector # noqa: F401, I001 from .atmosphere import PSPAtmosphericalCorrection # noqa: F401 from .atmosphere import PSPRayleighReflectance # noqa: F401 from .geometry import EffectiveSolarPathLengthCorrector # noqa: F401 diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index b518cc3f39..964e634ba4 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -571,9 +571,8 @@ def fake_area(): ) def test_nwcsaf_comps(fake_area, tmp_path, data): """Test loading NWCSAF composites.""" + from satpy import Scene from satpy.writers import get_enhanced_image - - from ... import Scene (flavour, dvname, altname, palettename, statusname, comp, filelabel, dtp) = _nwcsaf_geo_props[data] rng = (0, 100) if dtp == "uint8" else (-100, 1000) if flavour == "geo": diff --git a/satpy/tests/modifier_tests/test_crefl.py b/satpy/tests/modifier_tests/test_crefl.py index e43d7bc3fa..ab42f85155 100644 --- a/satpy/tests/modifier_tests/test_crefl.py +++ b/satpy/tests/modifier_tests/test_crefl.py @@ -23,7 +23,7 @@ from dask import array as da from pyresample.geometry import AreaDefinition -from ..utils import assert_maximum_dask_computes +from satpy.tests.utils import assert_maximum_dask_computes # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: diff --git a/satpy/tests/modifier_tests/test_parallax.py b/satpy/tests/modifier_tests/test_parallax.py index 04af43981f..8fa358ec35 100644 --- a/satpy/tests/modifier_tests/test_parallax.py +++ b/satpy/tests/modifier_tests/test_parallax.py @@ -30,8 +30,7 @@ from pyresample import create_area_def import satpy.resample - -from ...writers import get_enhanced_image +from satpy.writers import get_enhanced_image # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -88,7 +87,7 @@ class TestForwardParallax: def test_get_parallax_corrected_lonlats_ssp(self): """Test that at SSP, parallax correction does nothing.""" - from ...modifiers.parallax import get_parallax_corrected_lonlats + from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lat = sat_lon = lon = lat = 0. height = 5000. # m sat_alt = 30_000_000. # m @@ -98,7 +97,7 @@ def test_get_parallax_corrected_lonlats_ssp(self): def test_get_parallax_corrected_lonlats_clearsky(self): """Test parallax correction for clearsky case (returns NaN).""" - from ...modifiers.parallax import get_parallax_corrected_lonlats + from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lat = sat_lon = 0 lat = np.linspace(-20, 20, 25).reshape(5, 5) lon = np.linspace(-20, 20, 25).reshape(5, 5).T @@ -114,7 +113,7 @@ def test_get_parallax_corrected_lonlats_clearsky(self): @pytest.mark.parametrize("resolution", [0.01, 0.5, 10]) def test_get_parallax_corrected_lonlats_cloudy_ssp(self, lat, lon, resolution): """Test parallax correction for fully cloudy scene at SSP.""" - from ...modifiers.parallax import get_parallax_corrected_lonlats + from satpy.modifiers.parallax import get_parallax_corrected_lonlats N = 5 lats = np.linspace(lat-N*resolution, lat+N*resolution, 25).reshape(N, N) @@ -145,7 +144,7 @@ def test_get_parallax_corrected_lonlats_cloudy_ssp(self, lat, lon, resolution): def test_get_parallax_corrected_lonlats_cloudy_slant(self): """Test parallax correction for fully cloudy scene (not SSP).""" - from ...modifiers.parallax import get_parallax_corrected_lonlats + from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lat = sat_lon = 0 lat = np.linspace(-20, 20, 25).reshape(5, 5) lon = np.linspace(-20, 20, 25).reshape(5, 5).T @@ -161,7 +160,7 @@ def test_get_parallax_corrected_lonlats_cloudy_slant(self): def test_get_parallax_corrected_lonlats_mixed(self): """Test parallax correction for mixed cloudy case.""" - from ...modifiers.parallax import get_parallax_corrected_lonlats + from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lon = sat_lat = 0 sat_alt = 35_785_831.0 # m @@ -189,7 +188,7 @@ def test_get_parallax_corrected_lonlats_horizon(self): Test the rather unlikely case of a satellite elevation of exactly 0 """ - from ...modifiers.parallax import get_parallax_corrected_lonlats + from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lat = sat_lon = lon = lat = 0. height = 5000. sat_alt = 30_000_000. @@ -200,7 +199,7 @@ def test_get_parallax_corrected_lonlats_horizon(self): def test_get_surface_parallax_displacement(self): """Test surface parallax displacement.""" - from ...modifiers.parallax import get_surface_parallax_displacement + from satpy.modifiers.parallax import get_surface_parallax_displacement val = get_surface_parallax_displacement( 0, 0, 36_000_000, 0, 10, 10_000) @@ -215,7 +214,7 @@ class TestParallaxCorrectionClass: @pytest.mark.parametrize("resolution", [0.05, 1, 10]) def test_init_parallaxcorrection(self, center, sizes, resolution): """Test that ParallaxCorrection class can be instantiated.""" - from ...modifiers.parallax import ParallaxCorrection + from satpy.modifiers.parallax import ParallaxCorrection fake_area = _get_fake_areas(center, sizes, resolution)[0] pc = ParallaxCorrection(fake_area) assert pc.base_area == fake_area @@ -225,8 +224,8 @@ def test_init_parallaxcorrection(self, center, sizes, resolution): @pytest.mark.parametrize("resolution", [0.01, 0.5, 10]) def test_correct_area_clearsky(self, sat_pos, ar_pos, resolution, caplog): """Test that ParallaxCorrection doesn't change clearsky geolocation.""" - from ...modifiers.parallax import ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import ParallaxCorrection + from satpy.tests.utils import make_fake_scene (sat_lat, sat_lon) = sat_pos (ar_lat, ar_lon) = ar_pos small = 5 @@ -254,8 +253,8 @@ def test_correct_area_clearsky(self, sat_pos, ar_pos, resolution, caplog): @pytest.mark.parametrize("resolution", [0.01, 0.5, 10]) def test_correct_area_ssp(self, lat, lon, resolution): """Test that ParallaxCorrection doesn't touch SSP.""" - from ...modifiers.parallax import ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import ParallaxCorrection + from satpy.tests.utils import make_fake_scene codes = { (0, 0): 4326, (0, 40): 4326, @@ -298,8 +297,8 @@ def test_correct_area_ssp(self, lat, lon, resolution): @pytest.mark.parametrize("daskify", [False, True]) def test_correct_area_partlycloudy(self, daskify): """Test ParallaxCorrection for partly cloudy situation.""" - from ...modifiers.parallax import ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import ParallaxCorrection + from satpy.tests.utils import make_fake_scene small = 5 large = 9 (fake_area_small, fake_area_large) = _get_fake_areas( @@ -349,8 +348,8 @@ def test_correct_area_partlycloudy(self, daskify): @pytest.mark.parametrize("res1,res2", [(0.08, 0.3), (0.3, 0.08)]) def test_correct_area_clearsky_different_resolutions(self, res1, res2): """Test clearsky correction when areas have different resolutions.""" - from ...modifiers.parallax import ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import ParallaxCorrection + from satpy.tests.utils import make_fake_scene # areas with different resolutions, but same coverage @@ -385,8 +384,8 @@ def test_correct_area_clearsky_different_resolutions(self, res1, res2): @pytest.mark.xfail(reason="awaiting pyresample fixes") def test_correct_area_cloudy_no_overlap(self, ): """Test cloudy correction when areas have no overlap.""" - from ...modifiers.parallax import MissingHeightError, ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import MissingHeightError, ParallaxCorrection + from satpy.tests.utils import make_fake_scene areas_00 = _get_fake_areas((0, 40), [5, 9], 0.1) areas_shift = _get_fake_areas((90, 20), [5, 9], 0.1) fake_area_small = areas_00[0] @@ -405,8 +404,8 @@ def test_correct_area_cloudy_no_overlap(self, ): @pytest.mark.xfail(reason="awaiting pyresample fixes") def test_correct_area_cloudy_partly_shifted(self, ): """Test cloudy correction when areas overlap only partly.""" - from ...modifiers.parallax import IncompleteHeightWarning, ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import IncompleteHeightWarning, ParallaxCorrection + from satpy.tests.utils import make_fake_scene areas_00 = _get_fake_areas((0, 40), [5, 9], 0.1) areas_shift = _get_fake_areas((0.5, 40), [5, 9], 0.1) fake_area_small = areas_00[0] @@ -426,8 +425,8 @@ def test_correct_area_cloudy_partly_shifted(self, ): def test_correct_area_cloudy_same_area(self, ): """Test cloudy correction when areas are the same.""" - from ...modifiers.parallax import ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import ParallaxCorrection + from satpy.tests.utils import make_fake_scene area = _get_fake_areas((0, 0), [9], 0.1)[0] sc = make_fake_scene( @@ -446,8 +445,8 @@ def test_correct_area_no_orbital_parameters(self, caplog, fake_tle): on satellite location directly. Rather, they include platform name, sensor, start time, and end time, that we have to use instead. """ - from ...modifiers.parallax import ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import ParallaxCorrection + from satpy.tests.utils import make_fake_scene small = 5 large = 9 (fake_area_small, fake_area_large) = _get_fake_areas( @@ -478,7 +477,7 @@ class TestParallaxCorrectionModifier: def test_parallax_modifier_interface(self): """Test the modifier interface.""" - from ...modifiers.parallax import ParallaxCorrectionModifier + from satpy.modifiers.parallax import ParallaxCorrectionModifier (area_small, area_large) = _get_fake_areas((0, 0), [5, 9], 0.1) fake_bt = xr.DataArray( np.linspace(220, 230, 25).reshape(5, 5), @@ -512,7 +511,7 @@ def test_parallax_modifier_interface_with_cloud(self): BT corresponding to full disk SEVIRI, and test that no strange speckles occur. """ - from ...modifiers.parallax import ParallaxCorrectionModifier + from satpy.modifiers.parallax import ParallaxCorrectionModifier w_cth = 25 h_cth = 15 @@ -623,7 +622,7 @@ def _get_fake_cloud_datasets(self, test_area, cth, use_dask): @pytest.mark.parametrize("test_area", ["foroyar", "ouagadougou"], indirect=["test_area"]) def test_modifier_interface_fog_no_shift(self, test_area): """Test that fog isn't masked or shifted.""" - from ...modifiers.parallax import ParallaxCorrectionModifier + from satpy.modifiers.parallax import ParallaxCorrectionModifier (fake_bt, fake_cth, _) = self._get_fake_cloud_datasets(test_area, 50, use_dask=False) @@ -647,7 +646,7 @@ def test_modifier_interface_cloud_moves_to_observer(self, cth, use_dask, test_ar With the modifier interface, use a high resolution area and test that pixels are moved in the direction of the observer and not away from it. """ - from ...modifiers.parallax import ParallaxCorrectionModifier + from satpy.modifiers.parallax import ParallaxCorrectionModifier (fake_bt, fake_cth, cma) = self._get_fake_cloud_datasets(test_area, cth, use_dask=use_dask) diff --git a/satpy/tests/reader_tests/modis_tests/conftest.py b/satpy/tests/reader_tests/modis_tests/conftest.py index 5d7e8adfef..e6a8432653 100644 --- a/satpy/tests/reader_tests/modis_tests/conftest.py +++ b/satpy/tests/reader_tests/modis_tests/conftest.py @@ -17,7 +17,7 @@ # satpy. If not, see . """Setup and configuration for all reader tests.""" -from ._modis_fixtures import ( # noqa: F401 +from ._modis_fixtures import ( # noqa: F401, I001 modis_l1b_imapp_1000m_file, modis_l1b_imapp_geo_file, modis_l1b_nasa_1km_mod03_files, diff --git a/satpy/tests/reader_tests/test_cmsaf_claas.py b/satpy/tests/reader_tests/test_cmsaf_claas.py index 7f5b728ba8..db2117d264 100644 --- a/satpy/tests/reader_tests/test_cmsaf_claas.py +++ b/satpy/tests/reader_tests/test_cmsaf_claas.py @@ -17,7 +17,7 @@ # satpy. If not, see . """Tests for the 'cmsaf-claas2_l2_nc' reader.""" -import datetime +import datetime # noqa: I001 import os import numpy as np diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 6a51a71b36..0c9ca9f234 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -70,7 +70,7 @@ def __exit__(self, *args): def test_lonlat_storage(tmp_path): """Test correct storage for area with lon/lat units.""" - from ..utils import make_fake_scene + from satpy.tests.utils import make_fake_scene scn = make_fake_scene( {"ketolysis": np.arange(25).reshape(5, 5)}, daskify=True, diff --git a/satpy/tests/writer_tests/test_ninjotiff.py b/satpy/tests/writer_tests/test_ninjotiff.py index f36f1028b7..b8c311f9ed 100644 --- a/satpy/tests/writer_tests/test_ninjotiff.py +++ b/satpy/tests/writer_tests/test_ninjotiff.py @@ -99,10 +99,9 @@ def test_image(self, iwsi, save_dataset): def test_convert_units_self(self): """Test that unit conversion to themselves do nothing.""" + from satpy.tests.utils import make_fake_scene from satpy.writers.ninjotiff import convert_units - from ..utils import make_fake_scene - # ensure that converting from % to itself does not change the data sc = make_fake_scene( {"VIS006": np.arange(25, dtype="f4").reshape(5, 5)}, @@ -115,9 +114,8 @@ def test_convert_units_self(self): def test_convert_units_temp(self): """Test that temperature unit conversions works as expected.""" # test converting between °C and K + from satpy.tests.utils import make_fake_scene from satpy.writers.ninjotiff import convert_units - - from ..utils import make_fake_scene sc = make_fake_scene( {"IR108": np.arange(25, dtype="f4").reshape(5, 5)}, common_attrs={"units": "K"}) @@ -134,9 +132,8 @@ def test_convert_units_temp(self): def test_convert_units_other(self): """Test that other unit conversions are not implemented.""" # test arbitrary different conversion + from satpy.tests.utils import make_fake_scene from satpy.writers.ninjotiff import convert_units - - from ..utils import make_fake_scene sc = make_fake_scene( {"rain_rate": np.arange(25, dtype="f8").reshape(5, 5)}, common_attrs={"units": "millimeter/hour"}) From f189402adf5dfc4b2ad93ce7b7a5dfa45415b1ed Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 25 Oct 2023 08:43:32 +0200 Subject: [PATCH 330/702] Rename zarr_format to zarr_file_pattern --- satpy/modifiers/angles.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index f4146b60d5..87a6b433e0 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -151,18 +151,18 @@ def __call__(self, *args, cache_dir: Optional[str] = None) -> Any: def _cache_and_read(self, args, cache_dir): sanitized_args = self._sanitize_args_func(*args) if self._sanitize_args_func is not None else args - zarr_format = self._get_zarr_format(sanitized_args, cache_dir) - zarr_paths = glob(zarr_format.format("*")) + zarr_file_pattern = self._get_zarr_file_pattern(sanitized_args, cache_dir) + zarr_paths = glob(zarr_file_pattern.format("*")) if not zarr_paths: # use sanitized arguments self._warn_if_irregular_input_chunks(args, sanitized_args) res_to_cache = self._func(*(sanitized_args)) - self._cache_results(res_to_cache, zarr_format) + self._cache_results(res_to_cache, zarr_file_pattern) # if we did any caching, let's load from the zarr files, so that future calls have the same name # re-calculate the cached paths - zarr_paths = sorted(glob(zarr_format.format("*"))) + zarr_paths = sorted(glob(zarr_file_pattern.format("*"))) if not zarr_paths: raise RuntimeError("Data was cached to disk but no files were found") @@ -170,12 +170,11 @@ def _cache_and_read(self, args, cache_dir): res = tuple(da.from_zarr(zarr_path, chunks=new_chunks) for zarr_path in zarr_paths) return res - def _get_zarr_format(self, sanitized_args, cache_dir): + def _get_zarr_file_pattern(self, sanitized_args, cache_dir): arg_hash = _hash_args(*sanitized_args, unhashable_types=self._uncacheable_arg_types) zarr_filename = self._zarr_pattern(arg_hash) cache_dir = self._get_cache_dir_from_config(cache_dir) - zarr_format = os.path.join(cache_dir, zarr_filename) - return zarr_format + return os.path.join(cache_dir, zarr_filename) @staticmethod def _get_cache_dir_from_config(cache_dir: Optional[str]) -> str: @@ -198,14 +197,14 @@ def _warn_if_irregular_input_chunks(args, modified_args): stacklevel=3 ) - def _cache_results(self, res, zarr_format): - os.makedirs(os.path.dirname(zarr_format), exist_ok=True) + def _cache_results(self, res, zarr_file_pattern): + os.makedirs(os.path.dirname(zarr_file_pattern), exist_ok=True) new_res = [] for idx, sub_res in enumerate(res): if not isinstance(sub_res, da.Array): raise ValueError("Zarr caching currently only supports dask " f"arrays. Got {type(sub_res)}") - zarr_path = zarr_format.format(idx) + zarr_path = zarr_file_pattern.format(idx) # See https://github.com/dask/dask/issues/8380 with dask.config.set({"optimization.fuse.active": False}): new_sub_res = sub_res.to_zarr(zarr_path, compute=False) From 95c880cc818e02ef8c1ac9026428facfa84e94d3 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 25 Oct 2023 15:01:18 +0300 Subject: [PATCH 331/702] Replace da.where() usage with arr.where() in DayNightCompositor --- satpy/composites/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index da4d1a9e5c..7ae2e99aeb 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -754,8 +754,8 @@ def _get_data_for_single_side_product(self, foreground_data, weights): def _mask_weights(self, weights): if "day" in self.day_night: - return da.where(weights != 0, weights, np.nan) - return da.where(weights != 1, weights, np.nan) + return weights.where(weights == 0, np.nan) + return weights.where(weights == 1, np.nan) def _get_day_night_data_for_single_side_product(self, foreground_data): if "day" in self.day_night: @@ -786,12 +786,12 @@ def _mask_weights_with_data(self, weights, day_data, night_data): else: mask = _get_weight_mask_for_daynight_product(weights, data_a, data_b) - return da.where(mask, weights, np.nan) + return weights.where(mask, np.nan) def _weight_data(self, day_data, night_data, weights, attrs): if not self.include_alpha: fill = 1 if self.day_night == "night_only" else 0 - weights = da.where(np.isnan(weights), fill, weights) + weights = weights.where(~np.isnan(weights), fill) data = [] for b in _get_band_names(day_data, night_data): From 7070cfbee5ad5a8dc1fc11ca8aecdf6f7ebb1c79 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 25 Oct 2023 16:01:30 +0300 Subject: [PATCH 332/702] Fix reversed arr.where() logic --- satpy/composites/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 7ae2e99aeb..8ed200df4c 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -754,8 +754,8 @@ def _get_data_for_single_side_product(self, foreground_data, weights): def _mask_weights(self, weights): if "day" in self.day_night: - return weights.where(weights == 0, np.nan) - return weights.where(weights == 1, np.nan) + return weights.where(weights != 0, np.nan) + return weights.where(weights != 1, np.nan) def _get_day_night_data_for_single_side_product(self, foreground_data): if "day" in self.day_night: From c5805510d1dc24b1afdd98813e7eede0654f4409 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 25 Oct 2023 16:07:45 +0300 Subject: [PATCH 333/702] Make sure there are no additional compute()s in DNC tests --- satpy/tests/test_composites.py | 89 +++++++++++++++++++++------------- 1 file changed, 56 insertions(+), 33 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index f056d2fa93..e390fba7c6 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -30,6 +30,7 @@ from pyresample import AreaDefinition import satpy +from satpy.tests.utils import CustomScheduler # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -431,18 +432,22 @@ def setUp(self): def test_daynight_sza(self): """Test compositor with both day and night portions when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_night") - res = comp((self.data_a, self.data_b, self.sza)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="day_night") + res = comp((self.data_a, self.data_b, self.sza)) + res = res.compute() expected = np.array([[0., 0.22122352], [0.5, 1.]]) np.testing.assert_allclose(res.values[0], expected) def test_daynight_area(self): """Test compositor both day and night portions when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_night") - res = comp((self.data_a, self.data_b)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="day_night") + res = comp((self.data_a, self.data_b)) + res = res.compute() expected_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) for i in range(3): np.testing.assert_allclose(res.values[i], expected_channel) @@ -450,9 +455,11 @@ def test_daynight_area(self): def test_night_only_sza_with_alpha(self): """Test compositor with night portion with alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=True) - res = comp((self.data_b, self.sza)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=True) + res = comp((self.data_b, self.sza)) + res = res.compute() expected_red_channel = np.array([[np.nan, 0.], [0.5, 1.]]) expected_alpha = np.array([[0., 0.33296056], [1., 1.]]) np.testing.assert_allclose(res.values[0], expected_red_channel) @@ -461,9 +468,11 @@ def test_night_only_sza_with_alpha(self): def test_night_only_sza_without_alpha(self): """Test compositor with night portion without alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=False) - res = comp((self.data_a, self.sza)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=False) + res = comp((self.data_a, self.sza)) + res = res.compute() expected = np.array([[0., 0.11042631], [0.66835017, 1.]]) np.testing.assert_allclose(res.values[0], expected) assert 'A' not in res.bands @@ -471,9 +480,11 @@ def test_night_only_sza_without_alpha(self): def test_night_only_area_with_alpha(self): """Test compositor with night portion with alpha band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=True) - res = comp((self.data_b,)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=True) + res = comp((self.data_b,)) + res = res.compute() expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]]) expected_alpha = np.array([[np.nan, 0.], [0., 0.]]) np.testing.assert_allclose(res.values[0], expected_l_channel) @@ -482,9 +493,11 @@ def test_night_only_area_with_alpha(self): def test_night_only_area_without_alpha(self): """Test compositor with night portion without alpha band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=False) - res = comp((self.data_b,)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=False) + res = comp((self.data_b,)) + res = res.compute() expected = np.array([[np.nan, 0.], [0., 0.]]) np.testing.assert_allclose(res.values[0], expected) assert 'A' not in res.bands @@ -492,9 +505,11 @@ def test_night_only_area_without_alpha(self): def test_day_only_sza_with_alpha(self): """Test compositor with day portion with alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) - res = comp((self.data_a, self.sza)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) + res = comp((self.data_a, self.sza)) + res = res.compute() expected_red_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) expected_alpha = np.array([[1., 0.66703944], [0., 0.]]) np.testing.assert_allclose(res.values[0], expected_red_channel) @@ -503,9 +518,11 @@ def test_day_only_sza_with_alpha(self): def test_day_only_sza_without_alpha(self): """Test compositor with day portion without alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=False) - res = comp((self.data_a, self.sza)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=False) + res = comp((self.data_a, self.sza)) + res = res.compute() expected_channel_data = np.array([[0., 0.22122352], [0., 0.]]) for i in range(3): np.testing.assert_allclose(res.values[i], expected_channel_data) @@ -514,9 +531,11 @@ def test_day_only_sza_without_alpha(self): def test_day_only_area_with_alpha(self): """Test compositor with day portion with alpha_band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) - res = comp((self.data_a,)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) + res = comp((self.data_a,)) + res = res.compute() expected_l_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) expected_alpha = np.array([[1., 1.], [1., 1.]]) np.testing.assert_allclose(res.values[0], expected_l_channel) @@ -525,9 +544,11 @@ def test_day_only_area_with_alpha(self): def test_day_only_area_with_alpha_and_missing_data(self): """Test compositor with day portion with alpha_band when SZA data is not provided and there is missing data.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) - res = comp((self.data_b,)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) + res = comp((self.data_b,)) + res = res.compute() expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]]) expected_alpha = np.array([[np.nan, 1.], [1., 1.]]) np.testing.assert_allclose(res.values[0], expected_l_channel) @@ -536,9 +557,11 @@ def test_day_only_area_with_alpha_and_missing_data(self): def test_day_only_area_without_alpha(self): """Test compositor with day portion without alpha_band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=False) - res = comp((self.data_a,)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=False) + res = comp((self.data_a,)) + res = res.compute() expected = np.array([[0., 0.33164983], [0.66835017, 1.]]) np.testing.assert_allclose(res.values[0], expected) assert 'A' not in res.bands From cdb1920393dff5c7e25669bd66f9eeb6131eb280 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 26 Oct 2023 10:56:20 +0300 Subject: [PATCH 334/702] Do not use DataArray as weight --- satpy/composites/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 8ed200df4c..62ac07aad8 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -792,7 +792,8 @@ def _weight_data(self, day_data, night_data, weights, attrs): if not self.include_alpha: fill = 1 if self.day_night == "night_only" else 0 weights = weights.where(~np.isnan(weights), fill) - + if isinstance(weights, xr.DataArray): + weights = weights.data data = [] for b in _get_band_names(day_data, night_data): # if self.day_night == "night_only" and self.include_alpha is False: From 67ae9e401a5c0e6b9952596043f0f619786eaf4e Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 26 Oct 2023 11:06:16 +0300 Subject: [PATCH 335/702] Remove old commented debug code --- satpy/composites/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 62ac07aad8..c6883f9ab9 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -796,8 +796,6 @@ def _weight_data(self, day_data, night_data, weights, attrs): weights = weights.data data = [] for b in _get_band_names(day_data, night_data): - # if self.day_night == "night_only" and self.include_alpha is False: - # import ipdb; ipdb.set_trace() day_band = _get_single_band_data(day_data, b) night_band = _get_single_band_data(night_data, b) # For day-only and night-only products only the alpha channel is weighted From c88c887023d4fca719faad5020810cabf8a32098 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 12:24:56 +0200 Subject: [PATCH 336/702] Fix pytest style --- pyproject.toml | 2 +- satpy/tests/compositor_tests/test_abi.py | 9 +- satpy/tests/compositor_tests/test_agri.py | 9 +- satpy/tests/compositor_tests/test_sar.py | 18 +- satpy/tests/compositor_tests/test_spectral.py | 2 +- satpy/tests/compositor_tests/test_viirs.py | 10 +- satpy/tests/conftest.py | 6 +- .../enhancement_tests/test_enhancements.py | 8 +- satpy/tests/modifier_tests/test_angles.py | 2 +- satpy/tests/modifier_tests/test_crefl.py | 10 +- satpy/tests/modifier_tests/test_parallax.py | 18 +- satpy/tests/multiscene_tests/test_blend.py | 18 +- satpy/tests/multiscene_tests/test_misc.py | 24 +- .../multiscene_tests/test_save_animation.py | 42 ++-- .../reader_tests/gms/test_gms5_vissr_l1b.py | 70 +++--- .../gms/test_gms5_vissr_navigation.py | 36 +-- .../modis_tests/_modis_fixtures.py | 2 +- .../modis_tests/test_modis_l1b.py | 36 +-- .../reader_tests/modis_tests/test_modis_l2.py | 22 +- satpy/tests/reader_tests/test_aapp_l1b.py | 8 +- satpy/tests/reader_tests/test_abi_l1b.py | 73 +++--- satpy/tests/reader_tests/test_acspo.py | 6 +- satpy/tests/reader_tests/test_ahi_hrit.py | 46 ++-- satpy/tests/reader_tests/test_ahi_hsd.py | 44 ++-- .../reader_tests/test_ahi_l1b_gridded_bin.py | 24 +- satpy/tests/reader_tests/test_ami_l1b.py | 37 ++- satpy/tests/reader_tests/test_amsr2_l1b.py | 40 ++- satpy/tests/reader_tests/test_amsr2_l2.py | 20 +- .../test_ascat_l2_soilmoisture_bufr.py | 12 +- satpy/tests/reader_tests/test_atms_l1b_nc.py | 24 +- .../tests/reader_tests/test_atms_sdr_hdf5.py | 2 +- .../reader_tests/test_avhrr_l1b_gaclac.py | 42 ++-- satpy/tests/reader_tests/test_clavrx.py | 102 ++++---- satpy/tests/reader_tests/test_cmsaf_claas.py | 28 +-- .../tests/reader_tests/test_electrol_hrit.py | 16 +- satpy/tests/reader_tests/test_eps_l1b.py | 4 +- satpy/tests/reader_tests/test_eum_base.py | 48 ++-- satpy/tests/reader_tests/test_fci_l1c_nc.py | 38 +-- satpy/tests/reader_tests/test_fci_l2_nc.py | 58 ++--- satpy/tests/reader_tests/test_fy4_base.py | 4 +- .../tests/reader_tests/test_generic_image.py | 102 ++++---- satpy/tests/reader_tests/test_geocat.py | 22 +- satpy/tests/reader_tests/test_geos_area.py | 27 +- satpy/tests/reader_tests/test_glm_l2.py | 20 +- .../reader_tests/test_goes_imager_hrit.py | 20 +- .../reader_tests/test_goes_imager_nc_eum.py | 13 +- .../reader_tests/test_goes_imager_nc_noaa.py | 85 +++---- satpy/tests/reader_tests/test_gpm_imerg.py | 24 +- satpy/tests/reader_tests/test_hdf4_utils.py | 34 +-- satpy/tests/reader_tests/test_hdf5_utils.py | 50 ++-- satpy/tests/reader_tests/test_hdfeos_base.py | 6 +- satpy/tests/reader_tests/test_hrit_base.py | 14 +- satpy/tests/reader_tests/test_hsaf_grib.py | 15 +- .../reader_tests/test_hy2_scat_l2b_h5.py | 44 ++-- satpy/tests/reader_tests/test_iasi_l2.py | 60 ++--- .../reader_tests/test_iasi_l2_so2_bufr.py | 7 +- satpy/tests/reader_tests/test_ici_l1b_nc.py | 19 +- .../reader_tests/test_insat3d_img_l1b_h5.py | 6 +- satpy/tests/reader_tests/test_li_l2_nc.py | 2 +- satpy/tests/reader_tests/test_meris_nc.py | 4 +- .../reader_tests/test_mimic_TPW2_lowres.py | 44 ++-- .../tests/reader_tests/test_mimic_TPW2_nc.py | 16 +- satpy/tests/reader_tests/test_msi_safe.py | 2 +- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 2 +- satpy/tests/reader_tests/test_mws_l1b_nc.py | 17 +- satpy/tests/reader_tests/test_netcdf_utils.py | 53 ++-- satpy/tests/reader_tests/test_nucaps.py | 126 +++++----- satpy/tests/reader_tests/test_nwcsaf_msg.py | 27 +- satpy/tests/reader_tests/test_nwcsaf_nc.py | 56 ++--- .../reader_tests/test_oceancolorcci_l3_nc.py | 6 +- satpy/tests/reader_tests/test_olci_nc.py | 6 +- satpy/tests/reader_tests/test_omps_edr.py | 42 ++-- .../reader_tests/test_safe_sar_l2_ocn.py | 12 +- satpy/tests/reader_tests/test_satpy_cf_nc.py | 226 ++++++++--------- satpy/tests/reader_tests/test_scmi.py | 64 +++-- satpy/tests/reader_tests/test_seviri_base.py | 23 +- .../test_seviri_l1b_calibration.py | 12 +- .../reader_tests/test_seviri_l1b_hrit.py | 82 +++--- .../reader_tests/test_seviri_l1b_icare.py | 26 +- .../reader_tests/test_seviri_l1b_native.py | 44 ++-- .../tests/reader_tests/test_seviri_l1b_nc.py | 2 +- .../tests/reader_tests/test_seviri_l2_grib.py | 30 ++- satpy/tests/reader_tests/test_slstr_l1b.py | 22 +- satpy/tests/reader_tests/test_smos_l2_wind.py | 48 ++-- satpy/tests/reader_tests/test_tropomi_l2.py | 62 +++-- satpy/tests/reader_tests/test_utils.py | 54 ++-- satpy/tests/reader_tests/test_vii_base_nc.py | 66 ++--- satpy/tests/reader_tests/test_vii_l1b_nc.py | 12 +- satpy/tests/reader_tests/test_vii_l2_nc.py | 4 +- satpy/tests/reader_tests/test_vii_utils.py | 10 +- satpy/tests/reader_tests/test_vii_wv_nc.py | 4 +- .../reader_tests/test_viirs_atms_utils.py | 5 +- .../tests/reader_tests/test_viirs_compact.py | 6 +- satpy/tests/reader_tests/test_viirs_edr.py | 2 +- .../test_viirs_edr_active_fires.py | 92 +++---- .../reader_tests/test_viirs_edr_flood.py | 12 +- satpy/tests/reader_tests/test_viirs_sdr.py | 172 +++++++------ .../reader_tests/test_viirs_vgac_l1c_nc.py | 8 +- satpy/tests/reader_tests/test_virr_l1b.py | 58 +++-- satpy/tests/scene_tests/test_conversions.py | 6 +- satpy/tests/scene_tests/test_data_access.py | 5 +- satpy/tests/scene_tests/test_init.py | 6 +- satpy/tests/scene_tests/test_resampling.py | 2 +- satpy/tests/scene_tests/test_saving.py | 3 +- satpy/tests/test_composites.py | 191 +++++++------- satpy/tests/test_config.py | 12 +- satpy/tests/test_crefl_utils.py | 8 +- satpy/tests/test_data_download.py | 2 +- satpy/tests/test_dataset.py | 18 +- satpy/tests/test_demo.py | 40 +-- satpy/tests/test_dependency_tree.py | 14 +- satpy/tests/test_file_handlers.py | 26 +- satpy/tests/test_modifiers.py | 14 +- satpy/tests/test_readers.py | 196 +++++++-------- satpy/tests/test_resample.py | 238 +++++++++--------- satpy/tests/test_utils.py | 190 +++++++------- satpy/tests/test_writers.py | 52 ++-- satpy/tests/test_yaml_reader.py | 159 ++++++------ satpy/tests/utils.py | 4 +- satpy/tests/writer_tests/test_cf.py | 22 +- satpy/tests/writer_tests/test_mitiff.py | 47 ++-- satpy/tests/writer_tests/test_ninjogeotiff.py | 23 +- satpy/tests/writer_tests/test_ninjotiff.py | 6 +- satpy/tests/writer_tests/test_simple_image.py | 2 +- satpy/tests/writer_tests/test_utils.py | 2 +- 125 files changed, 2154 insertions(+), 2283 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 547cfd146c..64c036b07f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ # select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] -select = ["E", "W", "F", "I", "TID", "C90", "Q"] +select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q"] ignore = ["B905"] # only available from python 3.10 line-length = 120 diff --git a/satpy/tests/compositor_tests/test_abi.py b/satpy/tests/compositor_tests/test_abi.py index 79c5ae99ed..7c29a12c63 100644 --- a/satpy/tests/compositor_tests/test_abi.py +++ b/satpy/tests/compositor_tests/test_abi.py @@ -57,10 +57,9 @@ def test_simulated_green(self): dims=("y", "x"), attrs={"name": "C03", "area": area}) res = comp((c01, c02, c03)) - self.assertIsInstance(res, xr.DataArray) - self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs["name"], "green") - self.assertEqual(res.attrs["standard_name"], - "toa_bidirectional_reflectance") + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs["name"] == "green" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.28025) diff --git a/satpy/tests/compositor_tests/test_agri.py b/satpy/tests/compositor_tests/test_agri.py index 27a566a82c..b477dc53cb 100644 --- a/satpy/tests/compositor_tests/test_agri.py +++ b/satpy/tests/compositor_tests/test_agri.py @@ -54,10 +54,9 @@ def test_simulated_red(self): dims=("y", "x"), attrs={"name": "C02", "area": area}) res = comp((c01, c02)) - self.assertIsInstance(res, xr.DataArray) - self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs["name"], "red") - self.assertEqual(res.attrs["standard_name"], - "toa_bidirectional_reflectance") + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs["name"] == "red" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.24252874) diff --git a/satpy/tests/compositor_tests/test_sar.py b/satpy/tests/compositor_tests/test_sar.py index d7cd2a9c80..30c342ce44 100644 --- a/satpy/tests/compositor_tests/test_sar.py +++ b/satpy/tests/compositor_tests/test_sar.py @@ -43,11 +43,10 @@ def test_sar_ice(self): attrs={"name": "hv"}) res = comp((hh, hv)) - self.assertIsInstance(res, xr.DataArray) - self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs["name"], "sar_ice") - self.assertEqual(res.attrs["standard_name"], - "sar-ice") + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs["name"] == "sar_ice" + assert res.attrs["standard_name"] == "sar-ice" data = res.compute() np.testing.assert_allclose(data.sel(bands="R"), 31.58280822) np.testing.assert_allclose(data.sel(bands="G"), 159869.56789876) @@ -73,11 +72,10 @@ def test_sar_ice_log(self): attrs={"name": "hv"}) res = comp((hh, hv)) - self.assertIsInstance(res, xr.DataArray) - self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs["name"], "sar_ice_log") - self.assertEqual(res.attrs["standard_name"], - "sar-ice-log") + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs["name"] == "sar_ice_log" + assert res.attrs["standard_name"] == "sar-ice-log" data = res.compute() np.testing.assert_allclose(data.sel(bands="R"), -20) np.testing.assert_allclose(data.sel(bands="G"), -4.6) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 4800f12a7b..80f4dd8d93 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -39,7 +39,7 @@ def test_bad_lengths(self): """Test that error is raised if the amount of channels to blend does not match the number of weights.""" comp = SpectralBlender("blended_channel", fractions=(0.3, 0.7), prerequisites=(0.51, 0.85), standard_name="toa_bidirectional_reflectance") - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="fractions and projectables must have the same length."): comp((self.c01, self.c02, self.c03)) def test_spectral_blender(self): diff --git a/satpy/tests/compositor_tests/test_viirs.py b/satpy/tests/compositor_tests/test_viirs.py index 969f4579ef..1641e4248b 100644 --- a/satpy/tests/compositor_tests/test_viirs.py +++ b/satpy/tests/compositor_tests/test_viirs.py @@ -29,7 +29,7 @@ class TestVIIRSComposites: """Test various VIIRS-specific composites.""" - @pytest.fixture + @pytest.fixture() def area(self): """Return fake area for use with DNB tests.""" rows = 5 @@ -42,7 +42,7 @@ def area(self): (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) return area - @pytest.fixture + @pytest.fixture() def dnb(self, area): """Return fake channel 1 data for DNB tests.""" dnb = np.zeros(area.shape) + 0.25 @@ -55,7 +55,7 @@ def dnb(self, area): "start_time": datetime(2020, 1, 1, 12, 0, 0)}) return c01 - @pytest.fixture + @pytest.fixture() def sza(self, area): """Return fake sza dataset for DNB tests.""" # data changes by row, sza changes by col for testing @@ -69,7 +69,7 @@ def sza(self, area): "start_time": datetime(2020, 1, 1, 12, 0, 0)}) return c02 - @pytest.fixture + @pytest.fixture() def lza(self, area): """Return fake lunal zenith angle dataset for DNB tests.""" lza = np.zeros(area.shape) + 70.0 @@ -141,7 +141,7 @@ def test_hncc_dnb(self, area, dnb, sza, lza): 9.50784532e-03, 1.42617433e-02, 1.50001560e+03, 3.00001560e+03, 4.50001560e+03]) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Expected .*, got 2"): comp((dnb, sza)) def test_hncc_dnb_nomoonpha(self, area, dnb, sza, lza): diff --git a/satpy/tests/conftest.py b/satpy/tests/conftest.py index 842dade04e..754b11ffcd 100644 --- a/satpy/tests/conftest.py +++ b/satpy/tests/conftest.py @@ -30,7 +30,7 @@ @pytest.fixture(autouse=True) -def reset_satpy_config(tmpdir): +def _reset_satpy_config(tmpdir): """Set satpy config to logical defaults for tests.""" test_config = { "cache_dir": str(tmpdir / "cache"), @@ -44,13 +44,13 @@ def reset_satpy_config(tmpdir): @pytest.fixture(autouse=True) -def clear_function_caches(): +def _clear_function_caches(): """Clear out global function-level caches that may cause conflicts between tests.""" from satpy.composites.config_loader import load_compositor_configs_for_sensor load_compositor_configs_for_sensor.cache_clear() -@pytest.fixture +@pytest.fixture() def include_test_etc(): """Tell Satpy to use the config 'etc' directory from the tests directory.""" with satpy.config.set(config_path=[TEST_ETC_DIR]): diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index 964e634ba4..ca0d56f11f 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -365,7 +365,7 @@ def test_cmap_bad_mode(self, real_mode, forced_mode, filename_suffix): cmap_data = _generate_cmap_test_data(None, real_mode) _write_cmap_to_file(cmap_filename, cmap_data) # Force colormap_mode VRGBA to RGBA and we should see an exception - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unexpected colormap shape for mode .*"): create_colormap({"filename": cmap_filename, "colormap_mode": forced_mode}) def test_cmap_from_file_bad_shape(self): @@ -381,7 +381,7 @@ def test_cmap_from_file_bad_shape(self): [255], ])) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unexpected colormap shape for mode 'None'"): create_colormap({"filename": cmap_filename}) def test_cmap_from_config_path(self, tmp_path): @@ -415,7 +415,7 @@ def test_cmap_from_trollimage(self): def test_cmap_no_colormap(self): """Test that being unable to create a colormap raises an error.""" from satpy.enhancements import create_colormap - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unknown colormap format: .*"): create_colormap({}) def test_cmap_list(self): @@ -484,7 +484,7 @@ def func(dask_array): assert res.shape == arr.shape -@pytest.fixture +@pytest.fixture() def fake_area(): """Return a fake 2×2 area.""" from pyresample.geometry import create_area_def diff --git a/satpy/tests/modifier_tests/test_angles.py b/satpy/tests/modifier_tests/test_angles.py index 7bea78b7d1..2ebebacbc7 100644 --- a/satpy/tests/modifier_tests/test_angles.py +++ b/satpy/tests/modifier_tests/test_angles.py @@ -318,7 +318,7 @@ def test_cached_result_numpy_fails(self, tmp_path): def _fake_func(shape, chunks): return np.zeros(shape) - with pytest.raises(ValueError), \ + with pytest.raises(ValueError, match="Zarr caching currently only supports dask arrays. Got .*"), \ satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)): _fake_func((5, 5), ((5,), (5,))) diff --git a/satpy/tests/modifier_tests/test_crefl.py b/satpy/tests/modifier_tests/test_crefl.py index ab42f85155..dc9f4a232a 100644 --- a/satpy/tests/modifier_tests/test_crefl.py +++ b/satpy/tests/modifier_tests/test_crefl.py @@ -194,7 +194,7 @@ def test_reflectance_corrector_abi(self, name, wavelength, resolution, exp_mean, np.testing.assert_allclose(unique, exp_unique, rtol=1e-5) @pytest.mark.parametrize( - "url,dem_mock_cm,dem_sds", + ("url", "dem_mock_cm", "dem_sds"), [ (None, mock_cmgdem, "average elevation"), ("CMGDEM.hdf", mock_cmgdem, "averaged elevation"), @@ -341,12 +341,12 @@ def test_reflectance_corrector_bad_prereqs(self): """Test ReflectanceCorrector modifier with wrong number of inputs.""" from satpy.modifiers._crefl import ReflectanceCorrector ref_cor = ReflectanceCorrector("test") - pytest.raises(ValueError, ref_cor, [1], [2, 3, 4]) - pytest.raises(ValueError, ref_cor, [1, 2, 3, 4], []) - pytest.raises(ValueError, ref_cor, [], [1, 2, 3, 4]) + pytest.raises(ValueError, ref_cor, [1], [2, 3, 4], match="Not sure how to handle provided dependencies..*") + pytest.raises(ValueError, ref_cor, [1, 2, 3, 4], [], match="Not sure how to handle provided dependencies..*") + pytest.raises(ValueError, ref_cor, [], [1, 2, 3, 4], match="Not sure how to handle provided dependencies..*") @pytest.mark.parametrize( - "url,dem_mock_cm,dem_sds", + ("url", "dem_mock_cm", "dem_sds"), [ (None, mock_cmgdem, "average elevation"), ("CMGDEM.hdf", mock_cmgdem, "averaged elevation"), diff --git a/satpy/tests/modifier_tests/test_parallax.py b/satpy/tests/modifier_tests/test_parallax.py index 8fa358ec35..f1385e9b18 100644 --- a/satpy/tests/modifier_tests/test_parallax.py +++ b/satpy/tests/modifier_tests/test_parallax.py @@ -39,7 +39,7 @@ # - request -@pytest.fixture +@pytest.fixture() def fake_tle(): """Produce fake Two Line Element (TLE) object from pyorbital.""" return pyorbital.tlefile.Tle( @@ -109,7 +109,7 @@ def test_get_parallax_corrected_lonlats_clearsky(self): assert np.isnan(corr_lon).all() assert np.isnan(corr_lat).all() - @pytest.mark.parametrize("lat,lon", [(0, 0), (0, 40), (0, 179.9)]) + @pytest.mark.parametrize(("lat", "lon"), [(0, 0), (0, 40), (0, 179.9)]) @pytest.mark.parametrize("resolution", [0.01, 0.5, 10]) def test_get_parallax_corrected_lonlats_cloudy_ssp(self, lat, lon, resolution): """Test parallax correction for fully cloudy scene at SSP.""" @@ -219,7 +219,7 @@ def test_init_parallaxcorrection(self, center, sizes, resolution): pc = ParallaxCorrection(fake_area) assert pc.base_area == fake_area - @pytest.mark.parametrize("sat_pos,ar_pos", + @pytest.mark.parametrize(("sat_pos", "ar_pos"), [((0, 0), (0, 0)), ((0, 0), (40, 0))]) @pytest.mark.parametrize("resolution", [0.01, 0.5, 10]) def test_correct_area_clearsky(self, sat_pos, ar_pos, resolution, caplog): @@ -247,7 +247,7 @@ def test_correct_area_clearsky(self, sat_pos, ar_pos, resolution, caplog): new_area.get_lonlats(), fake_area_small.get_lonlats()) - @pytest.mark.parametrize("lat,lon", + @pytest.mark.parametrize(("lat", "lon"), [(0, 0), (0, 40), (0, 180), (90, 0)]) # relevant for Арктика satellites @pytest.mark.parametrize("resolution", [0.01, 0.5, 10]) @@ -345,7 +345,7 @@ def test_correct_area_partlycloudy(self, daskify): [49.86860622, 49.9097198, 49.90971976, 49.9097198, 49.88231496]]), rtol=1e-6) - @pytest.mark.parametrize("res1,res2", [(0.08, 0.3), (0.3, 0.08)]) + @pytest.mark.parametrize(("res1", "res2"), [(0.08, 0.3), (0.3, 0.08)]) def test_correct_area_clearsky_different_resolutions(self, res1, res2): """Test clearsky correction when areas have different resolutions.""" from satpy.modifiers.parallax import ParallaxCorrection @@ -564,7 +564,7 @@ def test_parallax_modifier_interface_with_cloud(self): # do so after parallax correction assert not (res.diff("x") < 0).any() - @pytest.fixture + @pytest.fixture() def test_area(self, request): """Produce test area for parallax correction unit tests. @@ -711,12 +711,12 @@ def test_modifier_interface_cloud_moves_to_observer(self, cth, use_dask, test_ar class TestParallaxCorrectionSceneLoad: """Test that scene load interface works as expected.""" - @pytest.fixture + @pytest.fixture() def yaml_code(self): """Return YAML code for parallax_corrected_VIS006.""" return _test_yaml_code - @pytest.fixture + @pytest.fixture() def conf_file(self, yaml_code, tmp_path): """Produce a fake configuration file.""" conf_file = tmp_path / "test.yaml" @@ -724,7 +724,7 @@ def conf_file(self, yaml_code, tmp_path): fp.write(yaml_code) return conf_file - @pytest.fixture + @pytest.fixture() def fake_scene(self, yaml_code): """Produce fake scene and prepare fake composite config.""" from satpy import Scene diff --git a/satpy/tests/multiscene_tests/test_blend.py b/satpy/tests/multiscene_tests/test_blend.py index c6e65d4615..6bd32ebc98 100644 --- a/satpy/tests/multiscene_tests/test_blend.py +++ b/satpy/tests/multiscene_tests/test_blend.py @@ -59,7 +59,7 @@ def _get_expected_stack_blend(scene1: Scene, scene2: Scene) -> xr.DataArray: return expected -@pytest.fixture +@pytest.fixture() def test_area(): """Get area definition used by test DataArrays.""" return _create_test_area() @@ -77,7 +77,7 @@ def image_mode(request): return request.param -@pytest.fixture +@pytest.fixture() def cloud_type_data_array1(test_area, data_type, image_mode): """Get DataArray for cloud type in the first test Scene.""" dsid1 = make_dataid( @@ -107,7 +107,7 @@ def cloud_type_data_array1(test_area, data_type, image_mode): return data_arr -@pytest.fixture +@pytest.fixture() def cloud_type_data_array2(test_area, data_type, image_mode): """Get DataArray for cloud type in the second test Scene.""" dsid1 = make_dataid( @@ -133,7 +133,7 @@ def cloud_type_data_array2(test_area, data_type, image_mode): return data_arr -@pytest.fixture +@pytest.fixture() def scene1_with_weights(cloud_type_data_array1, test_area): """Create first test scene with a dataset of weights.""" from satpy import Scene @@ -160,7 +160,7 @@ def scene1_with_weights(cloud_type_data_array1, test_area): return scene, [wgt1, wgt2] -@pytest.fixture +@pytest.fixture() def scene2_with_weights(cloud_type_data_array2, test_area): """Create second test scene.""" from satpy import Scene @@ -183,7 +183,7 @@ def scene2_with_weights(cloud_type_data_array2, test_area): return scene, [wgt1, wgt2] -@pytest.fixture +@pytest.fixture() def multi_scene_and_weights(scene1_with_weights, scene2_with_weights): """Create small multi-scene for testing.""" from satpy import MultiScene @@ -193,7 +193,7 @@ def multi_scene_and_weights(scene1_with_weights, scene2_with_weights): return MultiScene([scene1, scene2]), [weights1, weights2] -@pytest.fixture +@pytest.fixture() def groups(): """Get group definitions for the MultiScene.""" return { @@ -237,7 +237,7 @@ def test_blend_two_scenes_bad_blend_type(self, multi_scene_and_weights, groups): weights = [weights[0][0], weights[1][0]] stack_func = partial(stack, weights=weights, blend_type="i_dont_exist") - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unknown weighted blending type: .*.Expected one of: .*"): multi_scene.blend(blend_function=stack_func) @pytest.mark.parametrize( @@ -283,7 +283,7 @@ def test_blend_two_scenes_using_stack_weighted(self, multi_scene_and_weights, gr assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 11, 7, 250000) assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 20, 11, 950000) - @pytest.fixture + @pytest.fixture() def datasets_and_weights(self): """X-Array datasets with area definition plus weights for input to tests.""" shape = (8, 12) diff --git a/satpy/tests/multiscene_tests/test_misc.py b/satpy/tests/multiscene_tests/test_misc.py index 190045dad0..9f6e400e31 100644 --- a/satpy/tests/multiscene_tests/test_misc.py +++ b/satpy/tests/multiscene_tests/test_misc.py @@ -58,19 +58,17 @@ def test_properties(self): scenes[1]["ds3"] = _create_test_dataset("ds3") mscn = MultiScene(scenes) - self.assertSetEqual(mscn.loaded_dataset_ids, - {ds1_id, ds2_id, ds3_id}) - self.assertSetEqual(mscn.shared_dataset_ids, {ds1_id, ds2_id}) - self.assertTrue(mscn.all_same_area) + assert mscn.loaded_dataset_ids == {ds1_id, ds2_id, ds3_id} + assert mscn.shared_dataset_ids == {ds1_id, ds2_id} + assert mscn.all_same_area bigger_area = _create_test_area(shape=(20, 40)) scenes[0]["ds4"] = _create_test_dataset("ds4", shape=(20, 40), area=bigger_area) - self.assertSetEqual(mscn.loaded_dataset_ids, - {ds1_id, ds2_id, ds3_id, ds4_id}) - self.assertSetEqual(mscn.shared_dataset_ids, {ds1_id, ds2_id}) - self.assertFalse(mscn.all_same_area) + assert mscn.loaded_dataset_ids == {ds1_id, ds2_id, ds3_id, ds4_id} + assert mscn.shared_dataset_ids == {ds1_id, ds2_id} + assert not mscn.all_same_area def test_from_files(self): """Test creating a multiscene from multiple files.""" @@ -133,7 +131,7 @@ def test_from_files(self): class TestMultiSceneGrouping: """Test dataset grouping in MultiScene.""" - @pytest.fixture + @pytest.fixture() def scene1(self): """Create first test scene.""" from satpy import Scene @@ -154,7 +152,7 @@ def scene1(self): scene[dsid2] = _create_test_dataset(name="ds2") return scene - @pytest.fixture + @pytest.fixture() def scene2(self): """Create second test scene.""" from satpy import Scene @@ -175,13 +173,13 @@ def scene2(self): scene[dsid2] = _create_test_dataset(name="ds4") return scene - @pytest.fixture + @pytest.fixture() def multi_scene(self, scene1, scene2): """Create small multi scene for testing.""" from satpy import MultiScene return MultiScene([scene1, scene2]) - @pytest.fixture + @pytest.fixture() def groups(self): """Get group definitions for the MultiScene.""" return { @@ -201,5 +199,5 @@ def test_fails_to_add_multiple_datasets_from_the_same_scene_to_a_group(self, mul """Test that multiple datasets from the same scene in one group fails.""" groups = {DataQuery(name="mygroup"): ["ds1", "ds2"]} multi_scene.group(groups) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Cannot add multiple datasets from a scene to the same group"): next(multi_scene.scenes) diff --git a/satpy/tests/multiscene_tests/test_save_animation.py b/satpy/tests/multiscene_tests/test_save_animation.py index 2ea41f18f4..6807446bbb 100644 --- a/satpy/tests/multiscene_tests/test_save_animation.py +++ b/satpy/tests/multiscene_tests/test_save_animation.py @@ -83,11 +83,11 @@ def test_save_mp4_distributed(self): # 2 saves for the first scene + 1 black frame # 3 for the second scene - self.assertEqual(writer_mock.append_data.call_count, 3 + 3) + assert writer_mock.append_data.call_count == 3 + 3 filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] - self.assertEqual(filenames[0], "test_save_mp4_ds1_20180101_00_20180102_12.mp4") - self.assertEqual(filenames[1], "test_save_mp4_ds2_20180101_00_20180102_12.mp4") - self.assertEqual(filenames[2], "test_save_mp4_ds3_20180102_00_20180102_12.mp4") + assert filenames[0] == "test_save_mp4_ds1_20180101_00_20180102_12.mp4" + assert filenames[1] == "test_save_mp4_ds2_20180101_00_20180102_12.mp4" + assert filenames[2] == "test_save_mp4_ds3_20180102_00_20180102_12.mp4" # Test no distributed client found mscn = MultiScene(scenes) @@ -106,11 +106,11 @@ def test_save_mp4_distributed(self): # 2 saves for the first scene + 1 black frame # 3 for the second scene - self.assertEqual(writer_mock.append_data.call_count, 3 + 3) + assert writer_mock.append_data.call_count == 3 + 3 filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] - self.assertEqual(filenames[0], "test_save_mp4_ds1_20180101_00_20180102_12.mp4") - self.assertEqual(filenames[1], "test_save_mp4_ds2_20180101_00_20180102_12.mp4") - self.assertEqual(filenames[2], "test_save_mp4_ds3_20180102_00_20180102_12.mp4") + assert filenames[0] == "test_save_mp4_ds1_20180101_00_20180102_12.mp4" + assert filenames[1] == "test_save_mp4_ds2_20180101_00_20180102_12.mp4" + assert filenames[2] == "test_save_mp4_ds3_20180102_00_20180102_12.mp4" @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_mp4_no_distributed(self): @@ -146,11 +146,11 @@ def test_save_mp4_no_distributed(self): # 2 saves for the first scene + 1 black frame # 3 for the second scene - self.assertEqual(writer_mock.append_data.call_count, 3 + 3) + assert writer_mock.append_data.call_count == 3 + 3 filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] - self.assertEqual(filenames[0], "test_save_mp4_ds1_20180101_00_20180102_12.mp4") - self.assertEqual(filenames[1], "test_save_mp4_ds2_20180101_00_20180102_12.mp4") - self.assertEqual(filenames[2], "test_save_mp4_ds3_20180102_00_20180102_12.mp4") + assert filenames[0] == "test_save_mp4_ds1_20180101_00_20180102_12.mp4" + assert filenames[1] == "test_save_mp4_ds2_20180101_00_20180102_12.mp4" + assert filenames[2] == "test_save_mp4_ds3_20180102_00_20180102_12.mp4" @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_datasets_simple(self): @@ -181,7 +181,7 @@ def test_save_datasets_simple(self): writer="simple_image") # 2 for each scene - self.assertEqual(save_datasets.call_count, 2) + assert save_datasets.call_count == 2 @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_datasets_distributed_delayed(self): @@ -216,7 +216,7 @@ def test_save_datasets_distributed_delayed(self): writer="simple_image") # 2 for each scene - self.assertEqual(save_datasets.call_count, 2) + assert save_datasets.call_count == 2 @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_datasets_distributed_source_target(self): @@ -290,13 +290,13 @@ def test_crop(self): # by lon/lat bbox new_mscn = mscn.crop(ll_bbox=(-20., -5., 0, 0)) new_scn1 = list(new_mscn.scenes)[0] - self.assertIn("1", new_scn1) - self.assertIn("2", new_scn1) - self.assertIn("3", new_scn1) - self.assertTupleEqual(new_scn1["1"].shape, (y_size, x_size)) - self.assertTupleEqual(new_scn1["2"].shape, (y_size, x_size)) - self.assertTupleEqual(new_scn1["3"].shape, (184, 714)) - self.assertTupleEqual(new_scn1["4"].shape, (92, 357)) + assert "1" in new_scn1 + assert "2" in new_scn1 + assert "3" in new_scn1 + assert new_scn1["1"].shape == (y_size, x_size) + assert new_scn1["2"].shape == (y_size, x_size) + assert new_scn1["3"].shape == (184, 714) + assert new_scn1["4"].shape == (92, 357) @mock.patch("satpy.multiscene._multiscene.get_enhanced_image") diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py index 486eba370b..4638bcfca3 100644 --- a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py @@ -25,7 +25,7 @@ @pytest.fixture(params=[False, True], autouse=True) -def disable_jit(request, monkeypatch): +def _disable_jit(request, monkeypatch): """Run tests with jit enabled and disabled. Reason: Coverage report is only accurate with jit disabled. @@ -58,7 +58,7 @@ class TestFileHandler: """Test VISSR file handler.""" @pytest.fixture(autouse=True) - def patch_number_of_pixels_per_scanline(self, monkeypatch): + def _patch_number_of_pixels_per_scanline(self, monkeypatch): """Patch data types so that each scanline has two pixels.""" num_pixels = 2 IMAGE_DATA_BLOCK_IR = np.dtype( @@ -116,12 +116,12 @@ def with_compression(self, request): """Enable compression.""" return request.param - @pytest.fixture + @pytest.fixture() def open_function(self, with_compression): """Get open function for writing test files.""" return gzip.open if with_compression else open - @pytest.fixture + @pytest.fixture() def vissr_file(self, dataset_id, file_contents, open_function, tmp_path): """Get test VISSR file.""" filename = tmp_path / "vissr_file" @@ -130,7 +130,7 @@ def vissr_file(self, dataset_id, file_contents, open_function, tmp_path): writer.write(filename, file_contents) return filename - @pytest.fixture + @pytest.fixture() def file_contents(self, control_block, image_parameters, image_data): """Get VISSR file contents.""" return { @@ -139,7 +139,7 @@ def file_contents(self, control_block, image_parameters, image_data): "image_data": image_data, } - @pytest.fixture + @pytest.fixture() def control_block(self, dataset_id): """Get VISSR control block.""" block_size = {"IR1": 16, "VIS": 4} @@ -148,7 +148,7 @@ def control_block(self, dataset_id): ctrl_block["available_block_size_of_image_data"] = 2 return ctrl_block - @pytest.fixture + @pytest.fixture() def image_parameters(self, mode_block, cal_params, nav_params): """Get VISSR image parameters.""" image_params = {"mode": mode_block} @@ -156,7 +156,7 @@ def image_parameters(self, mode_block, cal_params, nav_params): image_params.update(nav_params) return image_params - @pytest.fixture + @pytest.fixture() def nav_params( self, coordinate_conversion, @@ -170,7 +170,7 @@ def nav_params( nav_params.update(coordinate_conversion) return nav_params - @pytest.fixture + @pytest.fixture() def cal_params( self, vis_calibration, @@ -186,7 +186,7 @@ def cal_params( "wv_calibration": wv_calibration, } - @pytest.fixture + @pytest.fixture() def mode_block(self): """Get VISSR mode block.""" mode = np.zeros(1, dtype=fmt.MODE_BLOCK) @@ -201,7 +201,7 @@ def mode_block(self): mode["vis_frame_parameters"]["number_of_pixels"] = 2 return mode - @pytest.fixture + @pytest.fixture() def coordinate_conversion(self, coord_conv, simple_coord_conv_table): """Get all coordinate conversion parameters.""" return { @@ -209,7 +209,7 @@ def coordinate_conversion(self, coord_conv, simple_coord_conv_table): "simple_coordinate_conversion_table": simple_coord_conv_table } - @pytest.fixture + @pytest.fixture() def coord_conv(self): """Get parameters for coordinate conversions. @@ -255,14 +255,14 @@ def coord_conv(self): conv["orbital_parameters"]["latitude_of_ssp"] = 1.0 return conv - @pytest.fixture + @pytest.fixture() def attitude_prediction(self): """Get attitude prediction.""" att_pred = np.zeros(1, dtype=fmt.ATTITUDE_PREDICTION) att_pred["data"] = real_world.ATTITUDE_PREDICTION return {"attitude_prediction": att_pred} - @pytest.fixture + @pytest.fixture() def orbit_prediction(self, orbit_prediction_1, orbit_prediction_2): """Get predictions of orbital parameters.""" return { @@ -270,21 +270,21 @@ def orbit_prediction(self, orbit_prediction_1, orbit_prediction_2): "orbit_prediction_2": orbit_prediction_2 } - @pytest.fixture + @pytest.fixture() def orbit_prediction_1(self): """Get first block of orbit prediction data.""" orb_pred = np.zeros(1, dtype=fmt.ORBIT_PREDICTION) orb_pred["data"] = real_world.ORBIT_PREDICTION_1 return orb_pred - @pytest.fixture + @pytest.fixture() def orbit_prediction_2(self): """Get second block of orbit prediction data.""" orb_pred = np.zeros(1, dtype=fmt.ORBIT_PREDICTION) orb_pred["data"] = real_world.ORBIT_PREDICTION_2 return orb_pred - @pytest.fixture + @pytest.fixture() def vis_calibration(self): """Get VIS calibration block.""" vis_cal = np.zeros(1, dtype=fmt.VIS_CALIBRATION) @@ -292,7 +292,7 @@ def vis_calibration(self): table[0, 0:4] = np.array([0, 0.25, 0.5, 1]) return vis_cal - @pytest.fixture + @pytest.fixture() def ir1_calibration(self): """Get IR1 calibration block.""" cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) @@ -300,32 +300,32 @@ def ir1_calibration(self): table[0, 0:4] = np.array([0, 100, 200, 300]) return cal - @pytest.fixture + @pytest.fixture() def ir2_calibration(self): """Get IR2 calibration block.""" cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) return cal - @pytest.fixture + @pytest.fixture() def wv_calibration(self): """Get WV calibration block.""" cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) return cal - @pytest.fixture + @pytest.fixture() def simple_coord_conv_table(self): """Get simple coordinate conversion table.""" table = np.zeros(1, dtype=fmt.SIMPLE_COORDINATE_CONVERSION_TABLE) table["satellite_height"] = 123457.0 return table - @pytest.fixture + @pytest.fixture() def image_data(self, dataset_id, image_data_ir1, image_data_vis): """Get VISSR image data.""" data = {"IR1": image_data_ir1, "VIS": image_data_vis} return data[dataset_id["name"]] - @pytest.fixture + @pytest.fixture() def image_data_ir1(self): """Get IR1 image data.""" image_data = np.zeros(2, fmt.IMAGE_DATA_BLOCK_IR) @@ -336,7 +336,7 @@ def image_data_ir1(self): image_data["image_data"] = [[0, 1], [2, 3]] return image_data - @pytest.fixture + @pytest.fixture() def image_data_vis(self): """Get VIS image data.""" image_data = np.zeros(2, fmt.IMAGE_DATA_BLOCK_VIS) @@ -347,7 +347,7 @@ def image_data_vis(self): image_data["image_data"] = [[0, 1], [2, 3]] return image_data - @pytest.fixture + @pytest.fixture() def vissr_file_like(self, vissr_file, with_compression): """Get file-like object for VISSR test file.""" if with_compression: @@ -355,14 +355,14 @@ def vissr_file_like(self, vissr_file, with_compression): return FSFile(open_file) return vissr_file - @pytest.fixture + @pytest.fixture() def file_handler(self, vissr_file_like, mask_space): """Get file handler to be tested.""" return vissr.GMS5VISSRFileHandler( vissr_file_like, {}, {}, mask_space=mask_space ) - @pytest.fixture + @pytest.fixture() def vis_refl_exp(self, mask_space, lons_lats_exp): """Get expected VIS reflectance.""" lons, lats = lons_lats_exp @@ -384,7 +384,7 @@ def vis_refl_exp(self, mask_space, lons_lats_exp): }, ) - @pytest.fixture + @pytest.fixture() def ir1_counts_exp(self, lons_lats_exp): """Get expected IR1 counts.""" lons, lats = lons_lats_exp @@ -402,7 +402,7 @@ def ir1_counts_exp(self, lons_lats_exp): }, ) - @pytest.fixture + @pytest.fixture() def ir1_bt_exp(self, lons_lats_exp): """Get expected IR1 brightness temperature.""" lons, lats = lons_lats_exp @@ -420,7 +420,7 @@ def ir1_bt_exp(self, lons_lats_exp): }, ) - @pytest.fixture + @pytest.fixture() def lons_lats_exp(self, dataset_id): """Get expected lon/lat coordinates. @@ -456,7 +456,7 @@ def lons_lats_exp(self, dataset_id): lats = xr.DataArray(exp["lats"], dims=("y", "x")) return lons, lats - @pytest.fixture + @pytest.fixture() def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp): """Get expected dataset.""" ir1_counts_id = make_dataid(name="IR1", calibration="counts", resolution=5000) @@ -473,7 +473,7 @@ def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp): } return expectations[dataset_id] - @pytest.fixture + @pytest.fixture() def area_def_exp(self, dataset_id): """Get expected area definition.""" if dataset_id["name"] == "IR1": @@ -507,7 +507,7 @@ def area_def_exp(self, dataset_id): height=size, ) - @pytest.fixture + @pytest.fixture() def attrs_exp(self, area_def_exp): """Get expected dataset attributes.""" return { @@ -546,7 +546,7 @@ def test_time_attributes(self, file_handler, attrs_exp): class TestCorruptFile: """Test reading corrupt files.""" - @pytest.fixture + @pytest.fixture() def file_contents(self): """Get corrupt file contents (all zero).""" control_block = np.zeros(1, dtype=fmt.CONTROL_BLOCK) @@ -557,7 +557,7 @@ def file_contents(self): "image_data": image_data, } - @pytest.fixture + @pytest.fixture() def corrupt_file(self, file_contents, tmp_path): """Write corrupt VISSR file to disk.""" filename = tmp_path / "my_vissr_file" diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py index 5b3c6117d4..2a1a1cade9 100644 --- a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py @@ -219,7 +219,7 @@ @pytest.fixture(params=[False, True], autouse=True) -def disable_jit(request, monkeypatch): +def _disable_jit(request, monkeypatch): """Run tests with jit enabled and disabled. Reason: Coverage report is only accurate with jit disabled. @@ -234,7 +234,7 @@ class TestSinglePixelNavigation: """Test navigation of a single pixel.""" @pytest.mark.parametrize( - "point,nav_params,expected", + ("point", "nav_params", "expected"), [ (ref["pixel"], ref["nav_params"], (ref["lon"], ref["lat"])) for ref in NAVIGATION_REFERENCE @@ -297,7 +297,7 @@ def test_intersect_view_vector_with_earth(self): np.testing.assert_allclose(point, exp) @pytest.mark.parametrize( - "point_earth_fixed,point_geodetic_exp", + ("point_earth_fixed", "point_geodetic_exp"), [ ([0, 0, 1], [0, 90]), ([0, 0, -1], [0, -90]), @@ -328,7 +328,7 @@ def test_normalize_vector(self): class TestImageNavigation: """Test navigation of an entire image.""" - @pytest.fixture + @pytest.fixture() def expected(self): """Get expected coordinates.""" exp = { @@ -356,7 +356,7 @@ class TestPredictionInterpolation: """Test interpolation of orbit and attitude predictions.""" @pytest.mark.parametrize( - "obs_time,expected", [(-1, np.nan), (1.5, 2.5), (5, np.nan)] + ("obs_time", "expected"), [(-1, np.nan), (1.5, 2.5), (5, np.nan)] ) def test_interpolate_continuous(self, obs_time, expected): """Test interpolation of continuous variables.""" @@ -366,7 +366,7 @@ def test_interpolate_continuous(self, obs_time, expected): np.testing.assert_allclose(res, expected) @pytest.mark.parametrize( - "obs_time,expected", + ("obs_time", "expected"), [ (-1, np.nan), (1.5, 0.75 * np.pi), @@ -385,7 +385,7 @@ def test_interpolate_angles(self, obs_time, expected): np.testing.assert_allclose(res, expected) @pytest.mark.parametrize( - "obs_time,expected", + ("obs_time", "expected"), [ (-1, np.nan * np.ones((2, 2))), (1.5, [[1, 0], [0, 2]]), @@ -417,12 +417,12 @@ def test_interpolate_attitude_prediction( attitude = nav.interpolate_attitude_prediction(attitude_prediction, obs_time) _assert_namedtuple_close(attitude, attitude_expected) - @pytest.fixture + @pytest.fixture() def obs_time(self): """Get observation time.""" return 2.5 - @pytest.fixture + @pytest.fixture() def orbit_expected(self): """Get expected orbit.""" return nav.Orbit( @@ -439,7 +439,7 @@ def orbit_expected(self): nutation_precession=1.6 * np.identity(3), ) - @pytest.fixture + @pytest.fixture() def attitude_expected(self): """Get expected attitude.""" return nav.Attitude( @@ -449,13 +449,13 @@ def attitude_expected(self): ) -@pytest.fixture +@pytest.fixture() def sampling_angle(): """Get sampling angle.""" return 0.000095719995443 -@pytest.fixture +@pytest.fixture() def scan_params(sampling_angle): """Get scanning parameters.""" return nav.ScanningParameters( @@ -466,7 +466,7 @@ def scan_params(sampling_angle): ) -@pytest.fixture +@pytest.fixture() def attitude_prediction(): """Get attitude prediction.""" return nav.AttitudePrediction( @@ -479,7 +479,7 @@ def attitude_prediction(): ) -@pytest.fixture +@pytest.fixture() def orbit_prediction(): """Get orbit prediction.""" return nav.OrbitPrediction( @@ -505,7 +505,7 @@ def orbit_prediction(): ) -@pytest.fixture +@pytest.fixture() def proj_params(sampling_angle): """Get projection parameters.""" return nav.ProjectionParameters( @@ -525,19 +525,19 @@ def proj_params(sampling_angle): ) -@pytest.fixture +@pytest.fixture() def static_nav_params(proj_params, scan_params): """Get static navigation parameters.""" return nav.StaticNavigationParameters(proj_params, scan_params) -@pytest.fixture +@pytest.fixture() def predicted_nav_params(attitude_prediction, orbit_prediction): """Get predicted navigation parameters.""" return nav.PredictedNavigationParameters(attitude_prediction, orbit_prediction) -@pytest.fixture +@pytest.fixture() def navigation_params(static_nav_params, predicted_nav_params): """Get image navigation parameters.""" return nav.ImageNavigationParameters(static_nav_params, predicted_nav_params) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index efecd1aa53..3af38328f8 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -370,7 +370,7 @@ def modis_l1b_nasa_mod02hkm_file(tmpdir_factory) -> list[str]: return [full_path] -@pytest.fixture +@pytest.fixture() def modis_l1b_nasa_mod02qkm_file(tmpdir_factory) -> list[str]: """Create a single MOD02QKM file following standard NASA file scheme.""" filename = generate_nasa_l1b_filename("MOD02Qkm") diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py index 85048de0af..a00eae8a3f 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py @@ -84,16 +84,16 @@ def test_available_reader(self): @pytest.mark.parametrize( ("input_files", "expected_names", "expected_data_res", "expected_geo_res"), [ - [lazy_fixture("modis_l1b_nasa_mod021km_file"), + (lazy_fixture("modis_l1b_nasa_mod021km_file"), AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, - [1000], [5000, 1000]], - [lazy_fixture("modis_l1b_imapp_1000m_file"), + [1000], [5000, 1000]), + (lazy_fixture("modis_l1b_imapp_1000m_file"), AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, - [1000], [5000, 1000]], - [lazy_fixture("modis_l1b_nasa_mod02hkm_file"), - AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [500], [1000, 500, 250]], - [lazy_fixture("modis_l1b_nasa_mod02qkm_file"), - AVAILABLE_QKM_PRODUCT_NAMES, [250], [1000, 500, 250]], + [1000], [5000, 1000]), + (lazy_fixture("modis_l1b_nasa_mod02hkm_file"), + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [500], [1000, 500, 250]), + (lazy_fixture("modis_l1b_nasa_mod02qkm_file"), + AVAILABLE_QKM_PRODUCT_NAMES, [250], [1000, 500, 250]), ] ) def test_scene_available_datasets(self, input_files, expected_names, expected_data_res, expected_geo_res): @@ -128,16 +128,16 @@ def test_scene_available_datasets(self, input_files, expected_names, expected_da @pytest.mark.parametrize( ("input_files", "has_5km", "has_500", "has_250", "default_res"), [ - [lazy_fixture("modis_l1b_nasa_mod021km_file"), - True, False, False, 1000], - [lazy_fixture("modis_l1b_imapp_1000m_file"), - True, False, False, 1000], - [lazy_fixture("modis_l1b_nasa_mod02hkm_file"), - False, True, True, 250], - [lazy_fixture("modis_l1b_nasa_mod02qkm_file"), - False, True, True, 250], - [lazy_fixture("modis_l1b_nasa_1km_mod03_files"), - True, True, True, 250], + (lazy_fixture("modis_l1b_nasa_mod021km_file"), + True, False, False, 1000), + (lazy_fixture("modis_l1b_imapp_1000m_file"), + True, False, False, 1000), + (lazy_fixture("modis_l1b_nasa_mod02hkm_file"), + False, True, True, 250), + (lazy_fixture("modis_l1b_nasa_mod02qkm_file"), + False, True, True, 250), + (lazy_fixture("modis_l1b_nasa_1km_mod03_files"), + True, True, True, 250), ] ) def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res): diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py index 99c0890d30..8876decb59 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py @@ -72,8 +72,8 @@ def test_scene_available_datasets(self, modis_l2_nasa_mod35_file): @pytest.mark.parametrize( ("input_files", "has_5km", "has_500", "has_250", "default_res"), [ - [lazy_fixture("modis_l2_nasa_mod35_file"), - True, False, False, 1000], + (lazy_fixture("modis_l2_nasa_mod35_file"), + True, False, False, 1000), ] ) def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res): @@ -108,12 +108,12 @@ def test_load_quality_assurance(self, modis_l2_nasa_mod35_file): @pytest.mark.parametrize( ("input_files", "loadables", "request_resolution", "exp_resolution", "exp_area"), [ - [lazy_fixture("modis_l2_nasa_mod35_mod03_files"), + (lazy_fixture("modis_l2_nasa_mod35_mod03_files"), ["cloud_mask"], - 1000, 1000, True], - [lazy_fixture("modis_l2_imapp_mask_byte1_geo_files"), + 1000, 1000, True), + (lazy_fixture("modis_l2_imapp_mask_byte1_geo_files"), ["cloud_mask", "land_sea_mask", "snow_ice_mask"], - None, 1000, True], + None, 1000, True), ] ) def test_load_category_dataset(self, input_files, loadables, request_resolution, exp_resolution, exp_area): @@ -138,8 +138,8 @@ def test_load_category_dataset(self, input_files, loadables, request_resolution, @pytest.mark.parametrize( ("input_files", "exp_area"), [ - [lazy_fixture("modis_l2_nasa_mod35_file"), False], - [lazy_fixture("modis_l2_nasa_mod35_mod03_files"), True], + (lazy_fixture("modis_l2_nasa_mod35_file"), False), + (lazy_fixture("modis_l2_nasa_mod35_mod03_files"), True), ] ) def test_load_250m_cloud_mask_dataset(self, input_files, exp_area): @@ -162,10 +162,10 @@ def test_load_250m_cloud_mask_dataset(self, input_files, exp_area): @pytest.mark.parametrize( ("input_files", "loadables", "exp_resolution", "exp_area", "exp_value"), [ - [lazy_fixture("modis_l2_nasa_mod06_file"), ["surface_pressure"], 5000, True, 4.0], + (lazy_fixture("modis_l2_nasa_mod06_file"), ["surface_pressure"], 5000, True, 4.0), # snow mask is considered a category product, factor/offset ignored - [lazy_fixture("modis_l2_imapp_snowmask_file"), ["snow_mask"], 1000, False, 1.0], - [lazy_fixture("modis_l2_imapp_snowmask_geo_files"), ["snow_mask"], 1000, True, 1.0], + (lazy_fixture("modis_l2_imapp_snowmask_file"), ["snow_mask"], 1000, False, 1.0), + (lazy_fixture("modis_l2_imapp_snowmask_geo_files"), ["snow_mask"], 1000, True, 1.0), ] ) def test_load_l2_dataset(self, input_files, loadables, exp_resolution, exp_area, exp_value): diff --git a/satpy/tests/reader_tests/test_aapp_l1b.py b/satpy/tests/reader_tests/test_aapp_l1b.py index e9414ee521..a9997f7a7e 100644 --- a/satpy/tests/reader_tests/test_aapp_l1b.py +++ b/satpy/tests/reader_tests/test_aapp_l1b.py @@ -218,9 +218,9 @@ def test_interpolation(self): fh._get_coordinates_in_degrees.return_value = (lons40km, lats40km) (lons, lats) = fh._get_all_interpolated_coordinates() lon_data = lons.compute() - self.assertTrue(np.max(lon_data) <= 180) + assert (np.max(lon_data) <= 180) # Not longitdes between -110, 110 in indata - self.assertTrue(np.all(np.abs(lon_data) > 110)) + assert np.all(np.abs(lon_data) > 110) def test_interpolation_angles(self): """Test reading the lon and lats.""" @@ -276,8 +276,8 @@ def test_interpolation_angles(self): fh._get_tiepoint_angles_in_degrees = mock.MagicMock() fh._get_tiepoint_angles_in_degrees.return_value = (sunz40km, satz40km, azidiff40km) (sunz, satz, azidiff) = fh._get_all_interpolated_angles() - self.assertTrue(np.max(sunz) <= 123) - self.assertTrue(np.max(satz) <= 70) + assert (np.max(sunz) <= 123) + assert (np.max(satz) <= 70) class TestAAPPL1BChannel3AMissing(unittest.TestCase): diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 1b54b58249..ab2b1eec54 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -131,7 +131,7 @@ def setUp(self, xr_, rad=None, clip_negative_radiances=False): class TestABIYAML: """Tests for the ABI L1b reader's YAML configuration.""" - @pytest.mark.parametrize(["channel", "suffix"], + @pytest.mark.parametrize(("channel", "suffix"), [("C{:02d}".format(num), suffix) for num in range(1, 17) for suffix in ("", "_test_suffix")]) @@ -157,10 +157,8 @@ class Test_NC_ABI_L1B(Test_NC_ABI_L1B_Base): def test_basic_attributes(self): """Test getting basic file attributes.""" from datetime import datetime - self.assertEqual(self.reader.start_time, - datetime(2017, 9, 20, 17, 30, 40, 800000)) - self.assertEqual(self.reader.end_time, - datetime(2017, 9, 20, 17, 41, 17, 500000)) + assert self.reader.start_time == datetime(2017, 9, 20, 17, 30, 40, 800000) + assert self.reader.end_time == datetime(2017, 9, 20, 17, 41, 17, 500000) def test_get_dataset(self): """Test the get_dataset method.""" @@ -190,24 +188,24 @@ def test_get_dataset(self): "suffix": "custom", "units": "W m-2 um-1 sr-1"} - self.assertDictEqual(res.attrs, exp) + assert res.attrs == exp # we remove any time dimension information - self.assertNotIn("t", res.coords) - self.assertNotIn("t", res.dims) - self.assertNotIn("time", res.coords) - self.assertNotIn("time", res.dims) + assert "t" not in res.coords + assert "t" not in res.dims + assert "time" not in res.coords + assert "time" not in res.dims @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def(self, adef): """Test the area generation.""" self.reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], {"a": 1.0, "b": 1.0, "h": 1.0, "lon_0": -90.0, "proj": "geos", - "sweep": "x", "units": "m"}) - self.assertEqual(call_args[4], self.reader.ncols) - self.assertEqual(call_args[5], self.reader.nlines) + assert call_args[3] == {"a": 1.0, "b": 1.0, "h": 1.0, + "lon_0": -90.0, "proj": "geos", "sweep": "x", "units": "m"} + assert call_args[4] == self.reader.ncols + assert call_args[5] == self.reader.nlines np.testing.assert_allclose(call_args[6], (-2, -2, 8, 2)) @@ -236,11 +234,10 @@ def test_ir_calibration_attrs(self): make_dataid(name="C05", calibration="brightness_temperature"), {}) # make sure the attributes from the file are in the data array - self.assertNotIn("scale_factor", res.attrs) - self.assertNotIn("_FillValue", res.attrs) - self.assertEqual(res.attrs["standard_name"], - "toa_brightness_temperature") - self.assertEqual(res.attrs["long_name"], "Brightness Temperature") + assert "scale_factor" not in res.attrs + assert "_FillValue" not in res.attrs + assert res.attrs["standard_name"] == "toa_brightness_temperature" + assert res.attrs["long_name"] == "Brightness Temperature" def test_clip_negative_radiances_attribute(self): """Assert that clip_negative_radiances is set to False.""" @@ -331,13 +328,11 @@ def test_vis_calibrate(self): expected = np.array([[0.15265617, 0.30531234, 0.45796851, 0.61062468, 0.76328085], [0.91593702, 1.06859319, 1.22124936, np.nan, 1.52656171]]) - self.assertTrue(np.allclose(res.data, expected, equal_nan=True)) - self.assertNotIn("scale_factor", res.attrs) - self.assertNotIn("_FillValue", res.attrs) - self.assertEqual(res.attrs["standard_name"], - "toa_bidirectional_reflectance") - self.assertEqual(res.attrs["long_name"], - "Bidirectional Reflectance") + assert np.allclose(res.data, expected, equal_nan=True) + assert "scale_factor" not in res.attrs + assert "_FillValue" not in res.attrs + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" + assert res.attrs["long_name"] == "Bidirectional Reflectance" class Test_NC_ABI_L1B_raw_cal(Test_NC_ABI_L1B_Base): @@ -366,22 +361,20 @@ def test_raw_calibrate(self): # We expect the raw data to be unchanged expected = res.data - self.assertTrue(np.allclose(res.data, expected, equal_nan=True)) + assert np.allclose(res.data, expected, equal_nan=True) # check for the presence of typical attributes - self.assertIn("scale_factor", res.attrs) - self.assertIn("add_offset", res.attrs) - self.assertIn("_FillValue", res.attrs) - self.assertIn("orbital_parameters", res.attrs) - self.assertIn("platform_shortname", res.attrs) - self.assertIn("scene_id", res.attrs) + assert "scale_factor" in res.attrs + assert "add_offset" in res.attrs + assert "_FillValue" in res.attrs + assert "orbital_parameters" in res.attrs + assert "platform_shortname" in res.attrs + assert "scene_id" in res.attrs # determine if things match their expected values/types. - self.assertEqual(res.data.dtype, np.int16, "int16 data type expected") - self.assertEqual(res.attrs["standard_name"], - "counts") - self.assertEqual(res.attrs["long_name"], - "Raw Counts") + assert res.data.dtype == np.int16 + assert res.attrs["standard_name"] == "counts" + assert res.attrs["long_name"] == "Raw Counts" class Test_NC_ABI_L1B_invalid_cal(Test_NC_ABI_L1B_Base): @@ -405,7 +398,7 @@ class Test_NC_ABI_File(unittest.TestCase): """Test file opening.""" @mock.patch("satpy.readers.abi_base.xr") - def test_open_dataset(self, _): + def test_open_dataset(self, _): # noqa: PT019 """Test openning a dataset.""" from satpy.readers.abi_l1b import NC_ABI_L1B diff --git a/satpy/tests/reader_tests/test_acspo.py b/satpy/tests/reader_tests/test_acspo.py index 166c77227d..723d1dbecd 100644 --- a/satpy/tests/reader_tests/test_acspo.py +++ b/satpy/tests/reader_tests/test_acspo.py @@ -119,10 +119,10 @@ def teardown_method(self): self.p.stop() @pytest.mark.parametrize( - ("filename",), + "filename", [ - ["20170401174600-STAR-L2P_GHRSST-SSTskin-VIIRS_NPP-ACSPO_V2.40-v02.0-fv01.0.nc"], - ["20210916161708-STAR-L2P_GHRSST-SSTsubskin-VIIRS_N20-ACSPO_V2.80-v02.0-fv01.0.nc"], + ("20170401174600-STAR-L2P_GHRSST-SSTskin-VIIRS_NPP-ACSPO_V2.40-v02.0-fv01.0.nc"), + ("20210916161708-STAR-L2P_GHRSST-SSTsubskin-VIIRS_N20-ACSPO_V2.80-v02.0-fv01.0.nc"), ] ) def test_init(self, filename): diff --git a/satpy/tests/reader_tests/test_ahi_hrit.py b/satpy/tests/reader_tests/test_ahi_hrit.py index 5746661cac..1dbf36c66b 100644 --- a/satpy/tests/reader_tests/test_ahi_hrit.py +++ b/satpy/tests/reader_tests/test_ahi_hrit.py @@ -106,29 +106,29 @@ def test_init(self): "unit": "ALBEDO(%)"}) mda_expected["projection_parameters"]["SSP_longitude"] = 140.7 reader = self._get_reader(mda=mda) - self.assertEqual(reader.mda, mda_expected) + assert reader.mda == mda_expected # Check projection name - self.assertEqual(reader.projection_name, "GEOS(140.70)") + assert reader.projection_name == "GEOS(140.70)" # Check calibration table cal_expected = np.array([[0, -0.1], [1023, 100], [65535, 100]]) - self.assertTrue(np.all(reader.calibration_table == cal_expected)) + assert np.all(reader.calibration_table == cal_expected) # Check if scanline timestamps are there (dedicated test below) - self.assertIsInstance(reader.acq_time, np.ndarray) + assert isinstance(reader.acq_time, np.ndarray) # Check platform - self.assertEqual(reader.platform, HIMAWARI8) + assert reader.platform == HIMAWARI8 # Check is_segmented attribute expected = {0: False, 1: True, 8: True} for segno, is_segmented in expected.items(): mda = self._get_mda(segno=segno) reader = self._get_reader(mda=mda) - self.assertEqual(reader.is_segmented, is_segmented) + assert reader.is_segmented == is_segmented # Check area IDs expected = [ @@ -139,7 +139,7 @@ def test_init(self): mda = self._get_mda() for filename_info, area_id in expected: reader = self._get_reader(mda=mda, filename_info=filename_info) - self.assertEqual(reader.area_id, area_id) + assert reader.area_id == area_id @mock.patch("satpy.readers.hrit_jma.HRITJMAFileHandler.__init__") def test_get_platform(self, mocked_init): @@ -151,11 +151,11 @@ def test_get_platform(self, mocked_init): for proj_name, platform in PLATFORMS.items(): reader.projection_name = proj_name - self.assertEqual(reader._get_platform(), platform) + assert reader._get_platform() == platform with mock.patch("logging.Logger.error") as mocked_log: reader.projection_name = "invalid" - self.assertEqual(reader._get_platform(), UNKNOWN_PLATFORM) + assert reader._get_platform() == UNKNOWN_PLATFORM mocked_log.assert_called() def test_get_area_def(self): @@ -206,8 +206,8 @@ def test_get_area_def(self): reader = self._get_reader(mda=mda, filename_info={"area": case["area"]}) area = reader.get_area_def("some_id") - self.assertTupleEqual(area.area_extent, case["extent"]) - self.assertEqual(area.description, AREA_NAMES[case["area"]]["long"]) + assert area.area_extent == case["extent"] + assert area.description == AREA_NAMES[case["area"]]["long"] def test_calibrate(self): """Test calibration.""" @@ -238,7 +238,7 @@ def test_calibrate(self): # 1. Counts res = reader.calibrate(data=counts, calibration="counts") - self.assertTrue(np.all(counts.values == res.values)) + assert np.all(counts.values == res.values) # 2. Reflectance res = reader.calibrate(data=counts, calibration="reflectance") @@ -263,7 +263,7 @@ def test_mask_space(self): # First line of the segment should be space, in the middle of the # last line there should be some valid pixels np.testing.assert_allclose(masked.values[0, :], np.nan) - self.assertTrue(np.all(masked.values[-1, 588:788] == 1)) + assert np.all(masked.values[-1, 588:788] == 1) @mock.patch("satpy.readers.hrit_jma.HRITFileHandler.get_dataset") def test_get_dataset(self, base_get_dataset): @@ -281,15 +281,15 @@ def test_get_dataset(self, base_get_dataset): # Check attributes res = reader.get_dataset(key, {"units": "%", "sensor": "ahi"}) - self.assertEqual(res.attrs["units"], "%") - self.assertEqual(res.attrs["sensor"], "ahi") - self.assertEqual(res.attrs["platform_name"], HIMAWARI8) - self.assertDictEqual(res.attrs["orbital_parameters"], {"projection_longitude": 140.7, - "projection_latitude": 0., - "projection_altitude": 35785831.0}) + assert res.attrs["units"] == "%" + assert res.attrs["sensor"] == "ahi" + assert res.attrs["platform_name"] == HIMAWARI8 + assert res.attrs["orbital_parameters"] == {"projection_longitude": 140.7, + "projection_latitude": 0.0, + "projection_altitude": 35785831.0} # Check if acquisition time is a coordinate - self.assertIn("acq_time", res.coords) + assert "acq_time" in res.coords # Check called methods with mock.patch.object(reader, "_mask_space") as mask_space: @@ -305,10 +305,8 @@ def test_get_dataset(self, base_get_dataset): def test_mjd2datetime64(self): """Test conversion from modified julian day to datetime64.""" from satpy.readers.hrit_jma import mjd2datetime64 - self.assertEqual(mjd2datetime64(np.array([0])), - np.datetime64("1858-11-17", "us")) - self.assertEqual(mjd2datetime64(np.array([40587.5])), - np.datetime64("1970-01-01 12:00", "us")) + assert mjd2datetime64(np.array([0])) == np.datetime64("1858-11-17", "us") + assert mjd2datetime64(np.array([40587.5])) == np.datetime64("1970-01-01 12:00", "us") def test_get_acq_time(self): """Test computation of scanline acquisition times.""" diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 3d2375f5ec..6b4efca8c1 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -142,12 +142,12 @@ def test_region(self, fromfile, np2str): area_def = fh.get_area_def(None) proj_dict = area_def.proj_dict a, b = proj4_radius_parameters(proj_dict) - self.assertEqual(a, 6378137.0) - self.assertEqual(b, 6356752.3) - self.assertEqual(proj_dict["h"], 35785863.0) - self.assertEqual(proj_dict["lon_0"], 140.7) - self.assertEqual(proj_dict["proj"], "geos") - self.assertEqual(proj_dict["units"], "m") + assert a == 6378137.0 + assert b == 6356752.3 + assert proj_dict["h"] == 35785863.0 + assert proj_dict["lon_0"] == 140.7 + assert proj_dict["proj"] == "geos" + assert proj_dict["units"] == "m" np.testing.assert_allclose(area_def.area_extent, (592000.0038256242, 4132000.0267018233, 1592000.0102878273, 5132000.033164027)) @@ -190,17 +190,17 @@ def test_segment(self, fromfile, np2str): area_def = fh.get_area_def(None) proj_dict = area_def.proj_dict a, b = proj4_radius_parameters(proj_dict) - self.assertEqual(a, 6378137.0) - self.assertEqual(b, 6356752.3) - self.assertEqual(proj_dict["h"], 35785863.0) - self.assertEqual(proj_dict["lon_0"], 140.7) - self.assertEqual(proj_dict["proj"], "geos") - self.assertEqual(proj_dict["units"], "m") + assert a == 6378137.0 + assert b == 6356752.3 + assert proj_dict["h"] == 35785863.0 + assert proj_dict["lon_0"] == 140.7 + assert proj_dict["proj"] == "geos" + assert proj_dict["units"] == "m" np.testing.assert_allclose(area_def.area_extent, (-5500000.035542117, -3300000.021325271, 5500000.035542117, -2200000.0142168473)) -@pytest.fixture +@pytest.fixture() def hsd_file_jp01(tmp_path): """Create a jp01 hsd file.""" from satpy.readers.ahi_hsd import ( # _IRCAL_INFO_TYPE, @@ -279,7 +279,7 @@ class TestAHIHSDFileHandler: def test_bad_calibration(self): """Test that a bad calibration mode causes an exception.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Invalid calibration mode: BAD_MODE. Choose one of (.*)"): with _fake_hsd_handler(fh_kwargs={"calib_mode": "BAD_MODE"}): pass @@ -503,16 +503,14 @@ def test_default_calibrate(self, *mocks): """Test default in-file calibration modes.""" self.setUp() # Counts - self.assertEqual(self.fh.calibrate(data=123, - calibration="counts"), - 123) + assert self.fh.calibrate(data=123, calibration="counts") == 123 # Radiance rad_exp = np.array([[15.2, 11.5], [7.8, -3.3]]) rad = self.fh.calibrate(data=self.counts, calibration="radiance") - self.assertTrue(np.allclose(rad, rad_exp)) + assert np.allclose(rad, rad_exp) # Brightness Temperature bt_exp = np.array([[330.978979, 310.524688], @@ -526,7 +524,7 @@ def test_default_calibrate(self, *mocks): [1.50189, 0.]]) refl = self.fh.calibrate(data=self.counts, calibration="reflectance") - self.assertTrue(np.allclose(refl, refl_exp)) + assert np.allclose(refl, refl_exp) def test_updated_calibrate(self): """Test updated in-file calibration modes.""" @@ -535,7 +533,7 @@ def test_updated_calibrate(self): rad_exp = np.array([[30.4, 23.0], [15.6, -6.6]]) rad = self.fh.calibrate(data=self.counts, calibration="radiance") - self.assertTrue(np.allclose(rad, rad_exp)) + assert np.allclose(rad, rad_exp) # Case for no updated calibration available (older data) self.fh._header = { @@ -556,7 +554,7 @@ def test_updated_calibrate(self): rad = self.fh.calibrate(data=self.counts, calibration="radiance") rad_exp = np.array([[15.2, 11.5], [7.8, -3.3]]) - self.assertTrue(np.allclose(rad, rad_exp)) + assert np.allclose(rad, rad_exp) def test_user_calibration(self): """Test user-defined calibration modes.""" @@ -567,7 +565,7 @@ def test_user_calibration(self): rad = self.fh.calibrate(data=self.counts, calibration="radiance").compute() rad_exp = np.array([[16.10526316, 12.21052632], [8.31578947, -3.36842105]]) - self.assertTrue(np.allclose(rad, rad_exp)) + assert np.allclose(rad, rad_exp) # This is for DN calibration self.fh.user_calibration = {"B13": {"slope": -0.0032, @@ -577,7 +575,7 @@ def test_user_calibration(self): rad = self.fh.calibrate(data=self.counts, calibration="radiance").compute() rad_exp = np.array([[15.2, 12.], [8.8, -0.8]]) - self.assertTrue(np.allclose(rad, rad_exp)) + assert np.allclose(rad, rad_exp) @contextlib.contextmanager diff --git a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py index 9d1302ef41..73206e7ffd 100644 --- a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py +++ b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py @@ -57,20 +57,20 @@ def make_fh(filetype, area="fld"): def test_low_res(self): """Check size of the low resolution (2km) grid.""" tmp_fh = self.make_fh("tir.01") - self.assertEqual(self.FULLDISK_SIZES[0.02]["x_size"], tmp_fh.ncols) - self.assertEqual(self.FULLDISK_SIZES[0.02]["y_size"], tmp_fh.nlines) + assert self.FULLDISK_SIZES[0.02]["x_size"] == tmp_fh.ncols + assert self.FULLDISK_SIZES[0.02]["y_size"] == tmp_fh.nlines def test_med_res(self): """Check size of the low resolution (1km) grid.""" tmp_fh = self.make_fh("vis.02") - self.assertEqual(self.FULLDISK_SIZES[0.01]["x_size"], tmp_fh.ncols) - self.assertEqual(self.FULLDISK_SIZES[0.01]["y_size"], tmp_fh.nlines) + assert self.FULLDISK_SIZES[0.01]["x_size"] == tmp_fh.ncols + assert self.FULLDISK_SIZES[0.01]["y_size"] == tmp_fh.nlines def test_hi_res(self): """Check size of the low resolution (0.5km) grid.""" tmp_fh = self.make_fh("ext.01") - self.assertEqual(self.FULLDISK_SIZES[0.005]["x_size"], tmp_fh.ncols) - self.assertEqual(self.FULLDISK_SIZES[0.005]["y_size"], tmp_fh.nlines) + assert self.FULLDISK_SIZES[0.005]["x_size"] == tmp_fh.ncols + assert self.FULLDISK_SIZES[0.005]["y_size"] == tmp_fh.nlines def test_area_def(self): """Check that a valid full disk area is produced.""" @@ -84,7 +84,7 @@ def test_area_def(self): tmp_fh = self.make_fh("vis.01") tmp_fh.get_area_def(None) - self.assertEqual(tmp_fh.area, good_area) + assert tmp_fh.area == good_area def test_bad_area(self): """Ensure an error is raised for an usupported area.""" @@ -170,7 +170,7 @@ def setUp(self): filetype_info={"file_type": "tir.01"}) # Check that the filename is altered for bz2 format files - self.assertNotEqual(in_fname, fh.filename) + assert in_fname != fh.filename self.fh = fh key = {"calibration": "counts", @@ -206,8 +206,8 @@ def test_get_dataset(self, mocked_read): # Check output data is correct np.testing.assert_allclose(res.values, out_data) # Also check a couple of attributes - self.assertEqual(res.attrs["name"], self.key["name"]) - self.assertEqual(res.attrs["wavelength"], self.info["wavelength"]) + assert res.attrs["name"] == self.key["name"] + assert res.attrs["wavelength"] == self.info["wavelength"] @mock.patch("os.path.exists", return_value=True) @mock.patch("os.remove") @@ -269,9 +269,9 @@ def test_get_luts(self): tempdir = tempfile.gettempdir() print(self.fh.lut_dir) self.fh._get_luts() - self.assertFalse(os.path.exists(os.path.join(tempdir, "count2tbb_v102/"))) + assert not os.path.exists(os.path.join(tempdir, "count2tbb_v102/")) for lut_name in AHI_LUT_NAMES: - self.assertTrue(os.path.isfile(os.path.join(self.fh.lut_dir, lut_name))) + assert os.path.isfile(os.path.join(self.fh.lut_dir, lut_name)) @mock.patch("urllib.request.urlopen") @mock.patch("shutil.copyfileobj") diff --git a/satpy/tests/reader_tests/test_ami_l1b.py b/satpy/tests/reader_tests/test_ami_l1b.py index 58a3612b49..cdbc4468c9 100644 --- a/satpy/tests/reader_tests/test_ami_l1b.py +++ b/satpy/tests/reader_tests/test_ami_l1b.py @@ -23,6 +23,7 @@ import dask.array as da import numpy as np import xarray as xr +from pytest import approx # noqa: PT013 class FakeDataset(object): @@ -144,7 +145,7 @@ def _check_orbital_parameters(self, orb_params): "satellite_actual_longitude": 128.2707, } for key, val in exp_params.items(): - self.assertAlmostEqual(val, orb_params[key], places=3) + assert val == approx(orb_params[key], abs=1e-3) def test_filename_grouping(self): """Test that filenames are grouped properly.""" @@ -167,16 +168,14 @@ def test_filename_grouping(self): "gk2a_ami_le1b_wv069_fd020ge_201909300300.nc", "gk2a_ami_le1b_wv073_fd020ge_201909300300.nc"] groups = group_files(filenames, reader="ami_l1b") - self.assertEqual(len(groups), 1) - self.assertEqual(len(groups[0]["ami_l1b"]), 16) + assert len(groups) == 1 + assert len(groups[0]["ami_l1b"]) == 16 def test_basic_attributes(self): """Test getting basic file attributes.""" from datetime import datetime - self.assertEqual(self.reader.start_time, - datetime(2019, 9, 30, 3, 0, 31, 957882)) - self.assertEqual(self.reader.end_time, - datetime(2019, 9, 30, 3, 9, 35, 606133)) + assert self.reader.start_time == datetime(2019, 9, 30, 3, 0, 31, 957882) + assert self.reader.end_time == datetime(2019, 9, 30, 3, 9, 35, 606133) def test_get_dataset(self): """Test gettting radiance data.""" @@ -193,7 +192,7 @@ def test_get_dataset(self): "sensor": "ami", "units": "W m-2 um-1 sr-1"} for key, val in exp.items(): - self.assertEqual(val, res.attrs[key]) + assert val == res.attrs[key] self._check_orbital_parameters(res.attrs["orbital_parameters"]) def test_bad_calibration(self): @@ -212,15 +211,15 @@ def test_get_area_def(self, adef): """Test the area generation.""" self.reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] exp = {"a": 6378137.0, "b": 6356752.3, "h": 35785863.0, "lon_0": 128.2, "proj": "geos", "units": "m"} for key, val in exp.items(): - self.assertIn(key, call_args[3]) - self.assertAlmostEqual(val, call_args[3][key]) - self.assertEqual(call_args[4], self.reader.nc.attrs["number_of_columns"]) - self.assertEqual(call_args[5], self.reader.nc.attrs["number_of_lines"]) + assert key in call_args[3] + assert val == approx(call_args[3][key]) + assert call_args[4] == self.reader.nc.attrs["number_of_columns"] + assert call_args[5] == self.reader.nc.attrs["number_of_lines"] np.testing.assert_allclose(call_args[6], [-5511022.902, -5511022.902, 5511022.902, 5511022.902]) @@ -239,7 +238,7 @@ def test_get_dataset_vis(self): "sensor": "ami", "units": "%"} for key, val in exp.items(): - self.assertEqual(val, res.attrs[key]) + assert val == res.attrs[key] self._check_orbital_parameters(res.attrs["orbital_parameters"]) def test_get_dataset_counts(self): @@ -257,7 +256,7 @@ def test_get_dataset_counts(self): "sensor": "ami", "units": "1"} for key, val in exp.items(): - self.assertEqual(val, res.attrs[key]) + assert val == res.attrs[key] self._check_orbital_parameters(res.attrs["orbital_parameters"]) @@ -310,7 +309,7 @@ def test_default_calibrate(self): [238.1965875, 238.16707956, 238.13755317, 238.10800829, 238.07844489]]) np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs["standard_name"], "toa_brightness_temperature") + assert res.attrs["standard_name"] == "toa_brightness_temperature" def test_infile_calibrate(self): """Test IR calibration using in-file coefficients.""" @@ -324,7 +323,7 @@ def test_infile_calibrate(self): # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.04) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs["standard_name"], "toa_brightness_temperature") + assert res.attrs["standard_name"] == "toa_brightness_temperature" def test_gsics_radiance_corr(self): """Test IR radiance adjustment using in-file GSICS coefs.""" @@ -338,7 +337,7 @@ def test_gsics_radiance_corr(self): # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.01) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs["standard_name"], "toa_brightness_temperature") + assert res.attrs["standard_name"] == "toa_brightness_temperature" def test_user_radiance_corr(self): """Test IR radiance adjustment using user-supplied coefs.""" @@ -354,4 +353,4 @@ def test_user_radiance_corr(self): # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.01) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs["standard_name"], "toa_brightness_temperature") + assert res.attrs["standard_name"] == "toa_brightness_temperature" diff --git a/satpy/tests/reader_tests/test_amsr2_l1b.py b/satpy/tests/reader_tests/test_amsr2_l1b.py index b8e51b845b..b627a53a0b 100644 --- a/satpy/tests/reader_tests/test_amsr2_l1b.py +++ b/satpy/tests/reader_tests/test_amsr2_l1b.py @@ -124,10 +124,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_basic(self): """Test loading of basic channels.""" @@ -136,7 +136,7 @@ def test_load_basic(self): loadables = r.select_files_from_pathnames([ "GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) ds = r.load([ "btemp_10.7v", @@ -152,16 +152,14 @@ def test_load_basic(self): "btemp_36.5v", "btemp_36.5h", ]) - self.assertEqual(len(ds), 12) + assert len(ds) == 12 for d in ds.values(): - self.assertEqual(d.attrs["calibration"], "brightness_temperature") - self.assertTupleEqual(d.shape, (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1] // 2))) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) - self.assertTupleEqual(d.attrs["area"].lons.shape, - (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2)) - self.assertTupleEqual(d.attrs["area"].lats.shape, - (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2)) + assert d.attrs["calibration"] == "brightness_temperature" + assert d.shape == (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1] // 2)) + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert d.attrs["area"].lons.shape == (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2) + assert d.attrs["area"].lats.shape == (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2) assert d.attrs["sensor"] == "amsr2" assert d.attrs["platform_name"] == "GCOM-W1" @@ -172,7 +170,7 @@ def test_load_89ghz(self): loadables = r.select_files_from_pathnames([ "GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) ds = r.load([ "btemp_89.0av", @@ -180,13 +178,11 @@ def test_load_89ghz(self): "btemp_89.0bv", "btemp_89.0bh", ]) - self.assertEqual(len(ds), 4) + assert len(ds) == 4 for d in ds.values(): - self.assertEqual(d.attrs["calibration"], "brightness_temperature") - self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) - self.assertTupleEqual(d.attrs["area"].lons.shape, - DEFAULT_FILE_SHAPE) - self.assertTupleEqual(d.attrs["area"].lats.shape, - DEFAULT_FILE_SHAPE) + assert d.attrs["calibration"] == "brightness_temperature" + assert d.shape == DEFAULT_FILE_SHAPE + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert d.attrs["area"].lons.shape == DEFAULT_FILE_SHAPE + assert d.attrs["area"].lats.shape == DEFAULT_FILE_SHAPE diff --git a/satpy/tests/reader_tests/test_amsr2_l2.py b/satpy/tests/reader_tests/test_amsr2_l2.py index 106f558919..7199a619bc 100644 --- a/satpy/tests/reader_tests/test_amsr2_l2.py +++ b/satpy/tests/reader_tests/test_amsr2_l2.py @@ -96,10 +96,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_basic(self): """Test loading of basic channels.""" @@ -108,15 +108,13 @@ def test_load_basic(self): loadables = r.select_files_from_pathnames([ "GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) ds = r.load(["ssw"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): - self.assertTupleEqual(d.shape, (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1]))) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) - self.assertTupleEqual(d.attrs["area"].lons.shape, - (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1])) - self.assertTupleEqual(d.attrs["area"].lats.shape, - (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1])) + assert d.shape == (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1])) + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert d.attrs["area"].lons.shape == (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) + assert d.attrs["area"].lats.shape == (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) diff --git a/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py b/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py index 17ac9f62de..07ed218e72 100644 --- a/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py +++ b/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py @@ -151,9 +151,9 @@ def test_scene(self): from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader="ascat_l2_soilmoisture_bufr", filenames=[fname]) - self.assertTrue("scatterometer" in scn.sensor_names) - self.assertTrue(datetime(2020, 12, 21, 9, 33, 0) == scn.start_time) - self.assertTrue(datetime(2020, 12, 21, 9, 33, 59) == scn.end_time) + assert "scatterometer" in scn.sensor_names + assert datetime(2020, 12, 21, 9, 33, 0) == scn.start_time + assert datetime(2020, 12, 21, 9, 33, 59) == scn.end_time @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_load_available_datasets(self): @@ -161,10 +161,10 @@ def test_scene_load_available_datasets(self): from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader="ascat_l2_soilmoisture_bufr", filenames=[fname]) - self.assertTrue("surface_soil_moisture" in scn.available_dataset_names()) + assert "surface_soil_moisture" in scn.available_dataset_names() scn.load(scn.available_dataset_names()) loaded = [dataset.name for dataset in scn] - self.assertTrue(sorted(loaded) == sorted(scn.available_dataset_names())) + assert sorted(loaded) == sorted(scn.available_dataset_names()) @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_dataset_values(self): @@ -184,4 +184,4 @@ def test_scene_dataset_values(self): # this makes each assertion below a separate test from unittest's point of view # (note: if all subtests pass, they will count as one test) with self.subTest(msg="Test failed for dataset: "+name): - self.assertTrue(np.allclose(original_values, loaded_values_nan_filled)) + assert np.allclose(original_values, loaded_values_nan_filled) diff --git a/satpy/tests/reader_tests/test_atms_l1b_nc.py b/satpy/tests/reader_tests/test_atms_l1b_nc.py index eca5454307..6b27081ed9 100644 --- a/satpy/tests/reader_tests/test_atms_l1b_nc.py +++ b/satpy/tests/reader_tests/test_atms_l1b_nc.py @@ -27,7 +27,7 @@ # - tmp_path -@pytest.fixture +@pytest.fixture() def reader(l1b_file): """Return reader of ATMS level1b data.""" return AtmsL1bNCFileHandler( @@ -37,15 +37,15 @@ def reader(l1b_file): ) -@pytest.fixture +@pytest.fixture() def l1b_file(tmp_path, atms_fake_dataset): """Return file path to level1b file.""" l1b_file_path = tmp_path / "test_file_atms_l1b.nc" atms_fake_dataset.to_netcdf(l1b_file_path) - yield l1b_file_path + return l1b_file_path -@pytest.fixture +@pytest.fixture() def atms_fake_dataset(): """Return fake ATMS dataset.""" atrack = 2 @@ -99,20 +99,20 @@ def test_antenna_temperature(self, reader, atms_fake_dataset): atms_fake_dataset.antenna_temp.values, ) - @pytest.mark.parametrize("param,expect", ( + @pytest.mark.parametrize(("param", "expect"), [ ("start_time", datetime(2000, 1, 2, 3, 4, 5)), ("end_time", datetime(2000, 1, 2, 4, 5, 6)), ("platform_name", "JPSS-1"), ("sensor", "ATMS"), - )) + ]) def test_attrs(self, reader, param, expect): """Test attributes.""" assert reader.attrs[param] == expect - @pytest.mark.parametrize("dims", ( + @pytest.mark.parametrize("dims", [ ("xtrack", "atrack"), ("x", "y"), - )) + ]) def test_standardize_dims(self, reader, dims): """Test standardize dims.""" data = xr.DataArray( @@ -134,7 +134,7 @@ def test_drop_coords(self, reader): data = reader._drop_coords(data) assert coords not in data.coords - @pytest.mark.parametrize("param,expect", ( + @pytest.mark.parametrize(("param", "expect"), [ ("start_time", datetime(2000, 1, 2, 3, 4, 5)), ("end_time", datetime(2000, 1, 2, 4, 5, 6)), ("platform_name", "JPSS-1"), @@ -142,7 +142,7 @@ def test_drop_coords(self, reader): ("creation_time", datetime(2020, 1, 2, 3, 4, 5)), ("type", "test_data"), ("name", "test"), - )) + ]) def test_merge_attributes(self, reader, param, expect): """Test merge attributes.""" data = xr.DataArray( @@ -154,10 +154,10 @@ def test_merge_attributes(self, reader, param, expect): data = reader._merge_attributes(data, dataset_info) assert data.attrs[param] == expect - @pytest.mark.parametrize("param,expect", ( + @pytest.mark.parametrize(("param", "expect"), [ ("1", 100.), ("sat_azi", 3.), - )) + ]) def test_select_dataset(self, reader, param, expect): """Test select dataset.""" np.testing.assert_array_equal( diff --git a/satpy/tests/reader_tests/test_atms_sdr_hdf5.py b/satpy/tests/reader_tests/test_atms_sdr_hdf5.py index aa11e66d09..8971c2d933 100644 --- a/satpy/tests/reader_tests/test_atms_sdr_hdf5.py +++ b/satpy/tests/reader_tests/test_atms_sdr_hdf5.py @@ -299,7 +299,7 @@ def test_init_start_end_time(self): # make sure we have some files assert r.file_handlers - @pytest.mark.parametrize("files, expected", + @pytest.mark.parametrize(("files", "expected"), [(["SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", "GATMO_j01_d20221220_t0910240_e0921356_b26361_c20221220100456680030_cspp_dev.h5"], True), diff --git a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py index 076f89b0f2..2272a950bf 100644 --- a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py +++ b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py @@ -158,10 +158,8 @@ def test_init(self): [GACPODReader, GACKLMReader, LACPODReader, LACKLMReader]): for filename in filenames: fh = self._get_fh(filename, **kwargs) - self.assertLess(fh.start_time, fh.end_time, - "Start time must precede end time.") - self.assertIs(fh.reader_class, reader_cls, - "Wrong reader class assigned to {}".format(filename)) + assert fh.start_time < fh.end_time + assert fh.reader_class is reader_cls def test_read_raw_data(self): """Test raw data reading.""" @@ -261,7 +259,7 @@ def test_get_dataset_latlon(self, *mocks): key = make_dataid(name=name) info = {"name": name, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) - self.assertTupleEqual(res.dims, ("y", "x_every_eighth")) + assert res.dims == ("y", "x_every_eighth") @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_angle") @@ -298,7 +296,7 @@ def test_get_dataset_angles(self, get_angle, *mocks): key = make_dataid(name=angle) info = {"name": angle, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) - self.assertTupleEqual(res.dims, ("y", "x_every_eighth")) + assert res.dims == ("y", "x_every_eighth") @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") def test_get_dataset_qual_flags(self, *mocks): @@ -348,21 +346,19 @@ def test_get_channel(self): key = make_dataid(name="1", calibration="counts") # Counts res = fh._get_channel(key=key) - np.testing.assert_array_equal(res, [[1, 2, 3], - [4, 5, 6]]) + np.testing.assert_array_equal(res, [[1, 2, 3], [4, 5, 6]]) np.testing.assert_array_equal(fh.counts, counts) # Reflectance and Brightness Temperature for calib in ["reflectance", "brightness_temperature"]: key = make_dataid(name="1", calibration=calib) res = fh._get_channel(key=key) - np.testing.assert_array_equal(res, [[2, 4, 6], - [8, 10, 12]]) + np.testing.assert_array_equal(res, [[2, 4, 6], [8, 10, 12]]) np.testing.assert_array_equal(fh.calib_channels, calib_channels) # Invalid - with pytest.raises(ValueError): - key = make_dataid(name="7", calibration="coffee") + with pytest.raises(ValueError, match="coffee invalid value for "): + _ = make_dataid(name="7", calibration="coffee") # Buffering reader.get_counts.reset_mock() @@ -387,12 +383,10 @@ def test_get_angle(self): # Test angle readout key = make_dataid(name="sensor_zenith_angle") res = fh._get_angle(key) - self.assertEqual(res, 2) - self.assertDictEqual(fh.angles, {"sensor_zenith_angle": 2, - "sensor_azimuth_angle": 1, - "solar_zenith_angle": 4, - "solar_azimuth_angle": 3, - "sun_sensor_azimuth_difference_angle": 5}) + assert res == 2 + assert fh.angles == {"sensor_zenith_angle": 2, "sensor_azimuth_angle": 1, + "solar_zenith_angle": 4, "solar_azimuth_angle": 3, + "sun_sensor_azimuth_difference_angle": 5} # Test buffering key = make_dataid(name="sensor_azimuth_angle") @@ -410,14 +404,14 @@ def test_strip_invalid_lat(self): # Test stripping pygac.utils.strip_invalid_lat.return_value = 1, 2 start, end = fh._strip_invalid_lat() - self.assertTupleEqual((start, end), (1, 2)) + assert (start, end) == (1, 2) # Test buffering fh._strip_invalid_lat() pygac.utils.strip_invalid_lat.assert_called_once() @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._slice") - def test_slice(self, _slice): + def test_slice(self, _slice): # noqa: PT019 """Test slicing.""" def _slice_patched(data): @@ -431,8 +425,8 @@ def _slice_patched(data): data_slc, times_slc = fh.slice(data, times) np.testing.assert_array_equal(data_slc, data[1:3]) np.testing.assert_array_equal(times_slc, times[1:3]) - self.assertEqual(fh.start_time, datetime(1970, 1, 1, 0, 0, 0, 2)) - self.assertEqual(fh.end_time, datetime(1970, 1, 1, 0, 0, 0, 3)) + assert fh.start_time == datetime(1970, 1, 1, 0, 0, 0, 2) + assert fh.end_time == datetime(1970, 1, 1, 0, 0, 0, 3) @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_qual_flags") @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._strip_invalid_lat") @@ -449,7 +443,7 @@ def test__slice(self, strip_invalid_lat, get_qual_flags): # a) Only start/end line given fh = _get_fh_mocked(start_line=5, end_line=6, strip_invalid_coords=False) data_slc = fh._slice(data) - self.assertEqual(data_slc, "sliced") + assert data_slc == "sliced" pygac.utils.check_user_scanlines.assert_called_with( start_line=5, end_line=6, first_valid_lat=None, last_valid_lat=None, along_track=2) @@ -474,7 +468,7 @@ def test__slice(self, strip_invalid_lat, get_qual_flags): # Test slicing with older pygac versions pygac.utils.slice_channel.return_value = ("sliced", "foo", "bar") data_slc = fh._slice(data) - self.assertEqual(data_slc, "sliced") + assert data_slc == "sliced" class TestGetDataset(GACLACFilePatcher): diff --git a/satpy/tests/reader_tests/test_clavrx.py b/satpy/tests/reader_tests/test_clavrx.py index 66758d44dc..bc5e968b08 100644 --- a/satpy/tests/reader_tests/test_clavrx.py +++ b/satpy/tests/reader_tests/test_clavrx.py @@ -133,10 +133,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_available_datasets(self): """Test available_datasets with fake variables from YAML.""" @@ -145,10 +145,10 @@ def test_available_datasets(self): loadables = r.select_files_from_pathnames([ "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers # mimic the YAML file being configured for more datasets fake_dataset_info = [ @@ -162,47 +162,47 @@ def test_available_datasets(self): ] new_ds_infos = list(r.file_handlers["clavrx_hdf4"][0].available_datasets( fake_dataset_info)) - self.assertEqual(len(new_ds_infos), 9) + assert len(new_ds_infos) == 9 # we have this and can provide the resolution - self.assertTrue(new_ds_infos[0][0]) - self.assertEqual(new_ds_infos[0][1]["resolution"], 742) # hardcoded + assert new_ds_infos[0][0] + assert new_ds_infos[0][1]["resolution"] == 742 # hardcoded # we have this, but previous file handler said it knew about it # and it is producing the same resolution as what we have - self.assertTrue(new_ds_infos[1][0]) - self.assertEqual(new_ds_infos[1][1]["resolution"], 742) + assert new_ds_infos[1][0] + assert new_ds_infos[1][1]["resolution"] == 742 # we have this, but don't want to change the resolution # because a previous handler said it has it - self.assertTrue(new_ds_infos[2][0]) - self.assertEqual(new_ds_infos[2][1]["resolution"], 1) + assert new_ds_infos[2][0] + assert new_ds_infos[2][1]["resolution"] == 1 # even though the previous one was known we can still # produce it at our new resolution - self.assertTrue(new_ds_infos[3][0]) - self.assertEqual(new_ds_infos[3][1]["resolution"], 742) + assert new_ds_infos[3][0] + assert new_ds_infos[3][1]["resolution"] == 742 # we have this and can update the resolution since # no one else has claimed it - self.assertTrue(new_ds_infos[4][0]) - self.assertEqual(new_ds_infos[4][1]["resolution"], 742) + assert new_ds_infos[4][0] + assert new_ds_infos[4][1]["resolution"] == 742 # we don't have this variable, don't change it - self.assertFalse(new_ds_infos[5][0]) - self.assertIsNone(new_ds_infos[5][1].get("resolution")) + assert not new_ds_infos[5][0] + assert new_ds_infos[5][1].get("resolution") is None # we have this, but it isn't supposed to come from our file type - self.assertIsNone(new_ds_infos[6][0]) - self.assertIsNone(new_ds_infos[6][1].get("resolution")) + assert new_ds_infos[6][0] is None + assert new_ds_infos[6][1].get("resolution") is None # we could have loaded this but some other file handler said it has this - self.assertTrue(new_ds_infos[7][0]) - self.assertIsNone(new_ds_infos[7][1].get("resolution")) + assert new_ds_infos[7][0] + assert new_ds_infos[7][1].get("resolution") is None # we can add resolution to the previous dataset, so we do - self.assertTrue(new_ds_infos[8][0]) - self.assertEqual(new_ds_infos[8][1]["resolution"], 742) + assert new_ds_infos[8][0] + assert new_ds_infos[8][1]["resolution"] == 742 def test_load_all(self): """Test loading all test datasets.""" @@ -218,15 +218,15 @@ def test_load_all(self): var_list = ["variable1", "variable2", "variable3"] datasets = r.load(var_list) - self.assertEqual(len(datasets), len(var_list)) + assert len(datasets) == len(var_list) for v in datasets.values(): - self.assertEqual(v.attrs["units"], "1") - self.assertEqual(v.attrs["platform_name"], "npp") - self.assertEqual(v.attrs["sensor"], "viirs") - self.assertIsInstance(v.attrs["area"], SwathDefinition) - self.assertEqual(v.attrs["area"].lons.attrs["rows_per_scan"], 16) - self.assertEqual(v.attrs["area"].lats.attrs["rows_per_scan"], 16) - self.assertIsInstance(datasets["variable3"].attrs.get("flag_meanings"), list) + assert v.attrs["units"] == "1" + assert v.attrs["platform_name"] == "npp" + assert v.attrs["sensor"] == "viirs" + assert isinstance(v.attrs["area"], SwathDefinition) + assert v.attrs["area"].lons.attrs["rows_per_scan"] == 16 + assert v.attrs["area"].lats.attrs["rows_per_scan"] == 16 + assert isinstance(datasets["variable3"].attrs.get("flag_meanings"), list) class FakeHDF4FileHandlerGeo(FakeHDF4FileHandler): @@ -331,10 +331,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "clavrx_H08_20180806_1800.level2.hdf", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_no_nav_donor(self): """Test exception raised when no donor file is available.""" @@ -376,22 +376,22 @@ def test_load_all_old_donor(self): ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] datasets = r.load(["variable1", "variable2", "variable3"]) - self.assertEqual(len(datasets), 3) + assert len(datasets) == 3 for v in datasets.values(): - self.assertNotIn("calibration", v.attrs) - self.assertEqual(v.attrs["units"], "1") - self.assertIsInstance(v.attrs["area"], AreaDefinition) + assert "calibration" not in v.attrs + assert v.attrs["units"] == "1" + assert isinstance(v.attrs["area"], AreaDefinition) if v.attrs.get("flag_values"): - self.assertIn("_FillValue", v.attrs) + assert "_FillValue" in v.attrs else: - self.assertNotIn("_FillValue", v.attrs) + assert "_FillValue" not in v.attrs if v.attrs["name"] == "variable1": - self.assertIsInstance(v.attrs["valid_range"], list) + assert isinstance(v.attrs["valid_range"], list) else: - self.assertNotIn("valid_range", v.attrs) + assert "valid_range" not in v.attrs if "flag_values" in v.attrs: - self.assertTrue(np.issubdtype(v.dtype, np.integer)) - self.assertIsNotNone(v.attrs.get("flag_meanings")) + assert np.issubdtype(v.dtype, np.integer) + assert v.attrs.get("flag_meanings") is not None def test_load_all_new_donor(self): """Test loading all test datasets with new donor.""" @@ -420,12 +420,12 @@ def test_load_all_new_donor(self): ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] datasets = r.load(["variable1", "variable2", "variable3"]) - self.assertEqual(len(datasets), 3) + assert len(datasets) == 3 for v in datasets.values(): - self.assertNotIn("calibration", v.attrs) - self.assertEqual(v.attrs["units"], "1") - self.assertIsInstance(v.attrs["area"], AreaDefinition) - self.assertTrue(v.attrs["area"].is_geostationary) - self.assertEqual(v.attrs["platform_name"], "himawari8") - self.assertEqual(v.attrs["sensor"], "ahi") - self.assertIsNotNone(datasets["variable3"].attrs.get("flag_meanings")) + assert "calibration" not in v.attrs + assert v.attrs["units"] == "1" + assert isinstance(v.attrs["area"], AreaDefinition) + assert v.attrs["area"].is_geostationary + assert v.attrs["platform_name"] == "himawari8" + assert v.attrs["sensor"] == "ahi" + assert datasets["variable3"].attrs.get("flag_meanings") is not None diff --git a/satpy/tests/reader_tests/test_cmsaf_claas.py b/satpy/tests/reader_tests/test_cmsaf_claas.py index db2117d264..4615662b32 100644 --- a/satpy/tests/reader_tests/test_cmsaf_claas.py +++ b/satpy/tests/reader_tests/test_cmsaf_claas.py @@ -41,7 +41,7 @@ def start_time(request): return request.param -@pytest.fixture +@pytest.fixture() def start_time_str(start_time): """Get string representation of the start time.""" return start_time.strftime("%Y-%m-%dT%H:%M:%SZ") @@ -81,7 +81,7 @@ def fake_dataset(start_time_str): ) -@pytest.fixture +@pytest.fixture() def encoding(): """Dataset encoding.""" return { @@ -89,15 +89,15 @@ def encoding(): } -@pytest.fixture +@pytest.fixture() def fake_file(fake_dataset, encoding, tmp_path): """Write a fake dataset to file.""" filename = tmp_path / "CPPin20140101001500305SVMSG01MD.nc" fake_dataset.to_netcdf(filename, encoding=encoding) - yield filename + return filename -@pytest.fixture +@pytest.fixture() def fake_files(fake_dataset, encoding, tmp_path): """Write the same fake dataset into two different files.""" filenames = [ @@ -106,10 +106,10 @@ def fake_files(fake_dataset, encoding, tmp_path): ] for filename in filenames: fake_dataset.to_netcdf(filename, encoding=encoding) - yield filenames + return filenames -@pytest.fixture +@pytest.fixture() def reader(): """Return reader for CMSAF CLAAS-2.""" from satpy._config import config_search_paths @@ -137,14 +137,14 @@ def test_file_pattern(reader): class TestCLAAS2MultiFile: """Test reading multiple CLAAS-2 files.""" - @pytest.fixture + @pytest.fixture() def multi_file_reader(self, reader, fake_files): """Create a multi-file reader.""" loadables = reader.select_files_from_pathnames(fake_files) reader.create_filehandlers(loadables) return reader - @pytest.fixture + @pytest.fixture() def multi_file_dataset(self, multi_file_reader): """Load datasets from multiple files.""" ds_ids = [make_dataid(name=name) for name in ["cph", "ctt"]] @@ -157,7 +157,7 @@ def test_combine_timestamps(self, multi_file_reader, start_time): assert multi_file_reader.end_time == datetime.datetime(2085, 8, 13, 13, 15) @pytest.mark.parametrize( - "ds_name,expected", + ("ds_name", "expected"), [ ("cph", [[0, 1], [2, 0], [0, 1], [2, 0]]), ("ctt", [[280, 290], [300, 310], [280, 290], [300, 310]]), @@ -177,20 +177,20 @@ def test_number_of_datasets(self, multi_file_dataset): class TestCLAAS2SingleFile: """Test reading a single CLAAS2 file.""" - @pytest.fixture + @pytest.fixture() def file_handler(self, fake_file): """Return a CLAAS-2 file handler.""" from satpy.readers.cmsaf_claas2 import CLAAS2 return CLAAS2(fake_file, {}, {}) - @pytest.fixture + @pytest.fixture() def area_extent_exp(self, start_time): """Get expected area extent.""" if start_time < datetime.datetime(2017, 12, 6): return (-5454733.160460291, -5454733.160460292, 5454733.160460292, 5454733.160460291) return (-5456233.362099582, -5453232.958821001, 5453232.958821001, 5456233.362099582) - @pytest.fixture + @pytest.fixture() def area_exp(self, area_extent_exp): """Get expected area definition.""" proj_dict = { @@ -217,7 +217,7 @@ def test_get_area_def(self, file_handler, area_exp): assert area == area_exp @pytest.mark.parametrize( - "ds_name,expected", + ("ds_name", "expected"), [ ("ctt", xr.DataArray([[280, 290], [300, 310]], dims=("y", "x"))), ("cph", xr.DataArray([[0, 1], [2, 0]], dims=("y", "x"))), diff --git a/satpy/tests/reader_tests/test_electrol_hrit.py b/satpy/tests/reader_tests/test_electrol_hrit.py index c555f377b1..b3e14c24d1 100644 --- a/satpy/tests/reader_tests/test_electrol_hrit.py +++ b/satpy/tests/reader_tests/test_electrol_hrit.py @@ -57,7 +57,7 @@ def test_fun(self): expected = {"test_sec": {"test_str": np.array([b"Testing"], dtype="= global_attrs.keys() -@pytest.mark.parametrize("calibration,expected_values", +@pytest.mark.parametrize(("calibration", "expected_values"), [("counts", values_1km), ("radiance", mask_array(values_1km * 2)), ("reflectance", mask_array(values_1km * 3))]) diff --git a/satpy/tests/reader_tests/test_li_l2_nc.py b/satpy/tests/reader_tests/test_li_l2_nc.py index 62eff6d18f..5e9d0ff563 100644 --- a/satpy/tests/reader_tests/test_li_l2_nc.py +++ b/satpy/tests/reader_tests/test_li_l2_nc.py @@ -45,7 +45,7 @@ def std_filetype_infos(): # get the li_l2 filetype: ftypes = cfg["file_types"] - yield ftypes + return ftypes # Note: the helper class below has some missing abstract class implementation, diff --git a/satpy/tests/reader_tests/test_meris_nc.py b/satpy/tests/reader_tests/test_meris_nc.py index 0ab28b1fef..b4a2cda809 100644 --- a/satpy/tests/reader_tests/test_meris_nc.py +++ b/satpy/tests/reader_tests/test_meris_nc.py @@ -79,7 +79,7 @@ def test_get_dataset(self, mocked_dataset): filename_info = {"mission_id": "ENV", "dataset_name": "mask", "start_time": 0, "end_time": 0} test = NCMERIS2("somedir/somefile.nc", filename_info, "c") res = test.get_dataset(ds_id, {"nc_key": "mask"}) - self.assertEqual(res.dtype, np.dtype("bool")) + assert res.dtype == np.dtype("bool") @mock.patch("xarray.open_dataset") def test_meris_angles(self, mocked_dataset): @@ -180,4 +180,4 @@ def test_bitflags(self): expected = np.array([True, True, True, False, False, True, True, True, False, True, True, True, True, True, True, True, True, True]) - self.assertTrue(all(mask == expected)) + assert all(mask == expected) diff --git a/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py b/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py index 77344e6856..4083f7de00 100644 --- a/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py +++ b/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py @@ -132,10 +132,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "comp20190619.130000.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_mimic_float(self): """Load TPW mimic float data.""" @@ -147,13 +147,13 @@ def test_load_mimic_float(self): ]) r.create_filehandlers(loadables) ds = r.load(float_variables) - self.assertEqual(len(ds), len(float_variables)) + assert len(ds) == len(float_variables) for d in ds.values(): - self.assertEqual(d.attrs["platform_shortname"], "aggregated microwave") - self.assertEqual(d.attrs["sensor"], "mimic") - self.assertEqual(d.attrs["units"], "mm") - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) + assert d.attrs["platform_shortname"] == "aggregated microwave" + assert d.attrs["sensor"] == "mimic" + assert d.attrs["units"] == "mm" + assert "area" in d.attrs + assert d.attrs["area"] is not None def test_load_mimic_timedelta(self): """Load TPW mimic timedelta data (data latency variables).""" @@ -165,14 +165,14 @@ def test_load_mimic_timedelta(self): ]) r.create_filehandlers(loadables) ds = r.load(date_variables) - self.assertEqual(len(ds), len(date_variables)) + assert len(ds) == len(date_variables) for d in ds.values(): - self.assertEqual(d.attrs["platform_shortname"], "aggregated microwave") - self.assertEqual(d.attrs["sensor"], "mimic") - self.assertEqual(d.attrs["units"], "minutes") - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) - self.assertEqual(d.dtype, DEFAULT_FILE_DTYPE) + assert d.attrs["platform_shortname"] == "aggregated microwave" + assert d.attrs["sensor"] == "mimic" + assert d.attrs["units"] == "minutes" + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert d.dtype == DEFAULT_FILE_DTYPE def test_load_mimic_ubyte(self): """Load TPW mimic sensor grids.""" @@ -184,11 +184,11 @@ def test_load_mimic_ubyte(self): ]) r.create_filehandlers(loadables) ds = r.load(ubyte_variables) - self.assertEqual(len(ds), len(ubyte_variables)) + assert len(ds) == len(ubyte_variables) for d in ds.values(): - self.assertEqual(d.attrs["platform_shortname"], "aggregated microwave") - self.assertEqual(d.attrs["sensor"], "mimic") - self.assertIn("source_key", d.attrs) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) - self.assertEqual(d.dtype, np.uint8) + assert d.attrs["platform_shortname"] == "aggregated microwave" + assert d.attrs["sensor"] == "mimic" + assert "source_key" in d.attrs + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert d.dtype == np.uint8 diff --git a/satpy/tests/reader_tests/test_mimic_TPW2_nc.py b/satpy/tests/reader_tests/test_mimic_TPW2_nc.py index 9c6c24b5a7..63214b0477 100644 --- a/satpy/tests/reader_tests/test_mimic_TPW2_nc.py +++ b/satpy/tests/reader_tests/test_mimic_TPW2_nc.py @@ -110,10 +110,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "comp20190619.130000.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_mimic(self): """Load Mimic data.""" @@ -125,10 +125,10 @@ def test_load_mimic(self): ]) r.create_filehandlers(loadables) ds = r.load(["tpwGrid"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): - self.assertEqual(d.attrs["platform_shortname"], "aggregated microwave") - self.assertEqual(d.attrs["sensor"], "mimic") - self.assertIn("area", d.attrs) - self.assertIn("units", d.attrs) - self.assertIsNotNone(d.attrs["area"]) + assert d.attrs["platform_shortname"] == "aggregated microwave" + assert d.attrs["sensor"] == "mimic" + assert "area" in d.attrs + assert "units" in d.attrs + assert d.attrs["area"] is not None diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 5f8490151f..bcee32ddbb 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -976,7 +976,7 @@ def setup_method(self): self.tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_tile_xml), self.filename_info, mock.MagicMock()) - @pytest.mark.parametrize("mask_saturated,calibration,expected", + @pytest.mark.parametrize(("mask_saturated", "calibration", "expected"), [(True, "reflectance", [[np.nan, 0.01 - 10], [645.34, np.inf]]), (False, "reflectance", [[np.nan, 0.01 - 10], [645.34, 645.35]]), (True, "radiance", [[np.nan, -251.58426503], [16251.99095011, np.inf]])]) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 8a57507141..b03336c230 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -400,7 +400,7 @@ def test_get_dataset(self, file_handler, name, calibration, resolution, is_refl = calibration == "reflectance" if is_easy and is_vis and not is_refl: # VIS counts/radiance not available in easy FCDR - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Cannot calibrate to .*. Easy FCDR provides reflectance only."): file_handler.get_dataset(dataset_id, dataset_info) else: ds = file_handler.get_dataset(dataset_id, dataset_info) diff --git a/satpy/tests/reader_tests/test_mws_l1b_nc.py b/satpy/tests/reader_tests/test_mws_l1b_nc.py index 89a6eb4700..2d227822a4 100644 --- a/satpy/tests/reader_tests/test_mws_l1b_nc.py +++ b/satpy/tests/reader_tests/test_mws_l1b_nc.py @@ -43,7 +43,7 @@ N_PRTS = 6 -@pytest.fixture +@pytest.fixture() def reader(fake_file): """Return reader of mws level-1b data.""" return MWSL1BFile( @@ -70,13 +70,13 @@ def reader(fake_file): ) -@pytest.fixture +@pytest.fixture() def fake_file(tmp_path): """Return file path to level-1b file.""" file_path = tmp_path / "test_file_mws_l1b.nc" writer = MWSL1BFakeFileWriter(file_path) writer.write() - yield file_path + return file_path class MWSL1BFakeFileWriter: @@ -325,10 +325,10 @@ def test_get_dataset_aux_data_expected_data_missing(self, caplog, reader): " no valid Dataset created") assert log_output in caplog.text - @pytest.mark.parametrize("dims", ( + @pytest.mark.parametrize("dims", [ ("n_scans", "n_fovs"), ("x", "y"), - )) + ]) def test_standardize_dims(self, reader, dims): """Test standardize dims.""" variable = xr.DataArray( @@ -389,7 +389,7 @@ def test_manage_attributes(self, mock, reader): } -@pytest.mark.parametrize("name, index", [("1", 0), ("2", 1), ("24", 23)]) +@pytest.mark.parametrize(("name", "index"), [("1", 0), ("2", 1), ("24", 23)]) def test_get_channel_index_from_name(name, index): """Test getting the MWS channel index from the channel name.""" ch_idx = get_channel_index_from_name(name) @@ -398,8 +398,5 @@ def test_get_channel_index_from_name(name, index): def test_get_channel_index_from_name_throw_exception(): """Test that an excpetion is thrown when getting the MWS channel index from an unsupported name.""" - with pytest.raises(Exception) as excinfo: + with pytest.raises(AttributeError, match="Channel name 'channel 1' not supported"): _ = get_channel_index_from_name("channel 1") - - assert str(excinfo.value) == "Channel name 'channel 1' not supported" - assert excinfo.type == AttributeError diff --git a/satpy/tests/reader_tests/test_netcdf_utils.py b/satpy/tests/reader_tests/test_netcdf_utils.py index 16dfc57a83..ea104ed086 100644 --- a/satpy/tests/reader_tests/test_netcdf_utils.py +++ b/satpy/tests/reader_tests/test_netcdf_utils.py @@ -127,25 +127,25 @@ def test_all_basic(self): from satpy.readers.netcdf_utils import NetCDF4FileHandler file_handler = NetCDF4FileHandler("test.nc", {}, {}) - self.assertEqual(file_handler["/dimension/rows"], 10) - self.assertEqual(file_handler["/dimension/cols"], 100) + assert file_handler["/dimension/rows"] == 10 + assert file_handler["/dimension/cols"] == 100 for ds in ("test_group/ds1_f", "test_group/ds1_i", "ds2_f", "ds2_i"): - self.assertEqual(file_handler[ds].dtype, np.float32 if ds.endswith("f") else np.int32) - self.assertTupleEqual(file_handler[ds + "/shape"], (10, 100)) - self.assertEqual(file_handler[ds + "/dimensions"], ("rows", "cols")) - self.assertEqual(file_handler[ds + "/attr/test_attr_str"], "test_string") - self.assertEqual(file_handler[ds + "/attr/test_attr_int"], 0) - self.assertEqual(file_handler[ds + "/attr/test_attr_float"], 1.2) + assert file_handler[ds].dtype == (np.float32 if ds.endswith("f") else np.int32) + assert file_handler[ds + "/shape"] == (10, 100) + assert file_handler[ds + "/dimensions"] == ("rows", "cols") + assert file_handler[ds + "/attr/test_attr_str"] == "test_string" + assert file_handler[ds + "/attr/test_attr_int"] == 0 + assert file_handler[ds + "/attr/test_attr_float"] == 1.2 test_group = file_handler["test_group"] - self.assertTupleEqual(test_group["ds1_i"].shape, (10, 100)) - self.assertTupleEqual(test_group["ds1_i"].dims, ("rows", "cols")) + assert test_group["ds1_i"].shape == (10, 100) + assert test_group["ds1_i"].dims == ("rows", "cols") - self.assertEqual(file_handler["/attr/test_attr_str"], "test_string") - self.assertEqual(file_handler["/attr/test_attr_str_arr"], "test_string2") - self.assertEqual(file_handler["/attr/test_attr_int"], 0) - self.assertEqual(file_handler["/attr/test_attr_float"], 1.2) + assert file_handler["/attr/test_attr_str"] == "test_string" + assert file_handler["/attr/test_attr_str_arr"] == "test_string2" + assert file_handler["/attr/test_attr_int"] == 0 + assert file_handler["/attr/test_attr_float"] == 1.2 global_attrs = { "test_attr_str": "test_string", @@ -153,16 +153,16 @@ def test_all_basic(self): "test_attr_int": 0, "test_attr_float": 1.2 } - self.assertEqual(file_handler["/attrs"], global_attrs) + assert file_handler["/attrs"] == global_attrs - self.assertIsInstance(file_handler.get("ds2_f")[:], xr.DataArray) - self.assertIsNone(file_handler.get("fake_ds")) - self.assertEqual(file_handler.get("fake_ds", "test"), "test") + assert isinstance(file_handler.get("ds2_f")[:], xr.DataArray) + assert file_handler.get("fake_ds") is None + assert file_handler.get("fake_ds", "test") == "test" - self.assertTrue("ds2_f" in file_handler) - self.assertFalse("fake_ds" in file_handler) - self.assertIsNone(file_handler.file_handle) - self.assertEqual(file_handler["ds2_sc"], 42) + assert ("ds2_f" in file_handler) is True + assert ("fake_ds" in file_handler) is False + assert file_handler.file_handle is None + assert file_handler["ds2_sc"] == 42 def test_listed_variables(self): """Test that only listed variables/attributes area collected.""" @@ -212,11 +212,10 @@ def test_caching(self): from satpy.readers.netcdf_utils import NetCDF4FileHandler h = NetCDF4FileHandler("test.nc", {}, {}, cache_var_size=1000, cache_handle=True) - self.assertIsNotNone(h.file_handle) - self.assertTrue(h.file_handle.isopen()) + assert h.file_handle is not None + assert h.file_handle.isopen() - self.assertEqual(sorted(h.cached_file_content.keys()), - ["ds2_s", "ds2_sc"]) + assert sorted(h.cached_file_content.keys()) == ["ds2_s", "ds2_sc"] # with caching, these tests access different lines than without np.testing.assert_array_equal(h["ds2_s"], np.arange(10)) np.testing.assert_array_equal(h["test_group/ds1_i"], @@ -227,7 +226,7 @@ def test_caching(self): h["ds2_f"], np.arange(10. * 100).reshape((10, 100))) h.__del__() - self.assertFalse(h.file_handle.isopen()) + assert not h.file_handle.isopen() def test_filenotfound(self): """Test that error is raised when file not found.""" diff --git a/satpy/tests/reader_tests/test_nucaps.py b/satpy/tests/reader_tests/test_nucaps.py index 5b1c061798..a1f5736bdb 100644 --- a/satpy/tests/reader_tests/test_nucaps.py +++ b/satpy/tests/reader_tests/test_nucaps.py @@ -177,10 +177,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_init_with_kwargs(self): """Test basic init with extra parameters.""" @@ -189,10 +189,10 @@ def test_init_with_kwargs(self): loadables = r.select_files_from_pathnames([ "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables, fh_kwargs={"mask_surface": False}) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_nonpressure_based(self): """Test loading all channels that aren't based on pressure.""" @@ -209,14 +209,14 @@ def test_load_nonpressure_based(self): "Skin_Temperature", "Quality_Flag", ]) - self.assertEqual(len(datasets), 6) + assert len(datasets) == 6 for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) # self.assertEqual(v.info['units'], 'degrees') - self.assertEqual(v.ndim, 1) - self.assertEqual(v.attrs["sensor"], set(["cris", "atms", "viirs"])) - self.assertEqual(type(v.attrs["start_time"]), datetime.datetime) - self.assertEqual(type(v.attrs["end_time"]), datetime.datetime) + assert v.ndim == 1 + assert v.attrs["sensor"] == set(["cris", "atms", "viirs"]) + assert type(v.attrs["start_time"]) == datetime.datetime + assert type(v.attrs["end_time"]) == datetime.datetime def test_load_pressure_based(self): """Test loading all channels based on pressure.""" @@ -246,10 +246,10 @@ def test_load_pressure_based(self): "SO2", "SO2_MR", ]) - self.assertEqual(len(datasets), 19) + assert len(datasets) == 19 for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) - self.assertEqual(v.ndim, 2) + assert v.ndim == 2 if np.issubdtype(v.dtype, np.floating): assert "_FillValue" not in v.attrs @@ -263,9 +263,9 @@ def test_load_multiple_files_pressure(self): ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=True) - self.assertEqual(len(datasets), 100) + assert len(datasets) == 100 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_individual_pressure_levels_true(self): """Test loading Temperature with individual pressure datasets.""" @@ -276,9 +276,9 @@ def test_load_individual_pressure_levels_true(self): ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=True) - self.assertEqual(len(datasets), 100) + assert len(datasets) == 100 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_individual_pressure_levels_min_max(self): """Test loading individual Temperature with min/max level specified.""" @@ -289,9 +289,9 @@ def test_load_individual_pressure_levels_min_max(self): ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(100., 150.)) - self.assertEqual(len(datasets), 6) + assert len(datasets) == 6 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_individual_pressure_levels_single(self): """Test loading individual Temperature with specific levels.""" @@ -302,9 +302,9 @@ def test_load_individual_pressure_levels_single(self): ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(103.017,)) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_pressure_levels_true(self): """Test loading Temperature with all pressure levels.""" @@ -315,10 +315,10 @@ def test_load_pressure_levels_true(self): ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature"], pressure_levels=True) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 2) - self.assertTupleEqual(v.shape, DEFAULT_PRES_FILE_SHAPE) + assert v.ndim == 2 + assert v.shape == DEFAULT_PRES_FILE_SHAPE def test_load_pressure_levels_min_max(self): """Test loading Temperature with min/max level specified.""" @@ -329,11 +329,10 @@ def test_load_pressure_levels_min_max(self): ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature"], pressure_levels=(100., 150.)) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 2) - self.assertTupleEqual(v.shape, - (DEFAULT_PRES_FILE_SHAPE[0], 6)) + assert v.ndim == 2 + assert v.shape == (DEFAULT_PRES_FILE_SHAPE[0], 6) def test_load_pressure_levels_single(self): """Test loading a specific Temperature level.""" @@ -344,11 +343,10 @@ def test_load_pressure_levels_single(self): ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature"], pressure_levels=(103.017,)) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 2) - self.assertTupleEqual(v.shape, - (DEFAULT_PRES_FILE_SHAPE[0], 1)) + assert v.ndim == 2 + assert v.shape == (DEFAULT_PRES_FILE_SHAPE[0], 1) def test_load_pressure_levels_single_and_pressure_levels(self): """Test loading a specific Temperature level and pressure levels.""" @@ -359,13 +357,12 @@ def test_load_pressure_levels_single_and_pressure_levels(self): ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature", "Pressure_Levels"], pressure_levels=(103.017,)) - self.assertEqual(len(datasets), 2) + assert len(datasets) == 2 t_ds = datasets["Temperature"] - self.assertEqual(t_ds.ndim, 2) - self.assertTupleEqual(t_ds.shape, - (DEFAULT_PRES_FILE_SHAPE[0], 1)) + assert t_ds.ndim == 2 + assert t_ds.shape == (DEFAULT_PRES_FILE_SHAPE[0], 1) pl_ds = datasets["Pressure_Levels"] - self.assertTupleEqual(pl_ds.shape, (1,)) + assert pl_ds.shape == (1,) class TestNUCAPSScienceEDRReader(unittest.TestCase): @@ -394,10 +391,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_nonpressure_based(self): """Test loading all channels that aren't based on pressure.""" @@ -413,12 +410,12 @@ def test_load_nonpressure_based(self): "Skin_Temperature", "Quality_Flag", ]) - self.assertEqual(len(datasets), 5) + assert len(datasets) == 5 for v in datasets.values(): - self.assertEqual(v.ndim, 1) - self.assertEqual(v.attrs["sensor"], set(["cris", "atms", "viirs"])) - self.assertEqual(type(v.attrs["start_time"]), datetime.datetime) - self.assertEqual(type(v.attrs["end_time"]), datetime.datetime) + assert v.ndim == 1 + assert v.attrs["sensor"] == set(["cris", "atms", "viirs"]) + assert type(v.attrs["start_time"]) == datetime.datetime + assert type(v.attrs["end_time"]) == datetime.datetime def test_load_pressure_based(self): """Test loading all channels based on pressure.""" @@ -445,10 +442,10 @@ def test_load_pressure_based(self): "SO2", "SO2_MR", ]) - self.assertEqual(len(datasets), 16) + assert len(datasets) == 16 for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) - self.assertEqual(v.ndim, 2) + assert v.ndim == 2 def test_load_individual_pressure_levels_true(self): """Test loading Temperature with individual pressure datasets.""" @@ -459,9 +456,9 @@ def test_load_individual_pressure_levels_true(self): ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=True) - self.assertEqual(len(datasets), 100) + assert len(datasets) == 100 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_individual_pressure_levels_min_max(self): """Test loading individual Temperature with min/max level specified.""" @@ -472,9 +469,9 @@ def test_load_individual_pressure_levels_min_max(self): ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(100., 150.)) - self.assertEqual(len(datasets), 6) + assert len(datasets) == 6 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_individual_pressure_levels_single(self): """Test loading individual Temperature with specific levels.""" @@ -485,9 +482,9 @@ def test_load_individual_pressure_levels_single(self): ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(103.017,)) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_pressure_levels_true(self): """Test loading Temperature with all pressure levels.""" @@ -498,10 +495,10 @@ def test_load_pressure_levels_true(self): ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature"], pressure_levels=True) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 2) - self.assertTupleEqual(v.shape, DEFAULT_PRES_FILE_SHAPE) + assert v.ndim == 2 + assert v.shape == DEFAULT_PRES_FILE_SHAPE def test_load_pressure_levels_min_max(self): """Test loading Temperature with min/max level specified.""" @@ -512,11 +509,10 @@ def test_load_pressure_levels_min_max(self): ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature"], pressure_levels=(100., 150.)) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 2) - self.assertTupleEqual(v.shape, - (DEFAULT_PRES_FILE_SHAPE[0], 6)) + assert v.ndim == 2 + assert v.shape == (DEFAULT_PRES_FILE_SHAPE[0], 6) def test_load_pressure_levels_single(self): """Test loading a specific Temperature level.""" @@ -527,11 +523,10 @@ def test_load_pressure_levels_single(self): ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature"], pressure_levels=(103.017,)) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 2) - self.assertTupleEqual(v.shape, - (DEFAULT_PRES_FILE_SHAPE[0], 1)) + assert v.ndim == 2 + assert v.shape == (DEFAULT_PRES_FILE_SHAPE[0], 1) def test_load_pressure_levels_single_and_pressure_levels(self): """Test loading a specific Temperature level and pressure levels.""" @@ -542,10 +537,9 @@ def test_load_pressure_levels_single_and_pressure_levels(self): ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature", "Pressure_Levels"], pressure_levels=(103.017,)) - self.assertEqual(len(datasets), 2) + assert len(datasets) == 2 t_ds = datasets["Temperature"] - self.assertEqual(t_ds.ndim, 2) - self.assertTupleEqual(t_ds.shape, - (DEFAULT_PRES_FILE_SHAPE[0], 1)) + assert t_ds.ndim == 2 + assert t_ds.shape == (DEFAULT_PRES_FILE_SHAPE[0], 1) pl_ds = datasets["Pressure_Levels"] - self.assertTupleEqual(pl_ds.shape, (1,)) + assert pl_ds.shape == (1,) diff --git a/satpy/tests/reader_tests/test_nwcsaf_msg.py b/satpy/tests/reader_tests/test_nwcsaf_msg.py index e323baeb20..6d4dbfe53f 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_msg.py +++ b/satpy/tests/reader_tests/test_nwcsaf_msg.py @@ -23,6 +23,7 @@ import h5py import numpy as np +import pytest from satpy.tests.reader_tests.utils import fill_h5 @@ -483,18 +484,18 @@ def test_get_area_def(self): aext_res = AREA_DEF_DICT["area_extent"] for i in range(4): - self.assertAlmostEqual(area_def.area_extent[i], aext_res[i], 4) + assert area_def.area_extent[i] == pytest.approx(aext_res[i], abs=1e-4) proj_dict = AREA_DEF_DICT["proj_dict"] - self.assertEqual(proj_dict["proj"], area_def.proj_dict["proj"]) + assert proj_dict["proj"] == area_def.proj_dict["proj"] # Not all elements passed on Appveyor, so skip testing every single element of the proj-dict: # for key in proj_dict: # self.assertEqual(proj_dict[key], area_def.proj_dict[key]) - self.assertEqual(AREA_DEF_DICT["x_size"], area_def.width) - self.assertEqual(AREA_DEF_DICT["y_size"], area_def.height) + assert AREA_DEF_DICT["x_size"] == area_def.width + assert AREA_DEF_DICT["y_size"] == area_def.height - self.assertEqual(AREA_DEF_DICT["area_id"], area_def.area_id) + assert AREA_DEF_DICT["area_id"] == area_def.area_id def test_get_dataset(self): """Retrieve datasets from a NWCSAF msgv2013 hdf5 file.""" @@ -506,8 +507,8 @@ def test_get_dataset(self): dsid = make_dataid(name="ct") test = Hdf5NWCSAF(self.filename_ct, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CT"}) - self.assertEqual(ds.shape, (1856, 3712)) - self.assertEqual(ds.dtype, np.uint8) + assert ds.shape == (1856, 3712) + assert ds.dtype == np.uint8 np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTYPE_TEST_FRAME) filename_info = {} @@ -515,8 +516,8 @@ def test_get_dataset(self): dsid = make_dataid(name="ctth_alti") test = Hdf5NWCSAF(self.filename_ctth, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CTTH_HEIGHT"}) - self.assertEqual(ds.shape, (1856, 3712)) - self.assertEqual(ds.dtype, np.float32) + assert ds.shape == (1856, 3712) + assert ds.dtype == np.float32 np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTTH_HEIGHT_TEST_FRAME_RES) filename_info = {} @@ -524,8 +525,8 @@ def test_get_dataset(self): dsid = make_dataid(name="ctth_pres") test = Hdf5NWCSAF(self.filename_ctth, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CTTH_PRESS"}) - self.assertEqual(ds.shape, (1856, 3712)) - self.assertEqual(ds.dtype, np.float32) + assert ds.shape == (1856, 3712) + assert ds.dtype == np.float32 np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTTH_PRESSURE_TEST_FRAME_RES) filename_info = {} @@ -533,8 +534,8 @@ def test_get_dataset(self): dsid = make_dataid(name="ctth_tempe") test = Hdf5NWCSAF(self.filename_ctth, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CTTH_TEMPER"}) - self.assertEqual(ds.shape, (1856, 3712)) - self.assertEqual(ds.dtype, np.float32) + assert ds.shape == (1856, 3712) + assert ds.dtype == np.float32 np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTTH_TEMPERATURE_TEST_FRAME_RES) def tearDown(self): diff --git a/satpy/tests/reader_tests/test_nwcsaf_nc.py b/satpy/tests/reader_tests/test_nwcsaf_nc.py index fb7187af1f..2070e5187c 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_nc.py +++ b/satpy/tests/reader_tests/test_nwcsaf_nc.py @@ -105,7 +105,7 @@ def create_nwcsaf_geo_ct_file(directory, attrs=global_attrs): return filename -@pytest.fixture +@pytest.fixture() def nwcsaf_geo_ct_filehandler(nwcsaf_geo_ct_filename): """Create a CT filehandler.""" return NcNWCSAF(nwcsaf_geo_ct_filename, {}, {}) @@ -156,13 +156,13 @@ def create_ctth_file(path, attrs=global_attrs): return filename -@pytest.fixture +@pytest.fixture() def nwcsaf_pps_cmic_filehandler(nwcsaf_pps_cmic_filename): """Create a CMIC filehandler.""" return NcNWCSAF(nwcsaf_pps_cmic_filename, {}, {"file_key_prefix": "cmic_"}) -@pytest.fixture +@pytest.fixture() def nwcsaf_pps_ctth_filehandler(nwcsaf_pps_ctth_filename): """Create a CMIC filehandler.""" return NcNWCSAF(nwcsaf_pps_ctth_filename, {}, {}) @@ -218,7 +218,7 @@ def create_ctth_alti_pal_variable_with_fill_value_color(nc_file, var_name): var.attrs["_FillValue"] = 65535 -@pytest.fixture +@pytest.fixture() def nwcsaf_pps_cpp_filehandler(nwcsaf_pps_cpp_filename): """Create a CPP filehandler.""" return NcNWCSAF(nwcsaf_pps_cpp_filename, {}, {"file_key_prefix": "cpp_"}) @@ -233,7 +233,7 @@ def nwcsaf_old_geo_ct_filename(tmp_path_factory): return create_nwcsaf_geo_ct_file(tmp_path_factory.mktemp("data-old"), attrs=attrs) -@pytest.fixture +@pytest.fixture() def nwcsaf_old_geo_ct_filehandler(nwcsaf_old_geo_ct_filename): """Create a CT filehandler.""" return NcNWCSAF(nwcsaf_old_geo_ct_filename, {}, {}) @@ -242,19 +242,19 @@ def nwcsaf_old_geo_ct_filehandler(nwcsaf_old_geo_ct_filename): class TestNcNWCSAFGeo: """Test the NcNWCSAF reader for Geo products.""" - @pytest.mark.parametrize("platform, instrument", [("Metop-B", "avhrr-3"), - ("NOAA-20", "viirs"), - ("Himawari-8", "ahi"), - ("GOES-17", "abi"), - ("Meteosat-11", "seviri")]) + @pytest.mark.parametrize(("platform", "instrument"), [("Metop-B", "avhrr-3"), + ("NOAA-20", "viirs"), + ("Himawari-8", "ahi"), + ("GOES-17", "abi"), + ("Meteosat-11", "seviri")]) def test_sensor_name_platform(self, nwcsaf_geo_ct_filehandler, platform, instrument): """Test that the correct sensor name is being set.""" nwcsaf_geo_ct_filehandler.set_platform_and_sensor(platform_name=platform) assert nwcsaf_geo_ct_filehandler.sensor == set([instrument]) assert nwcsaf_geo_ct_filehandler.sensor_names == set([instrument]) - @pytest.mark.parametrize("platform, instrument", [("GOES16", "abi"), - ("MSG4", "seviri")]) + @pytest.mark.parametrize(("platform", "instrument"), [("GOES16", "abi"), + ("MSG4", "seviri")]) def test_sensor_name_sat_id(self, nwcsaf_geo_ct_filehandler, platform, instrument): """Test that the correct sensor name is being set.""" nwcsaf_geo_ct_filehandler.set_platform_and_sensor(sat_id=platform) @@ -286,22 +286,22 @@ def test_scale_dataset_attr_removal(self, nwcsaf_geo_ct_filehandler): assert "scale_factor" not in var.attrs assert "add_offset" not in var.attrs - @pytest.mark.parametrize("attrs, expected", [({"scale_factor": np.array(1.5), - "add_offset": np.array(2.5), - "_FillValue": 1}, - [np.nan, 5.5, 7]), - ({"scale_factor": np.array(1.5), - "add_offset": np.array(2.5), - "valid_min": 1.1}, - [np.nan, 5.5, 7]), - ({"scale_factor": np.array(1.5), - "add_offset": np.array(2.5), - "valid_max": 2.1}, - [4, 5.5, np.nan]), - ({"scale_factor": np.array(1.5), - "add_offset": np.array(2.5), - "valid_range": (1.1, 2.1)}, - [np.nan, 5.5, np.nan])]) + @pytest.mark.parametrize(("attrs", "expected"), [({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "_FillValue": 1}, + [np.nan, 5.5, 7]), + ({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "valid_min": 1.1}, + [np.nan, 5.5, 7]), + ({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "valid_max": 2.1}, + [4, 5.5, np.nan]), + ({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "valid_range": (1.1, 2.1)}, + [np.nan, 5.5, np.nan])]) def test_scale_dataset_floating(self, nwcsaf_geo_ct_filehandler, attrs, expected): """Test the scaling of the dataset with floating point values.""" var = xr.DataArray([1, 2, 3], attrs=attrs) diff --git a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py index f0ed47f4f8..bdb0edfb03 100644 --- a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py +++ b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py @@ -94,7 +94,7 @@ def fake_dataset(): ds_list_kd = ["kd_490", "water_class10", "seawifs_nobs_sum"] -@pytest.fixture +@pytest.fixture() def fake_file_dict(fake_dataset, tmp_path): """Write a fake dataset to file.""" fdict = {} @@ -126,7 +126,7 @@ def fake_file_dict(fake_dataset, tmp_path): fake_dataset.to_netcdf(filename) fdict["k490_1d"] = filename - yield fdict + return fdict class TestOCCCIReader: @@ -149,7 +149,7 @@ def _create_reader_for_resolutions(self, filename): assert reader.file_handlers return reader - @pytest.fixture + @pytest.fixture() def area_exp(self): """Get expected area definition.""" proj_dict = {"datum": "WGS84", "no_defs": "None", "proj": "longlat", "type": "crs"} diff --git a/satpy/tests/reader_tests/test_olci_nc.py b/satpy/tests/reader_tests/test_olci_nc.py index b6f5863a25..2f37fb2098 100644 --- a/satpy/tests/reader_tests/test_olci_nc.py +++ b/satpy/tests/reader_tests/test_olci_nc.py @@ -109,7 +109,7 @@ def test_get_mask(self, mocked_dataset): filename_info = {"mission_id": "S3A", "dataset_name": "mask", "start_time": 0, "end_time": 0} test = NCOLCI2("somedir/somefile.nc", filename_info, "c") res = test.get_dataset(ds_id, {"nc_key": "mask"}) - self.assertEqual(res.dtype, np.dtype("bool")) + assert res.dtype == np.dtype("bool") expected = np.array([[True, False, True, True, True, True], [False, False, True, True, False, False], [False, False, False, False, False, True], @@ -133,7 +133,7 @@ def test_get_mask_with_alternative_items(self, mocked_dataset): filename_info = {"mission_id": "S3A", "dataset_name": "mask", "start_time": 0, "end_time": 0} test = NCOLCI2("somedir/somefile.nc", filename_info, "c", mask_items=["INVALID"]) res = test.get_dataset(ds_id, {"nc_key": "mask"}) - self.assertEqual(res.dtype, np.dtype("bool")) + assert res.dtype == np.dtype("bool") expected = np.array([True] + [False] * 29).reshape(5, 6) np.testing.assert_array_equal(res.values, expected) @@ -273,4 +273,4 @@ def test_bitflags(self): False, False, False, True, False, True, False, False, False, True, True, False, False, True, False]) - self.assertTrue(all(mask == expected)) + assert all(mask == expected) diff --git a/satpy/tests/reader_tests/test_omps_edr.py b/satpy/tests/reader_tests/test_omps_edr.py index f89e41f5d0..9aa227a200 100644 --- a/satpy/tests/reader_tests/test_omps_edr.py +++ b/satpy/tests/reader_tests/test_omps_edr.py @@ -195,10 +195,10 @@ def test_init(self): "OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5", "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) - self.assertEqual(len(loadables), 3) + assert len(loadables) == 3 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_basic_load_so2(self): """Test basic load of so2 datasets.""" @@ -209,32 +209,32 @@ def test_basic_load_so2(self): "OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5", "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) - self.assertEqual(len(loadables), 3) + assert len(loadables) == 3 r.create_filehandlers(loadables) ds = r.load(["so2_trm"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): - self.assertEqual(d.attrs["resolution"], 50000) - self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) + assert d.attrs["resolution"] == 50000 + assert d.shape == DEFAULT_FILE_SHAPE + assert "area" in d.attrs + assert d.attrs["area"] is not None ds = r.load(["tcso2_trm_sampo"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): - self.assertEqual(d.attrs["resolution"], 50000) - self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) + assert d.attrs["resolution"] == 50000 + assert d.shape == DEFAULT_FILE_SHAPE ds = r.load(["tcso2_stl_sampo"]) - self.assertEqual(len(ds), 0) + assert len(ds) == 0 # Dataset without _FillValue ds = r.load(["tcso2_tru_sampo"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 # Dataset without unit ds = r.load(["tcso2_pbl_sampo"]) - self.assertEqual(len(ds), 0) + assert len(ds) == 0 def test_basic_load_to3(self): """Test basic load of to3 datasets.""" @@ -245,15 +245,15 @@ def test_basic_load_to3(self): "OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5", "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) - self.assertEqual(len(loadables), 3) + assert len(loadables) == 3 r.create_filehandlers(loadables) ds = r.load(["reflectivity_331", "uvaerosol_index"]) - self.assertEqual(len(ds), 2) + assert len(ds) == 2 for d in ds.values(): - self.assertEqual(d.attrs["resolution"], 50000) - self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) + assert d.attrs["resolution"] == 50000 + assert d.shape == DEFAULT_FILE_SHAPE + assert "area" in d.attrs + assert d.attrs["area"] is not None @mock.patch("satpy.readers.hdf5_utils.HDF5FileHandler._get_reference") @mock.patch("h5py.File") @@ -269,4 +269,4 @@ def test_load_so2_DIMENSION_LIST(self, mock_h5py_file, mock_hdf5_utils_get_refer r.create_filehandlers(loadables) ds = r.load(["tcso2_trl_sampo"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 diff --git a/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py b/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py index a529ae9f50..661900e650 100644 --- a/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py +++ b/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py @@ -66,10 +66,10 @@ def setUp(self, xr_): def test_init(self): """Test reader initialization.""" - self.assertEqual(self.reader.start_time, 0) - self.assertEqual(self.reader.end_time, 0) - self.assertEqual(self.reader.fstart_time, 0) - self.assertEqual(self.reader.fend_time, 0) + assert self.reader.start_time == 0 + assert self.reader.end_time == 0 + assert self.reader.fstart_time == 0 + assert self.reader.fend_time == 0 def test_get_dataset(self): """Test getting a dataset.""" @@ -77,6 +77,4 @@ def test_get_dataset(self): dt = self.reader.get_dataset( key=make_dataid(name=ch), info={}) # ... this only compares the valid (unmasked) elements - self.assertTrue(np.all(self.nc[ch] == dt.to_masked_array()), - msg="get_dataset() returns invalid data for " - "dataset {}".format(ch)) + assert np.all(self.nc[ch] == dt.to_masked_array()), f"get_dataset() returns invalid data for dataset {ch}" diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index b335fd09c8..e71534fbd2 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -88,7 +88,7 @@ def _create_test_netcdf(filename, resolution=742): @pytest.fixture(scope="session") -def _cf_scene(): +def cf_scene(): tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) data_visir = np.array([[1, 2], [3, 4]]) @@ -212,15 +212,15 @@ def _cf_scene(): return scene -@pytest.fixture -def _nc_filename(tmp_path): +@pytest.fixture() +def nc_filename(tmp_path): now = datetime.utcnow() filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-mband-20201007075915-20201007080744.nc" return str(tmp_path / filename) -@pytest.fixture -def _nc_filename_i(tmp_path): +@pytest.fixture() +def nc_filename_i(tmp_path): now = datetime.utcnow() filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-iband-20201007075915-20201007080744.nc" return str(tmp_path / filename) @@ -229,21 +229,21 @@ def _nc_filename_i(tmp_path): class TestCFReader: """Test case for CF reader.""" - def test_write_and_read_with_area_definition(self, _cf_scene, _nc_filename): + def test_write_and_read_with_area_definition(self, cf_scene, nc_filename): """Save a dataset with an area definition to file with cf_writer and read the data again.""" - _cf_scene.save_datasets(writer="cf", - filename=_nc_filename, - engine="h5netcdf", - flatten_attrs=True, - pretty=True) + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="h5netcdf", + flatten_attrs=True, + pretty=True) scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename]) + filenames=[nc_filename]) scn_.load(["image0", "image1", "lat"]) - np.testing.assert_array_equal(scn_["image0"].data, _cf_scene["image0"].data) - np.testing.assert_array_equal(scn_["lat"].data, _cf_scene["lat"].data) # lat loaded as dataset - np.testing.assert_array_equal(scn_["image0"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord + np.testing.assert_array_equal(scn_["image0"].data, cf_scene["image0"].data) + np.testing.assert_array_equal(scn_["lat"].data, cf_scene["lat"].data) # lat loaded as dataset + np.testing.assert_array_equal(scn_["image0"].coords["lon"], cf_scene["lon"].data) # lon loded as coord assert isinstance(scn_["image0"].attrs["wavelength"], WavelengthRange) - expected_area = _cf_scene["image0"].attrs["area"] + expected_area = cf_scene["image0"].attrs["area"] actual_area = scn_["image0"].attrs["area"] assert pytest.approx(expected_area.area_extent, 0.000001) == actual_area.area_extent assert expected_area.proj_dict == actual_area.proj_dict @@ -252,18 +252,18 @@ def test_write_and_read_with_area_definition(self, _cf_scene, _nc_filename): assert expected_area.description == actual_area.description assert expected_area.proj_dict == actual_area.proj_dict - def test_write_and_read_with_swath_definition(self, _cf_scene, _nc_filename): + def test_write_and_read_with_swath_definition(self, cf_scene, nc_filename): """Save a dataset with a swath definition to file with cf_writer and read the data again.""" - _cf_scene.save_datasets(writer="cf", - filename=_nc_filename, - engine="h5netcdf", - flatten_attrs=True, - pretty=True, - datasets=["swath_data"]) + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="h5netcdf", + flatten_attrs=True, + pretty=True, + datasets=["swath_data"]) scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename]) + filenames=[nc_filename]) scn_.load(["swath_data"]) - expected_area = _cf_scene["swath_data"].attrs["area"] + expected_area = cf_scene["swath_data"].attrs["area"] actual_area = scn_["swath_data"].attrs["area"] assert expected_area.shape == actual_area.shape np.testing.assert_array_equal(expected_area.lons.data, actual_area.lons.data) @@ -278,162 +278,162 @@ def test_fix_modifier_attr(self): reader.fix_modifier_attr(ds_info) assert ds_info["modifiers"] == () - def test_read_prefixed_channels(self, _cf_scene, _nc_filename): + def test_read_prefixed_channels(self, cf_scene, nc_filename): """Check channels starting with digit is prefixed and read back correctly.""" - _cf_scene.save_datasets(writer="cf", - filename=_nc_filename, - engine="netcdf4", - flatten_attrs=True, - pretty=True) + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="netcdf4", + flatten_attrs=True, + pretty=True) scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename]) + filenames=[nc_filename]) scn_.load(["1"]) - np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) - np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loaded as coord + np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loaded as coord scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename], reader_kwargs={}) + filenames=[nc_filename], reader_kwargs={}) scn_.load(["1"]) - np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) - np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loaded as coord + np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loaded as coord # Check that variables starting with a digit is written to filename variable prefixed - with xr.open_dataset(_nc_filename) as ds_disk: - np.testing.assert_array_equal(ds_disk["CHANNEL_1"].data, _cf_scene["1"].data) + with xr.open_dataset(nc_filename) as ds_disk: + np.testing.assert_array_equal(ds_disk["CHANNEL_1"].data, cf_scene["1"].data) - def test_read_prefixed_channels_include_orig_name(self, _cf_scene, _nc_filename): + def test_read_prefixed_channels_include_orig_name(self, cf_scene, nc_filename): """Check channels starting with digit and includeed orig name is prefixed and read back correctly.""" - _cf_scene.save_datasets(writer="cf", - filename=_nc_filename, - engine="netcdf4", - flatten_attrs=True, - pretty=True, - include_orig_name=True) + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="netcdf4", + flatten_attrs=True, + pretty=True, + include_orig_name=True) scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename]) + filenames=[nc_filename]) scn_.load(["1"]) - np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) - np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loaded as coord + np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loaded as coord assert scn_["1"].attrs["original_name"] == "1" # Check that variables starting with a digit is written to filename variable prefixed - with xr.open_dataset(_nc_filename) as ds_disk: - np.testing.assert_array_equal(ds_disk["CHANNEL_1"].data, _cf_scene["1"].data) + with xr.open_dataset(nc_filename) as ds_disk: + np.testing.assert_array_equal(ds_disk["CHANNEL_1"].data, cf_scene["1"].data) - def test_read_prefixed_channels_by_user(self, _cf_scene, _nc_filename): + def test_read_prefixed_channels_by_user(self, cf_scene, nc_filename): """Check channels starting with digit is prefixed by user and read back correctly.""" - _cf_scene.save_datasets(writer="cf", - filename=_nc_filename, - engine="netcdf4", - flatten_attrs=True, - pretty=True, - numeric_name_prefix="USER") + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="netcdf4", + flatten_attrs=True, + pretty=True, + numeric_name_prefix="USER") scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename], reader_kwargs={"numeric_name_prefix": "USER"}) + filenames=[nc_filename], reader_kwargs={"numeric_name_prefix": "USER"}) scn_.load(["1"]) - np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) - np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord + np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loded as coord # Check that variables starting with a digit is written to filename variable prefixed - with xr.open_dataset(_nc_filename) as ds_disk: - np.testing.assert_array_equal(ds_disk["USER1"].data, _cf_scene["1"].data) + with xr.open_dataset(nc_filename) as ds_disk: + np.testing.assert_array_equal(ds_disk["USER1"].data, cf_scene["1"].data) - def test_read_prefixed_channels_by_user2(self, _cf_scene, _nc_filename): + def test_read_prefixed_channels_by_user2(self, cf_scene, nc_filename): """Check channels starting with digit is prefixed by user when saving and read back correctly without prefix.""" - _cf_scene.save_datasets(writer="cf", - filename=_nc_filename, - engine="netcdf4", - flatten_attrs=True, - pretty=True, - include_orig_name=False, - numeric_name_prefix="USER") + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="netcdf4", + flatten_attrs=True, + pretty=True, + include_orig_name=False, + numeric_name_prefix="USER") scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename]) + filenames=[nc_filename]) scn_.load(["USER1"]) - np.testing.assert_array_equal(scn_["USER1"].data, _cf_scene["1"].data) - np.testing.assert_array_equal(scn_["USER1"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord + np.testing.assert_array_equal(scn_["USER1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["USER1"].coords["lon"], cf_scene["lon"].data) # lon loded as coord - def test_read_prefixed_channels_by_user_include_prefix(self, _cf_scene, _nc_filename): + def test_read_prefixed_channels_by_user_include_prefix(self, cf_scene, nc_filename): """Check channels starting with digit is prefixed by user and include original name when saving.""" - _cf_scene.save_datasets(writer="cf", - filename=_nc_filename, - engine="netcdf4", - flatten_attrs=True, - pretty=True, - include_orig_name=True, - numeric_name_prefix="USER") + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="netcdf4", + flatten_attrs=True, + pretty=True, + include_orig_name=True, + numeric_name_prefix="USER") scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename]) + filenames=[nc_filename]) scn_.load(["1"]) - np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) - np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord + np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loded as coord - def test_read_prefixed_channels_by_user_no_prefix(self, _cf_scene, _nc_filename): + def test_read_prefixed_channels_by_user_no_prefix(self, cf_scene, nc_filename): """Check channels starting with digit is not prefixed by user.""" with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=UserWarning, message=".*starts with a digit.*") - _cf_scene.save_datasets(writer="cf", - filename=_nc_filename, - engine="netcdf4", - flatten_attrs=True, - pretty=True, - numeric_name_prefix="") + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="netcdf4", + flatten_attrs=True, + pretty=True, + numeric_name_prefix="") scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename]) + filenames=[nc_filename]) scn_.load(["1"]) - np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) - np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord + np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loded as coord - def test_orbital_parameters(self, _cf_scene, _nc_filename): + def test_orbital_parameters(self, cf_scene, nc_filename): """Test that the orbital parameters in attributes are handled correctly.""" - _cf_scene.save_datasets(writer="cf", - filename=_nc_filename) + cf_scene.save_datasets(writer="cf", + filename=nc_filename) scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename]) + filenames=[nc_filename]) scn_.load(["image0"]) - orig_attrs = _cf_scene["image0"].attrs["orbital_parameters"] + orig_attrs = cf_scene["image0"].attrs["orbital_parameters"] new_attrs = scn_["image0"].attrs["orbital_parameters"] assert isinstance(new_attrs, dict) for key in orig_attrs: assert orig_attrs[key] == new_attrs[key] - def test_write_and_read_from_two_files(self, _nc_filename, _nc_filename_i): + def test_write_and_read_from_two_files(self, nc_filename, nc_filename_i): """Save two datasets with different resolution and read the solar_zenith_angle again.""" - _create_test_netcdf(_nc_filename, resolution=742) - _create_test_netcdf(_nc_filename_i, resolution=371) + _create_test_netcdf(nc_filename, resolution=742) + _create_test_netcdf(nc_filename_i, resolution=371) scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename, _nc_filename_i]) + filenames=[nc_filename, nc_filename_i]) scn_.load(["solar_zenith_angle"], resolution=742) assert scn_["solar_zenith_angle"].attrs["resolution"] == 742 scn_.unload() scn_.load(["solar_zenith_angle"], resolution=371) assert scn_["solar_zenith_angle"].attrs["resolution"] == 371 - def test_dataid_attrs_equal_matching_dataset(self, _cf_scene, _nc_filename): + def test_dataid_attrs_equal_matching_dataset(self, cf_scene, nc_filename): """Check that get_dataset returns valid dataset when keys matches.""" from satpy.dataset.dataid import DataID, default_id_keys_config - _create_test_netcdf(_nc_filename, resolution=742) - reader = SatpyCFFileHandler(_nc_filename, {}, {"filetype": "info"}) + _create_test_netcdf(nc_filename, resolution=742) + reader = SatpyCFFileHandler(nc_filename, {}, {"filetype": "info"}) ds_id = DataID(default_id_keys_config, name="solar_zenith_angle", resolution=742, modifiers=()) res = reader.get_dataset(ds_id, {}) assert res.attrs["resolution"] == 742 - def test_dataid_attrs_equal_not_matching_dataset(self, _cf_scene, _nc_filename): + def test_dataid_attrs_equal_not_matching_dataset(self, cf_scene, nc_filename): """Check that get_dataset returns None when key(s) are not matching.""" from satpy.dataset.dataid import DataID, default_id_keys_config - _create_test_netcdf(_nc_filename, resolution=742) - reader = SatpyCFFileHandler(_nc_filename, {}, {"filetype": "info"}) + _create_test_netcdf(nc_filename, resolution=742) + reader = SatpyCFFileHandler(nc_filename, {}, {"filetype": "info"}) not_existing_resolution = 9999999 ds_id = DataID(default_id_keys_config, name="solar_zenith_angle", resolution=not_existing_resolution, modifiers=()) assert reader.get_dataset(ds_id, {}) is None - def test_dataid_attrs_equal_contains_not_matching_key(self, _cf_scene, _nc_filename): + def test_dataid_attrs_equal_contains_not_matching_key(self, cf_scene, nc_filename): """Check that get_dataset returns valid dataset when dataid have key(s) not existing in data.""" from satpy.dataset.dataid import DataID, default_id_keys_config - _create_test_netcdf(_nc_filename, resolution=742) - reader = SatpyCFFileHandler(_nc_filename, {}, {"filetype": "info"}) + _create_test_netcdf(nc_filename, resolution=742) + reader = SatpyCFFileHandler(nc_filename, {}, {"filetype": "info"}) ds_id = DataID(default_id_keys_config, name="solar_zenith_angle", resolution=742, modifiers=(), calibration="counts") res = reader.get_dataset(ds_id, {}) diff --git a/satpy/tests/reader_tests/test_scmi.py b/satpy/tests/reader_tests/test_scmi.py index 45fcc9caee..89eda0479a 100644 --- a/satpy/tests/reader_tests/test_scmi.py +++ b/satpy/tests/reader_tests/test_scmi.py @@ -105,12 +105,9 @@ def test_basic_attributes(self): from datetime import datetime from satpy.tests.utils import make_dataid - self.assertEqual(self.reader.start_time, - datetime(2017, 7, 29, 12, 0, 0, 0)) - self.assertEqual(self.reader.end_time, - datetime(2017, 7, 29, 12, 0, 0, 0)) - self.assertEqual(self.reader.get_shape(make_dataid(name="C05"), {}), - (2, 5)) + assert self.reader.start_time == datetime(2017, 7, 29, 12, 0, 0, 0) + assert self.reader.end_time == datetime(2017, 7, 29, 12, 0, 0, 0) + assert self.reader.get_shape(make_dataid(name="C05"), {}) == (2, 5) def test_data_load(self): """Test data loading.""" @@ -119,10 +116,9 @@ def test_data_load(self): make_dataid(name="C05", calibration="reflectance"), {}) np.testing.assert_allclose(res.data, self.expected_rad, equal_nan=True) - self.assertNotIn("scale_factor", res.attrs) - self.assertNotIn("_FillValue", res.attrs) - self.assertEqual(res.attrs["standard_name"], - "toa_bidirectional_reflectance") + assert "scale_factor" not in res.attrs + assert "_FillValue" not in res.attrs + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" assert "orbital_parameters" in res.attrs orb_params = res.attrs["orbital_parameters"] assert orb_params["projection_longitude"] == -90.0 @@ -181,13 +177,13 @@ def test_get_area_def_geos(self, adef): ) reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], { - "a": 1.0, "b": 1.0, "h": 1.0, "lon_0": -90.0, "lat_0": 0.0, - "proj": "geos", "sweep": "x", "units": "m"}) - self.assertEqual(call_args[4], reader.ncols) - self.assertEqual(call_args[5], reader.nlines) + assert call_args[3] == {"a": 1.0, "b": 1.0, "h": 1.0, + "lon_0": -90.0, "lat_0": 0.0, + "proj": "geos", "sweep": "x", "units": "m"} + assert call_args[4] == reader.ncols + assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") @@ -206,13 +202,13 @@ def test_get_area_def_lcc(self, adef): ) reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], { - "a": 1.0, "b": 1.0, "lon_0": -90.0, "lat_0": 25.0, "lat_1": 25.0, - "proj": "lcc", "units": "m"}) - self.assertEqual(call_args[4], reader.ncols) - self.assertEqual(call_args[5], reader.nlines) + assert call_args[3] == {"a": 1.0, "b": 1.0, + "lon_0": -90.0, "lat_0": 25.0, "lat_1": 25.0, + "proj": "lcc", "units": "m"} + assert call_args[4] == reader.ncols + assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") @@ -231,13 +227,13 @@ def test_get_area_def_stere(self, adef): ) reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], { - "a": 1.0, "b": 1.0, "lon_0": -90.0, "lat_0": 90.0, "lat_ts": 60.0, - "proj": "stere", "units": "m"}) - self.assertEqual(call_args[4], reader.ncols) - self.assertEqual(call_args[5], reader.nlines) + assert call_args[3] == {"a": 1.0, "b": 1.0, + "lon_0": -90.0, "lat_0": 90.0, "lat_ts": 60.0, + "proj": "stere", "units": "m"} + assert call_args[4] == reader.ncols + assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") @@ -255,13 +251,13 @@ def test_get_area_def_merc(self, adef): ) reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], { - "a": 1.0, "b": 1.0, "lon_0": -90.0, "lat_0": 0.0, "lat_ts": 0.0, - "proj": "merc", "units": "m"}) - self.assertEqual(call_args[4], reader.ncols) - self.assertEqual(call_args[5], reader.nlines) + assert call_args[3] == {"a": 1.0, "b": 1.0, + "lon_0": -90.0, "lat_0": 0.0, "lat_ts": 0.0, + "proj": "merc", "units": "m"} + assert call_args[4] == reader.ncols + assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index b6540d8623..73a2eac8db 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -74,8 +74,7 @@ def test_chebyshev(self): def test_get_cds_time(self): """Test the get_cds_time function.""" # Scalar - self.assertEqual(get_cds_time(days=21246, msecs=12*3600*1000), - np.datetime64("2016-03-03 12:00")) + assert get_cds_time(days=21246, msecs=12 * 3600 * 1000) == np.datetime64("2016-03-03 12:00") # Array days = np.array([21246, 21247, 21248]) @@ -118,18 +117,10 @@ def observation_end_time(self): def test_round_nom_time(self): """Test the rouding of start/end_time.""" - self.assertEqual(round_nom_time( - dt=self.observation_start_time(), - time_delta=timedelta(minutes=15) - ), - datetime(2023, 3, 20, 15, 0) - ) - self.assertEqual(round_nom_time( - dt=self.observation_end_time(), - time_delta=timedelta(minutes=15) - ), - datetime(2023, 3, 20, 15, 15) - ) + assert round_nom_time(dt=self.observation_start_time(), + time_delta=timedelta(minutes=15)) == datetime(2023, 3, 20, 15, 0) + assert round_nom_time(dt=self.observation_end_time(), + time_delta=timedelta(minutes=15)) == datetime(2023, 3, 20, 15, 15) @staticmethod def test_pad_data_horizontally(): @@ -251,7 +242,7 @@ def test_get_padding_area_int(): class TestSatellitePosition: """Test locating the satellite.""" - @pytest.fixture + @pytest.fixture() def orbit_polynomial(self): """Get an orbit polynomial for testing.""" return OrbitPolynomial( @@ -270,7 +261,7 @@ def orbit_polynomial(self): ) ) - @pytest.fixture + @pytest.fixture() def time(self): """Get scan timestamp for testing.""" return datetime(2006, 1, 1, 12, 15, 9, 304888) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py index cc1107cc6c..d46af5abd2 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py @@ -117,14 +117,14 @@ def test_convert_to_radiance(self): """Test the conversion from counts to radiances.""" result = self.algo.convert_to_radiance(COUNTS_INPUT, GAIN, OFFSET) xr.testing.assert_allclose(result, RADIANCES_OUTPUT) - self.assertEqual(result.dtype, np.float32) + assert result.dtype == np.float32 def test_ir_calibrate(self): """Test conversion from radiance to brightness temperature.""" result = self.algo.ir_calibrate(RADIANCES_OUTPUT, CHANNEL_NAME, CAL_TYPE1) xr.testing.assert_allclose(result, TBS_OUTPUT1, rtol=1E-5) - self.assertEqual(result.dtype, np.float32) + assert result.dtype == np.float32 result = self.algo.ir_calibrate(RADIANCES_OUTPUT, CHANNEL_NAME, CAL_TYPE2) @@ -138,8 +138,8 @@ def test_vis_calibrate(self): result = self.algo.vis_calibrate(VIS008_RADIANCE, VIS008_SOLAR_IRRADIANCE) xr.testing.assert_allclose(result, VIS008_REFLECTANCE) - self.assertTrue(result.sun_earth_distance_correction_applied) - self.assertEqual(result.dtype, np.float32) + assert result.sun_earth_distance_correction_applied + assert result.dtype == np.float32 class TestSeviriCalibrationHandler: @@ -147,7 +147,7 @@ class TestSeviriCalibrationHandler: def test_init(self): """Test initialization of the calibration handler.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Invalid calibration mode: INVALID. Choose one of (.*)"): SEVIRICalibrationHandler( platform_id=None, channel_name=None, @@ -182,7 +182,7 @@ def _get_calibration_handler(self, calib_mode="NOMINAL", ext_coefs=None): def test_calibrate_exceptions(self): """Test exceptions raised by the calibration handler.""" calib = self._get_calibration_handler() - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Invalid calibration invalid for channel IR_108"): calib.calibrate(None, "invalid") @pytest.mark.parametrize( diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index ae042999e3..0ce40d8dfc 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -67,7 +67,7 @@ def test_read_hrv_band(self, memmap): size=int((464 * 5568 * nbits) / 8), dtype=np.uint8) res = self.reader.read_band("HRV", None) - self.assertEqual(res.shape, (464, 5568)) + assert res.shape == (464, 5568) @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") @@ -79,7 +79,7 @@ def test_get_dataset(self, calibrate, parent_get_dataset): parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=("y", "x")) res = self.reader.get_dataset(key, info) - self.assertEqual(res.shape, (464, 11136)) + assert res.shape == (464, 11136) # Test method calls parent_get_dataset.assert_called_with(key, info) @@ -102,7 +102,7 @@ def test_get_dataset_non_fill(self, calibrate, parent_get_dataset): parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=("y", "x")) res = self.reader.get_dataset(key, info) - self.assertEqual(res.shape, (464, 5568)) + assert res.shape == (464, 5568) # Test method calls parent_get_dataset.assert_called_with(key, info) @@ -118,16 +118,15 @@ def test_get_area_def(self): """Test getting the area def.""" from pyresample.utils import proj4_radius_parameters area = self.reader.get_area_def(make_dataid(name="HRV", resolution=1000)) - self.assertEqual(area.area_extent, - (-45561979844414.07, -3720765401003.719, 45602912357076.38, 77771774058.38356)) + assert area.area_extent == (-45561979844414.07, -3720765401003.719, 45602912357076.38, 77771774058.38356) proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) - self.assertEqual(a, 6378169.0) - self.assertAlmostEqual(b, 6356583.8) - self.assertEqual(proj_dict["h"], 35785831.0) - self.assertEqual(proj_dict["lon_0"], 0.0) - self.assertEqual(proj_dict["proj"], "geos") - self.assertEqual(proj_dict["units"], "m") + assert a == 6378169.0 + assert b == pytest.approx(6356583.8) + assert proj_dict["h"] == 35785831.0 + assert proj_dict["lon_0"] == 0.0 + assert proj_dict["proj"] == "geos" + assert proj_dict["units"] == "m" self.reader.fill_hrv = False area = self.reader.get_area_def(make_dataid(name="HRV", resolution=1000)) npt.assert_allclose(area.defs[0].area_extent, @@ -135,8 +134,8 @@ def test_get_area_def(self): npt.assert_allclose(area.defs[1].area_extent, (-30793529275853.656, -3720765401003.719, 14788916824891.568, -2926674655354.9604)) - self.assertEqual(area.defs[0].area_id, "msg_seviri_fes_1km") - self.assertEqual(area.defs[1].area_id, "msg_seviri_fes_1km") + assert area.defs[0].area_id == "msg_seviri_fes_1km" + assert area.defs[1].area_id == "msg_seviri_fes_1km" class TestHRITMSGFileHandler(TestHRITMSGBase): @@ -171,24 +170,20 @@ def test_get_area_def(self): area = self.reader.get_area_def(make_dataid(name="VIS006", resolution=3000)) proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) - self.assertEqual(a, 6378169.0) - self.assertAlmostEqual(b, 6356583.8) - self.assertEqual(proj_dict["h"], 35785831.0) - self.assertEqual(proj_dict["lon_0"], self.projection_longitude) - self.assertEqual(proj_dict["proj"], "geos") - self.assertEqual(proj_dict["units"], "m") - self.assertEqual(area.area_extent, - (-77771774058.38356, -3720765401003.719, - 30310525626438.438, 77771774058.38356)) + assert a == 6378169.0 + assert b == pytest.approx(6356583.8) + assert proj_dict["h"] == 35785831.0 + assert proj_dict["lon_0"] == self.projection_longitude + assert proj_dict["proj"] == "geos" + assert proj_dict["units"] == "m" + assert area.area_extent == (-77771774058.38356, -3720765401003.719, 30310525626438.438, 77771774058.38356) # Data shifted by 1.5km to N-W self.reader.mda["offset_corrected"] = False area = self.reader.get_area_def(make_dataid(name="VIS006", resolution=3000)) - self.assertEqual(area.area_extent, - (-77771772558.38356, -3720765402503.719, - 30310525627938.438, 77771772558.38356)) + assert area.area_extent == (-77771772558.38356, -3720765402503.719, 30310525627938.438, 77771772558.38356) - self.assertEqual(area.area_id, "msg_seviri_rss_3km") + assert area.area_id == "msg_seviri_rss_3km" @mock.patch("satpy.readers.hrit_base.np.memmap") def test_read_band(self, memmap): @@ -198,7 +193,7 @@ def test_read_band(self, memmap): size=int((464 * 3712 * nbits) / 8), dtype=np.uint8) res = self.reader.read_band("VIS006", None) - self.assertEqual(res.shape, (464, 3712)) + assert res.shape == (464, 3712) @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") @@ -227,18 +222,18 @@ def test_get_dataset(self, calibrate, parent_get_dataset): setup.get_attrs_exp(self.projection_longitude) ) # testing start/end time - self.assertEqual(datetime(2006, 1, 1, 12, 15, 9, 304888), self.reader.observation_start_time) - self.assertEqual(datetime(2006, 1, 1, 12, 15,), self.reader.start_time) - self.assertEqual(self.reader.start_time, self.reader.nominal_start_time) + assert datetime(2006, 1, 1, 12, 15, 9, 304888) == self.reader.observation_start_time + assert datetime(2006, 1, 1, 12, 15) == self.reader.start_time + assert self.reader.start_time == self.reader.nominal_start_time - self.assertEqual(datetime(2006, 1, 1, 12, 27, 39), self.reader.observation_end_time) - self.assertEqual(self.reader.end_time, self.reader.nominal_end_time) - self.assertEqual(datetime(2006, 1, 1, 12, 30,), self.reader.end_time) + assert datetime(2006, 1, 1, 12, 27, 39) == self.reader.observation_end_time + assert self.reader.end_time == self.reader.nominal_end_time + assert datetime(2006, 1, 1, 12, 30) == self.reader.end_time # test repeat cycle duration - self.assertEqual(15, self.reader._repeat_cycle_duration) + assert 15 == self.reader._repeat_cycle_duration # Change the reducescan scenario to test the repeat cycle duration handling self.reader.epilogue["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] = 1 - self.assertEqual(5, self.reader._repeat_cycle_duration) + assert 5 == self.reader._repeat_cycle_duration @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") @@ -282,10 +277,10 @@ def test_get_raw_mda(self): self.reader.prologue_.reduce = lambda max_size: {"prologue": 1} self.reader.epilogue_.reduce = lambda max_size: {"epilogue": 1} expected = {"prologue": 1, "epilogue": 1, "segment": 1} - self.assertDictEqual(self.reader._get_raw_mda(), expected) + assert self.reader._get_raw_mda() == expected # Make sure _get_raw_mda() doesn't modify the original dictionary - self.assertIn("loff", self.reader.mda) + assert "loff" in self.reader.mda def test_satpos_no_valid_orbit_polynomial(self): """Test satellite position if there is no valid orbit polynomial.""" @@ -296,10 +291,7 @@ def test_satpos_no_valid_orbit_polynomial(self): projection_longitude=self.projection_longitude, orbit_polynomials=ORBIT_POLYNOMIALS_INVALID ) - self.assertNotIn( - "satellite_actual_longitude", - reader.mda["orbital_parameters"] - ) + assert "satellite_actual_longitude" not in reader.mda["orbital_parameters"] class TestHRITMSGPrologueFileHandler(unittest.TestCase): @@ -337,10 +329,10 @@ def test_reduce(self, reduce_mda): reduce_mda.return_value = "reduced" # Set buffer - self.assertEqual(self.reader.reduce(123), "reduced") + assert self.reader.reduce(123) == "reduced" # Read buffer - self.assertEqual(self.reader.reduce(123), "reduced") + assert self.reader.reduce(123) == "reduced" reduce_mda.assert_called_once() @@ -385,13 +377,13 @@ def test_reduce(self, reduce_mda): reduce_mda.return_value = "reduced" # Set buffer - self.assertEqual(self.reader.reduce(123), "reduced") + assert self.reader.reduce(123) == "reduced" reduce_mda.assert_called() # Read buffer reduce_mda.reset_mock() self.reader._reduced = "red" - self.assertEqual(self.reader.reduce(123), "red") + assert self.reader.reduce(123) == "red" reduce_mda.assert_not_called() diff --git a/satpy/tests/reader_tests/test_seviri_l1b_icare.py b/satpy/tests/reader_tests/test_seviri_l1b_icare.py index 81d385bc89..372611c87d 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_icare.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_icare.py @@ -104,9 +104,9 @@ def compare_areas(self, v): -5570248.6866857, -5537244.2506213, -4670127.7031114)} - self.assertEqual(v.attrs["area"].area_id, test_area["area_id"]) - self.assertEqual(v.attrs["area"].width, test_area["width"]) - self.assertEqual(v.attrs["area"].height, test_area["height"]) + assert v.attrs["area"].area_id == test_area["area_id"] + assert v.attrs["area"].width == test_area["width"] + assert v.attrs["area"].height == test_area["height"] np.testing.assert_almost_equal(v.attrs["area"].area_extent, test_area["area_extent"]) @@ -117,9 +117,9 @@ def test_init(self): "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf", "GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf" ]) - self.assertEqual(len(loadables), 2) + assert len(loadables) == 2 r.create_filehandlers(loadables) - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_dataset_vis(self): """Test loading all datasets from a full swath file.""" @@ -130,11 +130,11 @@ def test_load_dataset_vis(self): ]) r.create_filehandlers(loadables) datasets = r.load(["VIS008"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): dt = datetime(2004, 12, 29, 12, 27, 44) - self.assertEqual(v.attrs["end_time"], dt) - self.assertEqual(v.attrs["calibration"], "reflectance") + assert v.attrs["end_time"] == dt + assert v.attrs["calibration"] == "reflectance" def test_load_dataset_ir(self): """Test loading all datasets from a full swath file.""" @@ -144,9 +144,9 @@ def test_load_dataset_ir(self): ]) r.create_filehandlers(loadables) datasets = r.load(["IR_108"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["calibration"], "brightness_temperature") + assert v.attrs["calibration"] == "brightness_temperature" def test_area_def_lores(self): """Test loading all datasets from an area of interest file.""" @@ -157,7 +157,7 @@ def test_area_def_lores(self): r.create_filehandlers(loadables) ds = r.load(["VIS008"]) self.compare_areas(ds["VIS008"]) - self.assertEqual(ds["VIS008"].attrs["area"].proj_id, "msg_lowres") + assert ds["VIS008"].attrs["area"].proj_id == "msg_lowres" def test_area_def_hires(self): """Test loading all datasets from an area of interest file.""" @@ -168,7 +168,7 @@ def test_area_def_hires(self): r.create_filehandlers(loadables) ds = r.load(["HRV"]) self.compare_areas(ds["HRV"]) - self.assertEqual(ds["HRV"].attrs["area"].proj_id, "msg_hires") + assert ds["HRV"].attrs["area"].proj_id == "msg_hires" def test_sensor_names(self): """Check satellite name conversion is correct, including error case.""" @@ -191,7 +191,7 @@ def _run_target(): for sat in sensor_list: file_data["/attr/Sensors"] = sensor_list[sat] plat, sens = _run_target() - self.assertEqual(plat, sat) + assert plat == sat with self.assertRaises(NameError): file_data["/attr/Sensors"] = "BADSAT/NOSENSE" diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index 0130740246..ba7cf63447 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -654,8 +654,8 @@ def prepare_area_definitions(test_dict): @pytest.mark.parametrize( - "actual, expected", - ( + ("actual", "expected"), + [ (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK_FILL)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN)), @@ -676,7 +676,7 @@ def prepare_area_definitions(test_dict): (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI_FILL)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI_FILL)), - ) + ] ) def test_area_definitions(actual, expected): """Test area definitions with only one area.""" @@ -688,11 +688,11 @@ def test_area_definitions(actual, expected): @pytest.mark.parametrize( - "actual, expected", - ( + ("actual", "expected"), + [ (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK)), - ) + ] ) def test_stacked_area_definitions(actual, expected): """Test area definitions with stacked areas.""" @@ -736,12 +736,12 @@ def prepare_is_roi(test_dict): @pytest.mark.parametrize( - "actual, expected", - ( + ("actual", "expected"), + [ (prepare_is_roi(TEST_IS_ROI_FULLDISK)), (prepare_is_roi(TEST_IS_ROI_RAPIDSCAN)), (prepare_is_roi(TEST_IS_ROI_ROI)), - ) + ] ) def test_is_roi(actual, expected): """Test if given area is of area-of-interest.""" @@ -757,21 +757,21 @@ def test_get_available_channels(self): trues = ("WV_062", "WV_073", "IR_108", "VIS006", "VIS008", "IR_120") for bandname in AVAILABLE_CHANNELS: if bandname in trues: - self.assertTrue(available_chs[bandname]) + assert available_chs[bandname] else: - self.assertFalse(available_chs[bandname]) + assert not available_chs[bandname] available_chs = get_available_channels(TEST2_HEADER_CHNLIST) trues = ("VIS006", "VIS008", "IR_039", "WV_062", "WV_073", "IR_087", "HRV") for bandname in AVAILABLE_CHANNELS: if bandname in trues: - self.assertTrue(available_chs[bandname]) + assert available_chs[bandname] else: - self.assertFalse(available_chs[bandname]) + assert not available_chs[bandname] available_chs = get_available_channels(TEST3_HEADER_CHNLIST) for bandname in AVAILABLE_CHANNELS: - self.assertTrue(available_chs[bandname]) + assert available_chs[bandname] TEST_HEADER_CALIB = { @@ -829,7 +829,7 @@ def file_handler(self): @pytest.mark.parametrize( ("channel", "calibration", "calib_mode", "use_ext_coefs"), - ( + [ # VIS channel, internal coefficients ("VIS006", "counts", "NOMINAL", False), ("VIS006", "radiance", "NOMINAL", False), @@ -855,7 +855,7 @@ def file_handler(self): # HRV channel, external coefficients (mode should have no effect) ("HRV", "radiance", "GSICS", True), ("HRV", "reflectance", "NOMINAL", True), - ) + ] ) def test_calibrate( self, file_handler, counts, channel, calibration, calib_mode, @@ -882,7 +882,7 @@ def test_calibrate( class TestNativeMSGDataset: """Tests for getting the dataset.""" - @pytest.fixture + @pytest.fixture() def file_handler(self): """Create a file handler for testing.""" trailer = { @@ -1123,7 +1123,7 @@ def test_padder_fes_hrv(self): class TestNativeMSGFilenames: """Test identification of Native format filenames.""" - @pytest.fixture + @pytest.fixture() def reader(self): """Return reader for SEVIRI Native format.""" from satpy._config import config_search_paths @@ -1150,11 +1150,11 @@ def test_file_pattern(self, reader): @pytest.mark.parametrize( - "file_content,exp_header_size", - ( + ("file_content", "exp_header_size"), + [ (ASCII_STARTSWITH, 450400), # with ascii header (b"foobar", 445286), # without ascii header - ) + ] ) def test_header_type(file_content, exp_header_size): """Test identification of the file header type.""" @@ -1225,7 +1225,7 @@ def test_header_warning(): @pytest.mark.parametrize( - "starts_with, expected", + ("starts_with", "expected"), [ (ASCII_STARTSWITH, True), (b"this_shall_fail", False) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_nc.py b/satpy/tests/reader_tests/test_seviri_l1b_nc.py index 3f7b1a6296..f6a54aa60e 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_nc.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_nc.py @@ -192,7 +192,7 @@ def _get_fake_dataset(self, counts, h5netcdf): return ds - @pytest.fixture + @pytest.fixture() def h5netcdf(self): """Fixture for xr backend choice.""" return False diff --git a/satpy/tests/reader_tests/test_seviri_l2_grib.py b/satpy/tests/reader_tests/test_seviri_l2_grib.py index a8b5310a78..d57fda4e79 100644 --- a/satpy/tests/reader_tests/test_seviri_l2_grib.py +++ b/satpy/tests/reader_tests/test_seviri_l2_grib.py @@ -95,11 +95,10 @@ def test_data_reading(self, da_, xr_): # Checks the correct file open call mock_file.assert_called_with("test.grib", "rb") # Checks that the dataset has been created as a DataArray object - self.assertEqual(valid_dataset._extract_mock_name(), "xr.DataArray()") + assert valid_dataset._extract_mock_name() == "xr.DataArray()" # Checks that codes_release has been called after each codes_grib_new_from_file call # (except after the last one which has returned a None) - self.assertEqual(self.ec_.codes_grib_new_from_file.call_count, - self.ec_.codes_release.call_count + 1) + assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 # Restarts the id generator and clears the call history fake_gid_generator = (i for i in FAKE_GID) @@ -110,14 +109,13 @@ def test_data_reading(self, da_, xr_): # Checks the correct execution of the get_dataset function with an invalid parameter_number invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) # Checks that the function returns None - self.assertEqual(invalid_dataset, None) + assert invalid_dataset is None # Checks that codes_release has been called after each codes_grib_new_from_file call # (except after the last one which has returned a None) - self.assertEqual(self.ec_.codes_grib_new_from_file.call_count, - self.ec_.codes_release.call_count + 1) + assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 # Checks the basic data reading - self.assertEqual(REPEAT_CYCLE_DURATION, 15) + assert REPEAT_CYCLE_DURATION == 15 # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = self.reader._get_attributes() @@ -128,19 +126,19 @@ def test_data_reading(self, da_, xr_): "sensor": "seviri", "platform_name": "Meteosat-11" } - self.assertEqual(attributes, expected_attributes) + assert attributes == expected_attributes # Checks the reading of an array from the message self.reader._get_xarray_from_msg(0) # Checks that dask.array has been called with the correct arguments name, args, kwargs = da_.mock_calls[0] - self.assertTrue(np.all(args[0] == np.ones((1200, 1000)))) - self.assertEqual(args[1], CHUNK_SIZE) + assert np.all(args[0] == np.ones((1200, 1000))) is True + assert args[1] == CHUNK_SIZE # Checks that xarray.DataArray has been called with the correct arguments name, args, kwargs = xr_.mock_calls[0] - self.assertEqual(kwargs["dims"], ("y", "x")) + assert kwargs["dims"] == ("y", "x") # Checks the correct execution of the _get_proj_area function pdict, area_dict = self.reader._get_proj_area(0) @@ -156,7 +154,7 @@ def test_data_reading(self, da_, xr_): "a_desc": "MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution", "p_id": "", } - self.assertEqual(pdict, expected_pdict) + assert pdict == expected_pdict expected_area_dict = { "center_point": 500, "north": 1200, @@ -164,7 +162,7 @@ def test_data_reading(self, da_, xr_): "west": 1000, "south": 1, } - self.assertEqual(area_dict, expected_area_dict) + assert area_dict == expected_area_dict # Checks the correct execution of the get_area_def function with mock.patch("satpy.readers.seviri_l2_grib.calculate_area_extent", @@ -176,9 +174,9 @@ def test_data_reading(self, da_, xr_): expected_args = ({"center_point": 500, "east": 1, "west": 1000, "south": 1, "north": 1200, "column_step": 400., "line_step": 400.},) name, args, kwargs = cae.mock_calls[0] - self.assertEqual(args, expected_args) + assert args == expected_args # Asserts that get_area_definition has been called with the correct arguments name, args, kwargs = gad.mock_calls[0] - self.assertEqual(args[0], expected_pdict) + assert args[0] == expected_pdict # The second argument must be the return result of calculate_area_extent - self.assertEqual(args[1]._extract_mock_name(), "calculate_area_extent()") + assert args[1]._extract_mock_name() == "calculate_area_extent()" diff --git a/satpy/tests/reader_tests/test_slstr_l1b.py b/satpy/tests/reader_tests/test_slstr_l1b.py index cc0764685f..63a43c9c79 100644 --- a/satpy/tests/reader_tests/test_slstr_l1b.py +++ b/satpy/tests/reader_tests/test_slstr_l1b.py @@ -152,8 +152,8 @@ def test_instantiate(self, bvs_, xr_): assert test.view == "nadir" assert test.stripe == "a" test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) - self.assertEqual(test.start_time, good_start) - self.assertEqual(test.end_time, good_end) + assert test.start_time == good_start + assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() @@ -164,8 +164,8 @@ def test_instantiate(self, bvs_, xr_): assert test.view == "oblique" assert test.stripe == "c" test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) - self.assertEqual(test.start_time, good_start) - self.assertEqual(test.end_time, good_end) + assert test.start_time == good_start + assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() @@ -174,8 +174,8 @@ def test_instantiate(self, bvs_, xr_): "stripe": "a", "view": "n"} test = NCSLSTRGeo("somedir/geometry_an.nc", filename_info, "c") test.get_dataset(ds_id, dict(filename_info, **{"file_key": "latitude_{stripe:1s}{view:1s}"})) - self.assertEqual(test.start_time, good_start) - self.assertEqual(test.end_time, good_end) + assert test.start_time == good_start + assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() @@ -183,15 +183,15 @@ def test_instantiate(self, bvs_, xr_): test.get_dataset(ds_id, dict(filename_info, **{"file_key": "flags_{stripe:1s}{view:1s}"})) assert test.view == "nadir" assert test.stripe == "a" - self.assertEqual(test.start_time, good_start) - self.assertEqual(test.end_time, good_end) + assert test.start_time == good_start + assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() test = NCSLSTRAngles("somedir/S1_radiance_an.nc", filename_info, "c") test.get_dataset(ds_id, dict(filename_info, **{"file_key": "geometry_t{view:1s}"})) - self.assertEqual(test.start_time, good_start) - self.assertEqual(test.end_time, good_end) + assert test.start_time == good_start + assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() test.get_dataset(ds_id_500, dict(filename_info, **{"file_key": "geometry_t{view:1s}"})) @@ -244,7 +244,7 @@ def test_reflectance_calibration(self, da_, xr_): ds_id = make_dataid(name="S5", calibration="reflectance", stripe="a", view="nadir") test = NCSLSTR1B("somedir/S1_radiance_an.nc", filename_info, "c") data = test.get_dataset(ds_id, dict(filename_info, **{"file_key": "S5"})) - self.assertEqual(data.units, "%") + assert data.units == "%" np.testing.assert_allclose(data.values, self.rad * np.pi) def test_cal_rad(self): diff --git a/satpy/tests/reader_tests/test_smos_l2_wind.py b/satpy/tests/reader_tests/test_smos_l2_wind.py index 3303abff17..519030447b 100644 --- a/satpy/tests/reader_tests/test_smos_l2_wind.py +++ b/satpy/tests/reader_tests/test_smos_l2_wind.py @@ -101,10 +101,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_wind_speed(self): """Load wind_speed dataset.""" @@ -116,17 +116,17 @@ def test_load_wind_speed(self): ]) r.create_filehandlers(loadables) ds = r.load(["wind_speed"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): - self.assertEqual(d.attrs["platform_shortname"], "SM") - self.assertEqual(d.attrs["sensor"], "MIRAS") - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) - self.assertIn("y", d.dims) - self.assertIn("x", d.dims) - self.assertEqual(d.shape, (719, 1440)) - self.assertEqual(d.y[0].data, -89.75) - self.assertEqual(d.y[d.shape[0] - 1].data, 89.75) + assert d.attrs["platform_shortname"] == "SM" + assert d.attrs["sensor"] == "MIRAS" + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert "y" in d.dims + assert "x" in d.dims + assert d.shape == (719, 1440) + assert d.y[0].data == -89.75 + assert d.y[d.shape[0] - 1].data == 89.75 def test_load_lat(self): """Load lat dataset.""" @@ -138,12 +138,12 @@ def test_load_lat(self): ]) r.create_filehandlers(loadables) ds = r.load(["lat"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): - self.assertIn("y", d.dims) - self.assertEqual(d.shape, (719,)) - self.assertEqual(d.data[0], -89.75) - self.assertEqual(d.data[d.shape[0] - 1], 89.75) + assert "y" in d.dims + assert d.shape == (719,) + assert d.data[0] == -89.75 + assert d.data[d.shape[0] - 1] == 89.75 def test_load_lon(self): """Load lon dataset.""" @@ -155,12 +155,12 @@ def test_load_lon(self): ]) r.create_filehandlers(loadables) ds = r.load(["lon"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): - self.assertIn("x", d.dims) - self.assertEqual(d.shape, (1440,)) - self.assertEqual(d.data[0], -180.0) - self.assertEqual(d.data[d.shape[0] - 1], 179.75) + assert "x" in d.dims + assert d.shape == (1440,) + assert d.data[0] == -180.0 + assert d.data[d.shape[0] - 1] == 179.75 def test_adjust_lon(self): """Load adjust longitude dataset.""" @@ -174,7 +174,7 @@ def test_adjust_lon(self): expected = DataArray(np.concatenate((np.arange(0, 180., 0.25), np.arange(-180.0, 0, 0.25))), dims=("lon")) - self.assertEqual(adjusted.data.tolist(), expected.data.tolist()) + assert adjusted.data.tolist() == expected.data.tolist() def test_roll_dataset(self): """Load roll of dataset along the lon coordinate.""" @@ -187,4 +187,4 @@ def test_roll_dataset(self): data = smos_l2_wind_fh._adjust_lon_coord(data) adjusted = smos_l2_wind_fh._roll_dataset_lon_coord(data) expected = np.arange(-180., 180., 0.25) - self.assertEqual(adjusted.data.tolist(), expected.tolist()) + assert adjusted.data.tolist() == expected.tolist() diff --git a/satpy/tests/reader_tests/test_tropomi_l2.py b/satpy/tests/reader_tests/test_tropomi_l2.py index f2b3660089..05d0717538 100644 --- a/satpy/tests/reader_tests/test_tropomi_l2.py +++ b/satpy/tests/reader_tests/test_tropomi_l2.py @@ -122,10 +122,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_no2(self): """Load NO2 dataset.""" @@ -137,16 +137,16 @@ def test_load_no2(self): ]) r.create_filehandlers(loadables) ds = r.load(["nitrogen_dioxide_total_column"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): - self.assertEqual(d.attrs["platform_shortname"], "S5P") - self.assertEqual(d.attrs["sensor"], "tropomi") - self.assertEqual(d.attrs["time_coverage_start"], datetime(2018, 7, 9, 17, 25, 34)) - self.assertEqual(d.attrs["time_coverage_end"], datetime(2018, 7, 9, 18, 23, 4)) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) - self.assertIn("y", d.dims) - self.assertIn("x", d.dims) + assert d.attrs["platform_shortname"] == "S5P" + assert d.attrs["sensor"] == "tropomi" + assert d.attrs["time_coverage_start"] == datetime(2018, 7, 9, 17, 25, 34) + assert d.attrs["time_coverage_end"] == datetime(2018, 7, 9, 18, 23, 4) + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert "y" in d.dims + assert "x" in d.dims def test_load_so2(self): """Load SO2 dataset.""" @@ -158,13 +158,13 @@ def test_load_so2(self): ]) r.create_filehandlers(loadables) ds = r.load(["sulfurdioxide_total_vertical_column"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): - self.assertEqual(d.attrs["platform_shortname"], "S5P") - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) - self.assertIn("y", d.dims) - self.assertIn("x", d.dims) + assert d.attrs["platform_shortname"] == "S5P" + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert "y" in d.dims + assert "x" in d.dims def test_load_bounds(self): """Load bounds dataset.""" @@ -177,12 +177,12 @@ def test_load_bounds(self): r.create_filehandlers(loadables) keys = ["latitude_bounds", "longitude_bounds"] ds = r.load(keys) - self.assertEqual(len(ds), 2) + assert len(ds) == 2 for key in keys: - self.assertEqual(ds[key].attrs["platform_shortname"], "S5P") - self.assertIn("y", ds[key].dims) - self.assertIn("x", ds[key].dims) - self.assertIn("corner", ds[key].dims) + assert ds[key].attrs["platform_shortname"] == "S5P" + assert "y" in ds[key].dims + assert "x" in ds[key].dims + assert "corner" in ds[key].dims # check assembled bounds left = np.vstack([ds[key][:, :, 0], ds[key][-1:, :, 3]]) right = np.vstack([ds[key][:, -1:, 1], ds[key][-1:, -1:, 2]]) @@ -191,13 +191,11 @@ def test_load_bounds(self): dims=("y", "x") ) dest.attrs = ds[key].attrs - self.assertEqual(dest.attrs["platform_shortname"], "S5P") - self.assertIn("y", dest.dims) - self.assertIn("x", dest.dims) - self.assertEqual(DEFAULT_FILE_SHAPE[0] + 1, dest.shape[0]) - self.assertEqual(DEFAULT_FILE_SHAPE[1] + 1, dest.shape[1]) - self.assertIsNone(np.testing.assert_array_equal(dest[:-1, :-1], ds[key][:, :, 0])) - self.assertIsNone(np.testing.assert_array_equal(dest[-1, :-1], ds[key][-1, :, 3])) - self.assertIsNone(np.testing.assert_array_equal(dest[:, -1], - np.append(ds[key][:, -1, 1], ds[key][-1:, -1:, 2])) - ) + assert dest.attrs["platform_shortname"] == "S5P" + assert "y" in dest.dims + assert "x" in dest.dims + assert DEFAULT_FILE_SHAPE[0] + 1 == dest.shape[0] + assert DEFAULT_FILE_SHAPE[1] + 1 == dest.shape[1] + assert np.testing.assert_array_equal(dest[:-1, :-1], ds[key][:, :, 0]) + assert np.testing.assert_array_equal(dest[-1, :-1], ds[key][-1, :, 3]) + assert np.testing.assert_array_equal(dest[:, -1], np.append(ds[key][:, -1, 1], ds[key][-1:, -1:, 2])) diff --git a/satpy/tests/reader_tests/test_utils.py b/satpy/tests/reader_tests/test_utils.py index 12af80ca2a..9deaf1facc 100644 --- a/satpy/tests/reader_tests/test_utils.py +++ b/satpy/tests/reader_tests/test_utils.py @@ -162,24 +162,24 @@ def test_geostationary_mask(self): # Check results along a couple of lines # a) Horizontal - self.assertTrue(np.all(mask[50, :8] == 0)) - self.assertTrue(np.all(mask[50, 8:93] == 1)) - self.assertTrue(np.all(mask[50, 93:] == 0)) + assert np.all(mask[50, :8] == 0) + assert np.all(mask[50, 8:93] == 1) + assert np.all(mask[50, 93:] == 0) # b) Vertical - self.assertTrue(np.all(mask[:31, 50] == 0)) - self.assertTrue(np.all(mask[31:70, 50] == 1)) - self.assertTrue(np.all(mask[70:, 50] == 0)) + assert np.all(mask[:31, 50] == 0) + assert np.all(mask[31:70, 50] == 1) + assert np.all(mask[70:, 50] == 0) # c) Top left to bottom right - self.assertTrue(np.all(mask[range(33), range(33)] == 0)) - self.assertTrue(np.all(mask[range(33, 68), range(33, 68)] == 1)) - self.assertTrue(np.all(mask[range(68, 101), range(68, 101)] == 0)) + assert np.all(mask[range(33), range(33)] == 0) + assert np.all(mask[range(33, 68), range(33, 68)] == 1) + assert np.all(mask[range(68, 101), range(68, 101)] == 0) # d) Bottom left to top right - self.assertTrue(np.all(mask[range(101-1, 68-1, -1), range(33)] == 0)) - self.assertTrue(np.all(mask[range(68-1, 33-1, -1), range(33, 68)] == 1)) - self.assertTrue(np.all(mask[range(33-1, -1, -1), range(68, 101)] == 0)) + assert np.all(mask[range(101 - 1, 68 - 1, -1), range(33)] == 0) + assert np.all(mask[range(68 - 1, 33 - 1, -1), range(33, 68)] == 1) + assert np.all(mask[range(33 - 1, -1, -1), range(68, 101)] == 0) @mock.patch("satpy.readers.utils.AreaDefinition") def test_sub_area(self, adef): @@ -203,15 +203,15 @@ def test_np2str(self): """Test the np2str function.""" # byte object npstring = np.string_("hej") - self.assertEqual(hf.np2str(npstring), "hej") + assert hf.np2str(npstring) == "hej" # single element numpy array np_arr = np.array([npstring]) - self.assertEqual(hf.np2str(np_arr), "hej") + assert hf.np2str(np_arr) == "hej" # scalar numpy array np_arr = np.array(npstring) - self.assertEqual(hf.np2str(np_arr), "hej") + assert hf.np2str(np_arr) == "hej" # multi-element array npstring = np.array([npstring, npstring]) @@ -236,10 +236,10 @@ def re(lat): return n * np.sqrt((1 - e2)**2 * np.sin(lat)**2 + np.cos(lat)**2) for lon in (0, 180, 270): - self.assertEqual(hf.get_earth_radius(lon=lon, lat=0., a=a, b=b), a) + assert hf.get_earth_radius(lon=lon, lat=0.0, a=a, b=b) == a for lat in (90, -90): - self.assertEqual(hf.get_earth_radius(lon=0., lat=lat, a=a, b=b), b) - self.assertTrue(np.isclose(hf.get_earth_radius(lon=123, lat=45., a=a, b=b), re(45.))) + assert hf.get_earth_radius(lon=0.0, lat=lat, a=a, b=b) == b + assert np.isclose(hf.get_earth_radius(lon=123, lat=45.0, a=a, b=b), re(45.0)) def test_reduce_mda(self): """Test metadata size reduction.""" @@ -261,9 +261,9 @@ def test_reduce_mda(self): numpy.testing.assert_equal(hf.reduce_mda(mda, max_size=3), exp) # Make sure, reduce_mda() doesn't modify the original dictionary - self.assertIn("c", mda) - self.assertIn("c", mda["d"]) - self.assertIn("c", mda["d"]["d"]) + assert "c" in mda + assert "c" in mda["d"] + assert "c" in mda["d"]["d"] @mock.patch("satpy.readers.utils.bz2.BZ2File") @mock.patch("satpy.readers.utils.Popen") @@ -389,7 +389,7 @@ def test_pro_reading_gets_unzipped_file(self, fake_unzip_file, fake_remove): expected_filename = filename[:-4] with hf.unzip_context(filename) as new_filename: - self.assertEqual(new_filename, expected_filename) + assert new_filename == expected_filename fake_unzip_file.assert_called_with(filename) fake_remove.assert_called_with(expected_filename) @@ -409,14 +409,14 @@ def test_get_user_calibration_factors(self): "off": -0.0556}} # Test that correct values are returned from the dict slope, offset = hf.get_user_calibration_factors("WV063", radcor_dict) - self.assertEqual(slope, 1.015) - self.assertEqual(offset, -0.0556) + assert slope == 1.015 + assert offset == -0.0556 # Test that channels not present in dict return 1.0, 0.0 with self.assertWarns(UserWarning): slope, offset = hf.get_user_calibration_factors("IR097", radcor_dict) - self.assertEqual(slope, 1.) - self.assertEqual(offset, 0.) + assert slope == 1.0 + assert offset == 0.0 # Check that incorrect dict keys throw an error with self.assertRaises(KeyError): @@ -486,7 +486,7 @@ def test_remove_sunearth_corr(self): assert isinstance(out_refl.data, da.Array) -@pytest.mark.parametrize("data, filename, mode", +@pytest.mark.parametrize(("data", "filename", "mode"), [(b"Hello", "dummy.dat", "b"), ("Hello", "dummy.txt", "t")]) def test_generic_open_binary(tmp_path, data, filename, mode): diff --git a/satpy/tests/reader_tests/test_vii_base_nc.py b/satpy/tests/reader_tests/test_vii_base_nc.py index 4f5dbcd141..82c0e6a4e1 100644 --- a/satpy/tests/reader_tests/test_vii_base_nc.py +++ b/satpy/tests/reader_tests/test_vii_base_nc.py @@ -158,15 +158,15 @@ def test_file_reading(self): # Checks that the basic functionalities are correctly executed expected_start_time = datetime.datetime(year=2017, month=9, day=20, hour=17, minute=30, second=40, microsecond=888000) - self.assertEqual(self.reader.start_time, expected_start_time) + assert self.reader.start_time == expected_start_time expected_end_time = datetime.datetime(year=2017, month=9, day=20, hour=17, minute=41, second=17, microsecond=555000) - self.assertEqual(self.reader.end_time, expected_end_time) + assert self.reader.end_time == expected_end_time - self.assertEqual(self.reader.spacecraft_name, "test_spacecraft") - self.assertEqual(self.reader.sensor, "test_instrument") - self.assertEqual(self.reader.ssp_lon, None) + assert self.reader.spacecraft_name == "test_spacecraft" + assert self.reader.sensor == "test_instrument" + assert self.reader.ssp_lon is None # Checks that the global attributes are correctly read expected_global_attributes = { @@ -195,7 +195,7 @@ def test_file_reading(self): # Since the global_attributes dictionary contains numpy arrays, # it is not possible to peform a simple equality test # Must iterate on all keys to confirm that the dictionaries are equal - self.assertEqual(global_attributes.keys(), expected_global_attributes.keys()) + assert global_attributes.keys() == expected_global_attributes.keys() for key in expected_global_attributes: if key not in ["quality_group"]: # Quality check must be valid for both iterable and not iterable elements @@ -203,16 +203,16 @@ def test_file_reading(self): equal = all(global_attributes[key] == expected_global_attributes[key]) except (TypeError, ValueError): equal = global_attributes[key] == expected_global_attributes[key] - self.assertTrue(equal) + assert equal else: - self.assertEqual(global_attributes[key].keys(), expected_global_attributes[key].keys()) + assert global_attributes[key].keys() == expected_global_attributes[key].keys() for inner_key in global_attributes[key]: # Equality check must be valid for both iterable and not iterable elements try: equal = all(global_attributes[key][inner_key] == expected_global_attributes[key][inner_key]) except (TypeError, ValueError): equal = global_attributes[key][inner_key] == expected_global_attributes[key][inner_key] - self.assertTrue(equal) + assert equal @mock.patch("satpy.readers.vii_base_nc.tie_points_interpolation") @mock.patch("satpy.readers.vii_base_nc.tie_points_geo_interpolation") @@ -242,10 +242,10 @@ def test_functions(self, tpgi_, tpi_): return_value = self.reader._perform_interpolation(variable) tpi_.assert_called_with([variable], SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR) - self.assertTrue(np.allclose(return_value, np.ones((10, 100)))) - self.assertEqual(return_value.attrs, {"key_1": "value_1", "key_2": "value_2"}) - self.assertEqual(return_value.name, "test_name") - self.assertEqual(return_value.dims, ("num_pixels", "num_lines")) + assert np.allclose(return_value, np.ones((10, 100))) + assert return_value.attrs == {"key_1": "value_1", "key_2": "value_2"} + assert return_value.name == "test_name" + assert return_value.dims == ("num_pixels", "num_lines") # Checks that the _perform_geo_interpolation function is correctly executed variable_lon = xr.DataArray( @@ -282,15 +282,15 @@ def test_functions(self, tpgi_, tpi_): tpgi_.assert_called_with(variable_lon, variable_lat, SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR) - self.assertTrue(np.allclose(return_lon, np.ones((10, 100)))) - self.assertEqual(return_lon.attrs, {"key_1": "value_lon_1", "key_2": "value_lon_2"}) - self.assertEqual(return_lon.name, "test_lon") - self.assertEqual(return_lon.dims, ("num_pixels", "num_lines")) + assert np.allclose(return_lon, np.ones((10, 100))) + assert return_lon.attrs == {"key_1": "value_lon_1", "key_2": "value_lon_2"} + assert return_lon.name == "test_lon" + assert return_lon.dims == ("num_pixels", "num_lines") - self.assertTrue(np.allclose(return_lat, 6 * np.ones((10, 100)))) - self.assertEqual(return_lat.attrs, {"key_1": "value_lat_1", "key_2": "value_lat_2"}) - self.assertEqual(return_lat.name, "test_lat") - self.assertEqual(return_lat.dims, ("num_pixels", "num_lines")) + assert np.allclose(return_lat, 6 * np.ones((10, 100))) + assert return_lat.attrs == {"key_1": "value_lat_1", "key_2": "value_lat_2"} + assert return_lat.name == "test_lat" + assert return_lat.dims == ("num_pixels", "num_lines") def test_standardize_dims(self): """Test the standardize dims function.""" @@ -304,9 +304,9 @@ def test_standardize_dims(self): data=np.ones((10, 100)) * 1. ) out_variable = self.reader._standardize_dims(test_variable) - self.assertTrue(np.allclose(out_variable.values, np.ones((100, 10)))) - self.assertEqual(out_variable.dims, ("y", "x")) - self.assertEqual(out_variable.attrs["key_1"], "value_lat_1") + assert np.allclose(out_variable.values, np.ones((100, 10))) + assert out_variable.dims == ("y", "x") + assert out_variable.attrs["key_1"] == "value_lat_1" @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_calibration") @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_interpolation") @@ -320,10 +320,10 @@ def test_dataset(self, po_, pi_, pc_): pi_.assert_not_called() po_.assert_not_called() - self.assertTrue(np.allclose(variable.values, np.ones((100, 10)))) - self.assertEqual(variable.dims, ("y", "x")) - self.assertEqual(variable.attrs["test_attr"], "attr") - self.assertEqual(variable.attrs["units"], None) + assert np.allclose(variable.values, np.ones((100, 10))) + assert variable.dims == ("y", "x") + assert variable.attrs["test_attr"] == "attr" + assert variable.attrs["units"] is None # Checks the correct execution of the get_dataset function with a valid file_key # and required calibration and interpolation @@ -346,7 +346,7 @@ def test_dataset(self, po_, pi_, pc_): # Checks the correct execution of the get_dataset function with an invalid file_key invalid_dataset = self.reader.get_dataset(None, {"file_key": "test_invalid", "calibration": None}) # Checks that the function returns None - self.assertEqual(invalid_dataset, None) + assert invalid_dataset is None pc_.reset_mock() pi_.reset_mock() @@ -358,12 +358,12 @@ def test_dataset(self, po_, pi_, pc_): "interpolate": True}) pc_.assert_not_called() pi_.assert_not_called() - self.assertEqual(longitude[0, 0], 1.) + assert longitude[0, 0] == 1.0 # Checks the correct execution of the get_dataset function with a 'cached_latitude' file_key latitude = self.reader.get_dataset(None, {"file_key": "cached_latitude", "calibration": None}) - self.assertEqual(latitude[0, 0], 2.) + assert latitude[0, 0] == 2.0 # Repeats some check with the reader where orthorectification and interpolation are inhibited # by means of the filetype_info flags @@ -392,7 +392,7 @@ def test_dataset(self, po_, pi_, pc_): # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key longitude = self.reader_2.get_dataset(None, {"file_key": "cached_longitude", "calibration": None}) - self.assertEqual(longitude[0, 0], 100.) + assert longitude[0, 0] == 100.0 # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key # in a reader without defined longitude @@ -400,4 +400,4 @@ def test_dataset(self, po_, pi_, pc_): "calibration": "reflectance", "interpolate": True}) # Checks that the function returns None - self.assertEqual(longitude, None) + assert longitude is None diff --git a/satpy/tests/reader_tests/test_vii_l1b_nc.py b/satpy/tests/reader_tests/test_vii_l1b_nc.py index d62673d9f7..d9ee714d09 100644 --- a/satpy/tests/reader_tests/test_vii_l1b_nc.py +++ b/satpy/tests/reader_tests/test_vii_l1b_nc.py @@ -115,14 +115,14 @@ def test_calibration_functions(self): bt = self.reader._calibrate_bt(radiance, cw, a, b) expected_bt = np.array([[675.04993213, 753.10301462, 894.93149648], [963.20401882, 1048.95086402, 1270.95546218]]) - self.assertTrue(np.allclose(bt, expected_bt)) + assert np.allclose(bt, expected_bt) angle_factor = 0.4 isi = 2.0 refl = self.reader._calibrate_refl(radiance, angle_factor, isi) expected_refl = np.array([[62.8318531, 125.6637061, 314.1592654], [439.8229715, 628.3185307, 1256.637061]]) - self.assertTrue(np.allclose(refl, expected_refl)) + assert np.allclose(refl, expected_refl) def test_functions(self): """Test the functions.""" @@ -139,12 +139,12 @@ def test_functions(self): orthorect_variable = self.reader._perform_orthorectification(variable, "data/measurement_data/delta_lat") expected_values = np.degrees(np.ones((600, 72)) / MEAN_EARTH_RADIUS) + np.ones((600, 72)) - self.assertTrue(np.allclose(orthorect_variable.values, expected_values)) + assert np.allclose(orthorect_variable.values, expected_values) # Checks that the _perform_calibration function is correctly executed in all cases # radiance calibration: return value is simply a copy of the variable return_variable = self.reader._perform_calibration(variable, {"calibration": "radiance"}) - self.assertTrue(np.all(return_variable == variable)) + assert np.all(return_variable == variable) # invalid calibration: raises a ValueError with self.assertRaises(ValueError): @@ -156,7 +156,7 @@ def test_functions(self): {"calibration": "brightness_temperature", "chan_thermal_index": 3}) expected_values = np.full((600, 72), 1101.10413712) - self.assertTrue(np.allclose(calibrated_variable.values, expected_values)) + assert np.allclose(calibrated_variable.values, expected_values) # reflectance calibration: checks that the return value is correct calibrated_variable = self.reader._perform_calibration(variable, @@ -164,4 +164,4 @@ def test_functions(self): "wavelength": [0.658, 0.668, 0.678], "chan_solar_index": 2}) expected_values = np.full((600, 72), 173.3181982) - self.assertTrue(np.allclose(calibrated_variable.values, expected_values)) + assert np.allclose(calibrated_variable.values, expected_values) diff --git a/satpy/tests/reader_tests/test_vii_l2_nc.py b/satpy/tests/reader_tests/test_vii_l2_nc.py index 772f783684..8348470d0f 100644 --- a/satpy/tests/reader_tests/test_vii_l2_nc.py +++ b/satpy/tests/reader_tests/test_vii_l2_nc.py @@ -93,5 +93,5 @@ def test_functions(self): orthorect_variable = self.reader._perform_orthorectification(variable, "data/measurement_data/delta_lat") expected_values = 1.1 * np.ones((10, 100)) - self.assertTrue(np.allclose(orthorect_variable.values, expected_values)) - self.assertEqual(orthorect_variable.attrs["key_1"], "value_1") + assert np.allclose(orthorect_variable.values, expected_values) + assert orthorect_variable.attrs["key_1"] == "value_1" diff --git a/satpy/tests/reader_tests/test_vii_utils.py b/satpy/tests/reader_tests/test_vii_utils.py index ab90833887..8d9402e926 100644 --- a/satpy/tests/reader_tests/test_vii_utils.py +++ b/satpy/tests/reader_tests/test_vii_utils.py @@ -36,8 +36,8 @@ class TestViiUtils(unittest.TestCase): def test_constants(self): """Test the constant values.""" # Test the value of the constants - self.assertEqual(satpy.readers.vii_utils.C1, C1) - self.assertEqual(satpy.readers.vii_utils.C2, C2) - self.assertEqual(satpy.readers.vii_utils.TIE_POINTS_FACTOR, TIE_POINTS_FACTOR) - self.assertEqual(satpy.readers.vii_utils.SCAN_ALT_TIE_POINTS, SCAN_ALT_TIE_POINTS) - self.assertEqual(satpy.readers.vii_utils.MEAN_EARTH_RADIUS, MEAN_EARTH_RADIUS) + assert satpy.readers.vii_utils.C1 == C1 + assert satpy.readers.vii_utils.C2 == C2 + assert satpy.readers.vii_utils.TIE_POINTS_FACTOR == TIE_POINTS_FACTOR + assert satpy.readers.vii_utils.SCAN_ALT_TIE_POINTS == SCAN_ALT_TIE_POINTS + assert satpy.readers.vii_utils.MEAN_EARTH_RADIUS == MEAN_EARTH_RADIUS diff --git a/satpy/tests/reader_tests/test_vii_wv_nc.py b/satpy/tests/reader_tests/test_vii_wv_nc.py index 9d43f1ded1..63c5604187 100644 --- a/satpy/tests/reader_tests/test_vii_wv_nc.py +++ b/satpy/tests/reader_tests/test_vii_wv_nc.py @@ -93,5 +93,5 @@ def test_functions(self): orthorect_variable = self.reader._perform_orthorectification(variable, "data/measurement_data/delta_lat") expected_values = 1.1 * np.ones((10, 100)) - self.assertTrue(np.allclose(orthorect_variable.values, expected_values)) - self.assertEqual(orthorect_variable.attrs["key_1"], "value_1") + assert np.allclose(orthorect_variable.values, expected_values) + assert orthorect_variable.attrs["key_1"] == "value_1" diff --git a/satpy/tests/reader_tests/test_viirs_atms_utils.py b/satpy/tests/reader_tests/test_viirs_atms_utils.py index cdcc0b9361..cb388a5cab 100644 --- a/satpy/tests/reader_tests/test_viirs_atms_utils.py +++ b/satpy/tests/reader_tests/test_viirs_atms_utils.py @@ -49,12 +49,9 @@ def test_get_scale_factors_for_units_unsupported_units(): factors = xr.DataArray(da.from_array(DEFAULT_FILE_FACTORS, chunks=1)) file_units = "unknown unit" output_units = "%" - with pytest.raises(ValueError) as exec_info: + with pytest.raises(ValueError, match="Don't know how to convert 'unknown unit' to '%'"): _ = _get_scale_factors_for_units(factors, file_units, output_units) - expected = "Don't know how to convert 'unknown unit' to '%'" - assert str(exec_info.value) == expected - def test_get_scale_factors_for_units_reflectances(caplog): """Test get scale factors for units, when variable is supposed to be a reflectance.""" diff --git a/satpy/tests/reader_tests/test_viirs_compact.py b/satpy/tests/reader_tests/test_viirs_compact.py index 006cdfe968..ba8fa6f312 100644 --- a/satpy/tests/reader_tests/test_viirs_compact.py +++ b/satpy/tests/reader_tests/test_viirs_compact.py @@ -30,7 +30,7 @@ # - tmp_path -@pytest.fixture +@pytest.fixture() def fake_dnb(): """Create fake DNB content.""" fake_dnb = { @@ -2418,7 +2418,7 @@ def fake_dnb(): return fake_dnb -@pytest.fixture +@pytest.fixture() def fake_dnb_file(fake_dnb, tmp_path): """Create an hdf5 file in viirs_compact format with DNB data in it.""" filename = tmp_path / "SVDNBC_j01_d20191025_t0611251_e0612478_b10015_c20191025062459000870_eum_ops.h5" @@ -2435,7 +2435,7 @@ class TestCompact: """Test class for reading compact viirs format.""" @pytest.fixture(autouse=True) - def setup_method(self, fake_dnb_file): + def _setup_method(self, fake_dnb_file): """Create a fake file from scratch.""" self.filename = fake_dnb_file self.client = None diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index da6dc9a55b..9b13f384e2 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -33,7 +33,7 @@ import pytest import xarray as xr from pyresample import SwathDefinition -from pytest import TempPathFactory +from pytest import TempPathFactory # noqa: PT013 from pytest_lazyfixture import lazy_fixture I_COLS = 6400 diff --git a/satpy/tests/reader_tests/test_viirs_edr_active_fires.py b/satpy/tests/reader_tests/test_viirs_edr_active_fires.py index de55a9c20c..7063814c34 100644 --- a/satpy/tests/reader_tests/test_viirs_edr_active_fires.py +++ b/satpy/tests/reader_tests/test_viirs_edr_active_fires.py @@ -179,9 +179,9 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_dataset(self): """Test loading all datasets.""" @@ -192,23 +192,23 @@ def test_load_dataset(self): ]) r.create_filehandlers(loadables) datasets = r.load(["confidence_pct"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "%") - self.assertEqual(v.attrs["_FillValue"], 255) - self.assertTrue(np.issubdtype(v.dtype, DEFAULT_DETECTION_FILE_DTYPE)) + assert v.attrs["units"] == "%" + assert v.attrs["_FillValue"] == 255 + assert np.issubdtype(v.dtype, DEFAULT_DETECTION_FILE_DTYPE) datasets = r.load(["T13"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "K") + assert v.attrs["units"] == "K" datasets = r.load(["power"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "MW") - self.assertEqual(v.attrs["platform_name"], "NOAA-21") - self.assertEqual(v.attrs["sensor"], "viirs") + assert v.attrs["units"] == "MW" + assert v.attrs["platform_name"] == "NOAA-21" + assert v.attrs["sensor"] == "viirs" class TestImgVIIRSActiveFiresNetCDF4(unittest.TestCase): @@ -236,9 +236,9 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_dataset(self): """Test loading all datasets.""" @@ -249,23 +249,23 @@ def test_load_dataset(self): ]) r.create_filehandlers(loadables) datasets = r.load(["confidence_cat"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "1") - self.assertEqual(v.attrs["flag_meanings"], ["low", "medium", "high"]) - self.assertEqual(v.attrs["flag_values"], [7, 8, 9]) + assert v.attrs["units"] == "1" + assert v.attrs["flag_meanings"] == ["low", "medium", "high"] + assert v.attrs["flag_values"] == [7, 8, 9] datasets = r.load(["T4"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "K") + assert v.attrs["units"] == "K" datasets = r.load(["power"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "MW") - self.assertEqual(v.attrs["platform_name"], "Suomi-NPP") - self.assertEqual(v.attrs["sensor"], "viirs") + assert v.attrs["units"] == "MW" + assert v.attrs["platform_name"] == "Suomi-NPP" + assert v.attrs["sensor"] == "viirs" @mock.patch("satpy.readers.viirs_edr_active_fires.dd.read_csv") @@ -294,9 +294,9 @@ def test_init(self, mock_obj): loadables = r.select_files_from_pathnames([ "AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_dataset(self, csv_mock): """Test loading all datasets.""" @@ -307,21 +307,21 @@ def test_load_dataset(self, csv_mock): ]) r.create_filehandlers(loadables) datasets = r.load(["confidence_pct"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "%") + assert v.attrs["units"] == "%" datasets = r.load(["T13"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "K") + assert v.attrs["units"] == "K" datasets = r.load(["power"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "MW") - self.assertEqual(v.attrs["platform_name"], "NOAA-20") - self.assertEqual(v.attrs["sensor"], "VIIRS") + assert v.attrs["units"] == "MW" + assert v.attrs["platform_name"] == "NOAA-20" + assert v.attrs["sensor"] == "VIIRS" @mock.patch("satpy.readers.viirs_edr_active_fires.dd.read_csv") @@ -350,9 +350,9 @@ def test_init(self, mock_obj): loadables = r.select_files_from_pathnames([ "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_dataset(self, mock_obj): """Test loading all datasets.""" @@ -363,20 +363,20 @@ def test_load_dataset(self, mock_obj): ]) r.create_filehandlers(loadables) datasets = r.load(["confidence_cat"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "1") - self.assertEqual(v.attrs["flag_meanings"], ["low", "medium", "high"]) - self.assertEqual(v.attrs["flag_values"], [7, 8, 9]) + assert v.attrs["units"] == "1" + assert v.attrs["flag_meanings"] == ["low", "medium", "high"] + assert v.attrs["flag_values"] == [7, 8, 9] datasets = r.load(["T4"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "K") + assert v.attrs["units"] == "K" datasets = r.load(["power"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "MW") - self.assertEqual(v.attrs["platform_name"], "Suomi-NPP") - self.assertEqual(v.attrs["sensor"], "VIIRS") + assert v.attrs["units"] == "MW" + assert v.attrs["platform_name"] == "Suomi-NPP" + assert v.attrs["sensor"] == "VIIRS" diff --git a/satpy/tests/reader_tests/test_viirs_edr_flood.py b/satpy/tests/reader_tests/test_viirs_edr_flood.py index 0141259784..b7bc9f0319 100644 --- a/satpy/tests/reader_tests/test_viirs_edr_flood.py +++ b/satpy/tests/reader_tests/test_viirs_edr_flood.py @@ -95,9 +95,9 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf" ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_dataset(self): """Test loading all datasets from a full swath file.""" @@ -108,9 +108,9 @@ def test_load_dataset(self): ]) r.create_filehandlers(loadables) datasets = r.load(["WaterDetection"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "none") + assert v.attrs["units"] == "none" def test_load_dataset_aoi(self): """Test loading all datasets from an area of interest file.""" @@ -121,6 +121,6 @@ def test_load_dataset_aoi(self): ]) r.create_filehandlers(loadables) datasets = r.load(["WaterDetection"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "none") + assert v.attrs["units"] == "none" diff --git a/satpy/tests/reader_tests/test_viirs_sdr.py b/satpy/tests/reader_tests/test_viirs_sdr.py index fecd9a0b0f..952224daaf 100644 --- a/satpy/tests/reader_tests/test_viirs_sdr.py +++ b/satpy/tests/reader_tests/test_viirs_sdr.py @@ -282,40 +282,40 @@ class TestVIIRSSDRReader(unittest.TestCase): yaml_file = "viirs_sdr.yaml" def _assert_reflectance_properties(self, data_arr, num_scans=16, with_area=True): - self.assertTrue(np.issubdtype(data_arr.dtype, np.float32)) - self.assertEqual(data_arr.attrs["calibration"], "reflectance") - self.assertEqual(data_arr.attrs["units"], "%") - self.assertEqual(data_arr.attrs["rows_per_scan"], num_scans) + assert np.issubdtype(data_arr.dtype, np.float32) + assert data_arr.attrs["calibration"] == "reflectance" + assert data_arr.attrs["units"] == "%" + assert data_arr.attrs["rows_per_scan"] == num_scans if with_area: - self.assertIn("area", data_arr.attrs) - self.assertIsNotNone(data_arr.attrs["area"]) - self.assertEqual(data_arr.attrs["area"].shape, data_arr.shape) + assert "area" in data_arr.attrs + assert data_arr.attrs["area"] is not None + assert data_arr.attrs["area"].shape == data_arr.shape else: - self.assertNotIn("area", data_arr.attrs) + assert "area" not in data_arr.attrs def _assert_bt_properties(self, data_arr, num_scans=16, with_area=True): - self.assertTrue(np.issubdtype(data_arr.dtype, np.float32)) - self.assertEqual(data_arr.attrs["calibration"], "brightness_temperature") - self.assertEqual(data_arr.attrs["units"], "K") - self.assertEqual(data_arr.attrs["rows_per_scan"], num_scans) + assert np.issubdtype(data_arr.dtype, np.float32) + assert data_arr.attrs["calibration"] == "brightness_temperature" + assert data_arr.attrs["units"] == "K" + assert data_arr.attrs["rows_per_scan"] == num_scans if with_area: - self.assertIn("area", data_arr.attrs) - self.assertIsNotNone(data_arr.attrs["area"]) - self.assertEqual(data_arr.attrs["area"].shape, data_arr.shape) + assert "area" in data_arr.attrs + assert data_arr.attrs["area"] is not None + assert data_arr.attrs["area"].shape == data_arr.shape else: - self.assertNotIn("area", data_arr.attrs) + assert "area" not in data_arr.attrs def _assert_dnb_radiance_properties(self, data_arr, with_area=True): - self.assertTrue(np.issubdtype(data_arr.dtype, np.float32)) - self.assertEqual(data_arr.attrs["calibration"], "radiance") - self.assertEqual(data_arr.attrs["units"], "W m-2 sr-1") - self.assertEqual(data_arr.attrs["rows_per_scan"], 16) + assert np.issubdtype(data_arr.dtype, np.float32) + assert data_arr.attrs["calibration"] == "radiance" + assert data_arr.attrs["units"] == "W m-2 sr-1" + assert data_arr.attrs["rows_per_scan"] == 16 if with_area: - self.assertIn("area", data_arr.attrs) - self.assertIsNotNone(data_arr.attrs["area"]) - self.assertEqual(data_arr.attrs["area"].shape, data_arr.shape) + assert "area" in data_arr.attrs + assert data_arr.attrs["area"] is not None + assert data_arr.attrs["area"].shape == data_arr.shape else: - self.assertNotIn("area", data_arr.attrs) + assert "area" not in data_arr.attrs def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" @@ -338,21 +338,19 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_init_start_time_is_nodate(self): """Test basic init with start_time being set to the no-date 1/1-1958.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with pytest.raises(ValueError) as exec_info: + with pytest.raises(ValueError, match="Datetime invalid 1958-01-01 00:00:00"): _ = r.create_filehandlers([ "SVI01_npp_d19580101_t0000000_e0001261_b01708_c20120226002130255476_noaa_ops.h5", ]) - expected = "Datetime invalid 1958-01-01 00:00:00" - assert str(exec_info.value) == expected def test_init_start_time_beyond(self): """Test basic init with start_time after the provided files.""" @@ -366,7 +364,7 @@ def test_init_start_time_beyond(self): fhs = r.create_filehandlers([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) - self.assertEqual(len(fhs), 0) + assert len(fhs) == 0 def test_init_end_time_beyond(self): """Test basic init with end_time before the provided files.""" @@ -380,7 +378,7 @@ def test_init_end_time_beyond(self): fhs = r.create_filehandlers([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) - self.assertEqual(len(fhs), 0) + assert len(fhs) == 0 def test_init_start_end_time(self): """Test basic init with end_time before the provided files.""" @@ -396,10 +394,10 @@ def test_init_start_end_time(self): loadables = r.select_files_from_pathnames([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_all_m_reflectances_no_geo(self): """Load all M band reflectances with no geo files provided.""" @@ -431,7 +429,7 @@ def test_load_all_m_reflectances_no_geo(self): "M10", "M11", ]) - self.assertEqual(len(ds), 11) + assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=False) @@ -467,7 +465,7 @@ def test_load_all_m_reflectances_find_geo(self): "M11", ]) - self.assertEqual(len(ds), 11) + assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) @@ -503,13 +501,13 @@ def test_load_all_m_reflectances_provided_geo(self): "M10", "M11", ]) - self.assertEqual(len(ds), 11) + assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) - self.assertEqual(d.attrs["area"].lons.min(), 5) - self.assertEqual(d.attrs["area"].lats.min(), 45) - self.assertEqual(d.attrs["area"].lons.attrs["rows_per_scan"], 16) - self.assertEqual(d.attrs["area"].lats.attrs["rows_per_scan"], 16) + assert d.attrs["area"].lons.min() == 5 + assert d.attrs["area"].lats.min() == 45 + assert d.attrs["area"].lons.attrs["rows_per_scan"] == 16 + assert d.attrs["area"].lats.attrs["rows_per_scan"] == 16 def test_load_all_m_reflectances_use_nontc(self): """Load all M band reflectances but use non-TC geolocation.""" @@ -544,13 +542,13 @@ def test_load_all_m_reflectances_use_nontc(self): "M10", "M11", ]) - self.assertEqual(len(ds), 11) + assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) - self.assertEqual(d.attrs["area"].lons.min(), 15) - self.assertEqual(d.attrs["area"].lats.min(), 55) - self.assertEqual(d.attrs["area"].lons.attrs["rows_per_scan"], 16) - self.assertEqual(d.attrs["area"].lats.attrs["rows_per_scan"], 16) + assert d.attrs["area"].lons.min() == 15 + assert d.attrs["area"].lats.min() == 55 + assert d.attrs["area"].lons.attrs["rows_per_scan"] == 16 + assert d.attrs["area"].lats.attrs["rows_per_scan"] == 16 def test_load_all_m_reflectances_use_nontc2(self): """Load all M band reflectances but use non-TC geolocation because TC isn't available.""" @@ -584,13 +582,13 @@ def test_load_all_m_reflectances_use_nontc2(self): "M10", "M11", ]) - self.assertEqual(len(ds), 11) + assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) - self.assertEqual(d.attrs["area"].lons.min(), 15) - self.assertEqual(d.attrs["area"].lats.min(), 55) - self.assertEqual(d.attrs["area"].lons.attrs["rows_per_scan"], 16) - self.assertEqual(d.attrs["area"].lats.attrs["rows_per_scan"], 16) + assert d.attrs["area"].lons.min() == 15 + assert d.attrs["area"].lats.min() == 55 + assert d.attrs["area"].lons.attrs["rows_per_scan"] == 16 + assert d.attrs["area"].lats.attrs["rows_per_scan"] == 16 def test_load_all_m_bts(self): """Load all M band brightness temperatures.""" @@ -611,7 +609,7 @@ def test_load_all_m_bts(self): "M15", "M16", ]) - self.assertEqual(len(ds), 5) + assert len(ds) == 5 for d in ds.values(): self._assert_bt_properties(d, with_area=True) @@ -634,13 +632,13 @@ def test_load_dnb_sza_no_factors(self): "dnb_satellite_azimuth_angle", "dnb_lunar_zenith_angle", "dnb_lunar_azimuth_angle"]) - self.assertEqual(len(ds), 6) + assert len(ds) == 6 for d in ds.values(): - self.assertTrue(np.issubdtype(d.dtype, np.float32)) - self.assertEqual(d.attrs["units"], "degrees") - self.assertEqual(d.attrs["rows_per_scan"], 16) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) + assert np.issubdtype(d.dtype, np.float32) + assert d.attrs["units"] == "degrees" + assert d.attrs["rows_per_scan"] == 16 + assert "area" in d.attrs + assert d.attrs["area"] is not None def test_load_all_m_radiances(self): """Load all M band radiances.""" @@ -685,14 +683,14 @@ def test_load_all_m_radiances(self): make_dsq(name="M15", calibration="radiance"), make_dsq(name="M16", calibration="radiance"), ]) - self.assertEqual(len(ds), 16) + assert len(ds) == 16 for d in ds.values(): - self.assertTrue(np.issubdtype(d.dtype, np.float32)) - self.assertEqual(d.attrs["calibration"], "radiance") - self.assertEqual(d.attrs["units"], "W m-2 um-1 sr-1") - self.assertEqual(d.attrs["rows_per_scan"], 16) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) + assert np.issubdtype(d.dtype, np.float32) + assert d.attrs["calibration"] == "radiance" + assert d.attrs["units"] == "W m-2 um-1 sr-1" + assert d.attrs["rows_per_scan"] == 16 + assert "area" in d.attrs + assert d.attrs["area"] is not None def test_load_dnb(self): """Load DNB dataset.""" @@ -704,17 +702,17 @@ def test_load_dnb(self): ]) r.create_filehandlers(loadables) ds = r.load(["DNB"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): data = d.values # default scale factors are 2 and offset 1 # multiply DNB by 10000 should mean the first value of 0 should be: # data * factor * 10000 + offset * 10000 # 0 * 2 * 10000 + 1 * 10000 => 10000 - self.assertEqual(data[0, 0], 10000) + assert data[0, 0] == 10000 # the second value of 1 should be: # 1 * 2 * 10000 + 1 * 10000 => 30000 - self.assertEqual(data[0, 1], 30000) + assert data[0, 1] == 30000 self._assert_dnb_radiance_properties(d, with_area=True) def test_load_dnb_no_factors(self): @@ -727,17 +725,17 @@ def test_load_dnb_no_factors(self): ]) r.create_filehandlers(loadables, {"include_factors": False}) ds = r.load(["DNB"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): data = d.values # no scale factors, default factor 1 and offset 0 # multiply DNB by 10000 should mean the first value of 0 should be: # data * factor * 10000 + offset * 10000 # 0 * 1 * 10000 + 0 * 10000 => 0 - self.assertEqual(data[0, 0], 0) + assert data[0, 0] == 0 # the second value of 1 should be: # 1 * 1 * 10000 + 0 * 10000 => 10000 - self.assertEqual(data[0, 1], 10000) + assert data[0, 1] == 10000 self._assert_dnb_radiance_properties(d, with_area=True) def test_load_i_no_files(self): @@ -749,9 +747,9 @@ def test_load_i_no_files(self): "GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - self.assertNotIn("I01", [x["name"] for x in r.available_dataset_ids]) + assert "I01" not in [x["name"] for x in r.available_dataset_ids] ds = r.load(["I01"]) - self.assertEqual(len(ds), 0) + assert len(ds) == 0 def test_load_all_i_reflectances_provided_geo(self): """Load all I band reflectances with geo files provided.""" @@ -768,13 +766,13 @@ def test_load_all_i_reflectances_provided_geo(self): "I02", "I03", ]) - self.assertEqual(len(ds), 3) + assert len(ds) == 3 for d in ds.values(): self._assert_reflectance_properties(d, num_scans=32) - self.assertEqual(d.attrs["area"].lons.min(), 5) - self.assertEqual(d.attrs["area"].lats.min(), 45) - self.assertEqual(d.attrs["area"].lons.attrs["rows_per_scan"], 32) - self.assertEqual(d.attrs["area"].lats.attrs["rows_per_scan"], 32) + assert d.attrs["area"].lons.min() == 5 + assert d.attrs["area"].lats.min() == 45 + assert d.attrs["area"].lons.attrs["rows_per_scan"] == 32 + assert d.attrs["area"].lats.attrs["rows_per_scan"] == 32 def test_load_all_i_bts(self): """Load all I band brightness temperatures.""" @@ -789,7 +787,7 @@ def test_load_all_i_bts(self): ds = r.load(["I04", "I05", ]) - self.assertEqual(len(ds), 2) + assert len(ds) == 2 for d in ds.values(): self._assert_bt_properties(d, num_scans=32) @@ -814,14 +812,14 @@ def test_load_all_i_radiances(self): make_dsq(name="I04", calibration="radiance"), make_dsq(name="I05", calibration="radiance"), ]) - self.assertEqual(len(ds), 5) + assert len(ds) == 5 for d in ds.values(): - self.assertTrue(np.issubdtype(d.dtype, np.float32)) - self.assertEqual(d.attrs["calibration"], "radiance") - self.assertEqual(d.attrs["units"], "W m-2 um-1 sr-1") - self.assertEqual(d.attrs["rows_per_scan"], 32) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) + assert np.issubdtype(d.dtype, np.float32) is True + assert d.attrs["calibration"] == "radiance" + assert d.attrs["units"] == "W m-2 um-1 sr-1" + assert d.attrs["rows_per_scan"] == 32 + assert "area" in d.attrs + assert d.attrs["area"] is not None class FakeHDF5FileHandlerAggr(FakeHDF5FileHandler2): @@ -909,7 +907,7 @@ def test_load_truncated_band(self): ]) r.create_filehandlers(loadables) ds = r.load(["I01"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 i01_data = ds["I01"].compute() expected_rows = sum(FakeShortHDF5FileHandlerAggr._num_scans_per_gran) * DEFAULT_FILE_SHAPE[0] - self.assertEqual(i01_data.shape, (expected_rows, 300)) + assert i01_data.shape == (expected_rows, 300) diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index b14ff771d6..49206962e5 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -29,8 +29,8 @@ from netCDF4 import Dataset -@pytest.fixture -def _nc_filename(tmp_path): +@pytest.fixture() +def nc_filename(tmp_path): now = datetime.datetime.utcnow() filename = f"VGAC_VJ10XMOD_A{now:%Y%j_%H%M}_n004946_K005.nc" filename_str = str(tmp_path / filename) @@ -66,14 +66,14 @@ def _nc_filename(tmp_path): class TestVGACREader: """Test the VGACFileHandler reader.""" - def test_read_vgac(self, _nc_filename): + def test_read_vgac(self, nc_filename): """Test reading reflectances and BT.""" from satpy.scene import Scene # Read data scn_ = Scene( reader="viirs_vgac_l1c_nc", - filenames=[_nc_filename]) + filenames=[nc_filename]) scn_.load(["M05", "M15"]) assert (scn_["M05"][0, 0] == 100) assert (scn_["M15"][0, 0] == 400) diff --git a/satpy/tests/reader_tests/test_virr_l1b.py b/satpy/tests/reader_tests/test_virr_l1b.py index ff0f780190..e3fbd73272 100644 --- a/satpy/tests/reader_tests/test_virr_l1b.py +++ b/satpy/tests/reader_tests/test_virr_l1b.py @@ -104,13 +104,13 @@ def tearDown(self): def _band_helper(self, attributes, units, calibration, standard_name, file_type, band_index_size, resolution): - self.assertEqual(units, attributes["units"]) - self.assertEqual(calibration, attributes["calibration"]) - self.assertEqual(standard_name, attributes["standard_name"]) - self.assertEqual(file_type, attributes["file_type"]) - self.assertTrue(attributes["band_index"] in range(band_index_size)) - self.assertEqual(resolution, attributes["resolution"]) - self.assertEqual(("longitude", "latitude"), attributes["coordinates"]) + assert units == attributes["units"] + assert calibration == attributes["calibration"] + assert standard_name == attributes["standard_name"] + assert file_type == attributes["file_type"] + assert (attributes["band_index"] in range(band_index_size)) is True + assert resolution == attributes["resolution"] + assert ("longitude", "latitude") == attributes["coordinates"] def _fy3_helper(self, platform_name, reader, Emissive_units): """Load channels and test accurate metadata.""" @@ -133,13 +133,13 @@ def _fy3_helper(self, platform_name, reader, Emissive_units): # Object returned by get_dataset. ds = datasets[dataset["name"]] attributes = ds.attrs - self.assertTrue(isinstance(ds.data, da.Array)) - self.assertEqual("virr", attributes["sensor"]) - self.assertEqual(platform_name, attributes["platform_name"]) - self.assertEqual(datetime.datetime(2018, 12, 25, 21, 41, 47, 90000), attributes["start_time"]) - self.assertEqual(datetime.datetime(2018, 12, 25, 21, 47, 28, 254000), attributes["end_time"]) - self.assertEqual((19, 20), datasets[dataset["name"]].shape) - self.assertEqual(("y", "x"), datasets[dataset["name"]].dims) + assert isinstance(ds.data, da.Array) + assert "virr" == attributes["sensor"] + assert platform_name == attributes["platform_name"] + assert datetime.datetime(2018, 12, 25, 21, 41, 47, 90000) == attributes["start_time"] + assert datetime.datetime(2018, 12, 25, 21, 47, 28, 254000) == attributes["end_time"] + assert (19, 20) == datasets[dataset["name"]].shape + assert ("y", "x") == datasets[dataset["name"]].dims if dataset["name"] in ["1", "2", "6", "7", "8", "9", "10"]: self._band_helper(attributes, "%", "reflectance", "toa_bidirectional_reflectance", "virr_l1b", @@ -148,19 +148,17 @@ def _fy3_helper(self, platform_name, reader, Emissive_units): self._band_helper(attributes, Emissive_units, "brightness_temperature", "toa_brightness_temperature", "virr_l1b", 3, 1000) elif dataset["name"] in ["longitude", "latitude"]: - self.assertEqual("degrees", attributes["units"]) - self.assertTrue(attributes["standard_name"] in ["longitude", "latitude"]) - self.assertEqual(["virr_l1b", "virr_geoxx"], attributes["file_type"]) - self.assertEqual(1000, attributes["resolution"]) + assert "degrees" == attributes["units"] + assert (attributes["standard_name"] in ["longitude", "latitude"]) is True + assert ["virr_l1b", "virr_geoxx"] == attributes["file_type"] + assert 1000 == attributes["resolution"] else: - self.assertEqual("degrees", attributes["units"]) - self.assertTrue( - attributes["standard_name"] in ["solar_zenith_angle", "sensor_zenith_angle", "solar_azimuth_angle", - "sensor_azimuth_angle"]) - self.assertEqual(["virr_geoxx", "virr_l1b"], attributes["file_type"]) - self.assertEqual(("longitude", "latitude"), attributes["coordinates"]) - self.assertEqual(band_values[dataset["name"]], - round(float(np.array(ds[ds.shape[0] // 2][ds.shape[1] // 2])), 6)) + assert "degrees" == attributes["units"] + assert attributes["standard_name"] in ["solar_zenith_angle", "sensor_zenith_angle", + "solar_azimuth_angle", "sensor_azimuth_angle"] + assert ["virr_geoxx", "virr_l1b"] == attributes["file_type"] + assert ("longitude", "latitude") == attributes["coordinates"] + assert band_values[dataset["name"]] == round(float(np.array(ds[ds.shape[0] // 2][ds.shape[1] // 2])), 6) assert "valid_range" not in ds.attrs def test_fy3b_file(self): @@ -168,10 +166,10 @@ def test_fy3b_file(self): from satpy.readers import load_reader FY3B_reader = load_reader(self.reader_configs) FY3B_file = FY3B_reader.select_files_from_pathnames(["tf2018359214943.FY3B-L_VIRRX_L1B.HDF"]) - self.assertEqual(1, len(FY3B_file)) + assert 1 == len(FY3B_file) FY3B_reader.create_filehandlers(FY3B_file) # Make sure we have some files - self.assertTrue(FY3B_reader.file_handlers) + assert FY3B_reader.file_handlers self._fy3_helper("FY3B", FY3B_reader, "milliWstts/m^2/cm^(-1)/steradian") def test_fy3c_file(self): @@ -180,8 +178,8 @@ def test_fy3c_file(self): FY3C_reader = load_reader(self.reader_configs) FY3C_files = FY3C_reader.select_files_from_pathnames(["tf2018359143912.FY3C-L_VIRRX_GEOXX.HDF", "tf2018359143912.FY3C-L_VIRRX_L1B.HDF"]) - self.assertEqual(2, len(FY3C_files)) + assert 2 == len(FY3C_files) FY3C_reader.create_filehandlers(FY3C_files) # Make sure we have some files - self.assertTrue(FY3C_reader.file_handlers) + assert FY3C_reader.file_handlers self._fy3_helper("FY3C", FY3C_reader, "1") diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index 3760249d95..a886c3fa60 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -93,7 +93,7 @@ def test_with_empty_scene(self): assert len(ds.variables) == 0 assert len(ds.coords) == 0 - @pytest.fixture + @pytest.fixture() def single_area_scn(self): """Define Scene with single area.""" from pyresample.geometry import AreaDefinition @@ -108,7 +108,7 @@ def single_area_scn(self): scn["var1"] = data_array return scn - @pytest.fixture + @pytest.fixture() def multi_area_scn(self): """Define Scene with multiple area.""" from pyresample.geometry import AreaDefinition @@ -162,5 +162,5 @@ def test_wrong_dataset_key(self, single_area_scn): def test_to_xarray_with_multiple_area_scene(self, multi_area_scn): """Test converting muiltple area Scene to xarray.""" # TODO: in future adapt for DataTree implementation - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Datasets to be saved .* must have identical projection coordinates."): _ = multi_area_scn.to_xarray() diff --git a/satpy/tests/scene_tests/test_data_access.py b/satpy/tests/scene_tests/test_data_access.py index e446af9c46..66129ad8bb 100644 --- a/satpy/tests/scene_tests/test_data_access.py +++ b/satpy/tests/scene_tests/test_data_access.py @@ -97,7 +97,8 @@ def test_iter_by_area_swath(self): def test_bad_setitem(self): """Test setting an item wrongly.""" scene = Scene() - pytest.raises(ValueError, scene.__setitem__, "1", np.arange(5)) + with pytest.raises(ValueError, match="Key must be a DataID when value is not an xarray DataArray or dict"): + scene.__setitem__("1", np.arange(5)) def test_setitem(self): """Test setting an item.""" @@ -112,7 +113,7 @@ def test_setitem(self): scene[did] = ds1 assert "oranges" in scene nparray = np.arange(5*5).reshape(5, 5) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Key must be a DataID when value is not an xarray DataArray or dict"): scene["apples"] = nparray assert "apples" not in scene did = make_dataid(name="apples") diff --git a/satpy/tests/scene_tests/test_init.py b/satpy/tests/scene_tests/test_init.py index a9b4622769..b745fad9d0 100644 --- a/satpy/tests/scene_tests/test_init.py +++ b/satpy/tests/scene_tests/test_init.py @@ -46,7 +46,8 @@ def test_init(self): def test_init_str_filename(self): """Test initializing with a single string as filenames.""" - pytest.raises(ValueError, Scene, reader="blo", filenames="test.nc") + with pytest.raises(ValueError, match="'filenames' must be a list of files: .*"): + Scene(reader="blo", filenames="test.nc") def test_start_end_times(self): """Test start and end times for a scene.""" @@ -74,7 +75,8 @@ def test_init_alone(self): def test_init_no_files(self): """Test that providing an empty list of filenames fails.""" - pytest.raises(ValueError, Scene, reader="viirs_sdr", filenames=[]) + with pytest.raises(ValueError, match="'filenames' was provided but is empty."): + Scene(reader="viirs_sdr", filenames=[]) def test_create_reader_instances_with_filenames(self): """Test creating a reader providing filenames.""" diff --git a/satpy/tests/scene_tests/test_resampling.py b/satpy/tests/scene_tests/test_resampling.py index 286735c093..6b5f74ee59 100644 --- a/satpy/tests/scene_tests/test_resampling.py +++ b/satpy/tests/scene_tests/test_resampling.py @@ -619,7 +619,7 @@ def test_aggregate_with_boundary(self): scene1 = self._create_test_data(x_size, y_size) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Could not coarsen a dimension.*"): scene1.aggregate(func="sum", x=2, y=2, boundary="exact") scene2 = scene1.aggregate(func="sum", x=2, y=2, boundary="trim") diff --git a/satpy/tests/scene_tests/test_saving.py b/satpy/tests/scene_tests/test_saving.py index 0781ae8796..32c6ff61c2 100644 --- a/satpy/tests/scene_tests/test_saving.py +++ b/satpy/tests/scene_tests/test_saving.py @@ -77,7 +77,8 @@ def test_save_datasets_bad_writer(self, tmp_path): pytest.raises(ValueError, scn.save_datasets, writer="_bad_writer_", - base_dir=tmp_path) + base_dir=tmp_path, + match="Unknown writer '_bad_writer_'") def test_save_datasets_missing_wishlist(self, tmp_path): """Calling 'save_datasets' with no valid datasets.""" diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 5c3ededd40..a872ce31c4 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -126,7 +126,7 @@ def test_nondimensional_coords(self): ds["acq_time"] = ("y", [0, 1]) comp = CompositeBase("test_comp") ret_datasets = comp.match_data_arrays([ds, ds]) - self.assertNotIn("acq_time", ret_datasets[0].coords) + assert "acq_time" not in ret_datasets[0].coords class TestRatioSharpenedCompositors: @@ -196,7 +196,7 @@ def setup_method(self): def test_bad_colors(self, init_kwargs): """Test that only valid band colors can be provided.""" from satpy.composites import RatioSharpenedRGB - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="RatioSharpenedRGB..*_band must be one of .*"): RatioSharpenedRGB(name="true_color", **init_kwargs) def test_match_data_arrays(self): @@ -210,14 +210,14 @@ def test_more_than_three_datasets(self): """Test that only 3 datasets can be passed.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name="true_color") - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Expected 3 datasets, got 4"): comp((self.ds1, self.ds2, self.ds3, self.ds1), optional_datasets=(self.ds4_big,)) def test_self_sharpened_no_high_res(self): """Test for exception when no high_res band is specified.""" from satpy.composites import SelfSharpenedRGB comp = SelfSharpenedRGB(name="true_color", high_resolution_band=None) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="SelfSharpenedRGB requires at least one high resolution band, not 'None'"): comp((self.ds1, self.ds2, self.ds3)) def test_basic_no_high_res(self): @@ -355,14 +355,14 @@ def test_bad_areas_diff(self): self.assertRaises(IncompatibleAreas, comp, (self.ds1, self.ds2_big)) -@pytest.fixture +@pytest.fixture() def fake_area(): """Return a fake 2×2 area.""" from pyresample.geometry import create_area_def return create_area_def("skierffe", 4087, area_extent=[-5_000, -5_000, 5_000, 5_000], shape=(2, 2)) -@pytest.fixture +@pytest.fixture() def fake_dataset_pair(fake_area): """Return a fake pair of 2×2 datasets.""" ds1 = xr.DataArray(da.full((2, 2), 8, chunks=2, dtype=np.float32), attrs={"area": fake_area}) @@ -619,7 +619,7 @@ class TestSandwichCompositor: # Test RGB and RGBA @pytest.mark.parametrize( - "input_shape,bands", + ("input_shape", "bands"), [ ((3, 2, 2), ["R", "G", "B"]), ((4, 2, 2), ["R", "G", "B", "A"]) @@ -665,28 +665,24 @@ def test_inline_composites(self): # Check that "fog" product has all its prerequisites defined keys = comps["visir"].keys() fog = [comps["visir"][dsid] for dsid in keys if "fog" == dsid["name"]][0] - self.assertEqual(fog.attrs["prerequisites"][0]["name"], "_fog_dep_0") - self.assertEqual(fog.attrs["prerequisites"][1]["name"], "_fog_dep_1") - self.assertEqual(fog.attrs["prerequisites"][2], 10.8) + assert fog.attrs["prerequisites"][0]["name"] == "_fog_dep_0" + assert fog.attrs["prerequisites"][1]["name"] == "_fog_dep_1" + assert fog.attrs["prerequisites"][2] == 10.8 # Check that the sub-composite dependencies use wavelengths # (numeric values) keys = comps["visir"].keys() fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid["name"]] - self.assertEqual(comps["visir"][fog_dep_ids[0]].attrs["prerequisites"], - [12.0, 10.8]) - self.assertEqual(comps["visir"][fog_dep_ids[1]].attrs["prerequisites"], - [10.8, 8.7]) + assert comps["visir"][fog_dep_ids[0]].attrs["prerequisites"] == [12.0, 10.8] + assert comps["visir"][fog_dep_ids[1]].attrs["prerequisites"] == [10.8, 8.7] # Check the same for SEVIRI and verify channel names are used # in the sub-composite dependencies instead of wavelengths comps = load_compositor_configs_for_sensors(["seviri"])[0] keys = comps["seviri"].keys() fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid["name"]] - self.assertEqual(comps["seviri"][fog_dep_ids[0]].attrs["prerequisites"], - ["IR_120", "IR_108"]) - self.assertEqual(comps["seviri"][fog_dep_ids[1]].attrs["prerequisites"], - ["IR_108", "IR_087"]) + assert comps["seviri"][fog_dep_ids[0]].attrs["prerequisites"] == ["IR_120", "IR_108"] + assert comps["seviri"][fog_dep_ids[1]].attrs["prerequisites"] == ["IR_108", "IR_087"] class TestColormapCompositor(unittest.TestCase): @@ -701,8 +697,8 @@ def test_build_colormap_with_int_data_and_without_meanings(self): """Test colormap building.""" palette = np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]) colormap, squeezed_palette = self.colormap_compositor.build_colormap(palette, np.uint8, {}) - self.assertTrue(np.allclose(colormap.values, [0, 1])) - self.assertTrue(np.allclose(squeezed_palette, palette / 255.0)) + assert np.allclose(colormap.values, [0, 1]) + assert np.allclose(squeezed_palette, palette / 255.0) def test_build_colormap_with_int_data_and_with_meanings(self): """Test colormap building.""" @@ -710,8 +706,8 @@ def test_build_colormap_with_int_data_and_with_meanings(self): dims=["value", "band"]) palette.attrs["palette_meanings"] = [2, 3, 4] colormap, squeezed_palette = self.colormap_compositor.build_colormap(palette, np.uint8, {}) - self.assertTrue(np.allclose(colormap.values, [2, 3, 4])) - self.assertTrue(np.allclose(squeezed_palette, palette / 255.0)) + assert np.allclose(colormap.values, [2, 3, 4]) + assert np.allclose(squeezed_palette, palette / 255.0) class TestPaletteCompositor(unittest.TestCase): @@ -733,7 +729,7 @@ def test_call(self): [0., 0.498039, 1.]], [[1., 0.498039, 0.], [0., 0.498039, 1.]]]) - self.assertTrue(np.allclose(res, exp)) + assert np.allclose(res, exp) class TestColorizeCompositor(unittest.TestCase): @@ -758,7 +754,7 @@ def test_colorize_no_fill(self): [0., 0.498039, 1.]], [[1., 0.498039, 0.], [0., 0.498039, 1.]]]) - self.assertTrue(np.allclose(res, exp, atol=1e-4)) + assert np.allclose(res, exp, atol=0.0001) def test_colorize_with_interpolation(self): """Test colorizing with interpolation.""" @@ -940,14 +936,14 @@ def test_call(self): self.comp.attrs["resolution"] = None res = self.comp([all_valid], **attrs) # Verify attributes - self.assertEqual(res.attrs.get("sensor"), "foo") - self.assertTrue("foo" in res.attrs) - self.assertEqual(res.attrs.get("foo"), "bar") - self.assertTrue("units" in res.attrs) - self.assertTrue("calibration" in res.attrs) - self.assertFalse("modifiers" in res.attrs) - self.assertEqual(res.attrs["wavelength"], 10.8) - self.assertEqual(res.attrs["resolution"], 333) + assert res.attrs.get("sensor") == "foo" + assert "foo" in res.attrs + assert res.attrs.get("foo") == "bar" + assert "units" in res.attrs + assert "calibration" in res.attrs + assert "modifiers" not in res.attrs + assert res.attrs["wavelength"] == 10.8 + assert res.attrs["resolution"] == 333 class TestCategoricalDataCompositor(unittest.TestCase): @@ -1023,33 +1019,33 @@ def test_concat_datasets(self): from satpy.composites import IncompatibleAreas res = self.comp._concat_datasets([self.all_valid], "L") num_bands = len(res.bands) - self.assertEqual(num_bands, 1) - self.assertEqual(res.shape[0], num_bands) - self.assertEqual(res.bands[0], "L") + assert num_bands == 1 + assert res.shape[0] == num_bands + assert res.bands[0] == "L" res = self.comp._concat_datasets([self.all_valid, self.all_valid], "LA") num_bands = len(res.bands) - self.assertEqual(num_bands, 2) - self.assertEqual(res.shape[0], num_bands) - self.assertEqual(res.bands[0], "L") - self.assertEqual(res.bands[1], "A") + assert num_bands == 2 + assert res.shape[0] == num_bands + assert res.bands[0] == "L" + assert res.bands[1] == "A" self.assertRaises(IncompatibleAreas, self.comp._concat_datasets, [self.all_valid, self.wrong_shape], "LA") def test_get_sensors(self): """Test getting sensors from the dataset attributes.""" res = self.comp._get_sensors([self.all_valid]) - self.assertIsNone(res) + assert res is None dset1 = self.all_valid dset1.attrs["sensor"] = "foo" res = self.comp._get_sensors([dset1]) - self.assertEqual(res, "foo") + assert res == "foo" dset2 = self.first_invalid dset2.attrs["sensor"] = "bar" res = self.comp._get_sensors([dset1, dset2]) - self.assertIn("foo", res) - self.assertIn("bar", res) - self.assertEqual(len(res), 2) - self.assertIsInstance(res, set) + assert "foo" in res + assert "bar" in res + assert len(res) == 2 + assert isinstance(res, set) @mock.patch("satpy.composites.GenericCompositor._get_sensors") @mock.patch("satpy.composites.combine_metadata") @@ -1062,8 +1058,8 @@ def test_call_with_mock(self, match_data_arrays, check_times, combine_metadata, get_sensors.return_value = "foo" # One dataset, no mode given res = self.comp([self.all_valid]) - self.assertEqual(res.shape[0], 1) - self.assertEqual(res.attrs["mode"], "L") + assert res.shape[0] == 1 + assert res.attrs["mode"] == "L" match_data_arrays.assert_not_called() # This compositor has been initialized without common masking, so the # masking shouldn't have been called @@ -1093,15 +1089,15 @@ def test_call(self): self.comp.attrs["resolution"] = None res = self.comp([self.all_valid, self.first_invalid], **attrs) # Verify attributes - self.assertEqual(res.attrs.get("sensor"), "foo") - self.assertIn("foo", res.attrs) - self.assertEqual(res.attrs.get("foo"), "bar") - self.assertNotIn("units", res.attrs) - self.assertNotIn("calibration", res.attrs) - self.assertNotIn("modifiers", res.attrs) - self.assertIsNone(res.attrs["wavelength"]) - self.assertEqual(res.attrs["mode"], "LA") - self.assertEqual(res.attrs["resolution"], 333) + assert res.attrs.get("sensor") == "foo" + assert "foo" in res.attrs + assert res.attrs.get("foo") == "bar" + assert "units" not in res.attrs + assert "calibration" not in res.attrs + assert "modifiers" not in res.attrs + assert res.attrs["wavelength"] is None + assert res.attrs["mode"] == "LA" + assert res.attrs["resolution"] == 333 def test_deprecation_warning(self): """Test deprecation warning for dcprecated composite recipes.""" @@ -1125,7 +1121,7 @@ def test_add_bands_l_rgb(self): coords={"bands": ["R", "G", "B"]}) res = add_bands(data, new_bands) res_bands = ["R", "G", "B"] - self.assertEqual(res.attrs["mode"], "".join(res_bands)) + assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) @@ -1140,7 +1136,7 @@ def test_add_bands_l_rgba(self): coords={"bands": ["R", "G", "B", "A"]}) res = add_bands(data, new_bands) res_bands = ["R", "G", "B", "A"] - self.assertEqual(res.attrs["mode"], "".join(res_bands)) + assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) @@ -1155,7 +1151,7 @@ def test_add_bands_la_rgb(self): coords={"bands": ["R", "G", "B"]}) res = add_bands(data, new_bands) res_bands = ["R", "G", "B", "A"] - self.assertEqual(res.attrs["mode"], "".join(res_bands)) + assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) @@ -1171,7 +1167,7 @@ def test_add_bands_rgb_rbga(self): coords={"bands": ["R", "G", "B", "A"]}) res = add_bands(data, new_bands) res_bands = ["R", "G", "B", "A"] - self.assertEqual(res.attrs["mode"], "".join(res_bands)) + assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) @@ -1203,14 +1199,14 @@ def test_init(self, get_area_def): # No area defined comp = StaticImageCompositor("name", filename="/foo.tif") - self.assertEqual(comp._cache_filename, "/foo.tif") - self.assertIsNone(comp.area) + assert comp._cache_filename == "/foo.tif" + assert comp.area is None # Area defined get_area_def.return_value = "bar" comp = StaticImageCompositor("name", filename="/foo.tif", area="euro4") - self.assertEqual(comp._cache_filename, "/foo.tif") - self.assertEqual(comp.area, "bar") + assert comp._cache_filename == "/foo.tif" + assert comp.area == "bar" get_area_def.assert_called_once_with("euro4") @mock.patch("satpy.aux_download.retrieve") @@ -1239,11 +1235,11 @@ def load(self, arg): filenames=["/foo.tif"]) register.assert_not_called() retrieve.assert_not_called() - self.assertIn("start_time", res.attrs) - self.assertIn("end_time", res.attrs) - self.assertIsNone(res.attrs["sensor"]) - self.assertNotIn("modifiers", res.attrs) - self.assertNotIn("calibration", res.attrs) + assert "start_time" in res.attrs + assert "end_time" in res.attrs + assert res.attrs["sensor"] is None + assert "modifiers" not in res.attrs + assert "calibration" not in res.attrs # remote file with local cached version Scene.reset_mock() @@ -1253,11 +1249,11 @@ def load(self, arg): res = comp() Scene.assert_called_once_with(reader="generic_image", filenames=["data_dir/foo.tif"]) - self.assertIn("start_time", res.attrs) - self.assertIn("end_time", res.attrs) - self.assertIsNone(res.attrs["sensor"]) - self.assertNotIn("modifiers", res.attrs) - self.assertNotIn("calibration", res.attrs) + assert "start_time" in res.attrs + assert "end_time" in res.attrs + assert res.attrs["sensor"] is None + assert "modifiers" not in res.attrs + assert "calibration" not in res.attrs # Non-georeferenced image, no area given img.attrs.pop("area") @@ -1268,25 +1264,24 @@ def load(self, arg): # Non-georeferenced image, area given comp = StaticImageCompositor("name", filename="/foo.tif", area="euro4") res = comp() - self.assertEqual(res.attrs["area"].area_id, "euro4") + assert res.attrs["area"].area_id == "euro4" # Filename contains environment variable os.environ["TEST_IMAGE_PATH"] = "/path/to/image" comp = StaticImageCompositor("name", filename="${TEST_IMAGE_PATH}/foo.tif", area="euro4") - self.assertEqual(comp._cache_filename, "/path/to/image/foo.tif") + assert comp._cache_filename == "/path/to/image/foo.tif" # URL and filename without absolute path comp = StaticImageCompositor("name", url=remote_tif, filename="bar.tif") - self.assertEqual(comp._url, remote_tif) - self.assertEqual(comp._cache_filename, "bar.tif") + assert comp._url == remote_tif + assert comp._cache_filename == "bar.tif" # No URL, filename without absolute path, use default data_dir from config with mock.patch("os.path.exists") as exists: exists.return_value = True comp = StaticImageCompositor("name", filename="foo.tif") - self.assertEqual(comp._url, None) - self.assertEqual(comp._cache_filename, - os.path.join(os.path.sep, "path", "to", "image", "foo.tif")) + assert comp._url is None + assert comp._cache_filename == os.path.join(os.path.sep, "path", "to", "image", "foo.tif") def _enhance2dataset(dataset, convert_p=False): @@ -1384,7 +1379,7 @@ def test_multiple_sensors(self): class TestMaskingCompositor: """Test case for the simple masking compositor.""" - @pytest.fixture + @pytest.fixture() def conditions_v1(self): """Masking conditions with string values.""" return [{"method": "equal", @@ -1394,7 +1389,7 @@ def conditions_v1(self): "value": "Cloud-free_sea", "transparency": 50}] - @pytest.fixture + @pytest.fixture() def conditions_v2(self): """Masking conditions with numerical values.""" return [{"method": "equal", @@ -1404,12 +1399,12 @@ def conditions_v2(self): "value": 2, "transparency": 50}] - @pytest.fixture + @pytest.fixture() def test_data(self): """Test data to use with masking compositors.""" return xr.DataArray(da.random.random((3, 3)), dims=["y", "x"]) - @pytest.fixture + @pytest.fixture() def test_ct_data(self): """Test 2D CT data array.""" flag_meanings = ["Cloud-free_land", "Cloud-free_sea"] @@ -1422,18 +1417,18 @@ def test_ct_data(self): ct_data.attrs["flag_values"] = flag_values return ct_data - @pytest.fixture + @pytest.fixture() def test_ct_data_v3(self, test_ct_data): """Set ct data to NaN where it originally is 1.""" return test_ct_data.where(test_ct_data == 1) - @pytest.fixture + @pytest.fixture() def reference_data(self, test_data, test_ct_data): """Get reference data to use in masking compositor tests.""" # The data are set to NaN where ct is `1` return test_data.where(test_ct_data > 1) - @pytest.fixture + @pytest.fixture() def reference_alpha(self): """Get reference alpha to use in masking compositor tests.""" ref_alpha = da.array([[0, 0.5, 0.5], @@ -1446,8 +1441,8 @@ def test_init(self): from satpy.composites import MaskingCompositor # No transparency or conditions given raises ValueError - with pytest.raises(ValueError): - comp = MaskingCompositor("name") + with pytest.raises(ValueError, match="Masking conditions not defined."): + _ = MaskingCompositor("name") # transparency defined transparency = {0: 100, 1: 50} @@ -1621,7 +1616,7 @@ def test_incorrect_method(self, test_data, test_ct_data): with pytest.raises(AttributeError): comp([test_data, test_ct_data]) # Test with too few projectables. - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Expected 2 datasets, got 1"): comp([test_data]) def test_incorrect_mode(self, conditions_v1): @@ -1629,7 +1624,7 @@ def test_incorrect_mode(self, conditions_v1): from satpy.composites import MaskingCompositor # Incorrect mode raises ValueError - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Invalid mode YCbCrA. Supported modes: .*"): MaskingCompositor("name", conditions=conditions_v1, mode="YCbCrA") @@ -1660,17 +1655,17 @@ def temp_func(*args): match_data_arrays.side_effect = temp_func comp = NaturalEnh("foo", ch16_w=self.ch16_w, ch08_w=self.ch08_w, ch06_w=self.ch06_w) - self.assertEqual(comp.ch16_w, self.ch16_w) - self.assertEqual(comp.ch08_w, self.ch08_w) - self.assertEqual(comp.ch06_w, self.ch06_w) + assert comp.ch16_w == self.ch16_w + assert comp.ch08_w == self.ch08_w + assert comp.ch06_w == self.ch06_w res = comp(projectables) assert mock.call(projectables) in match_data_arrays.mock_calls correct = (self.ch16_w * projectables[0] + self.ch08_w * projectables[1] + self.ch06_w * projectables[2]) - self.assertEqual(res[0], correct) - self.assertEqual(res[1], projectables[1]) - self.assertEqual(res[2], projectables[2]) + assert res[0] == correct + assert res[1] == projectables[1] + assert res[2] == projectables[2] class TestEnhance2Dataset(unittest.TestCase): diff --git a/satpy/tests/test_config.py b/satpy/tests/test_config.py index 5cb1c047d2..df33436b45 100644 --- a/satpy/tests/test_config.py +++ b/satpy/tests/test_config.py @@ -154,7 +154,7 @@ def _fake_importlib_files(module_name: str) -> Path: return _fake_importlib_files -@pytest.fixture +@pytest.fixture() def fake_composite_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake compositor YAML configuration file.""" yield from _create_yamlbased_plugin( @@ -182,7 +182,7 @@ def _write_fake_composite_yaml(yaml_filename: str) -> None: """) -@pytest.fixture +@pytest.fixture() def fake_reader_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake reader YAML configuration file.""" yield from _create_yamlbased_plugin( @@ -205,7 +205,7 @@ def _write_fake_reader_yaml(yaml_filename: str) -> None: """) -@pytest.fixture +@pytest.fixture() def fake_writer_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake writer YAML configuration file.""" yield from _create_yamlbased_plugin( @@ -226,7 +226,7 @@ def _write_fake_writer_yaml(yaml_filename: str) -> None: """) -@pytest.fixture +@pytest.fixture() def fake_enh_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake enhancement YAML configure files. @@ -479,7 +479,9 @@ def test_bad_str_config_path(self): # strings are not allowed, lists are with satpy.config.set(config_path="/single/string/paths/are/bad"): - pytest.raises(ValueError, satpy._config.get_config_path_safe) + with pytest.raises(ValueError, + match="Satpy config option 'config_path' must be a list, not ''"): + satpy._config.get_config_path_safe() def test_tmp_dir_is_writable(self): """Check that the default temporary directory is writable.""" diff --git a/satpy/tests/test_crefl_utils.py b/satpy/tests/test_crefl_utils.py index 1e5da8cd9a..57eb4f84a6 100644 --- a/satpy/tests/test_crefl_utils.py +++ b/satpy/tests/test_crefl_utils.py @@ -33,7 +33,7 @@ def test_get_atm_variables_abi(self): 0.0043149700000000004, 0.0037296, 0.014107995000000002, 0.052349, ) sphalb, rhoray, TtotraytH2O, tOG = atm_vars() - self.assertLess(abs(np.array(sphalb) - 0.045213532544630494), 1e-10) - self.assertLess(abs(rhoray - 2.2030281148621356), 1e-10) - self.assertLess(abs(TtotraytH2O - 0.30309880915889087), 1e-10) - self.assertLess(abs(tOG - 0.5969089524560548), 1e-10) + assert abs(np.array(sphalb) - 0.045213532544630494) < 1e-10 + assert abs(rhoray - 2.2030281148621356) < 1e-10 + assert abs(TtotraytH2O - 0.30309880915889087) < 1e-10 + assert abs(tOG - 0.5969089524560548) < 1e-10 diff --git a/satpy/tests/test_data_download.py b/satpy/tests/test_data_download.py index 85cd420951..78edf180af 100644 --- a/satpy/tests/test_data_download.py +++ b/satpy/tests/test_data_download.py @@ -158,7 +158,7 @@ def _setup_custom_configs(self, tmpdir): _setup_custom_writer_config(tmpdir) self.tmpdir = tmpdir - @pytest.mark.parametrize("comp_sensors", [[], None, ["visir"]]) + @pytest.mark.parametrize("comp_sensors", [tuple(), None, ("visir",)]) @pytest.mark.parametrize("writers", [[], None, ["fake"]]) @pytest.mark.parametrize("readers", [[], None, ["fake"]]) def test_find_registerable(self, readers, writers, comp_sensors): diff --git a/satpy/tests/test_dataset.py b/satpy/tests/test_dataset.py index b8df391d30..014a450e0c 100644 --- a/satpy/tests/test_dataset.py +++ b/satpy/tests/test_dataset.py @@ -45,7 +45,7 @@ def test_basic_init(self): calibration="radiance") DataID(dikc, name="a", wavelength=0.86, resolution=250, calibration="radiance", modifiers=("sunz_corrected",)) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Required field name missing."): DataID(dikc, wavelength=0.86) did = DataID(mdkc, name="comp24", resolution=500) assert did["resolution"] == 500 @@ -64,14 +64,14 @@ def test_compare_no_wl(self): d2 = DataID(dikc, name="a", wavelength=None) # this happens when sorting IDs during dependency checks - self.assertFalse(d1 < d2) - self.assertTrue(d2 < d1) + assert not (d1 < d2) + assert d2 < d1 def test_bad_calibration(self): """Test that asking for a bad calibration fails.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="_bad_ invalid value for "): DataID(dikc, name="C05", calibration="_bad_") def test_is_modified(self): @@ -119,20 +119,20 @@ def test_average_datetimes(self): datetime(2018, 2, 1, 12, 2, 0), ) ret = average_datetimes(dts) - self.assertEqual(dts[2], ret) + assert dts[2] == ret def test_combine_times_with_averaging(self): """Test the combine_metadata with times with averaging.""" from satpy.dataset.metadata import combine_metadata ret = combine_metadata(*self.datetime_dts) - self.assertEqual(self.datetime_dts[2]["start_time"], ret["start_time"]) + assert self.datetime_dts[2]["start_time"] == ret["start_time"] def test_combine_times_without_averaging(self): """Test the combine_metadata with times without averaging.""" from satpy.dataset.metadata import combine_metadata ret = combine_metadata(*self.datetime_dts, average_times=False) # times are not equal so don't include it in the final result - self.assertNotIn("start_time", ret) + assert "start_time" not in ret def test_combine_arrays(self): """Test the combine_metadata with arrays.""" @@ -387,7 +387,7 @@ def test_dataid(): did = make_dataid(name="cheese_shops", resolution=None) assert "resolution" not in did assert "None" not in did.__repr__() - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Required field name missing."): make_dataid(name=None, resolution=1000) # Check that defaults are applied correctly @@ -404,7 +404,7 @@ def test_dataid(): did["resolution"] = 1000 # Check that a missing required field crashes - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Required field name missing."): make_dataid(resolution=1000) # Check to_dict diff --git a/satpy/tests/test_demo.py b/satpy/tests/test_demo.py index 7ed3a3ac43..d1dddd5e8d 100644 --- a/satpy/tests/test_demo.py +++ b/satpy/tests/test_demo.py @@ -97,7 +97,7 @@ def test_get_us_midlatitude_cyclone_abi(self, gcsfs_mod): filenames = get_us_midlatitude_cyclone_abi() expected = os.path.join(".", "abi_l1b", "20190314_us_midlatitude_cyclone", "a.nc") for fn in filenames: - self.assertEqual(expected, fn) + assert expected == fn @mock.patch("satpy.demo._google_cloud_platform.gcsfs") def test_get_hurricane_florence_abi(self, gcsfs_mod): @@ -114,19 +114,19 @@ def test_get_hurricane_florence_abi(self, gcsfs_mod): gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi() - self.assertEqual(10 * 16, len(filenames)) + assert 10 * 16 == len(filenames) gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi(channels=[2, 3, 4]) - self.assertEqual(10 * 3, len(filenames)) + assert 10 * 3 == len(filenames) gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi(channels=[2, 3, 4], num_frames=5) - self.assertEqual(5 * 3, len(filenames)) + assert 5 * 3 == len(filenames) gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi(num_frames=5) - self.assertEqual(5 * 16, len(filenames)) + assert 5 * 16 == len(filenames) class TestGCPUtils(unittest.TestCase): @@ -137,7 +137,7 @@ def test_is_gcp_instance(self, uo): """Test is_google_cloud_instance.""" from satpy.demo._google_cloud_platform import URLError, is_google_cloud_instance uo.side_effect = URLError("Test Environment") - self.assertFalse(is_google_cloud_instance()) + assert not is_google_cloud_instance() @mock.patch("satpy.demo._google_cloud_platform.gcsfs") def test_get_bucket_files(self, gcsfs_mod): @@ -149,11 +149,11 @@ def test_get_bucket_files(self, gcsfs_mod): gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] filenames = get_bucket_files("*.nc", ".") expected = [os.path.join(".", "a.nc"), os.path.join(".", "b.nc")] - self.assertEqual(expected, filenames) + assert expected == filenames gcsfs_inst.glob.side_effect = _GlobHelper(10) filenames = get_bucket_files(["*.nc", "*.txt"], ".", pattern_slice=slice(2, 5)) - self.assertEqual(len(filenames), 3 * 2) + assert len(filenames) == 3 * 2 gcsfs_inst.glob.side_effect = None # reset mock side effect gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] @@ -163,14 +163,14 @@ def test_get_bucket_files(self, gcsfs_mod): gcsfs_inst.get.reset_mock() gcsfs_inst.glob.return_value = ["a.nc"] filenames = get_bucket_files("*.nc", ".") - self.assertEqual([os.path.join(".", "a.nc")], filenames) + assert [os.path.join(".", "a.nc")] == filenames gcsfs_inst.get.assert_not_called() # force redownload gcsfs_inst.get.reset_mock() gcsfs_inst.glob.return_value = ["a.nc"] filenames = get_bucket_files("*.nc", ".", force=True) - self.assertEqual([os.path.join(".", "a.nc")], filenames) + assert [os.path.join(".", "a.nc")] == filenames gcsfs_inst.get.assert_called_once() # if we don't get any results then we expect an exception @@ -284,20 +284,20 @@ class TestVIIRSSDRDemoDownload: "SVDNB") ALL_GEO_PREFIXES = ("GITCO", "GMTCO", "GDNBO") - def test_download(self, _requests, tmpdir): + def test_download(self, requests, tmpdir): """Test downloading VIIRS SDR data.""" from satpy.demo import get_viirs_sdr_20170128_1229 - _requests.get.side_effect = _FakeRequest + requests.get.side_effect = _FakeRequest with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir)) assert len(files) == 10 * (16 + 5 + 1 + 3) # 10 granules * (5 I bands + 16 M bands + 1 DNB + 3 geolocation) self._assert_bands_in_filenames_and_contents(self.ALL_BAND_PREFIXES + self.ALL_GEO_PREFIXES, files, 10) - def test_do_not_download_the_files_twice(self, _requests, tmpdir): + def test_do_not_download_the_files_twice(self, requests, tmpdir): """Test re-downloading VIIRS SDR data.""" from satpy.demo import get_viirs_sdr_20170128_1229 get_mock = mock.MagicMock() - _requests.get.return_value.__enter__ = get_mock + requests.get.return_value.__enter__ = get_mock with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir)) new_files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir)) @@ -307,21 +307,21 @@ def test_do_not_download_the_files_twice(self, _requests, tmpdir): assert get_mock.call_count == total_num_files assert new_files == files - def test_download_channels_num_granules_im(self, _requests, tmpdir): + def test_download_channels_num_granules_im(self, requests, tmpdir): """Test downloading VIIRS SDR I/M data with select granules.""" from satpy.demo import get_viirs_sdr_20170128_1229 - _requests.get.side_effect = _FakeRequest + requests.get.side_effect = _FakeRequest with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir), channels=("I01", "M01")) assert len(files) == 10 * (1 + 1 + 2) # 10 granules * (1 I band + 1 M band + 2 geolocation) self._assert_bands_in_filenames_and_contents(("SVI01", "SVM01", "GITCO", "GMTCO"), files, 10) - def test_download_channels_num_granules_im_twice(self, _requests, tmpdir): + def test_download_channels_num_granules_im_twice(self, requests, tmpdir): """Test re-downloading VIIRS SDR I/M data with select granules.""" from satpy.demo import get_viirs_sdr_20170128_1229 get_mock = mock.MagicMock() - _requests.get.return_value.__enter__ = get_mock + requests.get.return_value.__enter__ = get_mock with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir), channels=("I01", "M01")) @@ -334,10 +334,10 @@ def test_download_channels_num_granules_im_twice(self, _requests, tmpdir): assert len(files) == 2 * (1 + 1 + 2) # 2 granules * (1 I band + 1 M band + 2 geolocation) assert get_mock.call_count == num_first_batch - def test_download_channels_num_granules_dnb(self, _requests, tmpdir): + def test_download_channels_num_granules_dnb(self, requests, tmpdir): """Test downloading and re-downloading VIIRS SDR DNB data with select granules.""" from satpy.demo import get_viirs_sdr_20170128_1229 - _requests.get.side_effect = _FakeRequest + requests.get.side_effect = _FakeRequest with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir), channels=("DNB",), diff --git a/satpy/tests/test_dependency_tree.py b/satpy/tests/test_dependency_tree.py index 57b718963f..40433c0032 100644 --- a/satpy/tests/test_dependency_tree.py +++ b/satpy/tests/test_dependency_tree.py @@ -87,10 +87,8 @@ def test_copy_preserves_unique_empty_node(self): new_dependency_tree = self.dependency_tree.copy() assert self.dependency_tree.empty_node is new_dependency_tree.empty_node - self.assertIs(self.dependency_tree._root.children[0].children[0].children[1], - self.dependency_tree.empty_node) - self.assertIs(new_dependency_tree._root.children[0].children[0].children[1], - self.dependency_tree.empty_node) + assert self.dependency_tree._root.children[0].children[0].children[1] is self.dependency_tree.empty_node + assert new_dependency_tree._root.children[0].children[0].children[1] is self.dependency_tree.empty_node def test_new_dependency_tree_preserves_unique_empty_node(self): """Test that dependency tree instantiation preserves the uniqueness of the empty node.""" @@ -216,8 +214,8 @@ def test_compositor_loaded_sensor_order(self): """Test that a compositor is loaded from the first alphabetical sensor.""" self.dependency_tree.populate_with_keys({"comp1"}) comp_nodes = self.dependency_tree.trunk() - self.assertEqual(len(comp_nodes), 1) - self.assertEqual(comp_nodes[0].name["resolution"], 500) + assert len(comp_nodes) == 1 + assert comp_nodes[0].name["resolution"] == 500 def test_modifier_loaded_sensor_order(self): """Test that a modifier is loaded from the first alphabetical sensor.""" @@ -225,5 +223,5 @@ def test_modifier_loaded_sensor_order(self): dq = DataQuery(name="ds5", modifiers=("mod1",)) self.dependency_tree.populate_with_keys({dq}) comp_nodes = self.dependency_tree.trunk() - self.assertEqual(len(comp_nodes), 1) - self.assertEqual(comp_nodes[0].data[0].ret_val, 1) + assert len(comp_nodes) == 1 + assert comp_nodes[0].data[0].ret_val == 1 diff --git a/satpy/tests/test_file_handlers.py b/satpy/tests/test_file_handlers.py index 4282bc86b1..403e686204 100644 --- a/satpy/tests/test_file_handlers.py +++ b/satpy/tests/test_file_handlers.py @@ -56,19 +56,19 @@ def test_combine_times(self): info2 = {"start_time": 2} res = self.fh.combine_info([info1, info2]) exp = {"start_time": 1} - self.assertDictEqual(res, exp) + assert res == exp res = self.fh.combine_info([info2, info1]) exp = {"start_time": 1} - self.assertDictEqual(res, exp) + assert res == exp info1 = {"end_time": 1} info2 = {"end_time": 2} res = self.fh.combine_info([info1, info2]) exp = {"end_time": 2} - self.assertDictEqual(res, exp) + assert res == exp res = self.fh.combine_info([info2, info1]) exp = {"end_time": 2} - self.assertDictEqual(res, exp) + assert res == exp def test_combine_orbits(self): """Combine orbits.""" @@ -76,19 +76,19 @@ def test_combine_orbits(self): info2 = {"start_orbit": 2} res = self.fh.combine_info([info1, info2]) exp = {"start_orbit": 1} - self.assertDictEqual(res, exp) + assert res == exp res = self.fh.combine_info([info2, info1]) exp = {"start_orbit": 1} - self.assertDictEqual(res, exp) + assert res == exp info1 = {"end_orbit": 1} info2 = {"end_orbit": 2} res = self.fh.combine_info([info1, info2]) exp = {"end_orbit": 2} - self.assertDictEqual(res, exp) + assert res == exp res = self.fh.combine_info([info2, info1]) exp = {"end_orbit": 2} - self.assertDictEqual(res, exp) + assert res == exp @mock.patch("satpy.readers.file_handlers.SwathDefinition") def test_combine_area(self, sdef): @@ -107,9 +107,9 @@ def test_combine_area(self, sdef): info2 = {"area": area2} self.fh.combine_info([info1, info2]) - self.assertTupleEqual(sdef.call_args[1]["lons"].shape, (2, 5)) - self.assertTupleEqual(sdef.call_args[1]["lats"].shape, (2, 5)) - self.assertEqual(sdef.return_value.name, "area1_area2") + assert sdef.call_args[1]["lons"].shape == (2, 5) + assert sdef.call_args[1]["lats"].shape == (2, 5) + assert sdef.return_value.name == "area1_area2" def test_combine_orbital_parameters(self): """Combine orbital parameters.""" @@ -148,10 +148,10 @@ def test_combine_orbital_parameters(self): "only_in_1": False, "only_in_2": True}} res = self.fh.combine_info([info1, info2]) - self.assertDictEqual(res, exp) + assert res == exp # Identity - self.assertEqual(self.fh.combine_info([info1]), info1) + assert self.fh.combine_info([info1]) == info1 # Empty self.fh.combine_info([{}]) diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 04d32b7ecc..4e41b78c75 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -275,7 +275,7 @@ def test_provide_sunz_and_threshold(self, calculator, apply_modifier_info, sza): info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) - self.assertEqual(res.attrs["sun_zenith_threshold"], 84.0) + assert res.attrs["sun_zenith_threshold"] == 84.0 calculator.assert_called_with("Meteosat-11", "seviri", "IR_039", sunz_threshold=84.0, masking_limit=NIRReflectance.MASKING_LIMIT) @@ -308,7 +308,7 @@ def test_provide_masking_limit(self, calculator, apply_modifier_info, sza): info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) - self.assertIsNone(res.attrs["sun_zenith_masking_limit"]) + assert res.attrs["sun_zenith_masking_limit"] is None calculator.assert_called_with("Meteosat-11", "seviri", "IR_039", sunz_threshold=NIRReflectance.TERMINATOR_LIMIT, masking_limit=None) @@ -383,11 +383,11 @@ def test_compositor(self, calculator, apply_modifier_info, sza): sza.return_value = sunz2 res = comp([nir, ir_], optional_datasets=[sunz], **info) - self.assertEqual(res.attrs["sun_zenith_threshold"], 86.0) - self.assertEqual(res.attrs["units"], "K") - self.assertEqual(res.attrs["platform_name"], platform) - self.assertEqual(res.attrs["sensor"], sensor) - self.assertEqual(res.attrs["name"], chan_name) + assert res.attrs["sun_zenith_threshold"] == 86.0 + assert res.attrs["units"] == "K" + assert res.attrs["platform_name"] == platform + assert res.attrs["sensor"] == sensor + assert res.attrs["name"] == chan_name calculator.assert_called_with("NOAA-20", "viirs", "M12", sunz_threshold=86.0, masking_limit=NIRReflectance.MASKING_LIMIT) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index 3b2888565b..8250f691a0 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -67,7 +67,7 @@ real_import = builtins.__import__ -@pytest.fixture +@pytest.fixture() def viirs_file(tmp_path, monkeypatch): """Create a dummy viirs file.""" filename = "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5" @@ -78,7 +78,7 @@ def viirs_file(tmp_path, monkeypatch): return filename -@pytest.fixture +@pytest.fixture() def atms_file(tmp_path, monkeypatch): """Create a dummy atms file.""" filename = "SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5" @@ -132,44 +132,44 @@ def test_init_noargs(self): """Test DatasetDict init with no arguments.""" from satpy import DatasetDict d = DatasetDict() - self.assertIsInstance(d, dict) + assert isinstance(d, dict) def test_init_dict(self): """Test DatasetDict init with a regular dict argument.""" from satpy import DatasetDict regular_dict = {make_dataid(name="test", wavelength=(0, 0.5, 1)): "1", } d = DatasetDict(regular_dict) - self.assertEqual(d, regular_dict) + assert d == regular_dict def test_getitem(self): """Test DatasetDict getitem with different arguments.""" from satpy.tests.utils import make_dsq d = self.test_dict # access by name - self.assertEqual(d["test"], "1") + assert d["test"] == "1" # access by exact wavelength - self.assertEqual(d[1.5], "2") + assert d[1.5] == "2" # access by near wavelength - self.assertEqual(d[1.55], "2") + assert d[1.55] == "2" # access by near wavelength of another dataset - self.assertEqual(d[1.65], "3") + assert d[1.65] == "3" # access by name with multiple levels - self.assertEqual(d["test6"], "6_100") + assert d["test6"] == "6_100" - self.assertEqual(d[make_dsq(wavelength=1.5)], "2") - self.assertEqual(d[make_dsq(wavelength=0.5, resolution=1000)], "1") - self.assertEqual(d[make_dsq(wavelength=0.5, resolution=500)], "1h") - self.assertEqual(d[make_dsq(name="test6", level=100)], "6_100") - self.assertEqual(d[make_dsq(name="test6", level=200)], "6_200") + assert d[make_dsq(wavelength=1.5)] == "2" + assert d[make_dsq(wavelength=0.5, resolution=1000)] == "1" + assert d[make_dsq(wavelength=0.5, resolution=500)] == "1h" + assert d[make_dsq(name="test6", level=100)] == "6_100" + assert d[make_dsq(name="test6", level=200)] == "6_200" # higher resolution is returned - self.assertEqual(d[0.5], "1h") - self.assertEqual(d["test4"], "4refl") - self.assertEqual(d[make_dataid(name="test4", calibration="radiance")], "4rad") + assert d[0.5] == "1h" + assert d["test4"] == "4refl" + assert d[make_dataid(name="test4", calibration="radiance")] == "4rad" self.assertRaises(KeyError, d.getitem, "1h") # test with full tuple - self.assertEqual(d[make_dsq(name="test", wavelength=(0, 0.5, 1), resolution=1000)], "1") + assert d[make_dsq(name="test", wavelength=(0, 0.5, 1), resolution=1000)] == "1" def test_get_key(self): """Test 'get_key' special functions.""" @@ -180,32 +180,27 @@ def test_get_key(self): num_results=0) res3 = get_key(make_dataid(name="test4"), d, calibration="radiance", num_results=3) - self.assertEqual(len(res2), 1) - self.assertEqual(len(res3), 1) + assert len(res2) == 1 + assert len(res3) == 1 res2 = res2[0] res3 = res3[0] - self.assertEqual(res1, res2) - self.assertEqual(res1, res3) + assert res1 == res2 + assert res1 == res3 res1 = get_key("test4", d, query=DataQuery(polarization="V")) - self.assertEqual(res1, make_dataid(name="test4", calibration="radiance", - polarization="V")) + assert res1 == make_dataid(name="test4", calibration="radiance", polarization="V") res1 = get_key(0.5, d, query=DataQuery(resolution=500)) - self.assertEqual(res1, make_dataid(name="testh", - wavelength=(0, 0.5, 1), - resolution=500)) + assert res1 == make_dataid(name="testh", wavelength=(0, 0.5, 1), resolution=500) res1 = get_key("test6", d, query=DataQuery(level=100)) - self.assertEqual(res1, make_dataid(name="test6", - level=100)) + assert res1 == make_dataid(name="test6", level=100) res1 = get_key("test5", d) res2 = get_key("test5", d, query=DataQuery(modifiers=("mod2",))) res3 = get_key("test5", d, query=DataQuery(modifiers=("mod1", "mod2",))) - self.assertEqual(res1, make_dataid(name="test5", - modifiers=("mod2",))) - self.assertEqual(res1, res2) - self.assertNotEqual(res1, res3) + assert res1 == make_dataid(name="test5", modifiers=("mod2",)) + assert res1 == res2 + assert res1 != res3 # more than 1 result when default is to ask for 1 result self.assertRaises(KeyError, get_key, "test4", d, best=False) @@ -213,40 +208,39 @@ def test_get_key(self): def test_contains(self): """Test DatasetDict contains method.""" d = self.test_dict - self.assertIn("test", d) - self.assertFalse(d.contains("test")) - self.assertNotIn("test_bad", d) - self.assertIn(0.5, d) - self.assertFalse(d.contains(0.5)) - self.assertIn(1.5, d) - self.assertIn(1.55, d) - self.assertIn(1.65, d) - self.assertIn(make_dataid(name="test4", calibration="radiance"), d) - self.assertIn("test4", d) + assert "test" in d + assert not d.contains("test") + assert "test_bad" not in d + assert 0.5 in d + assert not d.contains(0.5) + assert 1.5 in d + assert 1.55 in d + assert 1.65 in d + assert make_dataid(name="test4", calibration="radiance") in d + assert "test4" in d def test_keys(self): """Test keys method of DatasetDict.""" from satpy.tests.utils import DataID d = self.test_dict - self.assertEqual(len(d.keys()), len(self.regular_dict.keys())) - self.assertTrue(all(isinstance(x, DataID) for x in d.keys())) + assert len(d.keys()) == len(self.regular_dict.keys()) + assert all(isinstance(x, DataID) for x in d.keys()) name_keys = d.keys(names=True) - self.assertListEqual(sorted(set(name_keys))[:4], [ - "test", "test2", "test3", "test4"]) + assert sorted(set(name_keys))[:4] == ["test", "test2", "test3", "test4"] wl_keys = tuple(d.keys(wavelengths=True)) - self.assertIn((0, 0.5, 1), wl_keys) - self.assertIn((1, 1.5, 2, "µm"), wl_keys) - self.assertIn((1.2, 1.7, 2.2, "µm"), wl_keys) - self.assertIn(None, wl_keys) + assert (0, 0.5, 1) in wl_keys + assert (1, 1.5, 2, "µm") in wl_keys + assert (1.2, 1.7, 2.2, "µm") in wl_keys + assert None in wl_keys def test_setitem(self): """Test setitem method of DatasetDict.""" d = self.test_dict d["new_ds"] = {"metadata": "new_ds"} - self.assertEqual(d["new_ds"]["metadata"], "new_ds") + assert d["new_ds"]["metadata"] == "new_ds" d[0.5] = {"calibration": "radiance"} - self.assertEqual(d[0.5]["resolution"], 500) - self.assertEqual(d[0.5]["name"], "testh") + assert d[0.5]["resolution"] == 500 + assert d[0.5]["name"] == "testh" class TestReaderLoader(unittest.TestCase): @@ -276,20 +270,20 @@ def test_no_args(self): """ from satpy.readers import load_readers ri = load_readers() - self.assertDictEqual(ri, {}) + assert ri == {} def test_filenames_only(self): """Test with filenames specified.""" from satpy.readers import load_readers ri = load_readers(filenames=["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"]) - self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) + assert list(ri.keys()) == ["viirs_sdr"] def test_filenames_and_reader(self): """Test with filenames and reader specified.""" from satpy.readers import load_readers ri = load_readers(reader="viirs_sdr", filenames=["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"]) - self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) + assert list(ri.keys()) == ["viirs_sdr"] def test_bad_reader_name_with_filenames(self): """Test bad reader name with filenames provided.""" @@ -306,7 +300,7 @@ def test_filenames_as_path(self): ri = load_readers(filenames=[ Path("SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"), ]) - self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) + assert list(ri.keys()) == ["viirs_sdr"] def test_filenames_as_dict(self): """Test loading readers where filenames are organized by reader.""" @@ -315,7 +309,7 @@ def test_filenames_as_dict(self): "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], } ri = load_readers(filenames=filenames) - self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) + assert list(ri.keys()) == ["viirs_sdr"] def test_filenames_as_dict_bad_reader(self): """Test loading with filenames dict but one of the readers is bad.""" @@ -340,7 +334,7 @@ def test_filenames_as_dict_with_reader(self): "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], } ri = load_readers(reader="viirs_sdr", filenames=filenames) - self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) + assert list(ri.keys()) == ["viirs_sdr"] def test_empty_filenames_as_dict(self): """Test passing filenames as a dictionary with an empty list of filenames.""" @@ -357,7 +351,7 @@ def test_empty_filenames_as_dict(self): "viirs_l1b": [], } ri = load_readers(filenames) - self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) + assert list(ri.keys()) == ["viirs_sdr"] @mock.patch("satpy.readers.hrit_base.HRITFileHandler._get_hd") @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler._get_header") @@ -436,10 +430,10 @@ def test_almost_all_filtered(self): "end_time": datetime.datetime(2012, 2, 26)} # viirs has data that matches the request, abi doesn't readers = load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) - self.assertIn("viirs_sdr", readers) + assert "viirs_sdr" in readers # abi_l1b reader was created, but no datasets available - self.assertIn("abi_l1b", readers) - self.assertEqual(len(list(readers["abi_l1b"].available_dataset_ids)), 0) + assert "abi_l1b" in readers + assert len(list(readers["abi_l1b"].available_dataset_ids)) == 0 class TestFindFilesAndReaders: @@ -514,7 +508,7 @@ def test_reader_name_unmatched_start_end_time(self, viirs_file): """Test with start and end time matching the filename.""" from datetime import datetime - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="No supported files found"): find_files_and_readers(reader="viirs_sdr", start_time=datetime(2012, 2, 26, 18, 0, 0), end_time=datetime(2012, 2, 26, 19, 0, 0)) @@ -540,7 +534,7 @@ def test_no_parameters_both_atms_and_viirs(self, viirs_file, atms_file): def test_bad_sensor(self): """Test bad sensor doesn't find any files.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Sensor.* not supported by any readers"): find_files_and_readers(sensor="i_dont_exist") def test_sensor(self, viirs_file): @@ -555,7 +549,7 @@ def test_sensor_no_files(self): """Test that readers for the current sensor are loaded.""" # we can't easily know how many readers satpy has that support # 'viirs' so we just pass it and hope that this works - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="No supported files found"): find_files_and_readers(sensor="viirs") assert find_files_and_readers(sensor="viirs", missing_ok=True) == {} @@ -589,7 +583,7 @@ def test_old_reader_name_mapping(self): return pytest.skip("Skipping deprecated reader tests because " "no deprecated readers.") test_reader = sorted(OLD_READER_NAMES.keys())[0] - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Reader name .* has been deprecated, use .* instead."): get_valid_reader_names([test_reader]) @@ -714,12 +708,12 @@ def test_no_reader(self): # without files it's going to be an empty result assert group_files([]) == [] groups = group_files(self.g16_files) - self.assertEqual(6, len(groups)) + assert 6 == len(groups) def test_unknown_files(self): """Test that error is raised on unknown files.""" from satpy.readers import group_files - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="No matching readers found for these files: .*"): group_files(self.unknown_files, "abi_l1b") def test_bad_reader(self): @@ -737,8 +731,8 @@ def test_default_behavior(self): """Test the default behavior with the 'abi_l1b' reader.""" from satpy.readers import group_files groups = group_files(self.g16_files, reader="abi_l1b") - self.assertEqual(6, len(groups)) - self.assertEqual(2, len(groups[0]["abi_l1b"])) + assert 6 == len(groups) + assert 2 == len(groups[0]["abi_l1b"]) def test_default_behavior_set(self): """Test the default behavior with the 'abi_l1b' reader.""" @@ -747,23 +741,23 @@ def test_default_behavior_set(self): num_files = len(files) groups = group_files(files, reader="abi_l1b") # we didn't modify it - self.assertEqual(len(files), num_files) - self.assertEqual(6, len(groups)) - self.assertEqual(2, len(groups[0]["abi_l1b"])) + assert len(files) == num_files + assert 6 == len(groups) + assert 2 == len(groups[0]["abi_l1b"]) def test_non_datetime_group_key(self): """Test what happens when the start_time isn't used for grouping.""" from satpy.readers import group_files groups = group_files(self.g16_files, reader="abi_l1b", group_keys=("platform_shortname",)) - self.assertEqual(1, len(groups)) - self.assertEqual(12, len(groups[0]["abi_l1b"])) + assert 1 == len(groups) + assert 12 == len(groups[0]["abi_l1b"]) def test_large_time_threshold(self): """Test what happens when the time threshold holds multiple files.""" from satpy.readers import group_files groups = group_files(self.g16_files, reader="abi_l1b", time_threshold=60*8) - self.assertEqual(3, len(groups)) - self.assertEqual(4, len(groups[0]["abi_l1b"])) + assert 3 == len(groups) + assert 4 == len(groups[0]["abi_l1b"]) def test_two_instruments_files(self): """Test the behavior when two instruments files are provided. @@ -777,8 +771,8 @@ def test_two_instruments_files(self): """ from satpy.readers import group_files groups = group_files(self.g16_files + self.g17_files, reader="abi_l1b", group_keys=("start_time",)) - self.assertEqual(6, len(groups)) - self.assertEqual(4, len(groups[0]["abi_l1b"])) + assert 6 == len(groups) + assert 4 == len(groups[0]["abi_l1b"]) def test_two_instruments_files_split(self): """Test the default behavior when two instruments files are provided and split. @@ -790,49 +784,49 @@ def test_two_instruments_files_split(self): from satpy.readers import group_files groups = group_files(self.g16_files + self.g17_files, reader="abi_l1b", group_keys=("start_time", "platform_shortname")) - self.assertEqual(12, len(groups)) - self.assertEqual(2, len(groups[0]["abi_l1b"])) + assert 12 == len(groups) + assert 2 == len(groups[0]["abi_l1b"]) # default for abi_l1b should also behave like this groups = group_files(self.g16_files + self.g17_files, reader="abi_l1b") - self.assertEqual(12, len(groups)) - self.assertEqual(2, len(groups[0]["abi_l1b"])) + assert 12 == len(groups) + assert 2 == len(groups[0]["abi_l1b"]) def test_viirs_orbits(self): """Test a reader that doesn't use 'start_time' for default grouping.""" from satpy.readers import group_files groups = group_files(self.noaa20_files + self.npp_files, reader="viirs_sdr") - self.assertEqual(2, len(groups)) + assert 2 == len(groups) # the noaa-20 files will be first because the orbit number is smaller # 5 granules * 3 file types - self.assertEqual(5 * 3, len(groups[0]["viirs_sdr"])) + assert 5 * 3 == len(groups[0]["viirs_sdr"]) # 3 granules * 2 file types - self.assertEqual(6, len(groups[1]["viirs_sdr"])) + assert 6 == len(groups[1]["viirs_sdr"]) def test_viirs_override_keys(self): """Test overriding a group keys to add 'start_time'.""" from satpy.readers import group_files groups = group_files(self.noaa20_files + self.npp_files, reader="viirs_sdr", group_keys=("start_time", "orbit", "platform_shortname")) - self.assertEqual(8, len(groups)) - self.assertEqual(2, len(groups[0]["viirs_sdr"])) # NPP - self.assertEqual(2, len(groups[1]["viirs_sdr"])) # NPP - self.assertEqual(2, len(groups[2]["viirs_sdr"])) # NPP - self.assertEqual(3, len(groups[3]["viirs_sdr"])) # N20 - self.assertEqual(3, len(groups[4]["viirs_sdr"])) # N20 - self.assertEqual(3, len(groups[5]["viirs_sdr"])) # N20 - self.assertEqual(3, len(groups[6]["viirs_sdr"])) # N20 - self.assertEqual(3, len(groups[7]["viirs_sdr"])) # N20 + assert 8 == len(groups) + assert 2 == len(groups[0]["viirs_sdr"]) # NPP + assert 2 == len(groups[1]["viirs_sdr"]) # NPP + assert 2 == len(groups[2]["viirs_sdr"]) # NPP + assert 3 == len(groups[3]["viirs_sdr"]) # N20 + assert 3 == len(groups[4]["viirs_sdr"]) # N20 + assert 3 == len(groups[5]["viirs_sdr"]) # N20 + assert 3 == len(groups[6]["viirs_sdr"]) # N20 + assert 3 == len(groups[7]["viirs_sdr"]) # N20 # Ask for a larger time span with our groups groups = group_files(self.noaa20_files + self.npp_files, reader="viirs_sdr", time_threshold=60 * 60 * 2, group_keys=("start_time", "orbit", "platform_shortname")) - self.assertEqual(2, len(groups)) + assert 2 == len(groups) # NPP is first because it has an earlier time # 3 granules * 2 file types - self.assertEqual(6, len(groups[0]["viirs_sdr"])) + assert 6 == len(groups[0]["viirs_sdr"]) # 5 granules * 3 file types - self.assertEqual(5 * 3, len(groups[1]["viirs_sdr"])) + assert 5 * 3 == len(groups[1]["viirs_sdr"]) def test_multi_readers(self): """Test passing multiple readers.""" @@ -930,7 +924,7 @@ def test_multi_readers_empty_groups_passed(self): def test_multi_readers_invalid_parameter(self): """Verify that invalid missing parameter raises ValueError.""" from satpy.readers import group_files - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Invalid value for ``missing`` argument..*"): group_files( self._filenames_abi_glm, reader=["abi_l1b", "glm_l2"], diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index a9a3b24a01..be55954851 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -125,13 +125,13 @@ def test_type_preserve(self): data.attrs["_FillValue"] = 255 data.attrs["area"] = source_area res = resample_dataset(data, dest_area) - self.assertEqual(res.dtype, data.dtype) - self.assertTrue(np.all(res.values == expected_gap)) + assert res.dtype == data.dtype + assert np.all(res.values == expected_gap) expected_filled = np.array([[1, 2], [3, 3]]) res = resample_dataset(data, dest_area, radius_of_influence=1000000) - self.assertEqual(res.dtype, data.dtype) - self.assertTrue(np.all(res.values == expected_filled)) + assert res.dtype == data.dtype + assert np.all(res.values == expected_filled) class TestKDTreeResampler(unittest.TestCase): @@ -155,7 +155,7 @@ def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, xr_resampler.assert_called_once() resampler.resampler.get_neighbour_info.assert_called() # swath definitions should not be cached - self.assertFalse(len(mock_dset.to_zarr.mock_calls), 0) + assert len(mock_dset.to_zarr.mock_calls) == 0 resampler.resampler.reset_mock() cnc.assert_called_once() @@ -170,11 +170,11 @@ def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, zarr_open.side_effect = ValueError() resampler.precompute(cache_dir=the_dir) # assert data was saved to the on-disk cache - self.assertEqual(len(mock_dset.to_zarr.mock_calls), 1) + assert len(mock_dset.to_zarr.mock_calls) == 1 # assert that zarr_open was called to try to zarr_open something from disk - self.assertEqual(len(zarr_open.mock_calls), 1) + assert len(zarr_open.mock_calls) == 1 # we should have cached things in-memory - self.assertEqual(len(resampler._index_caches), 1) + assert len(resampler._index_caches) == 1 nbcalls = len(resampler.resampler.get_neighbour_info.mock_calls) # test reusing the resampler zarr_open.side_effect = None @@ -195,20 +195,20 @@ def astype(self, dtype): distance_array=4) resampler.precompute(cache_dir=the_dir) # we already have things cached in-memory, no need to save again - self.assertEqual(len(mock_dset.to_zarr.mock_calls), 1) + assert len(mock_dset.to_zarr.mock_calls) == 1 # we already have things cached in-memory, don't need to load - self.assertEqual(len(zarr_open.mock_calls), 1) + assert len(zarr_open.mock_calls) == 1 # we should have cached things in-memory - self.assertEqual(len(resampler._index_caches), 1) - self.assertEqual(len(resampler.resampler.get_neighbour_info.mock_calls), nbcalls) + assert len(resampler._index_caches) == 1 + assert len(resampler.resampler.get_neighbour_info.mock_calls) == nbcalls # test loading saved resampler resampler = KDTreeResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) - self.assertEqual(len(zarr_open.mock_calls), 4) - self.assertEqual(len(resampler.resampler.get_neighbour_info.mock_calls), nbcalls) + assert len(zarr_open.mock_calls) == 4 + assert len(resampler.resampler.get_neighbour_info.mock_calls) == nbcalls # we should have cached things in-memory now - self.assertEqual(len(resampler._index_caches), 1) + assert len(resampler._index_caches) == 1 finally: shutil.rmtree(the_dir) @@ -279,10 +279,10 @@ def test_2d_ewa(self, get_lonlats, ll2cr, fornav): num_chunks = len(source_swath.lons.chunks[0]) * len(source_swath.lons.chunks[1]) new_data = resample_dataset(swath_data, target_area, resampler="ewa") - self.assertTupleEqual(new_data.shape, (200, 100)) - self.assertEqual(new_data.dtype, np.float32) - self.assertEqual(new_data.attrs["test"], "test") - self.assertIs(new_data.attrs["area"], target_area) + assert new_data.shape == (200, 100) + assert new_data.dtype == np.float32 + assert new_data.attrs["test"] == "test" + assert new_data.attrs["area"] is target_area # make sure we can actually compute everything new_data.compute() lonlat_calls = get_lonlats.call_count @@ -296,17 +296,17 @@ def test_2d_ewa(self, get_lonlats, ll2cr, fornav): new_data = resample_dataset(data, target_area, resampler="ewa") new_data.compute() # ll2cr will be called once more because of the computation - self.assertEqual(ll2cr.call_count, ll2cr_calls + num_chunks) + assert ll2cr.call_count == ll2cr_calls + num_chunks # but we should already have taken the lonlats from the SwathDefinition - self.assertEqual(get_lonlats.call_count, lonlat_calls) - self.assertIn("y", new_data.coords) - self.assertIn("x", new_data.coords) - self.assertIn("crs", new_data.coords) - self.assertIsInstance(new_data.coords["crs"].item(), CRS) - self.assertIn("lambert", new_data.coords["crs"].item().coordinate_operation.method_name.lower()) - self.assertEqual(new_data.coords["y"].attrs["units"], "meter") - self.assertEqual(new_data.coords["x"].attrs["units"], "meter") - self.assertEqual(target_area.crs, new_data.coords["crs"].item()) + assert get_lonlats.call_count == lonlat_calls + assert "y" in new_data.coords + assert "x" in new_data.coords + assert "crs" in new_data.coords + assert isinstance(new_data.coords["crs"].item(), CRS) + assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() + assert new_data.coords["y"].attrs["units"] == "meter" + assert new_data.coords["x"].attrs["units"] == "meter" + assert target_area.crs == new_data.coords["crs"].item() @mock.patch("satpy.resample.fornav") @mock.patch("satpy.resample.ll2cr") @@ -329,10 +329,10 @@ def test_3d_ewa(self, get_lonlats, ll2cr, fornav): num_chunks = len(source_swath.lons.chunks[0]) * len(source_swath.lons.chunks[1]) new_data = resample_dataset(swath_data, target_area, resampler="ewa") - self.assertTupleEqual(new_data.shape, (3, 200, 100)) - self.assertEqual(new_data.dtype, np.float32) - self.assertEqual(new_data.attrs["test"], "test") - self.assertIs(new_data.attrs["area"], target_area) + assert new_data.shape == (3, 200, 100) + assert new_data.dtype == np.float32 + assert new_data.attrs["test"] == "test" + assert new_data.attrs["area"] is target_area # make sure we can actually compute everything new_data.compute() lonlat_calls = get_lonlats.call_count @@ -346,20 +346,20 @@ def test_3d_ewa(self, get_lonlats, ll2cr, fornav): new_data = resample_dataset(swath_data, target_area, resampler="ewa") new_data.compute() # ll2cr will be called once more because of the computation - self.assertEqual(ll2cr.call_count, ll2cr_calls + num_chunks) + assert ll2cr.call_count == ll2cr_calls + num_chunks # but we should already have taken the lonlats from the SwathDefinition - self.assertEqual(get_lonlats.call_count, lonlat_calls) - self.assertIn("y", new_data.coords) - self.assertIn("x", new_data.coords) - self.assertIn("bands", new_data.coords) - self.assertIn("crs", new_data.coords) - self.assertIsInstance(new_data.coords["crs"].item(), CRS) - self.assertIn("lambert", new_data.coords["crs"].item().coordinate_operation.method_name.lower()) - self.assertEqual(new_data.coords["y"].attrs["units"], "meter") - self.assertEqual(new_data.coords["x"].attrs["units"], "meter") + assert get_lonlats.call_count == lonlat_calls + assert "y" in new_data.coords + assert "x" in new_data.coords + assert "bands" in new_data.coords + assert "crs" in new_data.coords + assert isinstance(new_data.coords["crs"].item(), CRS) + assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() + assert new_data.coords["y"].attrs["units"] == "meter" + assert new_data.coords["x"].attrs["units"] == "meter" np.testing.assert_equal(new_data.coords["bands"].values, ["R", "G", "B"]) - self.assertEqual(target_area.crs, new_data.coords["crs"].item()) + assert target_area.crs == new_data.coords["crs"].item() class TestNativeResampler: @@ -388,7 +388,7 @@ def test_expand_reduce_aggregate_identity(self): @pytest.mark.parametrize("dim0_factor", [1. / 4, 0.333323423, 1.333323423]) def test_expand_reduce_aggregate_invalid(self, dim0_factor): """Test classmethod 'expand_reduce' fails when factor does not divide evenly.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="[Aggregation, Expand] .*"): NativeResampler._expand_reduce(self.d_arr, {0: dim0_factor, 1: 1.}) def test_expand_reduce_agg_rechunk(self): @@ -469,7 +469,7 @@ def test_expand_without_dims_4D(self): input_shape=(2, 3, 100, 50), input_dims=None) # source geo def doesn't actually matter resampler = NativeResampler(source_area, target_area) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Can only handle 2D or 3D arrays without dimensions."): resampler.resample(ds1) @@ -500,14 +500,14 @@ def test_bil_resampling(self, xr_resampler, create_filename, new_data = resampler.compute(data, fill_value=fill_value) resampler.resampler.get_sample_from_bil_info.assert_called_with( data, fill_value=fill_value, output_shape=target_area.shape) - self.assertIn("y", new_data.coords) - self.assertIn("x", new_data.coords) - self.assertIn("crs", new_data.coords) - self.assertIsInstance(new_data.coords["crs"].item(), CRS) - self.assertIn("lambert", new_data.coords["crs"].item().coordinate_operation.method_name.lower()) - self.assertEqual(new_data.coords["y"].attrs["units"], "meter") - self.assertEqual(new_data.coords["x"].attrs["units"], "meter") - self.assertEqual(target_area.crs, new_data.coords["crs"].item()) + assert "y" in new_data.coords + assert "x" in new_data.coords + assert "crs" in new_data.coords + assert isinstance(new_data.coords["crs"].item(), CRS) + assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() + assert new_data.coords["y"].attrs["units"] == "meter" + assert new_data.coords["x"].attrs["units"] == "meter" + assert target_area.crs == new_data.coords["crs"].item() # Test that the resampling info is tried to read from the disk resampler = BilinearResampler(source_swath, target_area) @@ -533,13 +533,13 @@ def test_bil_resampling(self, xr_resampler, create_filename, # we already have things cached in-memory, no need to save again resampler.resampler.save_resampling_info.assert_called_once() # we already have things cached in-memory, don't need to load - self.assertEqual(resampler.resampler.get_bil_info.call_count, nbcalls) + assert resampler.resampler.get_bil_info.call_count == nbcalls # test loading saved resampler resampler = BilinearResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) - self.assertEqual(resampler.resampler.load_resampling_info.call_count, 3) - self.assertEqual(resampler.resampler.get_bil_info.call_count, nbcalls) + assert resampler.resampler.load_resampling_info.call_count == 3 + assert resampler.resampler.get_bil_info.call_count == nbcalls resampler = BilinearResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) @@ -564,10 +564,8 @@ def test_move_existing_caches(self): fid.write("42") from satpy.resample import _move_existing_caches _move_existing_caches(the_dir, zarr_file) - self.assertFalse(os.path.exists(zarr_file)) - self.assertTrue(os.path.exists( - os.path.join(the_dir, "moved_by_satpy", - "test.zarr"))) + assert not os.path.exists(zarr_file) + assert os.path.exists(os.path.join(the_dir, "moved_by_satpy", "test.zarr")) # Run again to see that the existing dir doesn't matter with open(zarr_file, "w") as fid: fid.write("42") @@ -594,18 +592,16 @@ def test_area_def_coordinates(self): dims=("y", "x"), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) - self.assertIn("y", new_data_arr.coords) - self.assertIn("x", new_data_arr.coords) - - self.assertIn("units", new_data_arr.coords["y"].attrs) - self.assertEqual( - new_data_arr.coords["y"].attrs["units"], "meter") - self.assertIn("units", new_data_arr.coords["x"].attrs) - self.assertEqual( - new_data_arr.coords["x"].attrs["units"], "meter") - self.assertIn("crs", new_data_arr.coords) - self.assertIsInstance(new_data_arr.coords["crs"].item(), CRS) - self.assertEqual(area_def.crs, new_data_arr.coords["crs"].item()) + assert "y" in new_data_arr.coords + assert "x" in new_data_arr.coords + + assert "units" in new_data_arr.coords["y"].attrs + assert new_data_arr.coords["y"].attrs["units"] == "meter" + assert "units" in new_data_arr.coords["x"].attrs + assert new_data_arr.coords["x"].attrs["units"] == "meter" + assert "crs" in new_data_arr.coords + assert isinstance(new_data_arr.coords["crs"].item(), CRS) + assert area_def.crs == new_data_arr.coords["crs"].item() # already has coords data_arr = xr.DataArray( @@ -615,15 +611,15 @@ def test_area_def_coordinates(self): coords={"y": np.arange(2, 202), "x": np.arange(100)} ) new_data_arr = add_crs_xy_coords(data_arr, area_def) - self.assertIn("y", new_data_arr.coords) - self.assertNotIn("units", new_data_arr.coords["y"].attrs) - self.assertIn("x", new_data_arr.coords) - self.assertNotIn("units", new_data_arr.coords["x"].attrs) + assert "y" in new_data_arr.coords + assert "units" not in new_data_arr.coords["y"].attrs + assert "x" in new_data_arr.coords + assert "units" not in new_data_arr.coords["x"].attrs np.testing.assert_equal(new_data_arr.coords["y"], np.arange(2, 202)) - self.assertIn("crs", new_data_arr.coords) - self.assertIsInstance(new_data_arr.coords["crs"].item(), CRS) - self.assertEqual(area_def.crs, new_data_arr.coords["crs"].item()) + assert "crs" in new_data_arr.coords + assert isinstance(new_data_arr.coords["crs"].item(), CRS) + assert area_def.crs == new_data_arr.coords["crs"].item() # lat/lon area area_def = AreaDefinition( @@ -636,18 +632,16 @@ def test_area_def_coordinates(self): dims=("y", "x"), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) - self.assertIn("y", new_data_arr.coords) - self.assertIn("x", new_data_arr.coords) - - self.assertIn("units", new_data_arr.coords["y"].attrs) - self.assertEqual( - new_data_arr.coords["y"].attrs["units"], "degrees_north") - self.assertIn("units", new_data_arr.coords["x"].attrs) - self.assertEqual( - new_data_arr.coords["x"].attrs["units"], "degrees_east") - self.assertIn("crs", new_data_arr.coords) - self.assertIsInstance(new_data_arr.coords["crs"].item(), CRS) - self.assertEqual(area_def.crs, new_data_arr.coords["crs"].item()) + assert "y" in new_data_arr.coords + assert "x" in new_data_arr.coords + + assert "units" in new_data_arr.coords["y"].attrs + assert new_data_arr.coords["y"].attrs["units"] == "degrees_north" + assert "units" in new_data_arr.coords["x"].attrs + assert new_data_arr.coords["x"].attrs["units"] == "degrees_east" + assert "crs" in new_data_arr.coords + assert isinstance(new_data_arr.coords["crs"].item(), CRS) + assert area_def.crs == new_data_arr.coords["crs"].item() def test_swath_def_coordinates(self): """Test coordinates being added with an SwathDefinition.""" @@ -679,11 +673,11 @@ def test_swath_def_coordinates(self): # new_data_arr.coords['latitude'].attrs['units'], 'degrees_north') # self.assertIsInstance(new_data_arr.coords['latitude'].data, da.Array) - self.assertIn("crs", new_data_arr.coords) + assert "crs" in new_data_arr.coords crs = new_data_arr.coords["crs"].item() - self.assertIsInstance(crs, CRS) + assert isinstance(crs, CRS) assert crs.is_geographic - self.assertIsInstance(new_data_arr.coords["crs"].item(), CRS) + assert isinstance(new_data_arr.coords["crs"].item(), CRS) class TestBucketAvg(unittest.TestCase): @@ -702,16 +696,16 @@ def setUp(self): def test_init(self): """Test bucket resampler initialization.""" - self.assertIsNone(self.bucket.resampler) - self.assertTrue(self.bucket.source_geo_def == self.source_geo_def) - self.assertTrue(self.bucket.target_geo_def == self.target_geo_def) + assert self.bucket.resampler is None + assert self.bucket.source_geo_def == self.source_geo_def + assert self.bucket.target_geo_def == self.target_geo_def @mock.patch("pyresample.bucket.BucketResampler") def test_precompute(self, bucket): """Test bucket resampler precomputation.""" bucket.return_value = True self.bucket.precompute() - self.assertTrue(self.bucket.resampler) + assert self.bucket.resampler bucket.assert_called_once_with(self.target_geo_def, 1, 2) def _compute_mocked_bucket_avg(self, data, return_data=None, **kwargs): @@ -729,16 +723,16 @@ def test_compute(self): # 1D data data = da.ones((5,)) res = self._compute_mocked_bucket_avg(data, fill_value=2) - self.assertEqual(res.shape, (1, 5)) + assert res.shape == (1, 5) # 2D data data = da.ones((5, 5)) res = self._compute_mocked_bucket_avg(data, fill_value=2) - self.assertEqual(res.shape, (1, 5, 5)) + assert res.shape == (1, 5, 5) # 3D data data = da.ones((3, 5, 5)) self.bucket.resampler.get_average.return_value = data[0, :, :] res = self._compute_mocked_bucket_avg(data, return_data=data[0, :, :], fill_value=2) - self.assertEqual(res.shape, (3, 5, 5)) + assert res.shape == (3, 5, 5) @mock.patch("satpy.resample.PR_USE_SKIPNA", True) def test_compute_and_use_skipna_handling(self): @@ -805,33 +799,33 @@ def test_resample(self, pyresample_bucket): res = self.bucket.resample(data) self.bucket.precompute.assert_called_once() self.bucket.compute.assert_called_once() - self.assertEqual(res.shape, (5, 5)) - self.assertEqual(res.dims, ("y", "x")) - self.assertTrue("bar" in res.attrs) - self.assertEqual(res.attrs["bar"], "baz") + assert res.shape == (5, 5) + assert res.dims == ("y", "x") + assert "bar" in res.attrs + assert res.attrs["bar"] == "baz" # 2D input data data = xr.DataArray(da.ones((5, 5)), dims=("foo", "bar")) self.bucket.compute.return_value = da.ones((5, 5)) res = self.bucket.resample(data) - self.assertEqual(res.shape, (5, 5)) - self.assertEqual(res.dims, ("y", "x")) + assert res.shape == (5, 5) + assert res.dims == ("y", "x") # 3D input data with 'bands' dim data = xr.DataArray(da.ones((1, 5, 5)), dims=("bands", "foo", "bar"), coords={"bands": ["L"]}) self.bucket.compute.return_value = da.ones((1, 5, 5)) res = self.bucket.resample(data) - self.assertEqual(res.shape, (1, 5, 5)) - self.assertEqual(res.dims, ("bands", "y", "x")) - self.assertEqual(res.coords["bands"], ["L"]) + assert res.shape == (1, 5, 5) + assert res.dims == ("bands", "y", "x") + assert res.coords["bands"] == ["L"] # 3D input data with misc dim names data = xr.DataArray(da.ones((3, 5, 5)), dims=("foo", "bar", "baz")) self.bucket.compute.return_value = da.ones((3, 5, 5)) res = self.bucket.resample(data) - self.assertEqual(res.shape, (3, 5, 5)) - self.assertEqual(res.dims, ("foo", "bar", "baz")) + assert res.shape == (3, 5, 5) + assert res.dims == ("foo", "bar", "baz") class TestBucketSum(unittest.TestCase): @@ -861,15 +855,15 @@ def test_compute(self): # 1D data data = da.ones((5,)) res = self._compute_mocked_bucket_sum(data) - self.assertEqual(res.shape, (1, 5)) + assert res.shape == (1, 5) # 2D data data = da.ones((5, 5)) res = self._compute_mocked_bucket_sum(data) - self.assertEqual(res.shape, (1, 5, 5)) + assert res.shape == (1, 5, 5) # 3D data data = da.ones((3, 5, 5)) res = self._compute_mocked_bucket_sum(data, return_data=data[0, :, :]) - self.assertEqual(res.shape, (3, 5, 5)) + assert res.shape == (3, 5, 5) @mock.patch("satpy.resample.PR_USE_SKIPNA", True) def test_compute_and_use_skipna_handling(self): @@ -946,16 +940,16 @@ def test_compute(self): data = da.ones((5,)) res = self._compute_mocked_bucket_count(data) self.bucket.resampler.get_count.assert_called_once_with() - self.assertEqual(res.shape, (1, 5)) + assert res.shape == (1, 5) # 2D data data = da.ones((5, 5)) res = self._compute_mocked_bucket_count(data) self.bucket.resampler.get_count.assert_called_once_with() - self.assertEqual(res.shape, (1, 5, 5)) + assert res.shape == (1, 5, 5) # 3D data data = da.ones((3, 5, 5)) res = self._compute_mocked_bucket_count(data, return_data=data[0, :, :]) - self.assertEqual(res.shape, (3, 5, 5)) + assert res.shape == (3, 5, 5) class TestBucketFraction(unittest.TestCase): @@ -1007,6 +1001,6 @@ def test_resample(self, pyresample_bucket): arr = da.ones((5, 5)) self.bucket.compute.return_value = {0: arr, 1: arr, 2: arr} res = self.bucket.resample(data) - self.assertTrue("categories" in res.coords) - self.assertTrue("categories" in res.dims) - self.assertTrue(np.all(res.coords["categories"] == np.array([0, 1, 2]))) + assert "categories" in res.coords + assert "categories" in res.dims + assert np.all(res.coords["categories"] == np.array([0, 1, 2])) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 29d940fbdc..2e38e00b3c 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -27,6 +27,7 @@ import numpy as np import pytest import xarray as xr +from pytest import approx # noqa: PT013 from satpy.utils import ( angle2xyz, @@ -50,176 +51,176 @@ class TestUtils(unittest.TestCase): def test_lonlat2xyz(self): """Test the lonlat2xyz function.""" x__, y__, z__ = lonlat2xyz(0, 0) - self.assertAlmostEqual(x__, 1) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(1) + assert y__ == approx(0) + assert z__ == approx(0) x__, y__, z__ = lonlat2xyz(90, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 1) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(0) + assert y__ == approx(1) + assert z__ == approx(0) x__, y__, z__ = lonlat2xyz(0, 90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) + assert x__ == approx(0) + assert y__ == approx(0) + assert z__ == approx(1) x__, y__, z__ = lonlat2xyz(180, 0) - self.assertAlmostEqual(x__, -1) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(-1) + assert y__ == approx(0) + assert z__ == approx(0) x__, y__, z__ = lonlat2xyz(-90, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, -1) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(0) + assert y__ == approx(-1) + assert z__ == approx(0) x__, y__, z__ = lonlat2xyz(0, -90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, -1) + assert x__ == approx(0) + assert y__ == approx(0) + assert z__ == approx(-1) x__, y__, z__ = lonlat2xyz(0, 45) - self.assertAlmostEqual(x__, np.sqrt(2) / 2) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, np.sqrt(2) / 2) + assert x__ == approx(np.sqrt(2) / 2) + assert y__ == approx(0) + assert z__ == approx(np.sqrt(2) / 2) x__, y__, z__ = lonlat2xyz(0, 60) - self.assertAlmostEqual(x__, np.sqrt(1) / 2) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, np.sqrt(3) / 2) + assert x__ == approx(np.sqrt(1) / 2) + assert y__ == approx(0) + assert z__ == approx(np.sqrt(3) / 2) def test_angle2xyz(self): """Test the lonlat2xyz function.""" x__, y__, z__ = angle2xyz(0, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) + assert x__ == approx(0) + assert y__ == approx(0) + assert z__ == approx(1) x__, y__, z__ = angle2xyz(90, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) + assert x__ == approx(0) + assert y__ == approx(0) + assert z__ == approx(1) x__, y__, z__ = angle2xyz(0, 90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 1) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(0) + assert y__ == approx(1) + assert z__ == approx(0) x__, y__, z__ = angle2xyz(180, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) + assert x__ == approx(0) + assert y__ == approx(0) + assert z__ == approx(1) x__, y__, z__ = angle2xyz(-90, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) + assert x__ == approx(0) + assert y__ == approx(0) + assert z__ == approx(1) x__, y__, z__ = angle2xyz(0, -90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, -1) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(0) + assert y__ == approx(-1) + assert z__ == approx(0) x__, y__, z__ = angle2xyz(90, 90) - self.assertAlmostEqual(x__, 1) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(1) + assert y__ == approx(0) + assert z__ == approx(0) x__, y__, z__ = angle2xyz(-90, 90) - self.assertAlmostEqual(x__, -1) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(-1) + assert y__ == approx(0) + assert z__ == approx(0) x__, y__, z__ = angle2xyz(180, 90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, -1) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(0) + assert y__ == approx(-1) + assert z__ == approx(0) x__, y__, z__ = angle2xyz(0, -90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, -1) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(0) + assert y__ == approx(-1) + assert z__ == approx(0) x__, y__, z__ = angle2xyz(0, 45) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, np.sqrt(2) / 2) - self.assertAlmostEqual(z__, np.sqrt(2) / 2) + assert x__ == approx(0) + assert y__ == approx(np.sqrt(2) / 2) + assert z__ == approx(np.sqrt(2) / 2) x__, y__, z__ = angle2xyz(0, 60) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, np.sqrt(3) / 2) - self.assertAlmostEqual(z__, np.sqrt(1) / 2) + assert x__ == approx(0) + assert y__ == approx(np.sqrt(3) / 2) + assert z__ == approx(np.sqrt(1) / 2) def test_xyz2lonlat(self): """Test xyz2lonlat.""" lon, lat = xyz2lonlat(1, 0, 0) - self.assertAlmostEqual(lon, 0) - self.assertAlmostEqual(lat, 0) + assert lon == approx(0) + assert lat == approx(0) lon, lat = xyz2lonlat(0, 1, 0) - self.assertAlmostEqual(lon, 90) - self.assertAlmostEqual(lat, 0) + assert lon == approx(90) + assert lat == approx(0) lon, lat = xyz2lonlat(0, 0, 1, asin=True) - self.assertAlmostEqual(lon, 0) - self.assertAlmostEqual(lat, 90) + assert lon == approx(0) + assert lat == approx(90) lon, lat = xyz2lonlat(0, 0, 1) - self.assertAlmostEqual(lon, 0) - self.assertAlmostEqual(lat, 90) + assert lon == approx(0) + assert lat == approx(90) lon, lat = xyz2lonlat(np.sqrt(2) / 2, np.sqrt(2) / 2, 0) - self.assertAlmostEqual(lon, 45) - self.assertAlmostEqual(lat, 0) + assert lon == approx(45) + assert lat == approx(0) def test_xyz2angle(self): """Test xyz2angle.""" azi, zen = xyz2angle(1, 0, 0) - self.assertAlmostEqual(azi, 90) - self.assertAlmostEqual(zen, 90) + assert azi == approx(90) + assert zen == approx(90) azi, zen = xyz2angle(0, 1, 0) - self.assertAlmostEqual(azi, 0) - self.assertAlmostEqual(zen, 90) + assert azi == approx(0) + assert zen == approx(90) azi, zen = xyz2angle(0, 0, 1) - self.assertAlmostEqual(azi, 0) - self.assertAlmostEqual(zen, 0) + assert azi == approx(0) + assert zen == approx(0) azi, zen = xyz2angle(0, 0, 1, acos=True) - self.assertAlmostEqual(azi, 0) - self.assertAlmostEqual(zen, 0) + assert azi == approx(0) + assert zen == approx(0) azi, zen = xyz2angle(np.sqrt(2) / 2, np.sqrt(2) / 2, 0) - self.assertAlmostEqual(azi, 45) - self.assertAlmostEqual(zen, 90) + assert azi == approx(45) + assert zen == approx(90) azi, zen = xyz2angle(-1, 0, 0) - self.assertAlmostEqual(azi, -90) - self.assertAlmostEqual(zen, 90) + assert azi == approx(-90) + assert zen == approx(90) azi, zen = xyz2angle(0, -1, 0) - self.assertAlmostEqual(azi, 180) - self.assertAlmostEqual(zen, 90) + assert azi == approx(180) + assert zen == approx(90) def test_proj_units_to_meters(self): """Test proj units to meters conversion.""" prj = "+asd=123123123123" res = proj_units_to_meters(prj) - self.assertEqual(res, prj) + assert res == prj prj = "+a=6378.137" res = proj_units_to_meters(prj) - self.assertEqual(res, "+a=6378137.000") + assert res == "+a=6378137.000" prj = "+a=6378.137 +units=km" res = proj_units_to_meters(prj) - self.assertEqual(res, "+a=6378137.000") + assert res == "+a=6378137.000" prj = "+a=6378.137 +b=6378.137" res = proj_units_to_meters(prj) - self.assertEqual(res, "+a=6378137.000 +b=6378137.000") + assert res == "+a=6378137.000 +b=6378137.000" prj = "+a=6378.137 +b=6378.137 +h=35785.863" res = proj_units_to_meters(prj) - self.assertEqual(res, "+a=6378137.000 +b=6378137.000 +h=35785863.000") + assert res == "+a=6378137.000 +b=6378137.000 +h=35785863.000" class TestGetSatPos: @@ -271,11 +272,11 @@ def test_get_satpos(self, included_prefixes, preference, expected_result): @pytest.mark.parametrize( "attrs", - ( + [ {}, {"orbital_parameters": {"projection_longitude": 1}}, {"satellite_altitude": 1} - ) + ] ) def test_get_satpos_fails_with_informative_error(self, attrs): """Test that get_satpos raises an informative error message.""" @@ -358,10 +359,9 @@ def test_specific_check_satpy(self): checked_fake = False for call in print_mock.mock_calls: if len(call[1]) > 0 and "__fake" in call[1][0]: - self.assertNotIn("ok", call[1][1]) + assert "ok" not in call[1][1] checked_fake = True - self.assertTrue(checked_fake, "Did not find __fake module " - "mentioned in checks") + assert checked_fake, "Did not find __fake module mentioned in checks" def test_debug_on(caplog): @@ -605,7 +605,7 @@ def test_convert_remote_files_to_fsspec_storage_options(open_files): def test_import_error_helper(): """Test the import error helper.""" module = "some_crazy_name_for_unknow_dependency_module" - with pytest.raises(ImportError) as err: + with pytest.raises(ImportError) as err: # noqa: PT012 with import_error_helper(module): import unknow_dependency_module # noqa assert module in str(err) diff --git a/satpy/tests/test_writers.py b/satpy/tests/test_writers.py index 6e1ce7f2e2..c2d049dae1 100644 --- a/satpy/tests/test_writers.py +++ b/satpy/tests/test_writers.py @@ -79,7 +79,7 @@ def test_show(self, mock_get_image): data = np.arange(25).reshape((5, 5)) p = xr.DataArray(data, dims=["y", "x"]) show(p) - self.assertTrue(mock_get_image.return_value.show.called) + assert mock_get_image.return_value.show.called class TestEnhancer(unittest.TestCase): @@ -89,13 +89,13 @@ def test_basic_init_no_args(self): """Test Enhancer init with no arguments passed.""" from satpy.writers import Enhancer e = Enhancer() - self.assertIsNotNone(e.enhancement_tree) + assert e.enhancement_tree is not None def test_basic_init_no_enh(self): """Test Enhancer init requesting no enhancements.""" from satpy.writers import Enhancer e = Enhancer(enhancement_config_file=False) - self.assertIsNone(e.enhancement_tree) + assert e.enhancement_tree is None def test_basic_init_provided_enh(self): """Test Enhancer init with string enhancement configs.""" @@ -108,7 +108,7 @@ def test_basic_init_provided_enh(self): method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} """]) - self.assertIsNotNone(e.enhancement_tree) + assert e.enhancement_tree is not None def test_init_nonexistent_enh_file(self): """Test Enhancer init with a nonexistent enhancement configuration file.""" @@ -522,23 +522,21 @@ def _ignore_all_tags(self, tag_suffix, node): writer_fn_name = os.path.splitext(writer_fn)[0] writer_info = read_writer_config([writer_config], loader=IgnoreLoader) - self.assertEqual(writer_fn_name, writer_info["name"], - "Writer YAML filename doesn't match writer " - "name in the YAML file.") + assert writer_fn_name == writer_info["name"] def test_available_writers(self): """Test the 'available_writers' function.""" from satpy import available_writers writer_names = available_writers() - self.assertGreater(len(writer_names), 0) - self.assertIsInstance(writer_names[0], str) - self.assertIn("geotiff", writer_names) + assert len(writer_names) > 0 + assert isinstance(writer_names[0], str) + assert "geotiff" in writer_names writer_infos = available_writers(as_dict=True) - self.assertEqual(len(writer_names), len(writer_infos)) - self.assertIsInstance(writer_infos[0], dict) + assert len(writer_names) == len(writer_infos) + assert isinstance(writer_infos[0], dict) for writer_info in writer_infos: - self.assertIn("name", writer_info) + assert "name" in writer_info class TestComputeWriterResults(unittest.TestCase): @@ -584,7 +582,7 @@ def test_simple_image(self): writer="simple_image", compute=False) compute_writer_results([res]) - self.assertTrue(os.path.isfile(fname)) + assert os.path.isfile(fname) def test_geotiff(self): """Test writing to mitiff file.""" @@ -594,7 +592,7 @@ def test_geotiff(self): datasets=["test"], writer="geotiff", compute=False) compute_writer_results([res]) - self.assertTrue(os.path.isfile(fname)) + assert os.path.isfile(fname) # FIXME: This reader needs more information than exist at the moment # def test_mitiff(self): @@ -628,8 +626,8 @@ def test_multiple_geotiff(self): datasets=["test"], writer="geotiff", compute=False) compute_writer_results([res1, res2]) - self.assertTrue(os.path.isfile(fname1)) - self.assertTrue(os.path.isfile(fname2)) + assert os.path.isfile(fname1) + assert os.path.isfile(fname2) def test_multiple_simple(self): """Test writing to geotiff files.""" @@ -643,8 +641,8 @@ def test_multiple_simple(self): datasets=["test"], writer="simple_image", compute=False) compute_writer_results([res1, res2]) - self.assertTrue(os.path.isfile(fname1)) - self.assertTrue(os.path.isfile(fname2)) + assert os.path.isfile(fname1) + assert os.path.isfile(fname2) def test_mixed(self): """Test writing to multiple mixed-type files.""" @@ -659,8 +657,8 @@ def test_mixed(self): writer="geotiff", compute=False) res3 = [] compute_writer_results([res1, res2, res3]) - self.assertTrue(os.path.isfile(fname1)) - self.assertTrue(os.path.isfile(fname2)) + assert os.path.isfile(fname1) + assert os.path.isfile(fname2) class TestBaseWriter: @@ -805,9 +803,9 @@ def test_add_overlay_basic_rgb(self): with mock.patch.object(self.orig_rgb_img, "apply_pil") as apply_pil: apply_pil.return_value = self.orig_rgb_img new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, fill_value=0) - self.assertEqual(self.orig_rgb_img.mode, new_img.mode) + assert self.orig_rgb_img.mode == new_img.mode new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir) - self.assertEqual(self.orig_rgb_img.mode + "A", new_img.mode) + assert self.orig_rgb_img.mode + "A" == new_img.mode with mock.patch.object(self.orig_rgb_img, "convert") as convert: convert.return_value = self.orig_rgb_img @@ -849,21 +847,21 @@ def test_add_overlay_basic_l(self): """Test basic add_overlay usage with L data.""" from satpy.writers import add_overlay new_img = add_overlay(self.orig_l_img, self.area_def, "", fill_value=0) - self.assertEqual("RGB", new_img.mode) + assert "RGB" == new_img.mode new_img = add_overlay(self.orig_l_img, self.area_def, "") - self.assertEqual("RGBA", new_img.mode) + assert "RGBA" == new_img.mode def test_add_decorate_basic_rgb(self): """Test basic add_decorate usage with RGB data.""" from satpy.writers import add_decorate new_img = add_decorate(self.orig_rgb_img, **self.decorate) - self.assertEqual("RGBA", new_img.mode) + assert "RGBA" == new_img.mode def test_add_decorate_basic_l(self): """Test basic add_decorate usage with L data.""" from satpy.writers import add_decorate new_img = add_decorate(self.orig_l_img, **self.decorate) - self.assertEqual("RGBA", new_img.mode) + assert "RGBA" == new_img.mode def test_group_results_by_output_file(tmp_path): diff --git a/satpy/tests/test_yaml_reader.py b/satpy/tests/test_yaml_reader.py index b829f46d23..35752cd237 100644 --- a/satpy/tests/test_yaml_reader.py +++ b/satpy/tests/test_yaml_reader.py @@ -124,7 +124,7 @@ def test_get_filebase(self): pattern = os.path.join(*pattern.split("/")) filename = os.path.join(base_dir, "Oa05_radiance.nc") expected = os.path.join(base_data, "Oa05_radiance.nc") - self.assertEqual(yr._get_filebase(filename, pattern), expected) + assert yr._get_filebase(filename, pattern) == expected def test_match_filenames(self): """Check that matching filenames works.""" @@ -143,7 +143,7 @@ def test_match_filenames(self): filenames = [os.path.join(base_dir, "Oa05_radiance.nc"), os.path.join(base_dir, "geo_coordinates.nc")] expected = os.path.join(base_dir, "geo_coordinates.nc") - self.assertEqual(yr._match_filenames(filenames, pattern), {expected}) + assert yr._match_filenames(filenames, pattern) == {expected} def test_match_filenames_windows_forward_slash(self): """Check that matching filenames works on Windows with forward slashes. @@ -166,14 +166,13 @@ def test_match_filenames_windows_forward_slash(self): filenames = [os.path.join(base_dir, "Oa05_radiance.nc").replace(os.sep, "/"), os.path.join(base_dir, "geo_coordinates.nc").replace(os.sep, "/")] expected = os.path.join(base_dir, "geo_coordinates.nc").replace(os.sep, "/") - self.assertEqual(yr._match_filenames(filenames, pattern), {expected}) + assert yr._match_filenames(filenames, pattern) == {expected} def test_listify_string(self): """Check listify_string.""" - self.assertEqual(yr.listify_string(None), []) - self.assertEqual(yr.listify_string("some string"), ["some string"]) - self.assertEqual(yr.listify_string(["some", "string"]), - ["some", "string"]) + assert yr.listify_string(None) == [] + assert yr.listify_string("some string") == ["some string"] + assert yr.listify_string(["some", "string"]) == ["some", "string"] class DummyReader(BaseFileHandler): @@ -237,8 +236,8 @@ def test_select_from_pathnames(self): res = self.reader.select_files_from_pathnames(filelist) for expected in ["a001.bla", "a002.bla", "abcd.bla"]: - self.assertIn(expected, res) - self.assertEqual(len(res), 3) + assert expected in res + assert len(res) == 3 def test_fn_items_for_ft(self): """Check filename_items_for_filetype.""" @@ -247,7 +246,7 @@ def test_fn_items_for_ft(self): fiter = self.reader.filename_items_for_filetype(filelist, ft_info) filenames = dict(fname for fname in fiter) - self.assertEqual(len(filenames.keys()), 3) + assert len(filenames.keys()) == 3 def test_create_filehandlers(self): """Check create_filehandlers.""" @@ -255,7 +254,7 @@ def test_create_filehandlers(self): "abcd.bla", "k001.bla", "a003.bli"] self.reader.create_filehandlers(filelist) - self.assertEqual(len(self.reader.file_handlers["ftype1"]), 3) + assert len(self.reader.file_handlers["ftype1"]) == 3 def test_serializable(self): """Check that a reader is serializable by dask. @@ -342,29 +341,22 @@ def test_all_data_ids(self): def test_all_dataset_names(self): """Get all dataset names.""" - self.assertSetEqual(self.reader.all_dataset_names, - set(["ch01", "ch02", "lons", "lats"])) + assert self.reader.all_dataset_names == set(["ch01", "ch02", "lons", "lats"]) def test_available_dataset_ids(self): """Get ids of the available datasets.""" loadables = self.reader.select_files_from_pathnames(["a001.bla"]) self.reader.create_filehandlers(loadables) - self.assertSetEqual(set(self.reader.available_dataset_ids), - {make_dataid(name="ch02", - wavelength=(0.7, 0.75, 0.8), - calibration="counts", - modifiers=()), - make_dataid(name="ch01", - wavelength=(0.5, 0.6, 0.7), - calibration="reflectance", - modifiers=())}) + assert set(self.reader.available_dataset_ids) == {make_dataid(name="ch02", wavelength=(0.7, 0.75, 0.8), + calibration="counts", modifiers=()), + make_dataid(name="ch01", wavelength=(0.5, 0.6, 0.7), + calibration="reflectance", modifiers=())} def test_available_dataset_names(self): """Get ids of the available datasets.""" loadables = self.reader.select_files_from_pathnames(["a001.bla"]) self.reader.create_filehandlers(loadables) - self.assertSetEqual(set(self.reader.available_dataset_names), - set(["ch01", "ch02"])) + assert set(self.reader.available_dataset_names) == set(["ch01", "ch02"]) def test_filter_fh_by_time(self): """Check filtering filehandlers by time.""" @@ -383,11 +375,11 @@ def test_filter_fh_by_time(self): for idx, fh in enumerate([fh0, fh1, fh2, fh3, fh4, fh5]): res = self.reader.time_matches(fh.start_time, fh.end_time) # only the first one should be false - self.assertEqual(res, idx not in [0, 4]) + assert res == (idx not in [0, 4]) for idx, fh in enumerate([fh0, fh1, fh2, fh3, fh4, fh5]): res = self.reader.time_matches(fh.start_time, None) - self.assertEqual(res, idx not in [0, 1, 4, 5]) + assert res == (idx not in [0, 1, 4, 5]) @patch("satpy.readers.yaml_reader.get_area_def") @patch("satpy.readers.yaml_reader.AreaDefBoundary") @@ -401,17 +393,17 @@ def test_file_covers_area(self, bnd, adb, gad): bnd.return_value.contour_poly.intersection.return_value = True adb.return_value.contour_poly.intersection.return_value = True res = self.reader.check_file_covers_area(file_handler, True) - self.assertTrue(res) + assert res bnd.return_value.contour_poly.intersection.return_value = False adb.return_value.contour_poly.intersection.return_value = False res = self.reader.check_file_covers_area(file_handler, True) - self.assertFalse(res) + assert not res file_handler.get_bounding_box.side_effect = NotImplementedError() self.reader.filter_parameters["area"] = True res = self.reader.check_file_covers_area(file_handler, True) - self.assertTrue(res) + assert res def test_start_end_time(self): """Check start and end time behaviours.""" @@ -446,8 +438,8 @@ def get_end_time(): "2": [fh2, fh3], } - self.assertEqual(self.reader.start_time, datetime(1999, 12, 30, 0, 0)) - self.assertEqual(self.reader.end_time, datetime(2000, 1, 3, 12, 30)) + assert self.reader.start_time == datetime(1999, 12, 30, 0, 0) + assert self.reader.end_time == datetime(2000, 1, 3, 12, 30) def test_select_from_pathnames(self): """Check select_files_from_pathnames.""" @@ -455,9 +447,9 @@ def test_select_from_pathnames(self): res = self.reader.select_files_from_pathnames(filelist) for expected in ["a001.bla", "a002.bla", "abcd.bla"]: - self.assertIn(expected, res) + assert expected in res - self.assertEqual(0, len(self.reader.select_files_from_pathnames([]))) + assert 0 == len(self.reader.select_files_from_pathnames([])) def test_select_from_directory(self): """Check select_files_from_directory.""" @@ -469,12 +461,11 @@ def test_select_from_directory(self): res = self.reader.select_files_from_directory(dpath) for expected in ["a001.bla", "a002.bla", "abcd.bla"]: - self.assertIn(os.path.join(dpath, expected), res) + assert os.path.join(dpath, expected) in res for fname in filelist: os.remove(os.path.join(dpath, fname)) - self.assertEqual(0, - len(self.reader.select_files_from_directory(dpath))) + assert 0 == len(self.reader.select_files_from_directory(dpath)) os.rmdir(dpath) from fsspec.implementations.local import LocalFileSystem @@ -484,14 +475,12 @@ def glob(self, pattern): return ["/grocery/apricot.nc", "/grocery/aubergine.nc"] res = self.reader.select_files_from_directory(dpath, fs=Silly()) - self.assertEqual( - res, - {"/grocery/apricot.nc", "/grocery/aubergine.nc"}) + assert res == {"/grocery/apricot.nc", "/grocery/aubergine.nc"} def test_supports_sensor(self): """Check supports_sensor.""" - self.assertTrue(self.reader.supports_sensor("canon")) - self.assertFalse(self.reader.supports_sensor("nikon")) + assert self.reader.supports_sensor("canon") + assert not self.reader.supports_sensor("nikon") @patch("satpy.readers.yaml_reader.StackedAreaDefinition") def test_load_area_def(self, sad): @@ -502,33 +491,31 @@ def test_load_area_def(self, sad): for _i in range(items): file_handlers.append(MagicMock()) final_area = self.reader._load_area_def(dataid, file_handlers) - self.assertEqual(final_area, sad.return_value.squeeze.return_value) + assert final_area == sad.return_value.squeeze.return_value args, kwargs = sad.call_args - self.assertEqual(len(args), items) + assert len(args) == items def test_preferred_filetype(self): """Test finding the preferred filetype.""" self.reader.file_handlers = {"a": "a", "b": "b", "c": "c"} - self.assertEqual(self.reader._preferred_filetype(["c", "a"]), "c") - self.assertEqual(self.reader._preferred_filetype(["a", "c"]), "a") - self.assertEqual(self.reader._preferred_filetype(["d", "e"]), None) + assert self.reader._preferred_filetype(["c", "a"]) == "c" + assert self.reader._preferred_filetype(["a", "c"]) == "a" + assert self.reader._preferred_filetype(["d", "e"]) is None def test_get_coordinates_for_dataset_key(self): """Test getting coordinates for a key.""" ds_q = DataQuery(name="ch01", wavelength=(0.5, 0.6, 0.7, "µm"), calibration="reflectance", modifiers=()) res = self.reader._get_coordinates_for_dataset_key(ds_q) - self.assertListEqual(res, - [make_dataid(name="lons"), - make_dataid(name="lats")]) + assert res == [make_dataid(name="lons"), make_dataid(name="lats")] def test_get_coordinates_for_dataset_key_without(self): """Test getting coordinates for a key without coordinates.""" ds_id = make_dataid(name="lons", modifiers=()) res = self.reader._get_coordinates_for_dataset_key(ds_id) - self.assertListEqual(res, []) + assert res == [] def test_get_coordinates_for_dataset_keys(self): """Test getting coordinates for keys.""" @@ -543,7 +530,7 @@ def test_get_coordinates_for_dataset_keys(self): lons]) expected = {ds_id1: [lons, lats], ds_id2: [lons, lats], lons: []} - self.assertDictEqual(res, expected) + assert res == expected def test_get_file_handlers(self): """Test getting filehandler to load a dataset.""" @@ -551,10 +538,10 @@ def test_get_file_handlers(self): calibration="reflectance", modifiers=()) self.reader.file_handlers = {"ftype1": "bla"} - self.assertEqual(self.reader._get_file_handlers(ds_id1), "bla") + assert self.reader._get_file_handlers(ds_id1) == "bla" lons = make_dataid(name="lons", modifiers=()) - self.assertEqual(self.reader._get_file_handlers(lons), None) + assert self.reader._get_file_handlers(lons) is None @patch("satpy.readers.yaml_reader.xr") def test_load_entire_dataset(self, xarray): @@ -564,7 +551,7 @@ def test_load_entire_dataset(self, xarray): proj = self.reader._load_dataset(None, {}, file_handlers) - self.assertIs(proj, xarray.concat.return_value) + assert proj is xarray.concat.return_value class TestFileYAMLReaderLoading(unittest.TestCase): @@ -711,7 +698,7 @@ def test_update_ds_ids_from_file_handlers(self): if not isinstance(file_types, list): file_types = [file_types] if ftype in file_types: - self.assertEqual(resol, ds_id["resolution"]) + assert resol == ds_id["resolution"] # Test methods @@ -1016,13 +1003,13 @@ def test_get_expected_segments(self, cfh): # default (1) created_fhs = reader.create_filehandlers(["fake.nc"]) es = created_fhs["ft1"][0].filetype_info["expected_segments"] - self.assertEqual(es, 1) + assert es == 1 # YAML defined for each file type fake_fh.filetype_info["expected_segments"] = 2 created_fhs = reader.create_filehandlers(["fake.nc"]) es = created_fhs["ft1"][0].filetype_info["expected_segments"] - self.assertEqual(es, 2) + assert es == 2 # defined both in the filename and the YAML metadata # YAML has priority @@ -1030,20 +1017,20 @@ def test_get_expected_segments(self, cfh): fake_fh.filetype_info = {"expected_segments": 2} created_fhs = reader.create_filehandlers(["fake.nc"]) es = created_fhs["ft1"][0].filetype_info["expected_segments"] - self.assertEqual(es, 2) + assert es == 2 # defined in the filename fake_fh.filename_info = {"total_segments": 3} fake_fh.filetype_info = {} created_fhs = reader.create_filehandlers(["fake.nc"]) es = created_fhs["ft1"][0].filetype_info["expected_segments"] - self.assertEqual(es, 3) + assert es == 3 # check correct FCI segment (aka chunk in the FCI world) number reading into segment fake_fh.filename_info = {"count_in_repeat_cycle": 5} created_fhs = reader.create_filehandlers(["fake.nc"]) es = created_fhs["ft1"][0].filename_info["segment"] - self.assertEqual(es, 5) + assert es == 5 @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch("satpy.readers.yaml_reader.FileYAMLReader._load_dataset") @@ -1082,8 +1069,8 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): # No missing segments res = reader._load_dataset(dataid, ds_info, file_handlers) - self.assertTrue(res.attrs is file_handlers[0].combine_info.return_value) - self.assertTrue(empty_segment not in slice_list) + assert res.attrs is file_handlers[0].combine_info.return_value + assert empty_segment not in slice_list # One missing segment in the middle slice_list[4] = None @@ -1091,7 +1078,7 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) - self.assertTrue(slice_list[4] is empty_segment) + assert slice_list[4] is empty_segment # The last segment is missing slice_list = expected_segments * [seg, ] @@ -1100,7 +1087,7 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) - self.assertTrue(slice_list[-1] is empty_segment) + assert slice_list[-1] is empty_segment # The last two segments are missing slice_list = expected_segments * [seg, ] @@ -1109,8 +1096,8 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) - self.assertTrue(slice_list[-1] is empty_segment) - self.assertTrue(slice_list[-2] is empty_segment) + assert slice_list[-1] is empty_segment + assert slice_list[-2] is empty_segment # The first segment is missing slice_list = expected_segments * [seg, ] @@ -1119,7 +1106,7 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) - self.assertTrue(slice_list[0] is empty_segment) + assert slice_list[0] is empty_segment # The first two segments are missing slice_list = expected_segments * [seg, ] @@ -1129,8 +1116,8 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) - self.assertTrue(slice_list[0] is empty_segment) - self.assertTrue(slice_list[1] is empty_segment) + assert slice_list[0] is empty_segment + assert slice_list[1] is empty_segment # Disable padding res = reader._load_dataset(dataid, ds_info, file_handlers, @@ -1174,7 +1161,7 @@ def test_pad_later_segments_area(self, AreaDefinition): file_handlers = [fh_1] dataid = "dataid" res = reader._pad_later_segments_area(file_handlers, dataid) - self.assertEqual(len(res), 2) + assert len(res) == 2 seg2_extent = (0, 1500, 200, 1000) expected_call = ("fill", "fill", "fill", "some_crs", 500, 200, seg2_extent) @@ -1197,7 +1184,7 @@ def test_pad_earlier_segments_area(self, AreaDefinition): dataid = "dataid" area_defs = {2: seg2_area} res = reader._pad_earlier_segments_area(file_handlers, dataid, area_defs) - self.assertEqual(len(res), 2) + assert len(res) == 2 seg1_extent = (0, 500, 200, 0) expected_call = ("fill", "fill", "fill", "some_crs", 500, 200, seg1_extent) @@ -1219,11 +1206,11 @@ def test_find_missing_segments(self): dataid = "dataid" res = fms(file_handlers, ds_info, dataid) counter, expected_segments, slice_list, failure, proj = res - self.assertEqual(counter, 2) - self.assertEqual(expected_segments, 1) - self.assertTrue(projectable in slice_list) - self.assertFalse(failure) - self.assertTrue(proj is projectable) + assert counter == 2 + assert expected_segments == 1 + assert projectable in slice_list + assert failure is False + assert proj is projectable # Three expected segments, first and last missing filename_info = {"segment": 2} @@ -1240,14 +1227,14 @@ def test_find_missing_segments(self): dataid = "dataid" res = fms(file_handlers, ds_info, dataid) counter, expected_segments, slice_list, failure, proj = res - self.assertEqual(counter, 3) - self.assertEqual(expected_segments, 3) - self.assertEqual(slice_list, [None, projectable, None]) - self.assertFalse(failure) - self.assertTrue(proj is projectable) + assert counter == 3 + assert expected_segments == 3 + assert slice_list == [None, projectable, None] + assert failure is False + assert proj is projectable -@pytest.fixture +@pytest.fixture() @patch.object(yr.GEOVariableSegmentYAMLReader, "__init__", lambda x: None) def GVSYReader(): """Get a fixture of the GEOVariableSegmentYAMLReader.""" @@ -1258,28 +1245,28 @@ def GVSYReader(): return reader -@pytest.fixture +@pytest.fixture() def fake_geswh(): """Get a fixture of the patched _get_empty_segment_with_height.""" with patch("satpy.readers.yaml_reader._get_empty_segment_with_height") as geswh: yield geswh -@pytest.fixture +@pytest.fixture() def fake_xr(): """Get a fixture of the patched xarray.""" with patch("satpy.readers.yaml_reader.xr") as xr: yield xr -@pytest.fixture +@pytest.fixture() def fake_mss(): """Get a fixture of the patched _find_missing_segments.""" with patch("satpy.readers.yaml_reader._find_missing_segments") as mss: yield mss -@pytest.fixture +@pytest.fixture() def fake_adef(): """Get a fixture of the patched AreaDefinition.""" with patch("satpy.readers.yaml_reader.AreaDefinition") as adef: diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py index 70f1ec80e5..ca958fce37 100644 --- a/satpy/tests/utils.py +++ b/satpy/tests/utils.py @@ -156,8 +156,8 @@ def __call__(self, datasets, optional_datasets=None, **kwargs): opt_dep_name = opt_dep if isinstance(opt_dep, str) else opt_dep.get("name", "") if "NOPE" in opt_dep_name or "fail" in opt_dep_name: continue - assert (optional_datasets is not None and - len(optional_datasets)) + assert optional_datasets is not None + assert len(optional_datasets) resolution = datasets[0].attrs.get("resolution") mod_name = self.attrs["modifiers"][-1] data = datasets[0].data diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 0c9ca9f234..6c8977662a 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -354,7 +354,7 @@ def test_groups(self): # Different projection coordinates in one group are not supported with TempFile() as filename: - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Datasets .* must have identical projection coordinates..*"): scn.save_datasets(datasets=["VIS006", "HRV"], filename=filename, writer="cf") def test_single_time_value(self): @@ -731,7 +731,7 @@ def test_assert_xy_unique(self): assert_xy_unique(datas) datas["c"] = xr.DataArray(data=dummy, dims=("y", "x"), coords={"y": [1, 3], "x": [3, 4]}) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Datasets .* must have identical projection coordinates..*"): assert_xy_unique(datas) def test_link_coords(self): @@ -1149,7 +1149,7 @@ def test_global_attr_history_and_Conventions(self): class TestCFWriterData: """Test case for CF writer where data arrays are needed.""" - @pytest.fixture + @pytest.fixture() def datasets(self): """Create test dataset.""" data = [[75, 2], [3, 4]] @@ -1226,7 +1226,7 @@ def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): class EncodingUpdateTest: """Test update of netCDF encoding.""" - @pytest.fixture + @pytest.fixture() def fake_ds(self): """Create fake data for testing.""" ds = xr.Dataset({"foo": (("y", "x"), [[1, 2], [3, 4]]), @@ -1236,7 +1236,7 @@ def fake_ds(self): "lon": (("y", "x"), [[7, 8], [9, 10]])}) return ds - @pytest.fixture + @pytest.fixture() def fake_ds_digit(self): """Create fake data for testing.""" ds_digit = xr.Dataset({"CHANNEL_1": (("y", "x"), [[1, 2], [3, 4]]), @@ -1327,7 +1327,7 @@ def test_with_time(self, fake_ds): class TestEncodingKwarg: """Test CF writer with 'encoding' keyword argument.""" - @pytest.fixture + @pytest.fixture() def scene(self): """Create a fake scene.""" scn = Scene() @@ -1343,7 +1343,7 @@ def compression_on(self, request): """Get compression options.""" return request.param - @pytest.fixture + @pytest.fixture() def encoding(self, compression_on): """Get encoding.""" enc = { @@ -1359,19 +1359,19 @@ def encoding(self, compression_on): enc["test-array"].update(comp_params) return enc - @pytest.fixture + @pytest.fixture() def filename(self, tmp_path): """Get output filename.""" return str(tmp_path / "test.nc") - @pytest.fixture + @pytest.fixture() def complevel_exp(self, compression_on): """Get expected compression level.""" if compression_on: return 7 return 0 - @pytest.fixture + @pytest.fixture() def expected(self, complevel_exp): """Get expectated file contents.""" return { @@ -1419,7 +1419,7 @@ def test_no_warning_if_backends_match(self, scene, filename, monkeypatch): class TestEncodingAttribute(TestEncodingKwarg): """Test CF writer with 'encoding' dataset attribute.""" - @pytest.fixture + @pytest.fixture() def scene_with_encoding(self, scene, encoding): """Create scene with a dataset providing the 'encoding' attribute.""" scene["test-array"].encoding = encoding["test-array"] diff --git a/satpy/tests/writer_tests/test_mitiff.py b/satpy/tests/writer_tests/test_mitiff.py index 4e5c8b7c9c..b4ff371dab 100644 --- a/satpy/tests/writer_tests/test_mitiff.py +++ b/satpy/tests/writer_tests/test_mitiff.py @@ -593,7 +593,7 @@ def test_save_one_dataset(self): imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, os.listdir(self.base_dir)[0])) for key in imgdesc: if "In this file" in key: - self.assertEqual(key, " Channels: 1 In this file: 1") + assert key == " Channels: 1 In this file: 1" def test_save_one_dataset_sensor_set(self): """Test basic writer operation with one dataset ie. no bands.""" @@ -604,7 +604,7 @@ def test_save_one_dataset_sensor_set(self): imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, os.listdir(self.base_dir)[0])) for key in imgdesc: if "In this file" in key: - self.assertEqual(key, " Channels: 1 In this file: 1") + assert key == " Channels: 1 In this file: 1" def test_save_dataset_with_calibration(self): """Test writer operation with calibration.""" @@ -751,29 +751,29 @@ def test_save_dataset_with_calibration(self): if "Table_calibration" in key: found_table_calibration = True if "1-VIS0.63" in key: - self.assertEqual(key, expected_key_channel[0]) + assert key == expected_key_channel[0] number_of_calibrations += 1 elif "2-VIS0.86" in key: - self.assertEqual(key, expected_key_channel[1]) + assert key == expected_key_channel[1] number_of_calibrations += 1 elif "3(3B)-IR3.7" in key: - self.assertEqual(key, expected_key_channel[2]) + assert key == expected_key_channel[2] number_of_calibrations += 1 elif "4-IR10.8" in key: - self.assertEqual(key, expected_key_channel[3]) + assert key == expected_key_channel[3] number_of_calibrations += 1 elif "5-IR11.5" in key: - self.assertEqual(key, expected_key_channel[4]) + assert key == expected_key_channel[4] number_of_calibrations += 1 elif "6(3A)-VIS1.6" in key: - self.assertEqual(key, expected_key_channel[5]) + assert key == expected_key_channel[5] number_of_calibrations += 1 else: self.fail("Not a valid channel description i the given key.") - self.assertTrue(found_table_calibration, "Table_calibration is not found in the imagedescription.") - self.assertEqual(number_of_calibrations, 6) + assert found_table_calibration, "Table_calibration is not found in the imagedescription." + assert number_of_calibrations == 6 pillow_tif = Image.open(os.path.join(self.base_dir, filename)) - self.assertEqual(pillow_tif.n_frames, 6) + assert pillow_tif.n_frames == 6 self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_save_dataset_with_calibration_one_dataset(self): @@ -817,10 +817,10 @@ def test_save_dataset_with_calibration_one_dataset(self): if "Table_calibration" in key: found_table_calibration = True if "BT" in key: - self.assertEqual(key, expected_key_channel[0]) + assert key == expected_key_channel[0] number_of_calibrations += 1 - self.assertTrue(found_table_calibration, "Expected table_calibration is not found in the imagedescription.") - self.assertEqual(number_of_calibrations, 1) + assert found_table_calibration, "Expected table_calibration is not found in the imagedescription." + assert number_of_calibrations == 1 self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_save_dataset_with_bad_value(self): @@ -883,7 +883,7 @@ def test_convert_proj4_string(self): w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) proj4_string = w._add_proj4_string(ds1, ds1) - self.assertEqual(proj4_string, check["proj4"]) + assert proj4_string == check["proj4"] def test_save_dataset_palette(self): """Test writer operation as palette.""" @@ -934,11 +934,11 @@ def test_save_dataset_palette(self): dataset.attrs["start_time"]) pillow_tif = Image.open(os.path.join(self.base_dir, filename)) # Need to check PHOTOMETRIC is 3, ie palette - self.assertEqual(pillow_tif.tag_v2.get(262), 3) + assert pillow_tif.tag_v2.get(262) == 3 # Check the colormap of the palette image palette = pillow_tif.palette colormap = list((palette.getdata())[1]) - self.assertEqual(colormap, exp_c) + assert colormap == exp_c imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, filename)) found_color_info = False unit_name_found = False @@ -961,11 +961,11 @@ def test_save_dataset_palette(self): elif "COLOR INFO:" in key: found_color_info = True # Check the name of the palette description - self.assertEqual(name_length, 2) + assert name_length == 2 # Check the name and unit name of the palette - self.assertEqual(unit_name, " Test") + assert unit_name == " Test" # Check the palette description of the palette - self.assertEqual(names, [" test", " test2"]) + assert names == [" test", " test2"] self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_simple_write_two_bands(self): @@ -987,7 +987,7 @@ def test_get_test_dataset_three_bands_prereq(self): imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, filename)) for element in imgdesc: if " Channels:" in element: - self.assertEqual(element, " Channels: 3 In this file: 1 2 3") + assert element == " Channels: 3 In this file: 1 2 3" def test_save_dataset_with_calibration_error_one_dataset(self): """Test saving if mitiff as dataset with only one channel with invalid calibration.""" @@ -1010,7 +1010,7 @@ def test_save_dataset_with_calibration_error_one_dataset(self): with self.assertLogs(logger) as lc: w._add_calibration_datasets(4, dataset, _reverse_offset, _reverse_scale, _decimals) for _op in lc.output: - self.assertIn("Unknown calib type. Must be Radiance, Reflectance or BT.", _op) + assert "Unknown calib type. Must be Radiance, Reflectance or BT." in _op finally: logger.removeHandler(stream_handler) @@ -1039,7 +1039,6 @@ def test_save_dataset_with_missing_palette(self): with self.assertLogs(logger, logging.ERROR) as lc: w._save_as_palette(dataset.compute(), os.path.join(self.base_dir, filename), tiffinfo, **palette) for _op in lc.output: - self.assertIn(("In a mitiff palette image a color map must be provided: " - "palette_color_map is missing."), _op) + assert "In a mitiff palette image a color map must be provided: palette_color_map is missing." in _op finally: logger.removeHandler(stream_handler) diff --git a/satpy/tests/writer_tests/test_ninjogeotiff.py b/satpy/tests/writer_tests/test_ninjogeotiff.py index bba3e9b44e..a9c60bdf90 100644 --- a/satpy/tests/writer_tests/test_ninjogeotiff.py +++ b/satpy/tests/writer_tests/test_ninjogeotiff.py @@ -463,8 +463,8 @@ def ntg_latlon(test_image_latlon): SatelliteNameID=654321) -@pytest.fixture -def patch_datetime_now(monkeypatch): +@pytest.fixture() +def _patch_datetime_now(monkeypatch): """Get a fake datetime.datetime.now().""" # Source: https://stackoverflow.com/a/20503374/974555, CC-BY-SA 4.0 @@ -741,11 +741,13 @@ def test_calc_single_tag_by_name(ntg1, ntg2, ntg3): assert ntg2.get_tag("DataType") == "GORN" assert ntg3.get_tag("DataType") == "PPRN" assert ntg1.get_tag("DataSource") == "dowsing rod" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unknown tag: invalid"): ntg1.get_tag("invalid") - with pytest.raises(ValueError): + with pytest.raises(ValueError, + match="Optional tag OriginalHeader must be supplied by user if user wants to request the value," + " but wasn't."): ntg1.get_tag("OriginalHeader") - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Tag Gradient is added later by the GeoTIFF writer."): ntg1.get_tag("Gradient") @@ -773,11 +775,12 @@ def test_get_color_depth(ntg1, ntg2, ntg3, ntg_weird, ntg_rgba, ntg_cmyk): assert ntg3.get_color_depth() == 8 # mode P assert ntg_weird.get_color_depth() == 16 # mode LA assert ntg_rgba.get_color_depth() == 32 # mode RGBA - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unsupported image mode: CMYK"): ntg_cmyk.get_color_depth() -def test_get_creation_date_id(ntg1, ntg2, ntg3, patch_datetime_now): +@pytest.mark.usefixtures("_patch_datetime_now") +def test_get_creation_date_id(ntg1, ntg2, ntg3): """Test getting the creation date ID. This is the time at which the file was created. @@ -887,7 +890,7 @@ def test_get_projection(ntg1, ntg2, ntg3, ntg_weird, ntg_rgba, ntg_cmyk, assert ntg_cmyk.get_projection() == "SPOL" assert ntg_rgba.get_projection() == "MERC" assert ntg_latlon.get_projection() == "PLAT" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unknown mapping from area .*"): ntg_weird.get_projection() @@ -898,7 +901,7 @@ def test_get_ref_lat_1(ntg1, ntg2, ntg3, ntg_weird, ntg_latlon): np.testing.assert_allclose(rl1, 0.0) np.testing.assert_allclose(ntg2.get_ref_lat_1(), 2.5) np.testing.assert_allclose(ntg3.get_ref_lat_1(), 75) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Could not find reference latitude for area test-area-north-stereo"): ntg_weird.get_ref_lat_1() with pytest.raises(AttributeError): ntg_latlon.get_ref_lat_1() @@ -945,7 +948,7 @@ def test_get_ymax(ntg1, ntg2, ntg3): def test_create_unknown_tags(test_image_small_arctic_P): """Test that unknown tags raise ValueError.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="The following tags were not recognised: Locatie"): NinJoTagGenerator( test_image_small_arctic_P, 42, diff --git a/satpy/tests/writer_tests/test_ninjotiff.py b/satpy/tests/writer_tests/test_ninjotiff.py index b8c311f9ed..ea6cf07f95 100644 --- a/satpy/tests/writer_tests/test_ninjotiff.py +++ b/satpy/tests/writer_tests/test_ninjotiff.py @@ -53,7 +53,7 @@ def test_init(self): from satpy.writers.ninjotiff import NinjoTIFFWriter ninjo_tags = {40000: "NINJO"} ntw = NinjoTIFFWriter(tags=ninjo_tags) - self.assertDictEqual(ntw.tags, ninjo_tags) + assert ntw.tags == ninjo_tags @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_dataset") @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) @@ -65,7 +65,7 @@ def test_dataset(self, iwsd): with mock.patch("satpy.writers.ninjotiff.convert_units") as uconv: ntw.save_dataset(dataset, physic_unit="CELSIUS") uconv.assert_called_once_with(dataset, "K", "CELSIUS") - self.assertEqual(iwsd.call_count, 1) + assert iwsd.call_count == 1 @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_dataset") @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) @@ -78,7 +78,7 @@ def test_dataset_skip_unit_conversion(self, iwsd): ntw.save_dataset(dataset, physic_unit="CELSIUS", convert_temperature_units=False) uconv.assert_not_called() - self.assertEqual(iwsd.call_count, 1) + assert iwsd.call_count == 1 @mock.patch("satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset") @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_image") diff --git a/satpy/tests/writer_tests/test_simple_image.py b/satpy/tests/writer_tests/test_simple_image.py index b3e92c9510..01d89a22ad 100644 --- a/satpy/tests/writer_tests/test_simple_image.py +++ b/satpy/tests/writer_tests/test_simple_image.py @@ -72,6 +72,6 @@ def test_simple_delayed_write(self): w = PillowWriter(base_dir=self.base_dir) res = w.save_datasets(datasets, compute=False) for r__ in res: - self.assertIsInstance(r__, Delayed) + assert isinstance(r__, Delayed) r__.compute() compute_writer_results(res) diff --git a/satpy/tests/writer_tests/test_utils.py b/satpy/tests/writer_tests/test_utils.py index a0cf88e54f..e71e3a7e1e 100644 --- a/satpy/tests/writer_tests/test_utils.py +++ b/satpy/tests/writer_tests/test_utils.py @@ -32,4 +32,4 @@ def test_flatten_dict(self): "b_c": 1, "b_d_e": 1, "b_d_f_g": [1, 2]} - self.assertDictEqual(wutils.flatten_dict(d), expected) + assert wutils.flatten_dict(d) == expected From 2875ca1be33ae9e0396b95878e1b7faefe938307 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 13:21:50 +0200 Subject: [PATCH 337/702] Add other ruff rules --- pyproject.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 64c036b07f..f1dabc9473 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,8 +15,7 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ -# select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] -select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q"] +select = ["E", "W", "F", "I", "A", "B", "S", "N", "D","PT", "TID", "C90", "Q", "T10", "T20"] ignore = ["B905"] # only available from python 3.10 line-length = 120 From c67e92ff5fab9d0e3c784e8f1e651b6d91abd787 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 13:46:08 +0200 Subject: [PATCH 338/702] Fix after merge --- doc/source/reader_table.py | 2 + pyproject.toml | 3 +- satpy/_scene_converters.py | 2 +- satpy/composites/spectral.py | 2 +- satpy/modifiers/angles.py | 3 +- satpy/readers/_geos_area.py | 2 + satpy/readers/abi_l2_nc.py | 4 +- satpy/readers/ahi_l2_nc.py | 50 +++---- satpy/readers/gerb_l2_hr_h5.py | 16 +- satpy/readers/gms/gms5_vissr_l1b.py | 2 +- satpy/readers/gms/gms5_vissr_navigation.py | 3 + satpy/readers/goes_imager_nc.py | 4 +- satpy/readers/hrit_jma.py | 2 +- satpy/readers/iasi_l2_so2_bufr.py | 2 +- satpy/readers/modis_l2.py | 2 +- satpy/readers/mviri_l1b_fiduceo_nc.py | 4 +- satpy/readers/satpy_cf_nc.py | 6 +- satpy/readers/seviri_base.py | 31 ++-- satpy/readers/seviri_l1b_hrit.py | 2 +- satpy/readers/seviri_l1b_icare.py | 2 +- satpy/readers/seviri_l1b_native.py | 4 +- satpy/readers/seviri_l2_grib.py | 3 + satpy/scene.py | 4 +- satpy/tests/compositor_tests/test_spectral.py | 10 +- satpy/tests/multiscene_tests/test_blend.py | 4 +- .../modis_tests/test_modis_l1b.py | 14 +- satpy/tests/reader_tests/test_abi_l2_nc.py | 34 ++--- satpy/tests/reader_tests/test_ahi_hsd.py | 18 +-- .../reader_tests/test_ahi_l1b_gridded_bin.py | 1 - satpy/tests/reader_tests/test_ahi_l2_nc.py | 40 ++--- .../tests/reader_tests/test_gerb_l2_hr_h5.py | 138 +++++++++--------- satpy/tests/reader_tests/test_seviri_base.py | 28 ++-- satpy/tests/reader_tests/test_utils.py | 8 +- satpy/tests/test_modifiers.py | 28 ++-- satpy/tests/test_resample.py | 8 +- satpy/tests/test_utils.py | 5 +- satpy/tests/writer_tests/test_cf.py | 50 +++---- satpy/utils.py | 25 +--- satpy/writers/cf_writer.py | 30 ++-- setup.py | 8 +- utils/coord2area_def.py | 15 -- utils/fetch_avhrr_calcoeffs.py | 2 - 42 files changed, 303 insertions(+), 318 deletions(-) diff --git a/doc/source/reader_table.py b/doc/source/reader_table.py index 1c6760a390..3ddec3444b 100644 --- a/doc/source/reader_table.py +++ b/doc/source/reader_table.py @@ -28,6 +28,7 @@ def rst_table_row(columns=None): Args: columns (list[str]): Content of each column. + Returns: str """ @@ -48,6 +49,7 @@ def rst_table_header(name=None, header=None, header_rows=1, widths="auto"): width (optional[list[int]]): Width of each column as a list. If not specified defaults to auto and will therefore determined by the backend (see ) + Returns: str """ diff --git a/pyproject.toml b/pyproject.toml index f1dabc9473..8d81b23818 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,8 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ -select = ["E", "W", "F", "I", "A", "B", "S", "N", "D","PT", "TID", "C90", "Q", "T10", "T20"] +# "A", "B", "S", "N", "D", +select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] ignore = ["B905"] # only available from python 3.10 line-length = 120 diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py index 0aa903d2f8..4eb9826850 100644 --- a/satpy/_scene_converters.py +++ b/satpy/_scene_converters.py @@ -84,7 +84,7 @@ def to_xarray(scn, Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. - Returns + Returns: ------- ds, xr.Dataset A CF-compliant xr.Dataset diff --git a/satpy/composites/spectral.py b/satpy/composites/spectral.py index 7d05a000d6..59e8518a7e 100644 --- a/satpy/composites/spectral.py +++ b/satpy/composites/spectral.py @@ -149,7 +149,7 @@ class NDVIHybridGreen(SpectralBlender): def __init__(self, *args, ndvi_min=0.0, ndvi_max=1.0, limits=(0.15, 0.05), strength=1.0, **kwargs): """Initialize class and set the NDVI limits, blending fraction limits and strength.""" if strength <= 0.0: - raise ValueError(f"Expected stength greater than 0.0, got {strength}.") + raise ValueError(f"Expected strength greater than 0.0, got {strength}.") self.ndvi_min = ndvi_min self.ndvi_max = ndvi_max diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index a41e000254..1d059e1f5a 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -331,6 +331,7 @@ def compute_relative_azimuth(sat_azi: xr.DataArray, sun_azi: xr.DataArray) -> xr Args: sat_azi: DataArray for the satellite azimuth angles, typically in 0-360 degree range. sun_azi: DataArray for the solar azimuth angles, should be in same range as sat_azi. + Returns: A DataArray containing the relative azimuth angle in the 0-180 degree range. @@ -571,7 +572,7 @@ def _sunzen_reduction_ndarray(data: np.ndarray, reduction_factor = reduction_factor.clip(0., 1.) # invert the reduction factor such that minimum reduction is done at `limit` and gradually increases towards max_sza - with np.errstate(invalid='ignore'): # we expect space pixels to be invalid + with np.errstate(invalid="ignore"): # we expect space pixels to be invalid reduction_factor = 1. - np.log(reduction_factor + 1) / np.log(2) # apply non-linearity to the reduction factor for a non-linear reduction of the signal. This can be used for a diff --git a/satpy/readers/_geos_area.py b/satpy/readers/_geos_area.py index e777d78ca7..03dabfa9a0 100644 --- a/satpy/readers/_geos_area.py +++ b/satpy/readers/_geos_area.py @@ -72,6 +72,7 @@ def get_area_extent(pdict): coff: Column offset factor loff: Line offset factor scandir: 'N2S' for standard (N->S), 'S2N' for inverse (S->N) + Returns: aex: An area extent for the scene @@ -178,6 +179,7 @@ def get_geos_area_naming(input_dict): input_dict: dict Dictionary with keys `platform_name`, `instrument_name`, `service_name`, `service_desc`, `resolution` . The resolution is expected in meters. + Returns: area_naming_dict with `area_id`, `description` keys, values are strings. diff --git a/satpy/readers/abi_l2_nc.py b/satpy/readers/abi_l2_nc.py index ef30629c5c..2324d3e1fd 100644 --- a/satpy/readers/abi_l2_nc.py +++ b/satpy/readers/abi_l2_nc.py @@ -43,9 +43,9 @@ def get_dataset(self, key, info): self._remove_problem_attrs(variable) # convert to satpy standard units - if variable.attrs['units'] == '1' and key['calibration'] == 'reflectance': + if variable.attrs["units"] == "1" and key["calibration"] == "reflectance": variable *= 100.0 - variable.attrs['units'] = '%' + variable.attrs["units"] = "%" return variable diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 5159931819..17823fed1e 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -54,7 +54,7 @@ logger = logging.getLogger(__name__) -EXPECTED_DATA_AREA = 'Full Disk' +EXPECTED_DATA_AREA = "Full Disk" class HIML2NCFileHandler(BaseFileHandler): @@ -69,39 +69,39 @@ def __init__(self, filename, filename_info, filetype_info): chunks={"xc": "auto", "yc": "auto"}) # Check that file is a full disk scene, we don't know the area for anything else - if self.nc.attrs['cdm_data_type'] != EXPECTED_DATA_AREA: - raise ValueError('File is not a full disk scene') - - self.sensor = self.nc.attrs['instrument_name'].lower() - self.nlines = self.nc.dims['Columns'] - self.ncols = self.nc.dims['Rows'] - self.platform_name = self.nc.attrs['satellite_name'] - self.platform_shortname = filename_info['platform'] + if self.nc.attrs["cdm_data_type"] != EXPECTED_DATA_AREA: + raise ValueError("File is not a full disk scene") + + self.sensor = self.nc.attrs["instrument_name"].lower() + self.nlines = self.nc.dims["Columns"] + self.ncols = self.nc.dims["Rows"] + self.platform_name = self.nc.attrs["satellite_name"] + self.platform_shortname = filename_info["platform"] self._meta = None @property def start_time(self): """Start timestamp of the dataset.""" - dt = self.nc.attrs['time_coverage_start'] - return datetime.strptime(dt, '%Y-%m-%dT%H:%M:%SZ') + dt = self.nc.attrs["time_coverage_start"] + return datetime.strptime(dt, "%Y-%m-%dT%H:%M:%SZ") @property def end_time(self): """End timestamp of the dataset.""" - dt = self.nc.attrs['time_coverage_end'] - return datetime.strptime(dt, '%Y-%m-%dT%H:%M:%SZ') + dt = self.nc.attrs["time_coverage_end"] + return datetime.strptime(dt, "%Y-%m-%dT%H:%M:%SZ") def get_dataset(self, key, info): """Load a dataset.""" - var = info['file_key'] - logger.debug('Reading in get_dataset %s.', var) + var = info["file_key"] + logger.debug("Reading in get_dataset %s.", var) variable = self.nc[var] # Data has 'Latitude' and 'Longitude' coords, these must be replaced. - variable = variable.rename({'Rows': 'y', 'Columns': 'x'}) + variable = variable.rename({"Rows": "y", "Columns": "x"}) - variable = variable.drop('Latitude') - variable = variable.drop('Longitude') + variable = variable.drop("Latitude") + variable = variable.drop("Longitude") variable.attrs.update(key.to_dict()) return variable @@ -117,20 +117,20 @@ def get_area_def(self, dsid): return self.area def _get_area_def(self): - logger.info('The AHI L2 cloud products do not have the metadata required to produce an area definition.' - ' Assuming standard Himawari-8/9 full disk projection.') + logger.info("The AHI L2 cloud products do not have the metadata required to produce an area definition." + " Assuming standard Himawari-8/9 full disk projection.") # Basic check to ensure we're processing a full disk (2km) scene.n if self.nlines != 5500 or self.ncols != 5500: raise ValueError("Input L2 file is not a full disk Himawari scene. Only full disk data is supported.") - pdict = {'cfac': 20466275, 'lfac': 20466275, 'coff': 2750.5, 'loff': 2750.5, 'a': 6378137.0, 'h': 35785863.0, - 'b': 6356752.3, 'ssp_lon': 140.7, 'nlines': self.nlines, 'ncols': self.ncols, 'scandir': 'N2S'} + pdict = {"cfac": 20466275, "lfac": 20466275, "coff": 2750.5, "loff": 2750.5, "a": 6378137.0, "h": 35785863.0, + "b": 6356752.3, "ssp_lon": 140.7, "nlines": self.nlines, "ncols": self.ncols, "scandir": "N2S"} aex = get_area_extent(pdict) - pdict['a_name'] = 'Himawari_Area' - pdict['a_desc'] = "AHI Full Disk area" - pdict['p_id'] = f'geos{self.platform_shortname}' + pdict["a_name"] = "Himawari_Area" + pdict["a_desc"] = "AHI Full Disk area" + pdict["p_id"] = f"geos{self.platform_shortname}" return get_area_definition(pdict, aex) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index f663b3040f..0bf918d68f 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -40,10 +40,10 @@ def gerb_get_dataset(ds, ds_info): The routine takes into account the quantisation factor and fill values. """ ds_attrs = ds.attrs - ds_fill = ds_info['fill_value'] + ds_fill = ds_info["fill_value"] fill_mask = ds != ds_fill - if 'Quantisation Factor' in ds_attrs and 'Unit' in ds_attrs: - ds = ds*ds_attrs['Quantisation Factor'] + if "Quantisation Factor" in ds_attrs and "Unit" in ds_attrs: + ds = ds*ds_attrs["Quantisation Factor"] else: ds = ds*1. ds = ds.where(fill_mask) @@ -61,17 +61,17 @@ def end_time(self): @property def start_time(self): """Get start time.""" - return self.filename_info['sensing_time'] + return self.filename_info["sensing_time"] def get_dataset(self, ds_id, ds_info): """Read a HDF5 file into an xarray DataArray.""" - ds_name = ds_id['name'] - if ds_name not in ['Solar Flux', 'Thermal Flux', 'Solar Radiance', 'Thermal Radiance']: + ds_name = ds_id["name"] + if ds_name not in ["Solar Flux", "Thermal Flux", "Solar Radiance", "Thermal Radiance"]: raise KeyError(f"{ds_name} is an unknown dataset for this reader.") - ds = gerb_get_dataset(self[f'Radiometry/{ds_name}'], ds_info) + ds = gerb_get_dataset(self[f"Radiometry/{ds_name}"], ds_info) - ds.attrs.update({'start_time': self.start_time, 'data_time': self.start_time, 'end_time': self.end_time}) + ds.attrs.update({"start_time": self.start_time, "data_time": self.start_time, "end_time": self.end_time}) return ds diff --git a/satpy/readers/gms/gms5_vissr_l1b.py b/satpy/readers/gms/gms5_vissr_l1b.py index f3c6898f65..0e1a5df483 100644 --- a/satpy/readers/gms/gms5_vissr_l1b.py +++ b/satpy/readers/gms/gms5_vissr_l1b.py @@ -29,7 +29,7 @@ scene.load(["VIS", "IR1"]) -References +References: ~~~~~~~~~~ Details about platform, instrument and data format can be found in the diff --git a/satpy/readers/gms/gms5_vissr_navigation.py b/satpy/readers/gms/gms5_vissr_navigation.py index 8a811b2210..6335ce13b9 100644 --- a/satpy/readers/gms/gms5_vissr_navigation.py +++ b/satpy/readers/gms/gms5_vissr_navigation.py @@ -445,6 +445,7 @@ def get_lon_lat(pixel, nav_params): pixel (Pixel): Point in image coordinates. nav_params (PixelNavigationParameters): Navigation parameters for a single pixel. + Returns: Longitude and latitude in degrees. """ @@ -481,6 +482,7 @@ def transform_image_coords_to_scanning_angles(point, image_offset, scanning_angl point (Pixel): Point in image coordinates. image_offset (ImageOffset): Image offset. scanning_angles (ScanningAngles): Scanning angles. + Returns: Scanning angles (x, y) at the pixel center (rad). """ @@ -677,6 +679,7 @@ def intersect_with_earth(view_vector, sat_pos, ellipsoid): coordinates. sat_pos (Vector3D): Satellite position in earth-fixed coordinates. ellipsoid (EarthEllipsoid): Earth ellipsoid. + Returns: Intersection (Vector3D) with the earth's surface. """ diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index c343b7f7c5..4cb7cf8610 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -200,7 +200,7 @@ 6. Data is received via EumetCast -References +References: ========== - `[GVAR]`_ GVAR transmission format @@ -887,6 +887,7 @@ def _viscounts2radiance(counts, slope, offset): counts: Raw detector counts slope: Slope [W m-2 um-1 sr-1] offset: Offset [W m-2 um-1 sr-1] + Returns: Radiance [W m-2 um-1 sr-1] """ @@ -913,6 +914,7 @@ def _calibrate_vis(radiance, k): k: pi / H, where H is the solar spectral irradiance at annual-average sun-earth distance, averaged over the spectral response function of the detector). Units of k: [m2 um sr W-1] + Returns: Reflectance [%] """ diff --git a/satpy/readers/hrit_jma.py b/satpy/readers/hrit_jma.py index 2a85a95cd4..865cbc5dd7 100644 --- a/satpy/readers/hrit_jma.py +++ b/satpy/readers/hrit_jma.py @@ -34,7 +34,7 @@ - `AHI sample data`_ -Example +Example: ------- Here is an example how to read Himwari-8 HRIT data with Satpy: diff --git a/satpy/readers/iasi_l2_so2_bufr.py b/satpy/readers/iasi_l2_so2_bufr.py index 500c2b29df..a63d434a86 100644 --- a/satpy/readers/iasi_l2_so2_bufr.py +++ b/satpy/readers/iasi_l2_so2_bufr.py @@ -31,7 +31,7 @@ Scene(reader="iasi_l2_so2_bufr", filenames=fnames) -Example +Example: ------- Here is an example how to read the data in satpy: diff --git a/satpy/readers/modis_l2.py b/satpy/readers/modis_l2.py index 0ad1b95ba9..8fdf1c69bb 100644 --- a/satpy/readers/modis_l2.py +++ b/satpy/readers/modis_l2.py @@ -245,7 +245,7 @@ def _bits_strip(bit_start, bit_count, value): value : int Number from which to extract the bits - Returns + Returns: ------- int Value of the extracted bits diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index 979483513a..043c45d4cc 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -43,7 +43,7 @@ name in the reader as in the netCDF file. -Example +Example: ------- This is how to read FIDUCEO MVIRI FCDR data in satpy: @@ -143,7 +143,7 @@ sza_vis = scn[query_vis] -References +References: ---------- - `[Handbook]`_ MFG User Handbook - `[PUG]`_ FIDUCEO MVIRI FCDR Product User Guide diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index cf99b57e7d..120a14be36 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -36,9 +36,8 @@ '{platform_name}-{sensor}-{start_time:%Y%m%d%H%M%S}-{end_time:%Y%m%d%H%M%S}.nc' -Example +Example: ------- - Here is an example how to read the data in satpy: .. code-block:: python @@ -92,9 +91,8 @@ ''AVHRR-GAC_FDR_1C_{platform}_{start_time:%Y%m%dT%H%M%SZ}_{end_time:%Y%m%dT%H%M%SZ}_{processing_mode}_{disposition_mode}_{creation_time}_{version_int:04d}.nc' -Example +Example: ------- - Here is an example how to read the data in satpy: .. code-block:: python diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 1d7b7576e7..25e6ed1a8b 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -390,34 +390,34 @@ MEIRINK_EPOCH = datetime(2000, 1, 1) MEIRINK_COEFS: dict[str, dict[int, dict[str, tuple[float, float]]]] = {} -MEIRINK_COEFS['2023'] = {} +MEIRINK_COEFS["2023"] = {} # Meteosat-8 -MEIRINK_COEFS['2023'][321] = {'VIS006': (24.346, 0.3739), - 'VIS008': (30.989, 0.3111), - 'IR_016': (22.869, 0.0065) +MEIRINK_COEFS["2023"][321] = {"VIS006": (24.346, 0.3739), + "VIS008": (30.989, 0.3111), + "IR_016": (22.869, 0.0065) } # Meteosat-9 -MEIRINK_COEFS['2023'][322] = {'VIS006': (21.026, 0.2556), - 'VIS008': (26.875, 0.1835), - 'IR_016': (21.394, 0.0498) +MEIRINK_COEFS["2023"][322] = {"VIS006": (21.026, 0.2556), + "VIS008": (26.875, 0.1835), + "IR_016": (21.394, 0.0498) } # Meteosat-10 -MEIRINK_COEFS['2023'][323] = {'VIS006': (19.829, 0.5856), - 'VIS008': (25.284, 0.6787), - 'IR_016': (23.066, -0.0286) +MEIRINK_COEFS["2023"][323] = {"VIS006": (19.829, 0.5856), + "VIS008": (25.284, 0.6787), + "IR_016": (23.066, -0.0286) } # Meteosat-11 -MEIRINK_COEFS['2023'][324] = {'VIS006': (20.515, 0.3600), - 'VIS008': (25.803, 0.4844), - 'IR_016': (22.354, -0.0187) +MEIRINK_COEFS["2023"][324] = {"VIS006": (20.515, 0.3600), + "VIS008": (25.803, 0.4844), + "IR_016": (22.354, -0.0187) } @@ -440,7 +440,7 @@ def get_meirink_slope(meirink_coefs, acquisition_time): def should_apply_meirink(calib_mode, channel_name): """Decide whether to use the Meirink calibration coefficients.""" - return "MEIRINK" in calib_mode and channel_name in ['VIS006', 'VIS008', 'IR_016'] + return "MEIRINK" in calib_mode and channel_name in ["VIS006", "VIS008", "IR_016"] class MeirinkCalibrationHandler: @@ -448,7 +448,7 @@ class MeirinkCalibrationHandler: def __init__(self, calib_mode): """Initialize the calibration handler.""" - self.coefs = MEIRINK_COEFS[calib_mode.split('-')[1]] + self.coefs = MEIRINK_COEFS[calib_mode.split("-")[1]] def get_slope(self, platform, channel, time): """Return the slope using the provided calibration coefficients.""" @@ -963,6 +963,7 @@ def calculate_area_extent(area_dict): line_step: Pixel resolution in meters in south-north direction [column_offset: Column offset, defaults to 0 if not given] [line_offset: Line offset, defaults to 0 if not given] + Returns: tuple: An area extent for the scene defined by the lower left and upper right corners diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 83fc82f687..8e3fb148bc 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -84,7 +84,7 @@ } -Example +Example: ------- Here is an example how to read the data in satpy: diff --git a/satpy/readers/seviri_l1b_icare.py b/satpy/readers/seviri_l1b_icare.py index b2ceb30313..5c151d64a2 100644 --- a/satpy/readers/seviri_l1b_icare.py +++ b/satpy/readers/seviri_l1b_icare.py @@ -32,7 +32,7 @@ CHANN is the channel (i.e: HRV, IR016, WV073, etc) VX-XX is the processing version number -Example +Example: ------- Here is an example how to read the data in satpy: diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 81f2d01300..4593d3fe3d 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -37,7 +37,7 @@ To see the full list of arguments that can be provided, look into the documentation of :class:`NativeMSGFileHandler`. -Example +Example: ------- Here is an example how to read the data in satpy. @@ -749,7 +749,7 @@ def __init__(self, header, trailer, mda): def get_img_bounds(self, dataset_id, is_roi): """Get image line and column boundaries. - returns: + Returns: Dictionary with the four keys 'south_bound', 'north_bound', 'east_bound' and 'west_bound', each containing a list of the respective line/column numbers of the image boundaries. diff --git a/satpy/readers/seviri_l2_grib.py b/satpy/readers/seviri_l2_grib.py index ca021a7bc0..b69c60e7ac 100644 --- a/satpy/readers/seviri_l2_grib.py +++ b/satpy/readers/seviri_l2_grib.py @@ -151,6 +151,7 @@ def _get_proj_area(self, gid): Args: gid: The ID of the GRIB message. + Returns: tuple: A tuple of two dictionaries for the projection and the area definition. pdict: @@ -232,6 +233,7 @@ def _get_xarray_from_msg(self, gid): Args: gid: The ID of the GRIB message. + Returns: DataArray: The array containing the retrieved values. """ @@ -268,6 +270,7 @@ def _get_from_msg(gid, key): Args: gid: The ID of the GRIB message. key: The key of the required attribute. + Returns: The retrieved attribute or None if the key is missing. """ diff --git a/satpy/scene.py b/satpy/scene.py index f0a6e2468a..d96c81a0e4 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -785,7 +785,7 @@ def aggregate(self, dataset_ids=None, boundary="trim", side="left", func="mean", Returns: A new aggregated scene - See also: + See Also: xarray.DataArray.coarsen Example: @@ -1146,7 +1146,7 @@ def to_xarray(self, Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. - Returns + Returns: ------- ds, xr.Dataset A CF-compliant xr.Dataset diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 7386bb066a..36a3dd9355 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -67,13 +67,13 @@ def test_hybrid_green(self): def test_green_corrector(self): """Test the deprecated class for green corrections.""" - comp = GreenCorrector('blended_channel', fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), - standard_name='toa_bidirectional_reflectance') + comp = GreenCorrector("blended_channel", fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), + standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'blended_channel' - assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' + assert res.attrs["name"] == "blended_channel" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.23) @@ -114,6 +114,6 @@ def test_nonliniear_scaling(self): def test_invalid_strength(self): """Test using invalid `strength` term for non-linear scaling.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Expected strength greater than 0.0, got 0.0."): _ = NDVIHybridGreen("ndvi_hybrid_green", strength=0.0, prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") diff --git a/satpy/tests/multiscene_tests/test_blend.py b/satpy/tests/multiscene_tests/test_blend.py index 120af51930..f9d7e35462 100644 --- a/satpy/tests/multiscene_tests/test_blend.py +++ b/satpy/tests/multiscene_tests/test_blend.py @@ -394,7 +394,7 @@ def _check_stacked_metadata(data_arr: xr.DataArray, exp_name: str) -> None: class TestTemporalRGB: """Test the temporal RGB blending method.""" - @pytest.fixture + @pytest.fixture() def nominal_data(self): """Return the input arrays for the nominal use case.""" da1 = xr.DataArray([1, 0, 0], attrs={"start_time": datetime(2023, 5, 22, 9, 0, 0)}) @@ -403,7 +403,7 @@ def nominal_data(self): return [da1, da2, da3] - @pytest.fixture + @pytest.fixture() def expected_result(self): """Return the expected result arrays.""" return [[1, 0, 0], [0, 1, 0], [0, 0, 1]] diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py index 8976d03fb8..11068b6577 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py @@ -50,7 +50,7 @@ def _check_shared_metadata(data_arr): assert data_arr.attrs["platform_name"] == "EOS-Terra" assert "rows_per_scan" in data_arr.attrs assert isinstance(data_arr.attrs["rows_per_scan"], int) - assert data_arr.attrs['reader'] == "modis_l1b" + assert data_arr.attrs["reader"] == "modis_l1b" assert "resolution" in data_arr.attrs res = data_arr.attrs["resolution"] if res == 5000: @@ -160,7 +160,7 @@ def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, d shape_250m = _shape_for_resolution(250) default_shape = _shape_for_resolution(default_res) scheduler = CustomScheduler(max_computes=1 + has_5km + has_500 + has_250) - with dask.config.set({'scheduler': scheduler, 'array.chunk-size': '1 MiB'}): + with dask.config.set({"scheduler": scheduler, "array.chunk-size": "1 MiB"}): _load_and_check_geolocation(scene, "*", default_res, default_shape, True) _load_and_check_geolocation(scene, 5000, 5000, shape_5km, has_5km) _load_and_check_geolocation(scene, 500, 500, shape_500m, has_500) @@ -179,9 +179,9 @@ def test_load_sat_zenith_angle(self, modis_l1b_nasa_mod021km_file): def test_load_vis(self, modis_l1b_nasa_mod021km_file): """Test loading visible band.""" - scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file) - dataset_name = '1' - with dask.config.set({'array.chunk-size': '1 MiB'}): + scene = Scene(reader="modis_l1b", filenames=modis_l1b_nasa_mod021km_file) + dataset_name = "1" + with dask.config.set({"array.chunk-size": "1 MiB"}): scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset[0, 0] == 300.0 @@ -194,8 +194,8 @@ def test_load_vis_saturation(self, mask_saturated, modis_l1b_nasa_mod021km_file) """Test loading visible band.""" scene = Scene(reader="modis_l1b", filenames=modis_l1b_nasa_mod021km_file, reader_kwargs={"mask_saturated": mask_saturated}) - dataset_name = '2' - with dask.config.set({'array.chunk-size': '1 MiB'}): + dataset_name = "2" + with dask.config.set({"array.chunk-size": "1 MiB"}): scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset.shape == _shape_for_resolution(1000) diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py index 68d40017a7..4b8d3a9578 100644 --- a/satpy/tests/reader_tests/test_abi_l2_nc.py +++ b/satpy/tests/reader_tests/test_abi_l2_nc.py @@ -169,23 +169,23 @@ def test_mcmip_get_dataset(self, xr_, product, exp_metadata): exp_data *= 100 exp_attrs = { - 'instrument_ID': None, - 'modifiers': (), - 'name': product, - 'observation_type': 'MCMIP', - 'orbital_slot': None, - 'reader': 'abi_l2_nc', - 'platform_name': 'GOES-16', - 'platform_shortname': 'G16', - 'production_site': None, - 'scan_mode': 'M6', - 'scene_abbr': 'F', - 'scene_id': None, - 'sensor': 'abi', - 'timeline_ID': None, - 'start_time': datetime(2017, 9, 20, 17, 30, 40, 800000), - 'end_time': datetime(2017, 9, 20, 17, 41, 17, 500000), - 'ancillary_variables': [], + "instrument_ID": None, + "modifiers": (), + "name": product, + "observation_type": "MCMIP", + "orbital_slot": None, + "reader": "abi_l2_nc", + "platform_name": "GOES-16", + "platform_shortname": "G16", + "production_site": None, + "scan_mode": "M6", + "scene_abbr": "F", + "scene_id": None, + "sensor": "abi", + "timeline_ID": None, + "start_time": datetime(2017, 9, 20, 17, 30, 40, 800000), + "end_time": datetime(2017, 9, 20, 17, 41, 17, 500000), + "ancillary_variables": [], } exp_attrs.update(exp_metadata) diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 1ceb14e733..9338440246 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -569,10 +569,10 @@ def test_updated_calibrate(self): def test_user_calibration(self): """Test user-defined calibration modes.""" # This is for radiance correction - self.fh.user_calibration = {'B13': {'slope': 0.95, - 'offset': -0.1}} - self.fh.band_name = 'B13' - rad = self.fh.calibrate(data=self.counts, calibration='radiance') + self.fh.user_calibration = {"B13": {"slope": 0.95, + "offset": -0.1}} + self.fh.band_name = "B13" + rad = self.fh.calibrate(data=self.counts, calibration="radiance") rad_np = rad.compute() assert rad.dtype == rad_np.dtype assert rad.dtype == np.float32 @@ -581,11 +581,11 @@ def test_user_calibration(self): assert np.allclose(rad, rad_exp) # This is for DN calibration - self.fh.user_calibration = {'B13': {'slope': -0.0032, - 'offset': 15.20}, - 'type': 'DN'} - self.fh.band_name = 'B13' - rad = self.fh.calibrate(data=self.counts, calibration='radiance') + self.fh.user_calibration = {"B13": {"slope": -0.0032, + "offset": 15.20}, + "type": "DN"} + self.fh.band_name = "B13" + rad = self.fh.calibrate(data=self.counts, calibration="radiance") rad_np = rad.compute() assert rad.dtype == rad_np.dtype assert rad.dtype == np.float32 diff --git a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py index 73206e7ffd..05abef600b 100644 --- a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py +++ b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py @@ -267,7 +267,6 @@ def tearDown(self): def test_get_luts(self): """Check that the function to download LUTs operates successfully.""" tempdir = tempfile.gettempdir() - print(self.fh.lut_dir) self.fh._get_luts() assert not os.path.exists(os.path.join(tempdir, "count2tbb_v102/")) for lut_name in AHI_LUT_NAMES: diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index 39de4e1053..7d4050ecf0 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -18,7 +18,7 @@ start_time = datetime(2023, 8, 24, 5, 40, 21) end_time = datetime(2023, 8, 24, 5, 49, 40) -dimensions = {'Columns': 5500, 'Rows': 5500} +dimensions = {"Columns": 5500, "Rows": 5500} exp_ext = (-5499999.9012, -5499999.9012, 5499999.9012, 5499999.9012) @@ -30,12 +30,12 @@ } badarea_attrs = global_attrs.copy() -badarea_attrs['cdm_data_type'] = 'bad_area' +badarea_attrs["cdm_data_type"] = "bad_area" -def ahil2_filehandler(fname, platform='h09'): +def ahil2_filehandler(fname, platform="h09"): """Instantiate a Filehandler.""" - fileinfo = {'platform': platform} + fileinfo = {"platform": platform} filetype = None fh = HIML2NCFileHandler(fname, fileinfo, filetype) return fh @@ -45,9 +45,9 @@ def ahil2_filehandler(fname, platform='h09'): def himl2_filename(tmp_path_factory): """Create a fake himawari l2 file.""" fname = f'{tmp_path_factory.mktemp("data")}/AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc' - ds = xr.Dataset({'CloudMask': (['Rows', 'Columns'], clmk_data)}, - coords={'Latitude': (['Rows', 'Columns'], lat_data), - 'Longitude': (['Rows', 'Columns'], lon_data)}, + ds = xr.Dataset({"CloudMask": (["Rows", "Columns"], clmk_data)}, + coords={"Latitude": (["Rows", "Columns"], lat_data), + "Longitude": (["Rows", "Columns"], lon_data)}, attrs=global_attrs) ds.to_netcdf(fname) return fname @@ -57,9 +57,9 @@ def himl2_filename(tmp_path_factory): def himl2_filename_bad(tmp_path_factory): """Create a fake himawari l2 file.""" fname = f'{tmp_path_factory.mktemp("data")}/AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc' - ds = xr.Dataset({'CloudMask': (['Rows', 'Columns'], clmk_data)}, - coords={'Latitude': (['Rows', 'Columns'], lat_data), - 'Longitude': (['Rows', 'Columns'], lon_data)}, + ds = xr.Dataset({"CloudMask": (["Rows", "Columns"], clmk_data)}, + coords={"Latitude": (["Rows", "Columns"], lat_data), + "Longitude": (["Rows", "Columns"], lon_data)}, attrs=badarea_attrs) ds.to_netcdf(fname) @@ -75,35 +75,35 @@ def test_startend(himl2_filename): def test_ahi_l2_area_def(himl2_filename, caplog): """Test reader handles area definition correctly.""" - ps = '+proj=geos +lon_0=140.7 +h=35785863 +x_0=0 +y_0=0 +a=6378137 +rf=298.257024882273 +units=m +no_defs +type=crs' + ps = "+proj=geos +lon_0=140.7 +h=35785863 +x_0=0 +y_0=0 +a=6378137 +rf=298.257024882273 +units=m +no_defs +type=crs" # Check case where input data is correct size. fh = ahil2_filehandler(himl2_filename) clmk_id = make_dataid(name="cloudmask") area_def = fh.get_area_def(clmk_id) - assert area_def.width == dimensions['Columns'] - assert area_def.height == dimensions['Rows'] + assert area_def.width == dimensions["Columns"] + assert area_def.height == dimensions["Rows"] assert np.allclose(area_def.area_extent, exp_ext) assert area_def.proj4_string == ps # Check case where input data is incorrect size. - with pytest.raises(ValueError): - fh = ahil2_filehandler(himl2_filename) - fh.nlines = 3000 + fh = ahil2_filehandler(himl2_filename) + fh.nlines = 3000 + with pytest.raises(ValueError, match="Input L2 file is not a full disk Himawari scene..*"): fh.get_area_def(clmk_id) def test_bad_area_name(himl2_filename_bad): """Check case where area name is not correct.""" - global_attrs['cdm_data_type'] = 'bad_area' - with pytest.raises(ValueError): + global_attrs["cdm_data_type"] = "bad_area" + with pytest.raises(ValueError, match="File is not a full disk scene"): ahil2_filehandler(himl2_filename_bad) - global_attrs['cdm_data_type'] = 'Full Disk' + global_attrs["cdm_data_type"] = "Full Disk" def test_load_data(himl2_filename): """Test that data is loaded successfully.""" fh = ahil2_filehandler(himl2_filename) clmk_id = make_dataid(name="cloudmask") - clmk = fh.get_dataset(clmk_id, {'file_key': 'CloudMask'}) + clmk = fh.get_dataset(clmk_id, {"file_key": "CloudMask"}) assert np.allclose(clmk.data, clmk_data) diff --git a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py index eb06362831..0333f3df2b 100644 --- a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py +++ b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py @@ -37,10 +37,10 @@ def make_h5_null_string(length): def write_h5_null_string_att(loc_id, name, s): """Write a NULL terminated string attribute at loc_id.""" dt = make_h5_null_string(length=7) - name = bytes(name.encode('ascii')) - s = bytes(s.encode('ascii')) + name = bytes(name.encode("ascii")) + s = bytes(s.encode("ascii")) at = h5py.h5a.create(loc_id, name, dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(s, dtype=f'|S{len(s)+1}')) + at.write(np.array(s, dtype=f"|S{len(s)+1}")) @pytest.fixture(scope="session") @@ -48,74 +48,74 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): """Create a dummy HDF5 file for the GERB L2 HR product.""" filename = tmp_path_factory.mktemp("data") / FNAME - with h5py.File(filename, 'w') as fid: - fid.create_group('/Angles') - fid['/Angles/Relative Azimuth'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Angles/Relative Azimuth'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - fid['/Angles/Solar Zenith'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Angles/Solar Zenith'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - write_h5_null_string_att(fid['/Angles/Relative Azimuth'].id, 'Unit', 'Degree') - fid['/Angles/Viewing Azimuth'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Angles/Viewing Azimuth'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - write_h5_null_string_att(fid['/Angles/Viewing Azimuth'].id, 'Unit', 'Degree') - fid['/Angles/Viewing Zenith'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Angles/Viewing Zenith'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - write_h5_null_string_att(fid['/Angles/Viewing Zenith'].id, 'Unit', 'Degree') - fid.create_group('/GERB') + with h5py.File(filename, "w") as fid: + fid.create_group("/Angles") + fid["/Angles/Relative Azimuth"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Angles/Relative Azimuth"].attrs["Quantisation Factor"] = np.array(0.1, dtype="float64") + fid["/Angles/Solar Zenith"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Angles/Solar Zenith"].attrs["Quantisation Factor"] = np.array(0.1, dtype="float64") + write_h5_null_string_att(fid["/Angles/Relative Azimuth"].id, "Unit", "Degree") + fid["/Angles/Viewing Azimuth"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Angles/Viewing Azimuth"].attrs["Quantisation Factor"] = np.array(0.1, dtype="float64") + write_h5_null_string_att(fid["/Angles/Viewing Azimuth"].id, "Unit", "Degree") + fid["/Angles/Viewing Zenith"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Angles/Viewing Zenith"].attrs["Quantisation Factor"] = np.array(0.1, dtype="float64") + write_h5_null_string_att(fid["/Angles/Viewing Zenith"].id, "Unit", "Degree") + fid.create_group("/GERB") dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) dt.set_size(3) dt.set_strpad(h5py.h5t.STR_NULLTERM) - write_h5_null_string_att(fid['/GERB'].id, 'Instrument Identifier', 'G4') - fid.create_group('/GGSPS') - fid['/GGSPS'].attrs['L1.5 NANRG Product Version'] = np.array(-1, dtype='int32') - fid.create_group('/Geolocation') - write_h5_null_string_att(fid['/Geolocation'].id, 'Geolocation File Name', - 'G4_SEV4_L20_HR_GEO_20180111_181500_V010.hdf') - fid['/Geolocation'].attrs['Nominal Satellite Longitude (degrees)'] = np.array(0.0, dtype='float64') - fid.create_group('/Imager') - fid['/Imager'].attrs['Instrument Identifier'] = np.array(4, dtype='int32') - write_h5_null_string_att(fid['/Imager'].id, 'Type', 'SEVIRI') - fid.create_group('/RMIB') - fid.create_group('/Radiometry') - fid['/Radiometry'].attrs['SEVIRI Radiance Definition Flag'] = np.array(2, dtype='int32') - fid['/Radiometry/A Values (per GERB detector cell)'] = np.ones(shape=(256,), dtype=np.dtype('>f8')) - fid['/Radiometry/C Values (per GERB detector cell)'] = np.ones(shape=(256,), dtype=np.dtype('>f8')) - fid['/Radiometry/Longwave Correction'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Radiometry/Longwave Correction'].attrs['Offset'] = np.array(1.0, dtype='float64') - fid['/Radiometry/Longwave Correction'].attrs['Quantisation Factor'] = np.array(0.005, dtype='float64') - fid['/Radiometry/Shortwave Correction'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Radiometry/Shortwave Correction'].attrs['Offset'] = np.array(1.0, dtype='float64') - fid['/Radiometry/Shortwave Correction'].attrs['Quantisation Factor'] = np.array(0.005, dtype='float64') - fid['/Radiometry/Solar Flux'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Radiometry/Solar Flux'].attrs['Quantisation Factor'] = np.array(0.25, dtype='float64') - write_h5_null_string_att(fid['/Radiometry/Solar Flux'].id, 'Unit', 'Watt per square meter') - fid['/Radiometry/Solar Radiance'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Radiometry/Solar Radiance'].attrs['Quantisation Factor'] = np.array(0.05, dtype='float64') - write_h5_null_string_att(fid['/Radiometry/Solar Radiance'].id, 'Unit', 'Watt per square meter per steradian') - fid['/Radiometry/Thermal Flux'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Radiometry/Thermal Flux'].attrs['Quantisation Factor'] = np.array(0.25, dtype='float64') - write_h5_null_string_att(fid['/Radiometry/Thermal Flux'].id, 'Unit', 'Watt per square meter') - fid['/Radiometry/Thermal Radiance'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Radiometry/Thermal Radiance'].attrs['Quantisation Factor'] = np.array(0.05, dtype='float64') - write_h5_null_string_att(fid['/Radiometry/Thermal Radiance'].id, 'Unit', 'Watt per square meter per steradian') - fid.create_group('/Scene Identification') - write_h5_null_string_att(fid['/Scene Identification'].id, - 'Solar Angular Dependency Models Set Version', 'CERES_TRMM.1') - write_h5_null_string_att(fid['/Scene Identification'].id, - 'Thermal Angular Dependency Models Set Version', 'RMIB.3') - fid['/Scene Identification/Cloud Cover'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) - fid['/Scene Identification/Cloud Cover'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') - write_h5_null_string_att(fid['/Scene Identification/Cloud Cover'].id, 'Unit', 'Percent') - fid['/Scene Identification/Cloud Optical Depth (logarithm)'] = \ - np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Scene Identification/Cloud Optical Depth (logarithm)'].attrs['Quantisation Factor'] = \ - np.array(0.00025, dtype='float64') - fid['/Scene Identification/Cloud Phase'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) - fid['/Scene Identification/Cloud Phase'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') - write_h5_null_string_att(fid['/Scene Identification/Cloud Phase'].id, 'Unit', - 'Percent (Water=0%,Mixed,Ice=100%)') - fid.create_group('/Times') - fid['/Times/Time (per row)'] = np.ones(shape=(1237,), dtype=np.dtype('|S22')) + write_h5_null_string_att(fid["/GERB"].id, "Instrument Identifier", "G4") + fid.create_group("/GGSPS") + fid["/GGSPS"].attrs["L1.5 NANRG Product Version"] = np.array(-1, dtype="int32") + fid.create_group("/Geolocation") + write_h5_null_string_att(fid["/Geolocation"].id, "Geolocation File Name", + "G4_SEV4_L20_HR_GEO_20180111_181500_V010.hdf") + fid["/Geolocation"].attrs["Nominal Satellite Longitude (degrees)"] = np.array(0.0, dtype="float64") + fid.create_group("/Imager") + fid["/Imager"].attrs["Instrument Identifier"] = np.array(4, dtype="int32") + write_h5_null_string_att(fid["/Imager"].id, "Type", "SEVIRI") + fid.create_group("/RMIB") + fid.create_group("/Radiometry") + fid["/Radiometry"].attrs["SEVIRI Radiance Definition Flag"] = np.array(2, dtype="int32") + fid["/Radiometry/A Values (per GERB detector cell)"] = np.ones(shape=(256,), dtype=np.dtype(">f8")) + fid["/Radiometry/C Values (per GERB detector cell)"] = np.ones(shape=(256,), dtype=np.dtype(">f8")) + fid["/Radiometry/Longwave Correction"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Radiometry/Longwave Correction"].attrs["Offset"] = np.array(1.0, dtype="float64") + fid["/Radiometry/Longwave Correction"].attrs["Quantisation Factor"] = np.array(0.005, dtype="float64") + fid["/Radiometry/Shortwave Correction"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Radiometry/Shortwave Correction"].attrs["Offset"] = np.array(1.0, dtype="float64") + fid["/Radiometry/Shortwave Correction"].attrs["Quantisation Factor"] = np.array(0.005, dtype="float64") + fid["/Radiometry/Solar Flux"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Radiometry/Solar Flux"].attrs["Quantisation Factor"] = np.array(0.25, dtype="float64") + write_h5_null_string_att(fid["/Radiometry/Solar Flux"].id, "Unit", "Watt per square meter") + fid["/Radiometry/Solar Radiance"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Radiometry/Solar Radiance"].attrs["Quantisation Factor"] = np.array(0.05, dtype="float64") + write_h5_null_string_att(fid["/Radiometry/Solar Radiance"].id, "Unit", "Watt per square meter per steradian") + fid["/Radiometry/Thermal Flux"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Radiometry/Thermal Flux"].attrs["Quantisation Factor"] = np.array(0.25, dtype="float64") + write_h5_null_string_att(fid["/Radiometry/Thermal Flux"].id, "Unit", "Watt per square meter") + fid["/Radiometry/Thermal Radiance"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Radiometry/Thermal Radiance"].attrs["Quantisation Factor"] = np.array(0.05, dtype="float64") + write_h5_null_string_att(fid["/Radiometry/Thermal Radiance"].id, "Unit", "Watt per square meter per steradian") + fid.create_group("/Scene Identification") + write_h5_null_string_att(fid["/Scene Identification"].id, + "Solar Angular Dependency Models Set Version", "CERES_TRMM.1") + write_h5_null_string_att(fid["/Scene Identification"].id, + "Thermal Angular Dependency Models Set Version", "RMIB.3") + fid["/Scene Identification/Cloud Cover"] = np.ones(shape=(1237, 1237), dtype=np.dtype("uint8")) + fid["/Scene Identification/Cloud Cover"].attrs["Quantisation Factor"] = np.array(0.01, dtype="float64") + write_h5_null_string_att(fid["/Scene Identification/Cloud Cover"].id, "Unit", "Percent") + fid["/Scene Identification/Cloud Optical Depth (logarithm)"] = \ + np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Scene Identification/Cloud Optical Depth (logarithm)"].attrs["Quantisation Factor"] = \ + np.array(0.00025, dtype="float64") + fid["/Scene Identification/Cloud Phase"] = np.ones(shape=(1237, 1237), dtype=np.dtype("uint8")) + fid["/Scene Identification/Cloud Phase"].attrs["Quantisation Factor"] = np.array(0.01, dtype="float64") + write_h5_null_string_att(fid["/Scene Identification/Cloud Phase"].id, "Unit", + "Percent (Water=0%,Mixed,Ice=100%)") + fid.create_group("/Times") + fid["/Times/Time (per row)"] = np.ones(shape=(1237,), dtype=np.dtype("|S22")) return filename @@ -123,7 +123,7 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): @pytest.mark.parametrize("name", ["Solar Flux", "Thermal Flux", "Solar Radiance", "Thermal Radiance"]) def test_dataset_load(gerb_l2_hr_h5_dummy_file, name): """Test loading the solar flux component.""" - scene = Scene(reader='gerb_l2_hr_h5', filenames=[gerb_l2_hr_h5_dummy_file]) + scene = Scene(reader="gerb_l2_hr_h5", filenames=[gerb_l2_hr_h5_dummy_file]) scene.load([name]) assert scene[name].shape == (1237, 1237) assert np.nanmax((scene[name].to_numpy().flatten() - 0.25)) < 1e-6 diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index 6c7fcfb671..ced24a77ea 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -357,17 +357,17 @@ def test_get_orbit_polynomial_exceptions(self, orbit_polynomials, time): class TestMeirinkSlope: """Unit tests for the slope of Meirink calibration.""" - @pytest.mark.parametrize('platform_id', [321, 322, 323, 324]) - @pytest.mark.parametrize('channel_name', ['VIS006', 'VIS008', 'IR_016']) + @pytest.mark.parametrize("platform_id", [321, 322, 323, 324]) + @pytest.mark.parametrize("channel_name", ["VIS006", "VIS008", "IR_016"]) def test_get_meirink_slope_epoch(self, platform_id, channel_name): """Test the value of the slope of the Meirink calibration on 2000-01-01.""" - coefs = {'coefs': {}} - coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} - coefs['coefs']['EXTERNAL'] = {} - calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK-2023', MEIRINK_EPOCH) - assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS['2023'][platform_id][channel_name][0]/1000. + coefs = {"coefs": {}} + coefs["coefs"]["NOMINAL"] = {"gain": -1, "offset": -1} + coefs["coefs"]["EXTERNAL"] = {} + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, "MEIRINK-2023", MEIRINK_EPOCH) + assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS["2023"][platform_id][channel_name][0]/1000. - @pytest.mark.parametrize('platform_id,time,expected', ( + @pytest.mark.parametrize(("platform_id", "time", "expected"), [ (321, datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]), (321, datetime(2010, 12, 31, 0, 0), [0.0258479563, 0.0322386887, 0.022895110500000003]), (322, datetime(2010, 1, 18, 0, 0), [0.021964051999999998, 0.027548445, 0.021576766]), @@ -376,12 +376,12 @@ def test_get_meirink_slope_epoch(self, platform_id, channel_name): (323, datetime(2010, 12, 31, 0, 0), [0.022181355200000002, 0.0280103379, 0.0229511138]), (324, datetime(2010, 1, 18, 0, 0), [0.0218362, 0.027580748, 0.022285370999999998]), (324, datetime(2015, 6, 1, 0, 0), [0.0225418, 0.028530172, 0.022248718999999997]), - )) + ]) def test_get_meirink_slope_2020(self, platform_id, time, expected): """Test the value of the slope of the Meirink calibration.""" - coefs = {'coefs': {}} - coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} - coefs['coefs']['EXTERNAL'] = {} - for i, channel_name in enumerate(['VIS006', 'VIS008', 'IR_016']): - calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK-2023', time) + coefs = {"coefs": {}} + coefs["coefs"]["NOMINAL"] = {"gain": -1, "offset": -1} + coefs["coefs"]["EXTERNAL"] = {} + for i, channel_name in enumerate(["VIS006", "VIS008", "IR_016"]): + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, "MEIRINK-2023", time) assert abs(calibration_handler.get_gain_offset()[0] - expected[i]) < 1e-6 diff --git a/satpy/tests/reader_tests/test_utils.py b/satpy/tests/reader_tests/test_utils.py index 89a79ded0d..6471159449 100644 --- a/satpy/tests/reader_tests/test_utils.py +++ b/satpy/tests/reader_tests/test_utils.py @@ -202,16 +202,16 @@ def test_sub_area(self, adef): def test_np2str(self): """Test the np2str function.""" # byte object - npbytes = np.bytes_('hej') - self.assertEqual(hf.np2str(npbytes), 'hej') + npbytes = np.bytes_("hej") + assert hf.np2str(npbytes) == "hej" # single element numpy array np_arr = np.array([npbytes]) - self.assertEqual(hf.np2str(np_arr), 'hej') + assert hf.np2str(np_arr) == "hej" # scalar numpy array np_arr = np.array(npbytes) - self.assertEqual(hf.np2str(np_arr), 'hej') + assert hf.np2str(np_arr) == "hej" # multi-element array npbytes = np.array([npbytes, npbytes]) diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 61b94dc3d9..4aece73487 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -117,18 +117,18 @@ def test_basic_default_not_provided(self, sunz_ds1, as_32bit): if as_32bit: sunz_ds1 = sunz_ds1.astype(np.float32) - comp = SunZenithCorrector(name='sza_test', modifiers=tuple()) - res = comp((sunz_ds1,), test_attr='test') + comp = SunZenithCorrector(name="sza_test", modifiers=tuple()) + res = comp((sunz_ds1,), test_attr="test") np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) - assert 'y' in res.coords - assert 'x' in res.coords - ds1 = sunz_ds1.copy().drop_vars(('y', 'x')) - res = comp((ds1,), test_attr='test') + assert "y" in res.coords + assert "x" in res.coords + ds1 = sunz_ds1.copy().drop_vars(("y", "x")) + res = comp((ds1,), test_attr="test") res_np = res.compute() np.testing.assert_allclose(res_np.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) assert res.dtype == res_np.dtype - assert 'y' not in res.coords - assert 'x' not in res.coords + assert "y" not in res.coords + assert "x" not in res.coords def test_basic_lims_not_provided(self, sunz_ds1): """Test custom limits when SZA isn't provided.""" @@ -169,20 +169,20 @@ class TestSunZenithReducer: def setup_class(cls): """Initialze SunZenithReducer classes that shall be tested.""" from satpy.modifiers.geometry import SunZenithReducer - cls.default = SunZenithReducer(name='sza_reduction_test_default', modifiers=tuple()) - cls.custom = SunZenithReducer(name='sza_reduction_test_custom', modifiers=tuple(), + cls.default = SunZenithReducer(name="sza_reduction_test_default", modifiers=tuple()) + cls.custom = SunZenithReducer(name="sza_reduction_test_custom", modifiers=tuple(), correction_limit=70, max_sza=95, strength=3.0) def test_default_settings(self, sunz_ds1, sunz_sza): """Test default settings with sza data available.""" - res = self.default((sunz_ds1, sunz_sza), test_attr='test') + res = self.default((sunz_ds1, sunz_sza), test_attr="test") np.testing.assert_allclose(res.values, np.array([[0.00242814, 0.00235669], [0.00245885, 0.00238707]]), rtol=1e-5) def test_custom_settings(self, sunz_ds1, sunz_sza): """Test custom settings with sza data available.""" - res = self.custom((sunz_ds1, sunz_sza), test_attr='test') + res = self.custom((sunz_ds1, sunz_sza), test_attr="test") np.testing.assert_allclose(res.values, np.array([[0.01041319, 0.01030033], [0.01046164, 0.01034834]]), rtol=1e-5) @@ -190,8 +190,8 @@ def test_custom_settings(self, sunz_ds1, sunz_sza): def test_invalid_max_sza(self, sunz_ds1, sunz_sza): """Test invalid max_sza with sza data available.""" from satpy.modifiers.geometry import SunZenithReducer - with pytest.raises(ValueError): - SunZenithReducer(name='sza_reduction_test_invalid', modifiers=tuple(), max_sza=None) + with pytest.raises(ValueError, match="`max_sza` must be defined when using the SunZenithReducer."): + SunZenithReducer(name="sza_reduction_test_invalid", modifiers=tuple(), max_sza=None) class TestNIRReflectance(unittest.TestCase): diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index 49036d871f..66e93009d2 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -132,10 +132,10 @@ def test_type_preserve(self): class TestKDTreeResampler(unittest.TestCase): """Test the kd-tree resampler.""" - @mock.patch('satpy.resample.xr.Dataset') - @mock.patch('satpy.resample.zarr.open') - @mock.patch('satpy.resample.KDTreeResampler._create_cache_filename') - @mock.patch('pyresample.kd_tree.XArrayResamplerNN') + @mock.patch("satpy.resample.xr.Dataset") + @mock.patch("satpy.resample.zarr.open") + @mock.patch("satpy.resample.KDTreeResampler._create_cache_filename") + @mock.patch("pyresample.kd_tree.XArrayResamplerNN") def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, xr_dset): """Test the kd resampler.""" diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index f03ca60fae..c9717921df 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -28,7 +28,6 @@ import numpy as np import pytest import xarray as xr -from pytest import approx # noqa: PT013 from satpy.utils import ( angle2xyz, @@ -193,8 +192,8 @@ def test_get_satpos(self, included_prefixes, preference, expected_result): "attrs", [ {}, - {'orbital_parameters': {'projection_longitude': 1}}, - {'satellite_altitude': 1} + {"orbital_parameters": {"projection_longitude": 1}}, + {"satellite_altitude": 1} ] ) def test_get_satpos_fails_with_informative_error(self, attrs): diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 3a26e6c981..18b5947eb6 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -526,31 +526,31 @@ def get_test_attrs(self): Attributes, encoded attributes, encoded and flattened attributes """ - attrs = {'name': 'IR_108', - 'start_time': datetime(2018, 1, 1, 0), - 'end_time': datetime(2018, 1, 1, 0, 15), - 'int': 1, - 'float': 1.0, - 'none': None, # should be dropped - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': True, - 'numpy_void': np.void(0), - 'numpy_bytes': np.bytes_('test'), - 'numpy_string': np.str_('test'), - 'list': [1, 2, np.float64(3)], - 'nested_list': ["1", ["2", [3]]], - 'bool': True, - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': np.array([True, False, True]), - 'array_2d': np.array([[1, 2], [3, 4]]), - 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), - 'dict': {'a': 1, 'b': 2}, - 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, - 'raw_metadata': OrderedDict([ - ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), - ('flag', np.bool_(True)), - ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) + attrs = {"name": "IR_108", + "start_time": datetime(2018, 1, 1, 0), + "end_time": datetime(2018, 1, 1, 0, 15), + "int": 1, + "float": 1.0, + "none": None, # should be dropped + "numpy_int": np.uint8(1), + "numpy_float": np.float32(1), + "numpy_bool": True, + "numpy_void": np.void(0), + "numpy_bytes": np.bytes_("test"), + "numpy_string": np.str_("test"), + "list": [1, 2, np.float64(3)], + "nested_list": ["1", ["2", [3]]], + "bool": True, + "array": np.array([1, 2, 3], dtype="uint8"), + "array_bool": np.array([True, False, True]), + "array_2d": np.array([[1, 2], [3, 4]]), + "array_3d": np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), + "dict": {"a": 1, "b": 2}, + "nested_dict": {"l1": {"l2": {"l3": np.array([1, 2, 3], dtype="uint8")}}}, + "raw_metadata": OrderedDict([ + ("recarray", np.zeros(3, dtype=[("x", "i4"), ("y", "u1")])), + ("flag", np.bool_(True)), + ("dict", OrderedDict([("a", 1), ("b", np.array([1, 2, 3], dtype="uint8"))])) ])} encoded = {"name": "IR_108", "start_time": "2018-01-01 00:00:00", diff --git a/satpy/utils.py b/satpy/utils.py index 7ee3243d93..3996968043 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -505,24 +505,15 @@ def check_satpy(readers=None, writers=None, extras=None): from satpy.readers import configs_for_reader from satpy.writers import configs_for_writer - print("Readers") - print("=======") - for reader, res in sorted(_check_yaml_configs(configs_for_reader(reader=readers), "reader").items()): - print(reader + ": ", res) - print() - - print("Writers") - print("=======") - for writer, res in sorted(_check_yaml_configs(configs_for_writer(writer=writers), "writer").items()): - print(writer + ": ", res) - print() - - print("Extras") - print("======") + for _reader, _res in sorted(_check_yaml_configs(configs_for_reader(reader=readers), "reader").items()): + pass + + for _writer, _res in sorted(_check_yaml_configs(configs_for_writer(writer=writers), "writer").items()): + pass + module_names = extras if extras is not None else ("cartopy", "geoviews") - for module_name, res in sorted(_check_import(module_names).items()): - print(module_name + ": ", res) - print() + for _module_name, _res in sorted(_check_import(module_names).items()): + pass def unify_chunks(*data_arrays: xr.DataArray) -> tuple[xr.DataArray, ...]: diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 2d56d9f292..506a8bf561 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -195,20 +195,20 @@ # Numpy datatypes compatible with all netCDF4 backends. ``np.str_`` is # excluded because h5py (and thus h5netcdf) has problems with unicode, see # https://github.com/h5py/h5py/issues/624.""" -NC4_DTYPES = [np.dtype('int8'), np.dtype('uint8'), - np.dtype('int16'), np.dtype('uint16'), - np.dtype('int32'), np.dtype('uint32'), - np.dtype('int64'), np.dtype('uint64'), - np.dtype('float32'), np.dtype('float64'), +NC4_DTYPES = [np.dtype("int8"), np.dtype("uint8"), + np.dtype("int16"), np.dtype("uint16"), + np.dtype("int32"), np.dtype("uint32"), + np.dtype("int64"), np.dtype("uint64"), + np.dtype("float32"), np.dtype("float64"), np.bytes_] # Unsigned and int64 isn't CF 1.7 compatible # Note: Unsigned and int64 are CF 1.9 compatible -CF_DTYPES = [np.dtype('int8'), - np.dtype('int16'), - np.dtype('int32'), - np.dtype('float32'), - np.dtype('float64'), +CF_DTYPES = [np.dtype("int8"), + np.dtype("int16"), + np.dtype("int32"), + np.dtype("float32"), + np.dtype("float64"), np.bytes_] CF_VERSION = "CF-1.7" @@ -581,8 +581,8 @@ def _remove_satpy_attrs(new_data): def _format_prerequisites_attrs(dataarray): """Reformat prerequisites attribute value to string.""" - if 'prerequisites' in dataarray.attrs: - dataarray.attrs['prerequisites'] = [np.bytes_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] + if "prerequisites" in dataarray.attrs: + dataarray.attrs["prerequisites"] = [np.bytes_(str(prereq)) for prereq in dataarray.attrs["prerequisites"]] return dataarray @@ -809,7 +809,7 @@ def make_cf_dataarray(dataarray, Prepend dataset name with this if starting with a digit. The default is ``"CHANNEL_"``. - Returns + Returns: ------- new_data : xr.DataArray CF-compliant xr.DataArray. @@ -859,7 +859,7 @@ def _collect_cf_dataset(list_dataarrays, Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. - Returns + Returns: ------- ds : xr.Dataset A partially CF-compliant xr.Dataset @@ -977,7 +977,7 @@ def collect_cf_datasets(list_dataarrays, It is used to create grouped netCDFs using the CF_Writer. If None (the default), no groups will be created. - Returns + Returns: ------- grouped_datasets : dict A dictionary of CF-compliant xr.Dataset: {group_name: xr.Dataset} diff --git a/setup.py b/setup.py index 6bea7bf639..cd1c43422e 100644 --- a/setup.py +++ b/setup.py @@ -22,10 +22,10 @@ from setuptools import find_packages, setup -requires = ['numpy >=1.21', 'pillow', 'pyresample >=1.24.0', 'trollsift', - 'trollimage >=1.20', 'pykdtree', 'pyyaml >=5.1', 'xarray >=0.10.1, !=0.13.0', - 'dask[array] >=0.17.1', 'pyproj>=2.2', 'zarr', 'donfig', 'appdirs', - 'packaging', 'pooch', 'pyorbital'] +requires = ["numpy >=1.21", "pillow", "pyresample >=1.24.0", "trollsift", + "trollimage >=1.20", "pykdtree", "pyyaml >=5.1", "xarray >=0.10.1, !=0.13.0", + "dask[array] >=0.17.1", "pyproj>=2.2", "zarr", "donfig", "appdirs", + "packaging", "pooch", "pyorbital"] test_requires = ["behave", "h5py", "netCDF4", "pyhdf", "imageio", "rasterio", "geoviews", "trollimage", "fsspec", "bottleneck", diff --git a/utils/coord2area_def.py b/utils/coord2area_def.py index 8b6aa0478b..81fb93678b 100644 --- a/utils/coord2area_def.py +++ b/utils/coord2area_def.py @@ -126,21 +126,6 @@ " +".join(("proj=" + proj + ",lat_0=" + str(lat_0) + ",lon_0=" + str(lon_0) + ",ellps=WGS84").split(",")) - print("### " + proj4_string) - print() - print(name + ":") - print(" description: " + name) - print(" projection:") - print(" proj: " + proj) - print(" ellps: WGS84") - print(" lat_0: " + str(lat_0)) - print(" lon_0: " + str(lon_0)) - print(" shape:") - print(" height: " + str(ysize)) - print(" width: " + str(xsize)) - print(" area_extent:") - print(" lower_left_xy: [%f, %f]" % (area_extent[0], area_extent[1])) - print(" upper_right_xy: [%f, %f]" % (area_extent[2], area_extent[3])) if args.shapes is None: sys.exit(0) diff --git a/utils/fetch_avhrr_calcoeffs.py b/utils/fetch_avhrr_calcoeffs.py index f73975df95..8212c5531e 100644 --- a/utils/fetch_avhrr_calcoeffs.py +++ b/utils/fetch_avhrr_calcoeffs.py @@ -112,7 +112,6 @@ def get_all_coeffs(): coeffs[platform] = {} for chan in URLS[platform].keys(): url = URLS[platform][chan] - print(url) page = get_page(url) coeffs[platform][chan] = get_coeffs(page) @@ -134,7 +133,6 @@ def save_coeffs(coeffs, out_dir=""): fid[chan]["intercept2"] = coeffs[platform][chan]["intercept2"] fid.close() - print("Calibration coefficients saved for %s" % platform) def main(): From b75008beba074be25b279dc423f94d49eb11ab2b Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 13:46:35 +0200 Subject: [PATCH 339/702] Add ruff rules --- pyproject.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 8d81b23818..9677cbf09d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,8 +15,7 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ -# "A", "B", "S", "N", "D", -select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] +select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20", "A", "B", "S", "N", "D"] ignore = ["B905"] # only available from python 3.10 line-length = 120 From fd9be77662d4cdcaf74723ceb1df1df69607b7fb Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 13:49:17 +0200 Subject: [PATCH 340/702] Fix ruff rules --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9677cbf09d..a550f275e8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,8 +15,8 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ -select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20", "A", "B", "S", "N", "D"] -ignore = ["B905"] # only available from python 3.10 +# In the future, add "A", "B", "S", "N", "D" +select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] line-length = 120 [tool.ruff.per-file-ignores] From f2f91938d4281efd2a4057f4f5f0d8f009139ca0 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 13:52:00 +0200 Subject: [PATCH 341/702] Remove lint ci job --- .github/workflows/ci.yaml | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ee31213bf2..6e1fdfc781 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -11,28 +11,9 @@ env: CACHE_NUMBER: 0 jobs: - lint: - name: lint and style checks - runs-on: ubuntu-latest - steps: - - name: Checkout source - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.9 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install ruff pytest - - name: Install Satpy - run: | - pip install -e . - test: runs-on: ${{ matrix.os }} continue-on-error: ${{ matrix.experimental }} - needs: [lint] strategy: fail-fast: true matrix: From 8b7938f6c0d96a5995031652d7997e31afd82427 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 26 Oct 2023 09:16:38 -0500 Subject: [PATCH 342/702] Fix failing demo tests --- satpy/tests/test_demo.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/satpy/tests/test_demo.py b/satpy/tests/test_demo.py index d1dddd5e8d..32e8016f58 100644 --- a/satpy/tests/test_demo.py +++ b/satpy/tests/test_demo.py @@ -89,7 +89,7 @@ def test_get_us_midlatitude_cyclone_abi(self, gcsfs_mod): gcsfs_mod.GCSFileSystem.return_value = gcsfs_inst gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] # expected 16 files, got 2 - self.assertRaises(AssertionError, get_us_midlatitude_cyclone_abi) + self.assertRaises(RuntimeError, get_us_midlatitude_cyclone_abi) # unknown access method self.assertRaises(NotImplementedError, get_us_midlatitude_cyclone_abi, method="unknown") @@ -109,7 +109,7 @@ def test_get_hurricane_florence_abi(self, gcsfs_mod): # only return 5 results total gcsfs_inst.glob.side_effect = _GlobHelper([5, 0]) # expected 16 files * 10 frames, got 16 * 5 - self.assertRaises(AssertionError, get_hurricane_florence_abi) + self.assertRaises(RuntimeError, get_hurricane_florence_abi) self.assertRaises(NotImplementedError, get_hurricane_florence_abi, method="unknown") gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) @@ -244,11 +244,12 @@ class _FakeRequest: requests_log: list[str] = [] - def __init__(self, url, stream=None): + def __init__(self, url, stream=None, timeout=None): self._filename = os.path.basename(url) self.headers = {} self.requests_log.append(url) del stream # just mimicking requests 'get' + del timeout # just mimicking requests 'get' def __enter__(self): return self From 6bf2ce9efcd9486251d95a2520e895423d2baac3 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 26 Oct 2023 09:26:35 -0500 Subject: [PATCH 343/702] Revert changes to check_satpy logic --- satpy/utils.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/satpy/utils.py b/satpy/utils.py index 3996968043..f9ea05ca79 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -505,15 +505,24 @@ def check_satpy(readers=None, writers=None, extras=None): from satpy.readers import configs_for_reader from satpy.writers import configs_for_writer - for _reader, _res in sorted(_check_yaml_configs(configs_for_reader(reader=readers), "reader").items()): - pass - - for _writer, _res in sorted(_check_yaml_configs(configs_for_writer(writer=writers), "writer").items()): - pass - + print("Readers") # noqa: T201 + print("=======") # noqa: T201 + for reader, res in sorted(_check_yaml_configs(configs_for_reader(reader=readers), "reader").items()): + print(reader + ": ", res) # noqa: T201 + print() # noqa: T201 + + print("Writers") # noqa: T201 + print("=======") # noqa: T201 + for writer, res in sorted(_check_yaml_configs(configs_for_writer(writer=writers), "writer").items()): + print(writer + ": ", res) # noqa: T201 + print() # noqa: T201 + + print("Extras") # noqa: T201 + print("======") # noqa: T201 module_names = extras if extras is not None else ("cartopy", "geoviews") - for _module_name, _res in sorted(_check_import(module_names).items()): - pass + for module_name, res in sorted(_check_import(module_names).items()): + print(module_name + ": ", res) # noqa: T201 + print() # noqa: T201 def unify_chunks(*data_arrays: xr.DataArray) -> tuple[xr.DataArray, ...]: From 529055e8e35ce1480ff43fa7c14b3d0513aa6cc2 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 26 Oct 2023 09:30:52 -0500 Subject: [PATCH 344/702] Fix various numpy related test failures --- satpy/tests/reader_tests/test_generic_image.py | 2 +- satpy/tests/reader_tests/test_seviri_l2_grib.py | 2 +- satpy/tests/reader_tests/test_tropomi_l2.py | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py index 393bbfa98d..0ea143269f 100644 --- a/satpy/tests/reader_tests/test_generic_image.py +++ b/satpy/tests/reader_tests/test_generic_image.py @@ -267,7 +267,7 @@ def test_GenericImageFileHandler_nodata(self): info = {"nodata_handling": "nan_mask"} dataset = reader.get_dataset(foo, info) assert isinstance(dataset, xr.DataArray) - assert np.all(np.isnan(dataset.data[0][:10, :10].compute())) is True + assert np.all(np.isnan(dataset.data[0][:10, :10].compute())) assert np.isnan(dataset.attrs["_FillValue"]) info = {"nodata_handling": "fill_value"} diff --git a/satpy/tests/reader_tests/test_seviri_l2_grib.py b/satpy/tests/reader_tests/test_seviri_l2_grib.py index d57fda4e79..d3b40d6caa 100644 --- a/satpy/tests/reader_tests/test_seviri_l2_grib.py +++ b/satpy/tests/reader_tests/test_seviri_l2_grib.py @@ -133,7 +133,7 @@ def test_data_reading(self, da_, xr_): # Checks that dask.array has been called with the correct arguments name, args, kwargs = da_.mock_calls[0] - assert np.all(args[0] == np.ones((1200, 1000))) is True + assert np.all(args[0] == np.ones((1200, 1000))) assert args[1] == CHUNK_SIZE # Checks that xarray.DataArray has been called with the correct arguments diff --git a/satpy/tests/reader_tests/test_tropomi_l2.py b/satpy/tests/reader_tests/test_tropomi_l2.py index 05d0717538..7305bf365c 100644 --- a/satpy/tests/reader_tests/test_tropomi_l2.py +++ b/satpy/tests/reader_tests/test_tropomi_l2.py @@ -196,6 +196,6 @@ def test_load_bounds(self): assert "x" in dest.dims assert DEFAULT_FILE_SHAPE[0] + 1 == dest.shape[0] assert DEFAULT_FILE_SHAPE[1] + 1 == dest.shape[1] - assert np.testing.assert_array_equal(dest[:-1, :-1], ds[key][:, :, 0]) - assert np.testing.assert_array_equal(dest[-1, :-1], ds[key][-1, :, 3]) - assert np.testing.assert_array_equal(dest[:, -1], np.append(ds[key][:, -1, 1], ds[key][-1:, -1:, 2])) + np.testing.assert_array_equal(dest[:-1, :-1], ds[key][:, :, 0]) + np.testing.assert_array_equal(dest[-1, :-1], ds[key][-1, :, 3]) + np.testing.assert_array_equal(dest[:, -1], np.append(ds[key][:, -1, 1], ds[key][-1:, -1:, 2])) From 9c9cc875286f37fe08b0248c877f927b4fe62ede Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 26 Oct 2023 09:42:06 -0500 Subject: [PATCH 345/702] Fix extra whitespace after commas --- satpy/readers/eum_base.py | 10 +++++----- satpy/readers/goes_imager_nc.py | 2 +- satpy/readers/seviri_l2_bufr.py | 2 +- satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py | 6 +++--- satpy/tests/reader_tests/test_seviri_l2_bufr.py | 2 +- .../tests/reader_tests/test_viirs_edr_active_fires.py | 4 ++-- satpy/tests/test_utils.py | 2 +- 7 files changed, 14 insertions(+), 14 deletions(-) diff --git a/satpy/readers/eum_base.py b/satpy/readers/eum_base.py index cc82ee008d..916ba9d444 100644 --- a/satpy/readers/eum_base.py +++ b/satpy/readers/eum_base.py @@ -88,14 +88,14 @@ def recarray2dict(arr): def get_service_mode(instrument_name, ssp_lon): """Get information about service mode for a given instrument and subsatellite longitude.""" - service_modes = {"seviri": {"0.0": {"service_name": "fes", "service_desc": "Full Earth Scanning service"}, - "9.5": {"service_name": "rss", "service_desc": "Rapid Scanning Service"}, + service_modes = {"seviri": {"0.0": {"service_name": "fes", "service_desc": "Full Earth Scanning service"}, + "9.5": {"service_name": "rss", "service_desc": "Rapid Scanning Service"}, "41.5": {"service_name": "iodc", "service_desc": "Indian Ocean Data Coverage service"}, "45.5": {"service_name": "iodc", "service_desc": "Indian Ocean Data Coverage service"} }, - "fci": {"0.0": {"service_name": "fdss", "service_desc": "Full Disk Scanning Service"}, - "9.5": {"service_name": "rss", "service_desc": "Rapid Scanning Service"}, - }, + "fci": {"0.0": {"service_name": "fdss", "service_desc": "Full Disk Scanning Service"}, + "9.5": {"service_name": "rss", "service_desc": "Rapid Scanning Service"}, + }, } unknown_modes = {"service_name": "unknown", "service_desc": "unknown"} diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index 4cb7cf8610..7bb8fac84c 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -317,7 +317,7 @@ "GOES-14": {"00_7": {"slope": [5.874693E-1, 5.865367E-1, 5.862807E-1, 5.864086E-1, 5.857146E-1, 5.852004E-1, 5.860814E-1, 5.841697E-1], - "offset": [-17.037, -17.010, -17.002, -17.006, + "offset": [-17.037, -17.010, -17.002, -17.006, -16.986, -16.971, -16.996, -16.941], "x0": 29, "k": 1.88772E-3}, diff --git a/satpy/readers/seviri_l2_bufr.py b/satpy/readers/seviri_l2_bufr.py index 3b7188945c..02aa0c2767 100644 --- a/satpy/readers/seviri_l2_bufr.py +++ b/satpy/readers/seviri_l2_bufr.py @@ -46,7 +46,7 @@ CHUNK_SIZE = get_legacy_chunk_size() logger = logging.getLogger("SeviriL2Bufr") -data_center_dict = {55: {"ssp": "E0415", "name": "08"}, 56: {"ssp": "E0455", "name": "09"}, +data_center_dict = {55: {"ssp": "E0415", "name": "08"}, 56: {"ssp": "E0455", "name": "09"}, 57: {"ssp": "E0095", "name": "10"}, 70: {"ssp": "E0000", "name": "11"}} seg_size_dict = {"seviri_l2_bufr_asr": 16, "seviri_l2_bufr_cla": 16, diff --git a/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py b/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py index 20a537a4a9..7055a4df6d 100644 --- a/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py +++ b/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py @@ -310,9 +310,9 @@ def setUp(self): self._header = np.zeros(1, dtype=_HEADERTYPE) self._header["satid"][0] = 3 self._header["instrument"][0] = 12 - self._header["tempradcnv"][0] = [[2968720, 0, 1000000, 5236956, 0], - [1000000, 6114597, 0, 1000000, 6114597], - [-3100, 1000270, 6348092, 0, 1000000]] + self._header["tempradcnv"][0] = [[2968720, 0, 1000000, 5236956, 0], + [1000000, 6114597, 0, 1000000, 6114597], + [-3100, 1000270, 6348092, 0, 1000000]] self._data = np.zeros(3, dtype=_SCANTYPE) self._data["scnlinyr"][:] = 2020 self._data["scnlindy"][:] = 261 diff --git a/satpy/tests/reader_tests/test_seviri_l2_bufr.py b/satpy/tests/reader_tests/test_seviri_l2_bufr.py index 2979084974..ec3fdf7b56 100644 --- a/satpy/tests/reader_tests/test_seviri_l2_bufr.py +++ b/satpy/tests/reader_tests/test_seviri_l2_bufr.py @@ -29,7 +29,7 @@ from satpy.tests.utils import make_dataid -FILETYPE_INFO = {"file_type": "seviri_l2_bufr_asr"} +FILETYPE_INFO = {"file_type": "seviri_l2_bufr_asr"} FILENAME_INFO = {"start_time": "20191112000000", "spacecraft": "MSG2"} diff --git a/satpy/tests/reader_tests/test_viirs_edr_active_fires.py b/satpy/tests/reader_tests/test_viirs_edr_active_fires.py index 7063814c34..7bede07292 100644 --- a/satpy/tests/reader_tests/test_viirs_edr_active_fires.py +++ b/satpy/tests/reader_tests/test_viirs_edr_active_fires.py @@ -73,7 +73,7 @@ def get_test_content(self, filename, filename_info, filename_type): file_content["Fire Pixels/attr/units"] = "none" file_content["Fire Pixels/shape"] = DEFAULT_FILE_SHAPE - attrs = ("FP_latitude", "FP_longitude", "FP_T13", "FP_confidence") + attrs = ("FP_latitude", "FP_longitude", "FP_T13", "FP_confidence") convert_file_content_to_data_array( file_content, attrs=attrs, dims=("z", "fakeDim0", "fakeDim1")) @@ -97,7 +97,7 @@ def get_test_content(self, filename, filename_info, filename_type): file_content["Fire Pixels/FP_T4/attr/units"] = "kelvins" file_content["Fire Pixels/FP_confidence"] = DEFAULT_DETECTION_FILE_DATA - attrs = ("FP_latitude", "FP_longitude", "FP_T13", "FP_confidence") + attrs = ("FP_latitude", "FP_longitude", "FP_T13", "FP_confidence") convert_file_content_to_data_array( file_content, attrs=attrs, dims=("z", "fakeDim0", "fakeDim1")) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index c9717921df..6f5db02087 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -283,7 +283,7 @@ def test_specific_check_satpy(self): if len(call[1]) > 0 and "__fake" in call[1][0]: assert "ok" not in call[1][1] checked_fake = True - assert checked_fake, "Did not find __fake module mentioned in checks" + assert checked_fake, "Did not find __fake module mentioned in checks" def test_debug_on(caplog): From d3743fe2d6e223a3071895d054280b1cc4ead20d Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 17:00:28 +0200 Subject: [PATCH 346/702] Restore removed prints --- pyproject.toml | 2 ++ utils/coord2area_def.py | 15 +++++++++++++++ utils/fetch_avhrr_calcoeffs.py | 2 ++ 3 files changed, 19 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index a550f275e8..1282120a59 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,8 @@ line-length = 120 [tool.ruff.per-file-ignores] "satpy/tests/*" = ["S101"] # assert allowed in tests +"utils/coord2area_def.py" = ["T201"] # allow print +"fetch_avhrr_calcoeffs.py" = ["T201"] # allow print [tool.ruff.pydocstyle] convention = "google" diff --git a/utils/coord2area_def.py b/utils/coord2area_def.py index 81fb93678b..8b6aa0478b 100644 --- a/utils/coord2area_def.py +++ b/utils/coord2area_def.py @@ -126,6 +126,21 @@ " +".join(("proj=" + proj + ",lat_0=" + str(lat_0) + ",lon_0=" + str(lon_0) + ",ellps=WGS84").split(",")) + print("### " + proj4_string) + print() + print(name + ":") + print(" description: " + name) + print(" projection:") + print(" proj: " + proj) + print(" ellps: WGS84") + print(" lat_0: " + str(lat_0)) + print(" lon_0: " + str(lon_0)) + print(" shape:") + print(" height: " + str(ysize)) + print(" width: " + str(xsize)) + print(" area_extent:") + print(" lower_left_xy: [%f, %f]" % (area_extent[0], area_extent[1])) + print(" upper_right_xy: [%f, %f]" % (area_extent[2], area_extent[3])) if args.shapes is None: sys.exit(0) diff --git a/utils/fetch_avhrr_calcoeffs.py b/utils/fetch_avhrr_calcoeffs.py index 8212c5531e..f73975df95 100644 --- a/utils/fetch_avhrr_calcoeffs.py +++ b/utils/fetch_avhrr_calcoeffs.py @@ -112,6 +112,7 @@ def get_all_coeffs(): coeffs[platform] = {} for chan in URLS[platform].keys(): url = URLS[platform][chan] + print(url) page = get_page(url) coeffs[platform][chan] = get_coeffs(page) @@ -133,6 +134,7 @@ def save_coeffs(coeffs, out_dir=""): fid[chan]["intercept2"] = coeffs[platform][chan]["intercept2"] fid.close() + print("Calibration coefficients saved for %s" % platform) def main(): From 3f2bbc48d23263e7bc605d222174f989657c4930 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 17:23:37 +0200 Subject: [PATCH 347/702] Fix docstrings --- satpy/readers/goes_imager_nc.py | 2 +- satpy/readers/hrit_jma.py | 2 +- satpy/readers/iasi_l2_so2_bufr.py | 2 +- satpy/readers/mviri_l1b_fiduceo_nc.py | 2 +- satpy/readers/satpy_cf_nc.py | 4 ++-- satpy/readers/seviri_l1b_hrit.py | 2 +- satpy/readers/seviri_l1b_icare.py | 2 +- satpy/readers/seviri_l1b_native.py | 2 +- 8 files changed, 9 insertions(+), 9 deletions(-) diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index 7bb8fac84c..214852fffd 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -201,7 +201,7 @@ References: -========== +=========== - `[GVAR]`_ GVAR transmission format - `[BOOK-N]`_ GOES-N databook diff --git a/satpy/readers/hrit_jma.py b/satpy/readers/hrit_jma.py index 865cbc5dd7..c273b9b578 100644 --- a/satpy/readers/hrit_jma.py +++ b/satpy/readers/hrit_jma.py @@ -35,7 +35,7 @@ Example: -------- +-------- Here is an example how to read Himwari-8 HRIT data with Satpy: .. code-block:: python diff --git a/satpy/readers/iasi_l2_so2_bufr.py b/satpy/readers/iasi_l2_so2_bufr.py index a63d434a86..b5088aa041 100644 --- a/satpy/readers/iasi_l2_so2_bufr.py +++ b/satpy/readers/iasi_l2_so2_bufr.py @@ -32,7 +32,7 @@ Scene(reader="iasi_l2_so2_bufr", filenames=fnames) Example: -------- +-------- Here is an example how to read the data in satpy: .. code-block:: python diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index 043c45d4cc..d50ecab97f 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -44,7 +44,7 @@ Example: -------- +-------- This is how to read FIDUCEO MVIRI FCDR data in satpy: .. code-block:: python diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index 120a14be36..7a26ead72b 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -37,7 +37,7 @@ Example: -------- +-------- Here is an example how to read the data in satpy: .. code-block:: python @@ -92,7 +92,7 @@ Example: -------- +-------- Here is an example how to read the data in satpy: .. code-block:: python diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 8e3fb148bc..3b3aa82277 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -85,7 +85,7 @@ Example: -------- +-------- Here is an example how to read the data in satpy: .. code-block:: python diff --git a/satpy/readers/seviri_l1b_icare.py b/satpy/readers/seviri_l1b_icare.py index 5c151d64a2..2024c46532 100644 --- a/satpy/readers/seviri_l1b_icare.py +++ b/satpy/readers/seviri_l1b_icare.py @@ -33,7 +33,7 @@ VX-XX is the processing version number Example: -------- +-------- Here is an example how to read the data in satpy: .. code-block:: python diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 4593d3fe3d..361dd1bb50 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -38,7 +38,7 @@ of :class:`NativeMSGFileHandler`. Example: -------- +-------- Here is an example how to read the data in satpy. NOTE: When loading the data, the orientation From 887137e9ba4dd5945b132e07b99c10de50d782ed Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 17:34:57 +0200 Subject: [PATCH 348/702] Fix underline in doc --- satpy/readers/mviri_l1b_fiduceo_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index d50ecab97f..9a309a0bb8 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -144,7 +144,7 @@ References: ----------- +----------- - `[Handbook]`_ MFG User Handbook - `[PUG]`_ FIDUCEO MVIRI FCDR Product User Guide From 41759a8a925dc223f68fb02cd359f6873426be24 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 17:42:49 +0200 Subject: [PATCH 349/702] Fix underline --- satpy/readers/gms/gms5_vissr_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/gms/gms5_vissr_l1b.py b/satpy/readers/gms/gms5_vissr_l1b.py index 0e1a5df483..c8a88dfe25 100644 --- a/satpy/readers/gms/gms5_vissr_l1b.py +++ b/satpy/readers/gms/gms5_vissr_l1b.py @@ -30,7 +30,7 @@ References: -~~~~~~~~~~ +~~~~~~~~~~~ Details about platform, instrument and data format can be found in the following references: From 5d6a1062ea4f4a448f8a79361a88282bdba218cc Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 26 Oct 2023 11:35:03 -0500 Subject: [PATCH 350/702] Remove coordinates during DayNightCompositor masking And add a lot more type annotations --- satpy/composites/__init__.py | 91 +++++++++++++++++++++++++----------- 1 file changed, 65 insertions(+), 26 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index c6883f9ab9..100dc3e293 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -19,6 +19,7 @@ import logging import os import warnings +from typing import Optional, Sequence import dask.array as da import numpy as np @@ -119,7 +120,12 @@ def id(self): id_keys = self.attrs.get('_satpy_id_keys', minimal_default_keys_config) return DataID(id_keys, **self.attrs) - def __call__(self, datasets, optional_datasets=None, **info): + def __call__( + self, + datasets: Sequence[xr.DataArray], + optional_datasets: Optional[Sequence[xr.DataArray]] = None, + **info + ) -> xr.DataArray: """Generate a composite.""" raise NotImplementedError() @@ -422,7 +428,12 @@ def _get_sensors(self, projectables): sensor = list(sensor)[0] return sensor - def __call__(self, projectables, nonprojectables=None, **attrs): + def __call__( + self, + datasets: Sequence[xr.DataArray], + optional_datasets: Optional[Sequence[xr.DataArray]] = None, + **attrs + ) -> xr.DataArray: """Build the composite.""" if 'deprecation_warning' in self.attrs: warnings.warn( @@ -431,29 +442,29 @@ def __call__(self, projectables, nonprojectables=None, **attrs): stacklevel=2 ) self.attrs.pop('deprecation_warning', None) - num = len(projectables) + num = len(datasets) mode = attrs.get('mode') if mode is None: # num may not be in `self.modes` so only check if we need to mode = self.modes[num] - if len(projectables) > 1: - projectables = self.match_data_arrays(projectables) - data = self._concat_datasets(projectables, mode) + if len(datasets) > 1: + datasets = self.match_data_arrays(datasets) + data = self._concat_datasets(datasets, mode) # Skip masking if user wants it or a specific alpha channel is given. if self.common_channel_mask and mode[-1] != 'A': data = data.where(data.notnull().all(dim='bands')) else: - data = projectables[0] + data = datasets[0] # if inputs have a time coordinate that may differ slightly between # themselves then find the mid time and use that as the single # time coordinate value - if len(projectables) > 1: - time = check_times(projectables) + if len(datasets) > 1: + time = check_times(datasets) if time is not None and 'time' in data.dims: data['time'] = [time] - new_attrs = combine_metadata(*projectables) + new_attrs = combine_metadata(*datasets) # remove metadata that shouldn't make sense in a composite new_attrs["wavelength"] = None new_attrs.pop("units", None) @@ -467,7 +478,7 @@ def __call__(self, projectables, nonprojectables=None, **attrs): new_attrs.update(self.attrs) if resolution is not None: new_attrs['resolution'] = resolution - new_attrs["sensor"] = self._get_sensors(projectables) + new_attrs["sensor"] = self._get_sensors(datasets) new_attrs["mode"] = mode return xr.DataArray(data=data.data, attrs=new_attrs, @@ -692,22 +703,27 @@ def __init__(self, name, lim_low=85., lim_high=88., day_night="day_night", inclu self._has_sza = False super(DayNightCompositor, self).__init__(name, **kwargs) - def __call__(self, projectables, **kwargs): + def __call__( + self, + datasets: Sequence[xr.DataArray], + optional_datasets: Optional[Sequence[xr.DataArray]] = None, + **attrs + ) -> xr.DataArray: """Generate the composite.""" - projectables = self.match_data_arrays(projectables) + datasets = self.match_data_arrays(datasets) # At least one composite is requested. - foreground_data = projectables[0] + foreground_data = datasets[0] - weights = self._get_coszen_blending_weights(projectables) + weights = self._get_coszen_blending_weights(datasets) # Apply enhancements to the foreground data foreground_data = enhance2dataset(foreground_data) if "only" in self.day_night: - attrs = foreground_data.attrs.copy() + fg_attrs = foreground_data.attrs.copy() day_data, night_data, weights = self._get_data_for_single_side_product(foreground_data, weights) else: - day_data, night_data, attrs = self._get_data_for_combined_product(foreground_data, projectables[1]) + day_data, night_data, fg_attrs = self._get_data_for_combined_product(foreground_data, datasets[1]) # The computed coszen is for the full area, so it needs to be masked for missing and off-swath data if self.include_alpha and not self._has_sza: @@ -718,11 +734,18 @@ def __call__(self, projectables, **kwargs): day_data = zero_missing_data(day_data, night_data) night_data = zero_missing_data(night_data, day_data) - data = self._weight_data(day_data, night_data, weights, attrs) + data = self._weight_data(day_data, night_data, weights, fg_attrs) - return super(DayNightCompositor, self).__call__(data, **kwargs) + return super(DayNightCompositor, self).__call__( + data, + optional_datasets=optional_datasets, + **attrs + ) - def _get_coszen_blending_weights(self, projectables): + def _get_coszen_blending_weights( + self, + projectables: Sequence[xr.DataArray], + ) -> xr.DataArray: lim_low = np.cos(np.deg2rad(self.lim_low)) lim_high = np.cos(np.deg2rad(self.lim_high)) try: @@ -739,7 +762,11 @@ def _get_coszen_blending_weights(self, projectables): return coszen.clip(0, 1) - def _get_data_for_single_side_product(self, foreground_data, weights): + def _get_data_for_single_side_product( + self, + foreground_data: xr.DataArray, + weights: xr.DataArray, + ) -> tuple[xr.DataArray, xr.DataArray, xr.DataArray]: # Only one portion (day or night) is selected. One composite is requested. # Add alpha band to single L/RGB composite to make the masked-out portion transparent when needed # L -> LA @@ -778,7 +805,12 @@ def _get_data_for_combined_product(self, day_data, night_data): return day_data, night_data, attrs - def _mask_weights_with_data(self, weights, day_data, night_data): + def _mask_weights_with_data( + self, + weights: xr.DataArray, + day_data: xr.DataArray, + night_data: xr.DataArray, + ) -> xr.DataArray: data_a = _get_single_channel(day_data) data_b = _get_single_channel(night_data) if "only" in self.day_night: @@ -788,12 +820,16 @@ def _mask_weights_with_data(self, weights, day_data, night_data): return weights.where(mask, np.nan) - def _weight_data(self, day_data, night_data, weights, attrs): + def _weight_data( + self, + day_data: xr.DataArray, + night_data: xr.DataArray, + weights: xr.DataArray, + attrs: dict, + ) -> list[xr.DataArray]: if not self.include_alpha: fill = 1 if self.day_night == "night_only" else 0 weights = weights.where(~np.isnan(weights), fill) - if isinstance(weights, xr.DataArray): - weights = weights.data data = [] for b in _get_band_names(day_data, night_data): day_band = _get_single_band_data(day_data, b) @@ -823,9 +859,12 @@ def _get_single_band_data(data, band): return data.sel(bands=band) -def _get_single_channel(data): +def _get_single_channel(data: xr.DataArray) -> xr.DataArray: try: data = data[0, :, :] + # remove coordinates that may be band-specific (ex. "bands") + # and we don't care about anymore + data = data.reset_coords(drop=True) except (IndexError, TypeError): pass return data From be120d9696c5e2dcc617796ed2cbb478586ae4eb Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 26 Oct 2023 13:46:39 -0500 Subject: [PATCH 351/702] Fix type annotation in crefl function --- satpy/modifiers/_crefl_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/modifiers/_crefl_utils.py b/satpy/modifiers/_crefl_utils.py index c8d6920056..5d1b06977b 100644 --- a/satpy/modifiers/_crefl_utils.py +++ b/satpy/modifiers/_crefl_utils.py @@ -318,7 +318,7 @@ def __call__(self, sensor_azimuth, sensor_zenith, solar_azimuth, solar_zenith, a def _run_crefl(self, mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs): raise NotImplementedError() - def _height_from_avg_elevation(self, avg_elevation: Optional[np.ndarray]) -> da.Array: + def _height_from_avg_elevation(self, avg_elevation: Optional[np.ndarray]) -> da.Array | float: """Get digital elevation map data for our granule with ocean fill value set to 0.""" if avg_elevation is None: LOG.debug("No average elevation information provided in CREFL") From f182d7424933d1e437a067a2b4419700a8ec17c6 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 22 Oct 2023 19:29:19 -0500 Subject: [PATCH 352/702] Add initial hacky chunking and float32 handling to ABI L1b reader --- satpy/readers/abi_base.py | 18 +++++++++++++----- satpy/readers/abi_l1b.py | 4 ++-- 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 0b80045767..69c059e569 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -62,17 +62,25 @@ def __init__(self, filename, filename_info, filetype_info): @cached_property def nc(self): """Get the xarray dataset for this file.""" + import math + + from satpy.utils import get_dask_chunk_size_in_bytes + chunk_size_for_high_res = math.sqrt(get_dask_chunk_size_in_bytes() / 4) # 32-bit floats + chunk_size_for_high_res = np.round(chunk_size_for_high_res / 226) * 226 + ft = self.filetype_info["file_type"] + low_res_factor = 1 if ft == "c02" else (2 if ft in ("c01", "c03", "c05") else 4) + chunk_size = int(chunk_size_for_high_res / low_res_factor) f_obj = open_file_or_filename(self.filename) try: nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, - chunks={"x": CHUNK_SIZE, "y": CHUNK_SIZE}, ) + chunks={'x': chunk_size, 'y': chunk_size}, ) except ValueError: nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, - chunks={"lon": CHUNK_SIZE, "lat": CHUNK_SIZE}, ) + chunks={'lon': chunk_size, 'lat': chunk_size}, ) nc = self._rename_dims(nc) return nc @@ -137,7 +145,7 @@ def is_int(val): new_fill = fill else: new_fill = np.nan - data = data.where(data != fill, new_fill) + data = data.where(data != fill, np.float32(new_fill)) if factor != 1 and item in ("x", "y"): # be more precise with x/y coordinates # see get_area_def for more information @@ -147,8 +155,8 @@ def is_int(val): # can't do this in place since data is most likely uint16 # and we are making it a 64-bit float if not is_int(factor): - factor = float(factor) - data = data * factor + offset + factor = np.float32(factor) + data = data * np.float32(factor) + np.float32(offset) return data def _adjust_coords(self, data, item): diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index 3a22397cde..4d0276bf79 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -136,11 +136,11 @@ def _raw_calibrate(self, data): def _vis_calibrate(self, data): """Calibrate visible channels to reflectance.""" solar_irradiance = self["esun"] - esd = self["earth_sun_distance_anomaly_in_AU"].astype(float) + esd = self["earth_sun_distance_anomaly_in_AU"].astype(np.float32) factor = np.pi * esd * esd / solar_irradiance - res = data * factor + res = data * np.float32(factor) res.attrs = data.attrs res.attrs["units"] = "1" res.attrs["long_name"] = "Bidirectional Reflectance" From 878e5c6c4dbc4d6200df208660102d060786907d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 27 Oct 2023 12:34:39 -0500 Subject: [PATCH 353/702] Use filetype info for ABI resolution-based chunking --- satpy/etc/readers/abi_l1b.yaml | 4 ++++ satpy/readers/abi_base.py | 19 +++++++------------ satpy/tests/reader_tests/test_abi_l1b.py | 2 +- 3 files changed, 12 insertions(+), 13 deletions(-) diff --git a/satpy/etc/readers/abi_l1b.yaml b/satpy/etc/readers/abi_l1b.yaml index d9de341ff1..f4986ba106 100644 --- a/satpy/etc/readers/abi_l1b.yaml +++ b/satpy/etc/readers/abi_l1b.yaml @@ -25,16 +25,19 @@ file_types: # "suffix" is an arbitrary suffix that may be added during third-party testing (see PR #1380) c01: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B + resolution: 1000 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c02: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B + resolution: 500 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c03: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B + resolution: 1000 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] @@ -44,6 +47,7 @@ file_types: '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C04_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c05: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B + resolution: 1000 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 69c059e569..956bec278e 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -66,21 +66,16 @@ def nc(self): from satpy.utils import get_dask_chunk_size_in_bytes chunk_size_for_high_res = math.sqrt(get_dask_chunk_size_in_bytes() / 4) # 32-bit floats - chunk_size_for_high_res = np.round(chunk_size_for_high_res / 226) * 226 - ft = self.filetype_info["file_type"] - low_res_factor = 1 if ft == "c02" else (2 if ft in ("c01", "c03", "c05") else 4) - chunk_size = int(chunk_size_for_high_res / low_res_factor) - f_obj = open_file_or_filename(self.filename) - try: + chunk_size_for_high_res = np.round(chunk_size_for_high_res / (4 * 226)) * (4 * 226) + low_res_factor = int(self.filetype_info.get("resolution", 2000) // 500) + res_chunk_bytes = int(chunk_size_for_high_res / low_res_factor) * 4 + import dask + with dask.config.set({"array.chunk-size": res_chunk_bytes}): + f_obj = open_file_or_filename(self.filename) nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, - chunks={'x': chunk_size, 'y': chunk_size}, ) - except ValueError: - nc = xr.open_dataset(f_obj, - decode_cf=True, - mask_and_scale=False, - chunks={'lon': chunk_size, 'lat': chunk_size}, ) + chunks="auto") nc = self._rename_dims(nc) return nc diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index ab2b1eec54..7563f6e13d 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -404,7 +404,7 @@ def test_open_dataset(self, _): # noqa: PT019 openable_thing = mock.MagicMock() - NC_ABI_L1B(openable_thing, {"platform_shortname": "g16"}, None) + NC_ABI_L1B(openable_thing, {"platform_shortname": "g16"}, {}) openable_thing.open.assert_called() From 85360970007207e6e0f5b611801491d56dfd70ba Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 27 Oct 2023 16:29:13 -0500 Subject: [PATCH 354/702] Start refactoring ABI L1b tests --- satpy/tests/reader_tests/test_abi_l1b.py | 77 +++++++++++++++--------- 1 file changed, 50 insertions(+), 27 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 7563f6e13d..f8bd7e4e9f 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -16,8 +16,10 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The abi_l1b reader tests package.""" +from __future__ import annotations import unittest +from typing import Any from unittest import mock import numpy as np @@ -26,13 +28,23 @@ from satpy.tests.utils import make_dataid +RAD_SHAPE = { + 500: (3000, 5000), # conus - 500m + 1000: (1500, 2500), # conus - 1km + 2000: (750, 1250), # conus - 2km +} -def _create_fake_rad_dataarray(rad=None): + +def _create_fake_rad_dataarray( + rad: xr.DataArray | None = None, + # resolution: int = 2000, +): x_image = xr.DataArray(0.) y_image = xr.DataArray(0.) time = xr.DataArray(0.) + shape = (2, 5) # RAD_SHAPE[resolution] if rad is None: - rad_data = (np.arange(10.).reshape((2, 5)) + 1.) * 50. + rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.) * 50. rad_data = (rad_data + 1.) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( @@ -115,17 +127,28 @@ class Test_NC_ABI_L1B_Base(unittest.TestCase): """Common setup for NC_ABI_L1B tests.""" @mock.patch("satpy.readers.abi_base.xr") - def setUp(self, xr_, rad=None, clip_negative_radiances=False): + def setUp( + self, + xr_, + rad: xr.DataArray | None = None, + clip_negative_radiances: bool = False, + filetype_resolution: int = 0 + ) -> None: """Create a fake dataset using the given radiance data.""" from satpy.readers.abi_l1b import NC_ABI_L1B xr_.open_dataset.return_value = _create_fake_rad_dataset(rad=rad) - self.reader = NC_ABI_L1B("filename", - {"platform_shortname": "G16", "observation_type": "Rad", - "suffix": "custom", - "scene_abbr": "C", "scan_mode": "M3"}, - {"filetype": "info"}, - clip_negative_radiances=clip_negative_radiances) + ft_info: dict[str, Any] = {"filetype": "info"} + if filetype_resolution: + ft_info["resolution"] = filetype_resolution + self.file_handler = NC_ABI_L1B( + "filename", + {"platform_shortname": "G16", "observation_type": "Rad", + "suffix": "custom", + "scene_abbr": "C", "scan_mode": "M3"}, + ft_info, + clip_negative_radiances=clip_negative_radiances + ) class TestABIYAML: @@ -157,13 +180,13 @@ class Test_NC_ABI_L1B(Test_NC_ABI_L1B_Base): def test_basic_attributes(self): """Test getting basic file attributes.""" from datetime import datetime - assert self.reader.start_time == datetime(2017, 9, 20, 17, 30, 40, 800000) - assert self.reader.end_time == datetime(2017, 9, 20, 17, 41, 17, 500000) + assert self.file_handler.start_time == datetime(2017, 9, 20, 17, 30, 40, 800000) + assert self.file_handler.end_time == datetime(2017, 9, 20, 17, 41, 17, 500000) def test_get_dataset(self): """Test the get_dataset method.""" key = make_dataid(name="Rad", calibration="radiance") - res = self.reader.get_dataset(key, {"info": "info"}) + res = self.file_handler.get_dataset(key, {"info": "info"}) exp = {"calibration": "radiance", "instrument_ID": None, "modifiers": (), @@ -198,14 +221,14 @@ def test_get_dataset(self): @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def(self, adef): """Test the area generation.""" - self.reader.get_area_def(None) + self.file_handler.get_area_def(None) assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] assert call_args[3] == {"a": 1.0, "b": 1.0, "h": 1.0, "lon_0": -90.0, "proj": "geos", "sweep": "x", "units": "m"} - assert call_args[4] == self.reader.ncols - assert call_args[5] == self.reader.nlines + assert call_args[4] == self.file_handler.ncols + assert call_args[5] == self.file_handler.nlines np.testing.assert_allclose(call_args[6], (-2, -2, 8, 2)) @@ -226,11 +249,11 @@ def setUp(self): "_FillValue": 1002, # last rad_data value } ) - super(Test_NC_ABI_L1B_ir_cal, self).setUp(rad=rad) + super(Test_NC_ABI_L1B_ir_cal, self).setUp(rad=rad, filetype_resolution=2000) def test_ir_calibration_attrs(self): """Test IR calibrated DataArray attributes.""" - res = self.reader.get_dataset( + res = self.file_handler.get_dataset( make_dataid(name="C05", calibration="brightness_temperature"), {}) # make sure the attributes from the file are in the data array @@ -241,11 +264,11 @@ def test_ir_calibration_attrs(self): def test_clip_negative_radiances_attribute(self): """Assert that clip_negative_radiances is set to False.""" - assert not self.reader.clip_negative_radiances + assert not self.file_handler.clip_negative_radiances def test_ir_calibrate(self): """Test IR calibration.""" - res = self.reader.get_dataset( + res = self.file_handler.get_dataset( make_dataid(name="C05", calibration="brightness_temperature"), {}) expected = np.array([[267.55572248, 305.15576503, 332.37383249, 354.73895301, 374.19710115], @@ -273,15 +296,15 @@ def setUp(self): } ) - super().setUp(rad=rad, clip_negative_radiances=True) + super().setUp(rad=rad, clip_negative_radiances=True, filetype_resolution=2000) def test_clip_negative_radiances_attribute(self): """Assert that clip_negative_radiances has been set to True.""" - assert self.reader.clip_negative_radiances + assert self.file_handler.clip_negative_radiances def test_ir_calibrate(self): """Test IR calibration.""" - res = self.reader.get_dataset( + res = self.file_handler.get_dataset( make_dataid(name="C07", calibration="brightness_temperature"), {}) clipped_ir = 267.07775531 @@ -319,11 +342,11 @@ def setUp(self): "_FillValue": 20, } ) - super(Test_NC_ABI_L1B_vis_cal, self).setUp(rad=rad) + super(Test_NC_ABI_L1B_vis_cal, self).setUp(rad=rad, filetype_resolution=1000) def test_vis_calibrate(self): """Test VIS calibration.""" - res = self.reader.get_dataset( + res = self.file_handler.get_dataset( make_dataid(name="C05", calibration="reflectance"), {}) expected = np.array([[0.15265617, 0.30531234, 0.45796851, 0.61062468, 0.76328085], @@ -352,11 +375,11 @@ def setUp(self): "_FillValue": 20, } ) - super(Test_NC_ABI_L1B_raw_cal, self).setUp(rad=rad) + super(Test_NC_ABI_L1B_raw_cal, self).setUp(rad=rad, filetype_resolution=1000) def test_raw_calibrate(self): """Test RAW calibration.""" - res = self.reader.get_dataset( + res = self.file_handler.get_dataset( make_dataid(name="C05", calibration="counts"), {}) # We expect the raw data to be unchanged @@ -391,7 +414,7 @@ def to_dict(self): with self.assertRaises(ValueError, msg="Did not detect invalid cal"): did = FakeDataID(name="C05", calibration="invalid", modifiers=()) - self.reader.get_dataset(did, {}) + self.file_handler.get_dataset(did, {}) class Test_NC_ABI_File(unittest.TestCase): From 55e52484c05b254737863abb0648e1483d63e1b5 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sat, 28 Oct 2023 19:30:06 -0500 Subject: [PATCH 355/702] Remove unnecessary duplication in ABI L1b tests --- satpy/readers/abi_l1b.py | 8 +- satpy/tests/reader_tests/test_abi_l1b.py | 451 ++++++++++++----------- 2 files changed, 231 insertions(+), 228 deletions(-) diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index 4d0276bf79..c3da53c9c7 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -59,12 +59,8 @@ def get_dataset(self, key, info): "radiance": self._rad_calibrate, "counts": self._raw_calibrate, } - - try: - func = cal_dictionary[key["calibration"]] - res = func(radiances) - except KeyError: - raise ValueError("Unknown calibration '{}'".format(key["calibration"])) + func = cal_dictionary[key["calibration"]] + res = func(radiances) # convert to satpy standard units if res.attrs["units"] == "1" and key["calibration"] != "counts": diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index f8bd7e4e9f..bdaa03f9e5 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -18,14 +18,17 @@ """The abi_l1b reader tests package.""" from __future__ import annotations -import unittest -from typing import Any +import contextlib +from pathlib import Path +from typing import Any, Iterator from unittest import mock +import dask.array as da import numpy as np import pytest import xarray as xr +from satpy.readers.abi_l1b import NC_ABI_L1B from satpy.tests.utils import make_dataid RAD_SHAPE = { @@ -36,27 +39,27 @@ def _create_fake_rad_dataarray( - rad: xr.DataArray | None = None, - # resolution: int = 2000, -): - x_image = xr.DataArray(0.) - y_image = xr.DataArray(0.) - time = xr.DataArray(0.) + rad: xr.DataArray | None = None, + # resolution: int = 2000, +) -> xr.DataArray: + x_image = xr.DataArray(0.0) + y_image = xr.DataArray(0.0) + time = xr.DataArray(0.0) shape = (2, 5) # RAD_SHAPE[resolution] if rad is None: - rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.) * 50. - rad_data = (rad_data + 1.) / 0.5 + rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.0) * 50.0 + rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( rad_data, dims=("y", "x"), attrs={ "scale_factor": 0.5, - "add_offset": -1., + "add_offset": -1.0, "_FillValue": 1002, "units": "W m-2 um-1 sr-1", "valid_range": (0, 4095), - } + }, ) rad.coords["t"] = time rad.coords["x_image"] = x_image @@ -68,25 +71,21 @@ def _create_fake_rad_dataset(rad=None): rad = _create_fake_rad_dataarray(rad=rad) x__ = xr.DataArray( - range(5), - attrs={"scale_factor": 2., "add_offset": -1.}, - dims=("x",) + range(5), attrs={"scale_factor": 2.0, "add_offset": -1.0}, dims=("x",) ) y__ = xr.DataArray( - range(2), - attrs={"scale_factor": -2., "add_offset": 1.}, - dims=("y",) + range(2), attrs={"scale_factor": -2.0, "add_offset": 1.0}, dims=("y",) ) proj = xr.DataArray( [], attrs={ - "semi_major_axis": 1., - "semi_minor_axis": 1., - "perspective_point_height": 1., - "longitude_of_projection_origin": -90., - "latitude_of_projection_origin": 0., - "sweep_angle_axis": u"x" - } + "semi_major_axis": 1.0, + "semi_minor_axis": 1.0, + "perspective_point_height": 1.0, + "longitude_of_projection_origin": -90.0, + "latitude_of_projection_origin": 0.0, + "sweep_angle_axis": "x", + }, ) fake_dataset = xr.Dataset( @@ -95,8 +94,8 @@ def _create_fake_rad_dataset(rad=None): "band_id": np.array(8), # 'x': x__, # 'y': y__, - "x_image": xr.DataArray(0.), - "y_image": xr.DataArray(0.), + "x_image": xr.DataArray(0.0), + "y_image": xr.DataArray(0.0), "goes_imager_projection": proj, "yaw_flip_flag": np.array([1]), "planck_fk1": np.array(13432.1), @@ -107,13 +106,12 @@ def _create_fake_rad_dataset(rad=None): "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02), - "earth_sun_distance_anomaly_in_AU": np.array(0.99) + "earth_sun_distance_anomaly_in_AU": np.array(0.99), }, coords={ "t": rad.coords["t"], "x": x__, "y": y__, - }, attrs={ "time_coverage_start": "2017-09-20T17:30:40.8Z", @@ -123,93 +121,139 @@ def _create_fake_rad_dataset(rad=None): return fake_dataset -class Test_NC_ABI_L1B_Base(unittest.TestCase): - """Common setup for NC_ABI_L1B tests.""" +def generate_l1b_filename(chan_name: str) -> str: + return f"OR_ABI-L1b-RadC-M4{chan_name}_G16_s20161811540362_e20161811545170_c20161811545230.nc" + + +@pytest.fixture(scope="module") +def l1b_c01_file(tmp_path_factory) -> list[Path]: + filename = generate_l1b_filename("C01") + data_path = tmp_path_factory.mktemp("abi_l1b").join(filename) + dataset = _create_fake_rad_dataset() + dataset.to_netcdf(data_path) + return [data_path] + + +@pytest.fixture(scope="module") +def l1b_all_files( + l1b_c01_file, +) -> list[Path]: + return l1b_c01_file - @mock.patch("satpy.readers.abi_base.xr") - def setUp( - self, - xr_, - rad: xr.DataArray | None = None, - clip_negative_radiances: bool = False, - filetype_resolution: int = 0 - ) -> None: - """Create a fake dataset using the given radiance data.""" - from satpy.readers.abi_l1b import NC_ABI_L1B +@contextlib.contextmanager +def create_file_handler( + rad: xr.DataArray | None = None, + clip_negative_radiances: bool = False, + filetype_resolution: int = 0, +) -> Iterator[NC_ABI_L1B]: + """Create a fake dataset using the given radiance data.""" + + ft_info: dict[str, Any] = {"filetype": "info"} + if filetype_resolution: + ft_info["resolution"] = filetype_resolution + + with mock.patch("satpy.readers.abi_base.xr") as xr_: xr_.open_dataset.return_value = _create_fake_rad_dataset(rad=rad) - ft_info: dict[str, Any] = {"filetype": "info"} - if filetype_resolution: - ft_info["resolution"] = filetype_resolution - self.file_handler = NC_ABI_L1B( + file_handler = NC_ABI_L1B( "filename", - {"platform_shortname": "G16", "observation_type": "Rad", - "suffix": "custom", - "scene_abbr": "C", "scan_mode": "M3"}, + { + "platform_shortname": "G16", + "observation_type": "Rad", + "suffix": "custom", + "scene_abbr": "C", + "scan_mode": "M3", + }, ft_info, - clip_negative_radiances=clip_negative_radiances + clip_negative_radiances=clip_negative_radiances, ) + yield file_handler class TestABIYAML: """Tests for the ABI L1b reader's YAML configuration.""" - @pytest.mark.parametrize(("channel", "suffix"), - [("C{:02d}".format(num), suffix) - for num in range(1, 17) - for suffix in ("", "_test_suffix")]) + @pytest.mark.parametrize( + ("channel", "suffix"), + [ + ("C{:02d}".format(num), suffix) + for num in range(1, 17) + for suffix in ("", "_test_suffix") + ], + ) def test_file_patterns_match(self, channel, suffix): """Test that the configured file patterns work.""" from satpy.readers import configs_for_reader, load_reader + reader_configs = list(configs_for_reader("abi_l1b"))[0] reader = load_reader(reader_configs) - fn1 = ("OR_ABI-L1b-RadM1-M3{}_G16_s20182541300210_e20182541300267" - "_c20182541300308{}.nc").format(channel, suffix) + fn1 = ( + "OR_ABI-L1b-RadM1-M3{}_G16_s20182541300210_e20182541300267" + "_c20182541300308{}.nc" + ).format(channel, suffix) loadables = reader.select_files_from_pathnames([fn1]) assert len(loadables) == 1 if not suffix and channel in ["C01", "C02", "C03", "C05"]: - fn2 = ("OR_ABI-L1b-RadM1-M3{}_G16_s20182541300210_e20182541300267" - "_c20182541300308-000000_0.nc").format(channel) + fn2 = ( + "OR_ABI-L1b-RadM1-M3{}_G16_s20182541300210_e20182541300267" + "_c20182541300308-000000_0.nc" + ).format(channel) loadables = reader.select_files_from_pathnames([fn2]) assert len(loadables) == 1 -class Test_NC_ABI_L1B(Test_NC_ABI_L1B_Base): +class Test_NC_ABI_L1B: """Test the NC_ABI_L1B reader.""" + @property + def fake_rad(self): + """Create fake data for these tests. + + Needs to be an instance method so the subclass can override it. + + """ + return None + def test_basic_attributes(self): """Test getting basic file attributes.""" from datetime import datetime - assert self.file_handler.start_time == datetime(2017, 9, 20, 17, 30, 40, 800000) - assert self.file_handler.end_time == datetime(2017, 9, 20, 17, 41, 17, 500000) + + with create_file_handler(rad=self.fake_rad) as file_handler: + assert file_handler.start_time == datetime(2017, 9, 20, 17, 30, 40, 800000) + assert file_handler.end_time == datetime(2017, 9, 20, 17, 41, 17, 500000) def test_get_dataset(self): """Test the get_dataset method.""" key = make_dataid(name="Rad", calibration="radiance") - res = self.file_handler.get_dataset(key, {"info": "info"}) - exp = {"calibration": "radiance", - "instrument_ID": None, - "modifiers": (), - "name": "Rad", - "observation_type": "Rad", - "orbital_parameters": {"projection_altitude": 1.0, - "projection_latitude": 0.0, - "projection_longitude": -90.0, - "satellite_nominal_altitude": 35786020., - "satellite_nominal_latitude": 0.0, - "satellite_nominal_longitude": -89.5, - "yaw_flip": True}, - "orbital_slot": None, - "platform_name": "GOES-16", - "platform_shortname": "G16", - "production_site": None, - "scan_mode": "M3", - "scene_abbr": "C", - "scene_id": None, - "sensor": "abi", - "timeline_ID": None, - "suffix": "custom", - "units": "W m-2 um-1 sr-1"} + with create_file_handler(rad=self.fake_rad) as file_handler: + res = file_handler.get_dataset(key, {"info": "info"}) + exp = { + "calibration": "radiance", + "instrument_ID": None, + "modifiers": (), + "name": "Rad", + "observation_type": "Rad", + "orbital_parameters": { + "projection_altitude": 1.0, + "projection_latitude": 0.0, + "projection_longitude": -90.0, + "satellite_nominal_altitude": 35786020.0, + "satellite_nominal_latitude": 0.0, + "satellite_nominal_longitude": -89.5, + "yaw_flip": True, + }, + "orbital_slot": None, + "platform_name": "GOES-16", + "platform_shortname": "G16", + "production_site": None, + "scan_mode": "M3", + "scene_abbr": "C", + "scene_id": None, + "sensor": "abi", + "timeline_ID": None, + "suffix": "custom", + "units": "W m-2 um-1 sr-1", + } assert res.attrs == exp # we remove any time dimension information @@ -221,40 +265,47 @@ def test_get_dataset(self): @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def(self, adef): """Test the area generation.""" - self.file_handler.get_area_def(None) - - assert adef.call_count == 1 - call_args = tuple(adef.call_args)[0] - assert call_args[3] == {"a": 1.0, "b": 1.0, "h": 1.0, - "lon_0": -90.0, "proj": "geos", "sweep": "x", "units": "m"} - assert call_args[4] == self.file_handler.ncols - assert call_args[5] == self.file_handler.nlines - np.testing.assert_allclose(call_args[6], (-2, -2, 8, 2)) + with create_file_handler(rad=self.fake_rad) as file_handler: + file_handler.get_area_def(None) + + assert adef.call_count == 1 + call_args = tuple(adef.call_args)[0] + assert call_args[3] == { + "a": 1.0, + "b": 1.0, + "h": 1.0, + "lon_0": -90.0, + "proj": "geos", + "sweep": "x", + "units": "m", + } + assert call_args[4] == file_handler.ncols + assert call_args[5] == file_handler.nlines + np.testing.assert_allclose(call_args[6], (-2, -2, 8, 2)) -class Test_NC_ABI_L1B_ir_cal(Test_NC_ABI_L1B_Base): +class Test_NC_ABI_L1B_ir_cal: """Test the NC_ABI_L1B reader's default IR calibration.""" - def setUp(self): - """Create fake data for the tests.""" - rad_data = (np.arange(10.).reshape((2, 5)) + 1.) * 50. - rad_data = (rad_data + 1.) / 0.5 - rad_data = rad_data.astype(np.int16) - rad = xr.DataArray( - rad_data, - dims=("y", "x"), - attrs={ - "scale_factor": 0.5, - "add_offset": -1., - "_FillValue": 1002, # last rad_data value - } + @pytest.mark.parametrize("clip_negative_radiances", [False, True]) + def test_ir_calibrate(self, clip_negative_radiances): + """Test IR calibration.""" + with _ir_file_handler( + clip_negative_radiances=clip_negative_radiances + ) as file_handler: + res = file_handler.get_dataset( + make_dataid(name="C07", calibration="brightness_temperature"), {} + ) + assert file_handler.clip_negative_radiances == clip_negative_radiances + + clipped_ir = 134.68753 if clip_negative_radiances else np.nan + expected = np.array( + [ + [clipped_ir, 304.97037, 332.22778, 354.6147, 374.08688], + [391.58655, 407.64786, 422.60635, 436.68802, np.nan], + ] ) - super(Test_NC_ABI_L1B_ir_cal, self).setUp(rad=rad, filetype_resolution=2000) - - def test_ir_calibration_attrs(self): - """Test IR calibrated DataArray attributes.""" - res = self.file_handler.get_dataset( - make_dataid(name="C05", calibration="brightness_temperature"), {}) + np.testing.assert_allclose(res.data, expected, equal_nan=True, atol=1e-04) # make sure the attributes from the file are in the data array assert "scale_factor" not in res.attrs @@ -262,95 +313,69 @@ def test_ir_calibration_attrs(self): assert res.attrs["standard_name"] == "toa_brightness_temperature" assert res.attrs["long_name"] == "Brightness Temperature" - def test_clip_negative_radiances_attribute(self): - """Assert that clip_negative_radiances is set to False.""" - assert not self.file_handler.clip_negative_radiances - - def test_ir_calibrate(self): - """Test IR calibration.""" - res = self.file_handler.get_dataset( - make_dataid(name="C05", calibration="brightness_temperature"), {}) - - expected = np.array([[267.55572248, 305.15576503, 332.37383249, 354.73895301, 374.19710115], - [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]]) - assert np.allclose(res.data, expected, equal_nan=True) - - -class Test_NC_ABI_L1B_clipped_ir_cal(Test_NC_ABI_L1B_Base): - """Test the NC_ABI_L1B reader's IR calibration (clipping negative radiance).""" - def setUp(self): - """Create fake data for the tests.""" - values = np.arange(10.) - values[0] = -0.0001 # introduce below minimum expected radiance - rad_data = (values.reshape((2, 5)) + 1.) * 50. - rad_data = (rad_data + 1.) / 0.5 - rad_data = rad_data.astype(np.int16) - rad = xr.DataArray( - rad_data, - dims=("y", "x"), - attrs={ - "scale_factor": 0.5, - "add_offset": -1., - "_FillValue": 1002, - } - ) - - super().setUp(rad=rad, clip_negative_radiances=True, filetype_resolution=2000) - - def test_clip_negative_radiances_attribute(self): - """Assert that clip_negative_radiances has been set to True.""" - assert self.file_handler.clip_negative_radiances - - def test_ir_calibrate(self): - """Test IR calibration.""" - res = self.file_handler.get_dataset( - make_dataid(name="C07", calibration="brightness_temperature"), {}) +@contextlib.contextmanager +def _ir_file_handler( + data: da.Array | None = None, clip_negative_radiances: bool = False +): + """Create fake data for the tests.""" + if data is None: + data = _fake_ir_data() + rad = xr.DataArray( + data, + dims=("y", "x"), + attrs={ + "scale_factor": 0.5, + "add_offset": -1.3, + "_FillValue": np.int16( + np.floor(((9 + 1) * 50.0 + 1.3) / 0.5) + ), # last rad_data value + }, + ) + with create_file_handler( + rad=rad, + clip_negative_radiances=clip_negative_radiances, + filetype_resolution=2000, + ) as file_handler: + yield file_handler - clipped_ir = 267.07775531 - expected = np.array([[clipped_ir, 305.15576503, 332.37383249, 354.73895301, 374.19710115], - [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]]) - assert np.allclose(res.data, expected, equal_nan=True) - def test_get_minimum_radiance(self): - """Test get_minimum_radiance from Rad DataArray.""" - from satpy.readers.abi_l1b import NC_ABI_L1B - data = xr.DataArray( - attrs={ - "scale_factor": 0.5, - "add_offset": -1., - "_FillValue": 1002, - } - ) - np.testing.assert_allclose(NC_ABI_L1B._get_minimum_radiance(NC_ABI_L1B, data), 0.0) +def _fake_ir_data(): + values = np.arange(10.0) + rad_data = (values.reshape((2, 5)) + 1.0) * 50.0 + rad_data[0, 0] = -0.0001 # introduce below minimum expected radiance + rad_data = (rad_data + 1.3) / 0.5 + return rad_data.astype(np.int16) -class Test_NC_ABI_L1B_vis_cal(Test_NC_ABI_L1B_Base): +class Test_NC_ABI_L1B_vis_cal: """Test the NC_ABI_L1B reader.""" - def setUp(self): - """Create fake data for the tests.""" - rad_data = (np.arange(10.).reshape((2, 5)) + 1.) - rad_data = (rad_data + 1.) / 0.5 + def test_vis_calibrate(self): + """Test VIS calibration.""" + rad_data = np.arange(10.0).reshape((2, 5)) + 1.0 + rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( rad_data, dims=("y", "x"), attrs={ "scale_factor": 0.5, - "add_offset": -1., + "add_offset": -1.0, "_FillValue": 20, - } + }, + ) + with create_file_handler(rad=rad, filetype_resolution=1000) as file_handler: + res = file_handler.get_dataset( + make_dataid(name="C05", calibration="reflectance"), {} + ) + + expected = np.array( + [ + [0.15265617, 0.30531234, 0.45796851, 0.61062468, 0.76328085], + [0.91593702, 1.06859319, 1.22124936, np.nan, 1.52656171], + ] ) - super(Test_NC_ABI_L1B_vis_cal, self).setUp(rad=rad, filetype_resolution=1000) - - def test_vis_calibrate(self): - """Test VIS calibration.""" - res = self.file_handler.get_dataset( - make_dataid(name="C05", calibration="reflectance"), {}) - - expected = np.array([[0.15265617, 0.30531234, 0.45796851, 0.61062468, 0.76328085], - [0.91593702, 1.06859319, 1.22124936, np.nan, 1.52656171]]) assert np.allclose(res.data, expected, equal_nan=True) assert "scale_factor" not in res.attrs assert "_FillValue" not in res.attrs @@ -358,29 +383,27 @@ def test_vis_calibrate(self): assert res.attrs["long_name"] == "Bidirectional Reflectance" -class Test_NC_ABI_L1B_raw_cal(Test_NC_ABI_L1B_Base): +class Test_NC_ABI_L1B_raw_cal: """Test the NC_ABI_L1B reader raw calibration.""" - def setUp(self): - """Create fake data for the tests.""" - rad_data = (np.arange(10.).reshape((2, 5)) + 1.) - rad_data = (rad_data + 1.) / 0.5 + def test_raw_calibrate(self): + """Test RAW calibration.""" + rad_data = np.arange(10.0).reshape((2, 5)) + 1.0 + rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( rad_data, dims=("y", "x"), attrs={ "scale_factor": 0.5, - "add_offset": -1., + "add_offset": -1.0, "_FillValue": 20, - } + }, ) - super(Test_NC_ABI_L1B_raw_cal, self).setUp(rad=rad, filetype_resolution=1000) - - def test_raw_calibrate(self): - """Test RAW calibration.""" - res = self.file_handler.get_dataset( - make_dataid(name="C05", calibration="counts"), {}) + with create_file_handler(rad=rad) as file_handler: + res = file_handler.get_dataset( + make_dataid(name="C05", calibration="counts"), {} + ) # We expect the raw data to be unchanged expected = res.data @@ -400,24 +423,7 @@ def test_raw_calibrate(self): assert res.attrs["long_name"] == "Raw Counts" -class Test_NC_ABI_L1B_invalid_cal(Test_NC_ABI_L1B_Base): - """Test the NC_ABI_L1B reader with invalid calibration.""" - - def test_invalid_calibration(self): - """Test detection of invalid calibration values.""" - # Need to use a custom DataID class because the real DataID class is - # smart enough to detect the invalid calibration before the ABI L1B - # get_dataset method gets a chance to run. - class FakeDataID(dict): - def to_dict(self): - return self - - with self.assertRaises(ValueError, msg="Did not detect invalid cal"): - did = FakeDataID(name="C05", calibration="invalid", modifiers=()) - self.file_handler.get_dataset(did, {}) - - -class Test_NC_ABI_File(unittest.TestCase): +class Test_NC_ABI_File: """Test file opening.""" @mock.patch("satpy.readers.abi_base.xr") @@ -434,17 +440,18 @@ def test_open_dataset(self, _): # noqa: PT019 class Test_NC_ABI_L1B_H5netcdf(Test_NC_ABI_L1B): """Allow h5netcdf peculiarities.""" - def setUp(self): + @property + def fake_rad(self): """Create fake data for the tests.""" rad_data = np.int16(50) rad = xr.DataArray( rad_data, attrs={ "scale_factor": 0.5, - "add_offset": -1., + "add_offset": -1.0, "_FillValue": np.array([1002]), "units": "W m-2 um-1 sr-1", "valid_range": (0, 4095), - } + }, ) - super(Test_NC_ABI_L1B_H5netcdf, self).setUp(rad=rad) + return rad From deac453b30cf4936d06fb8eabc0a27941645f2bf Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sat, 28 Oct 2023 20:48:12 -0500 Subject: [PATCH 356/702] Use dask arrays in abi l1b tests --- satpy/tests/reader_tests/test_abi_l1b.py | 32 ++++++++++-------------- 1 file changed, 13 insertions(+), 19 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index bdaa03f9e5..bc7c5351e8 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -51,7 +51,7 @@ def _create_fake_rad_dataarray( rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( - rad_data, + da.from_array(rad_data), dims=("y", "x"), attrs={ "scale_factor": 0.5, @@ -212,7 +212,7 @@ def fake_rad(self): Needs to be an instance method so the subclass can override it. """ - return None + return None # use default from file handler creator def test_basic_attributes(self): """Test getting basic file attributes.""" @@ -315,14 +315,16 @@ def test_ir_calibrate(self, clip_negative_radiances): @contextlib.contextmanager -def _ir_file_handler( - data: da.Array | None = None, clip_negative_radiances: bool = False -): +def _ir_file_handler(clip_negative_radiances: bool = False): """Create fake data for the tests.""" - if data is None: - data = _fake_ir_data() + values = np.arange(10.0) + rad_data = (values.reshape((2, 5)) + 1.0) * 50.0 + rad_data[0, 0] = -0.0001 # introduce below minimum expected radiance + rad_data = (rad_data + 1.3) / 0.5 + data = rad_data.astype(np.int16) + rad = xr.DataArray( - data, + da.from_array(data), dims=("y", "x"), attrs={ "scale_factor": 0.5, @@ -340,14 +342,6 @@ def _ir_file_handler( yield file_handler -def _fake_ir_data(): - values = np.arange(10.0) - rad_data = (values.reshape((2, 5)) + 1.0) * 50.0 - rad_data[0, 0] = -0.0001 # introduce below minimum expected radiance - rad_data = (rad_data + 1.3) / 0.5 - return rad_data.astype(np.int16) - - class Test_NC_ABI_L1B_vis_cal: """Test the NC_ABI_L1B reader.""" @@ -357,7 +351,7 @@ def test_vis_calibrate(self): rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( - rad_data, + da.from_array(rad_data), dims=("y", "x"), attrs={ "scale_factor": 0.5, @@ -392,7 +386,7 @@ def test_raw_calibrate(self): rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( - rad_data, + da.from_array(rad_data), dims=("y", "x"), attrs={ "scale_factor": 0.5, @@ -445,7 +439,7 @@ def fake_rad(self): """Create fake data for the tests.""" rad_data = np.int16(50) rad = xr.DataArray( - rad_data, + da.from_array(rad_data), attrs={ "scale_factor": 0.5, "add_offset": -1.0, From e24832416dca7aa2d285a3a51a00521f6acf2843 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 29 Oct 2023 10:03:02 -0500 Subject: [PATCH 357/702] Switch some tests to on-disk files --- satpy/readers/abi_base.py | 2 +- satpy/tests/reader_tests/test_abi_l1b.py | 64 +++++++++++++++--------- 2 files changed, 40 insertions(+), 26 deletions(-) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 956bec278e..28ff91ce38 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -66,7 +66,7 @@ def nc(self): from satpy.utils import get_dask_chunk_size_in_bytes chunk_size_for_high_res = math.sqrt(get_dask_chunk_size_in_bytes() / 4) # 32-bit floats - chunk_size_for_high_res = np.round(chunk_size_for_high_res / (4 * 226)) * (4 * 226) + chunk_size_for_high_res = np.round(max(chunk_size_for_high_res / (4 * 226), 1)) * (4 * 226) low_res_factor = int(self.filetype_info.get("resolution", 2000) // 500) res_chunk_bytes = int(chunk_size_for_high_res / low_res_factor) * 4 import dask diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index bc7c5351e8..6d8a001918 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -19,8 +19,9 @@ from __future__ import annotations import contextlib +from datetime import datetime from pathlib import Path -from typing import Any, Iterator +from typing import Any, Callable, Iterator from unittest import mock import dask.array as da @@ -28,6 +29,7 @@ import pytest import xarray as xr +from satpy import Scene from satpy.readers.abi_l1b import NC_ABI_L1B from satpy.tests.utils import make_dataid @@ -77,7 +79,7 @@ def _create_fake_rad_dataset(rad=None): range(2), attrs={"scale_factor": -2.0, "add_offset": 1.0}, dims=("y",) ) proj = xr.DataArray( - [], + np.int64(0), attrs={ "semi_major_axis": 1.0, "semi_minor_axis": 1.0, @@ -122,16 +124,27 @@ def _create_fake_rad_dataset(rad=None): def generate_l1b_filename(chan_name: str) -> str: - return f"OR_ABI-L1b-RadC-M4{chan_name}_G16_s20161811540362_e20161811545170_c20161811545230.nc" + return f"OR_ABI-L1b-RadC-M4{chan_name}_G16_s20161811540362_e20161811545170_c20161811545230_suffix.nc" @pytest.fixture(scope="module") -def l1b_c01_file(tmp_path_factory) -> list[Path]: - filename = generate_l1b_filename("C01") - data_path = tmp_path_factory.mktemp("abi_l1b").join(filename) - dataset = _create_fake_rad_dataset() - dataset.to_netcdf(data_path) - return [data_path] +def l1b_c01_file(tmp_path_factory) -> Callable: + def _create_file_handler( + rad: xr.DataArray | None = None, + clip_negative_radiances: bool = False, + ): + filename = generate_l1b_filename("C01") + data_path = tmp_path_factory.mktemp("abi_l1b") / filename + dataset = _create_fake_rad_dataset(rad=rad) + dataset.to_netcdf(data_path) + scn = Scene( + reader="abi_l1b", + filenames=[str(data_path)], + reader_kwargs={"clip_negative_radiances": clip_negative_radiances} + ) + return scn + + return _create_file_handler @pytest.fixture(scope="module") @@ -214,24 +227,17 @@ def fake_rad(self): """ return None # use default from file handler creator - def test_basic_attributes(self): - """Test getting basic file attributes.""" - from datetime import datetime - - with create_file_handler(rad=self.fake_rad) as file_handler: - assert file_handler.start_time == datetime(2017, 9, 20, 17, 30, 40, 800000) - assert file_handler.end_time == datetime(2017, 9, 20, 17, 41, 17, 500000) - - def test_get_dataset(self): + def test_get_dataset(self, l1b_c01_file): """Test the get_dataset method.""" - key = make_dataid(name="Rad", calibration="radiance") - with create_file_handler(rad=self.fake_rad) as file_handler: - res = file_handler.get_dataset(key, {"info": "info"}) + scn = l1b_c01_file(rad=self.fake_rad) + key = make_dataid(name="C01", calibration="radiance") + scn.load([key]) + exp = { "calibration": "radiance", "instrument_ID": None, "modifiers": (), - "name": "Rad", + "name": "C01", "observation_type": "Rad", "orbital_parameters": { "projection_altitude": 1.0, @@ -246,16 +252,24 @@ def test_get_dataset(self): "platform_name": "GOES-16", "platform_shortname": "G16", "production_site": None, - "scan_mode": "M3", + "reader": "abi_l1b", + "resolution": 1000, + "scan_mode": "M4", "scene_abbr": "C", "scene_id": None, "sensor": "abi", "timeline_ID": None, - "suffix": "custom", + "suffix": "suffix", "units": "W m-2 um-1 sr-1", + "start_time": datetime(2017, 9, 20, 17, 30, 40, 800000), + "end_time": datetime(2017, 9, 20, 17, 41, 17, 500000), } - assert res.attrs == exp + res = scn["C01"] + assert "area" in res.attrs + for exp_key, exp_val in exp.items(): + assert res.attrs[exp_key] == exp_val + # we remove any time dimension information assert "t" not in res.coords assert "t" not in res.dims From 07e841c3540982f2d1638d34ba4795bdcf1f4559 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 29 Oct 2023 14:57:55 -0500 Subject: [PATCH 358/702] Move more ABI L1b tests to on-disk files --- satpy/readers/abi_base.py | 6 ++-- satpy/tests/reader_tests/test_abi_l1b.py | 42 +++++++++++++++--------- satpy/utils.py | 19 +++++++++++ 3 files changed, 48 insertions(+), 19 deletions(-) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 28ff91ce38..3574349c71 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -23,7 +23,7 @@ import numpy as np import xarray as xr -from pyresample import geometry +from pyresample.geometry import AreaDefinition from satpy._compat import cached_property from satpy.readers import open_file_or_filename @@ -212,7 +212,7 @@ def _get_areadef_latlon(self, key): "fi": float(fi), "pm": float(pm)} - ll_area_def = geometry.AreaDefinition( + ll_area_def = AreaDefinition( self.nc.attrs.get("orbital_slot", "abi_geos"), self.nc.attrs.get("spatial_resolution", "ABI file area"), "abi_latlon", @@ -262,7 +262,7 @@ def _get_areadef_fixedgrid(self, key): "units": "m", "sweep": sweep_axis} - fg_area_def = geometry.AreaDefinition( + fg_area_def = AreaDefinition( self.nc.attrs.get("orbital_slot", "abi_geos"), self.nc.attrs.get("spatial_resolution", "ABI file area"), "abi_fixed_grid", diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 6d8a001918..7ee9d36acd 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -32,6 +32,7 @@ from satpy import Scene from satpy.readers.abi_l1b import NC_ABI_L1B from satpy.tests.utils import make_dataid +from satpy.utils import ignore_pyproj_proj_warnings RAD_SHAPE = { 500: (3000, 5000), # conus - 500m @@ -276,26 +277,34 @@ def test_get_dataset(self, l1b_c01_file): assert "time" not in res.coords assert "time" not in res.dims - @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") - def test_get_area_def(self, adef): + def test_get_area_def(self, l1b_c01_file): """Test the area generation.""" - with create_file_handler(rad=self.fake_rad) as file_handler: - file_handler.get_area_def(None) - - assert adef.call_count == 1 - call_args = tuple(adef.call_args)[0] - assert call_args[3] == { - "a": 1.0, - "b": 1.0, + from pyresample.geometry import AreaDefinition + + scn = l1b_c01_file(rad=self.fake_rad) + scn.load(["C01"]) + area_def = scn["C01"].attrs["area"] + assert isinstance(area_def, AreaDefinition) + + with ignore_pyproj_proj_warnings(): + proj_dict = area_def.crs.to_dict() + exp_dict = { "h": 1.0, "lon_0": -90.0, "proj": "geos", "sweep": "x", "units": "m", } - assert call_args[4] == file_handler.ncols - assert call_args[5] == file_handler.nlines - np.testing.assert_allclose(call_args[6], (-2, -2, 8, 2)) + if "R" in proj_dict: + assert proj_dict["R"] == 1 + else: + assert proj_dict["a"] == 1 + assert proj_dict["b"] == 1 + for proj_key, proj_val in exp_dict.items(): + assert proj_dict[proj_key] == proj_val + + assert area_def.shape == scn["C01"].shape + assert area_def.area_extent == (-2, -2, 8, 2) class Test_NC_ABI_L1B_ir_cal: @@ -437,8 +446,6 @@ class Test_NC_ABI_File: @mock.patch("satpy.readers.abi_base.xr") def test_open_dataset(self, _): # noqa: PT019 """Test openning a dataset.""" - from satpy.readers.abi_l1b import NC_ABI_L1B - openable_thing = mock.MagicMock() NC_ABI_L1B(openable_thing, {"platform_shortname": "g16"}, {}) @@ -451,7 +458,10 @@ class Test_NC_ABI_L1B_H5netcdf(Test_NC_ABI_L1B): @property def fake_rad(self): """Create fake data for the tests.""" - rad_data = np.int16(50) + shape = (2, 5) + rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.0) * 50.0 + rad_data = (rad_data + 1.0) / 0.5 + rad_data = rad_data.astype(np.int16) rad = xr.DataArray( da.from_array(rad_data), attrs={ diff --git a/satpy/utils.py b/satpy/utils.py index f9ea05ca79..dfedc30803 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -576,6 +576,25 @@ def ignore_invalid_float_warnings(): yield +@contextlib.contextmanager +def ignore_pyproj_proj_warnings(): + """Wrap operations that we know will produce a PROJ.4 precision warning. + + Only to be used internally to Pyresample when we have no other choice but + to use PROJ.4 strings/dicts. For example, serialization to YAML or other + human-readable formats or testing the methods that produce the PROJ.4 + versions of the CRS. + + """ + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", + "You will likely lose important projection information", + UserWarning, + ) + yield + + def get_chunk_size_limit(dtype=float): """Compute the chunk size limit in bytes given *dtype* (float by default). From b6411c7dc34afbbde38efca5dd13d5a5fe18dd69 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 29 Oct 2023 20:47:53 -0500 Subject: [PATCH 359/702] Switch all ABI L1b tests to on-disk files --- satpy/tests/reader_tests/test_abi_l1b.py | 101 ++++++++--------------- 1 file changed, 33 insertions(+), 68 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 7ee9d36acd..e60453f9a3 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -18,10 +18,8 @@ """The abi_l1b reader tests package.""" from __future__ import annotations -import contextlib from datetime import datetime -from pathlib import Path -from typing import Any, Callable, Iterator +from typing import Callable from unittest import mock import dask.array as da @@ -29,7 +27,7 @@ import pytest import xarray as xr -from satpy import Scene +from satpy import DataQuery, Scene from satpy.readers.abi_l1b import NC_ABI_L1B from satpy.tests.utils import make_dataid from satpy.utils import ignore_pyproj_proj_warnings @@ -130,10 +128,7 @@ def generate_l1b_filename(chan_name: str) -> str: @pytest.fixture(scope="module") def l1b_c01_file(tmp_path_factory) -> Callable: - def _create_file_handler( - rad: xr.DataArray | None = None, - clip_negative_radiances: bool = False, - ): + def _create_file_handler(rad: xr.DataArray | None = None): filename = generate_l1b_filename("C01") data_path = tmp_path_factory.mktemp("abi_l1b") / filename dataset = _create_fake_rad_dataset(rad=rad) @@ -141,7 +136,6 @@ def _create_file_handler( scn = Scene( reader="abi_l1b", filenames=[str(data_path)], - reader_kwargs={"clip_negative_radiances": clip_negative_radiances} ) return scn @@ -149,39 +143,23 @@ def _create_file_handler( @pytest.fixture(scope="module") -def l1b_all_files( - l1b_c01_file, -) -> list[Path]: - return l1b_c01_file - - -@contextlib.contextmanager -def create_file_handler( - rad: xr.DataArray | None = None, - clip_negative_radiances: bool = False, - filetype_resolution: int = 0, -) -> Iterator[NC_ABI_L1B]: - """Create a fake dataset using the given radiance data.""" - - ft_info: dict[str, Any] = {"filetype": "info"} - if filetype_resolution: - ft_info["resolution"] = filetype_resolution - - with mock.patch("satpy.readers.abi_base.xr") as xr_: - xr_.open_dataset.return_value = _create_fake_rad_dataset(rad=rad) - file_handler = NC_ABI_L1B( - "filename", - { - "platform_shortname": "G16", - "observation_type": "Rad", - "suffix": "custom", - "scene_abbr": "C", - "scan_mode": "M3", - }, - ft_info, - clip_negative_radiances=clip_negative_radiances, +def l1b_c07_file(tmp_path_factory) -> Callable: + def _create_file_handler( + rad: xr.DataArray | None = None, + clip_negative_radiances: bool = False, + ): + filename = generate_l1b_filename("C07") + data_path = tmp_path_factory.mktemp("abi_l1b") / filename + dataset = _create_fake_rad_dataset(rad=rad) + dataset.to_netcdf(data_path) + scn = Scene( + reader="abi_l1b", + filenames=[str(data_path)], + reader_kwargs={"clip_negative_radiances": clip_negative_radiances} ) - yield file_handler + return scn + + return _create_file_handler class TestABIYAML: @@ -311,15 +289,11 @@ class Test_NC_ABI_L1B_ir_cal: """Test the NC_ABI_L1B reader's default IR calibration.""" @pytest.mark.parametrize("clip_negative_radiances", [False, True]) - def test_ir_calibrate(self, clip_negative_radiances): + def test_ir_calibrate(self, l1b_c07_file, clip_negative_radiances): """Test IR calibration.""" - with _ir_file_handler( - clip_negative_radiances=clip_negative_radiances - ) as file_handler: - res = file_handler.get_dataset( - make_dataid(name="C07", calibration="brightness_temperature"), {} - ) - assert file_handler.clip_negative_radiances == clip_negative_radiances + scn = l1b_c07_file(rad=_fake_ir_data(), clip_negative_radiances=clip_negative_radiances) + scn.load([DataQuery(name="C07", calibration="brightness_temperature")]) + res = scn["C07"] clipped_ir = 134.68753 if clip_negative_radiances else np.nan expected = np.array( @@ -337,9 +311,7 @@ def test_ir_calibrate(self, clip_negative_radiances): assert res.attrs["long_name"] == "Brightness Temperature" -@contextlib.contextmanager -def _ir_file_handler(clip_negative_radiances: bool = False): - """Create fake data for the tests.""" +def _fake_ir_data(): values = np.arange(10.0) rad_data = (values.reshape((2, 5)) + 1.0) * 50.0 rad_data[0, 0] = -0.0001 # introduce below minimum expected radiance @@ -357,18 +329,13 @@ def _ir_file_handler(clip_negative_radiances: bool = False): ), # last rad_data value }, ) - with create_file_handler( - rad=rad, - clip_negative_radiances=clip_negative_radiances, - filetype_resolution=2000, - ) as file_handler: - yield file_handler + return rad class Test_NC_ABI_L1B_vis_cal: """Test the NC_ABI_L1B reader.""" - def test_vis_calibrate(self): + def test_vis_calibrate(self, l1b_c01_file): """Test VIS calibration.""" rad_data = np.arange(10.0).reshape((2, 5)) + 1.0 rad_data = (rad_data + 1.0) / 0.5 @@ -382,10 +349,9 @@ def test_vis_calibrate(self): "_FillValue": 20, }, ) - with create_file_handler(rad=rad, filetype_resolution=1000) as file_handler: - res = file_handler.get_dataset( - make_dataid(name="C05", calibration="reflectance"), {} - ) + scn = l1b_c01_file(rad=rad) + scn.load(["C01"]) + res = scn["C01"] expected = np.array( [ @@ -403,7 +369,7 @@ def test_vis_calibrate(self): class Test_NC_ABI_L1B_raw_cal: """Test the NC_ABI_L1B reader raw calibration.""" - def test_raw_calibrate(self): + def test_raw_calibrate(self, l1b_c01_file): """Test RAW calibration.""" rad_data = np.arange(10.0).reshape((2, 5)) + 1.0 rad_data = (rad_data + 1.0) / 0.5 @@ -417,10 +383,9 @@ def test_raw_calibrate(self): "_FillValue": 20, }, ) - with create_file_handler(rad=rad) as file_handler: - res = file_handler.get_dataset( - make_dataid(name="C05", calibration="counts"), {} - ) + scn = l1b_c01_file(rad=rad) + scn.load([DataQuery(name="C01", calibration="counts")]) + res = scn["C01"] # We expect the raw data to be unchanged expected = res.data From f9efd963ba6a50cbead6979095db4786c456c31d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 29 Oct 2023 21:16:10 -0500 Subject: [PATCH 360/702] Use more realistic sizes in ABI tests --- satpy/tests/reader_tests/test_abi_l1b.py | 49 +++++++++++++----------- 1 file changed, 27 insertions(+), 22 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index e60453f9a3..fb08ef9361 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -41,12 +41,12 @@ def _create_fake_rad_dataarray( rad: xr.DataArray | None = None, - # resolution: int = 2000, + resolution: int = 2000, ) -> xr.DataArray: x_image = xr.DataArray(0.0) y_image = xr.DataArray(0.0) time = xr.DataArray(0.0) - shape = (2, 5) # RAD_SHAPE[resolution] + shape = RAD_SHAPE[resolution] if rad is None: rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.0) * 50.0 rad_data = (rad_data + 1.0) / 0.5 @@ -68,14 +68,14 @@ def _create_fake_rad_dataarray( return rad -def _create_fake_rad_dataset(rad=None): - rad = _create_fake_rad_dataarray(rad=rad) +def _create_fake_rad_dataset(rad: xr.DataArray, resolution: int) -> xr.Dataset: + rad = _create_fake_rad_dataarray(rad=rad, resolution=resolution) x__ = xr.DataArray( - range(5), attrs={"scale_factor": 2.0, "add_offset": -1.0}, dims=("x",) + range(rad.shape[1]), attrs={"scale_factor": 2.0, "add_offset": -1.0}, dims=("x",) ) y__ = xr.DataArray( - range(2), attrs={"scale_factor": -2.0, "add_offset": 1.0}, dims=("y",) + range(rad.shape[0]), attrs={"scale_factor": -2.0, "add_offset": 1.0}, dims=("y",) ) proj = xr.DataArray( np.int64(0), @@ -131,7 +131,7 @@ def l1b_c01_file(tmp_path_factory) -> Callable: def _create_file_handler(rad: xr.DataArray | None = None): filename = generate_l1b_filename("C01") data_path = tmp_path_factory.mktemp("abi_l1b") / filename - dataset = _create_fake_rad_dataset(rad=rad) + dataset = _create_fake_rad_dataset(rad=rad, resolution=1000) dataset.to_netcdf(data_path) scn = Scene( reader="abi_l1b", @@ -145,12 +145,12 @@ def _create_file_handler(rad: xr.DataArray | None = None): @pytest.fixture(scope="module") def l1b_c07_file(tmp_path_factory) -> Callable: def _create_file_handler( - rad: xr.DataArray | None = None, + rad: xr.DataArray, clip_negative_radiances: bool = False, ): filename = generate_l1b_filename("C07") data_path = tmp_path_factory.mktemp("abi_l1b") / filename - dataset = _create_fake_rad_dataset(rad=rad) + dataset = _create_fake_rad_dataset(rad=rad, resolution=2000) dataset.to_netcdf(data_path) scn = Scene( reader="abi_l1b", @@ -204,7 +204,7 @@ def fake_rad(self): Needs to be an instance method so the subclass can override it. """ - return None # use default from file handler creator + return None def test_get_dataset(self, l1b_c01_file): """Test the get_dataset method.""" @@ -282,7 +282,7 @@ def test_get_area_def(self, l1b_c01_file): assert proj_dict[proj_key] == proj_val assert area_def.shape == scn["C01"].shape - assert area_def.area_extent == (-2, -2, 8, 2) + assert area_def.area_extent == (-2.0, -2998.0, 4998.0, 2.0) class Test_NC_ABI_L1B_ir_cal: @@ -298,11 +298,12 @@ def test_ir_calibrate(self, l1b_c07_file, clip_negative_radiances): clipped_ir = 134.68753 if clip_negative_radiances else np.nan expected = np.array( [ - [clipped_ir, 304.97037, 332.22778, 354.6147, 374.08688], - [391.58655, 407.64786, 422.60635, 436.68802, np.nan], + clipped_ir, 304.97037, 332.22778, 354.6147, 374.08688, + 391.58655, 407.64786, 422.60635, 436.68802, np.nan, ] ) - np.testing.assert_allclose(res.data, expected, equal_nan=True, atol=1e-04) + data_np = res.data.compute() + np.testing.assert_allclose(data_np[0, :10], expected, equal_nan=True, atol=1e-04) # make sure the attributes from the file are in the data array assert "scale_factor" not in res.attrs @@ -312,8 +313,9 @@ def test_ir_calibrate(self, l1b_c07_file, clip_negative_radiances): def _fake_ir_data(): - values = np.arange(10.0) - rad_data = (values.reshape((2, 5)) + 1.0) * 50.0 + shape = RAD_SHAPE[2000] + values = np.arange(shape[0] * shape[1]) + rad_data = (values.reshape(shape) + 1.0) * 50.0 rad_data[0, 0] = -0.0001 # introduce below minimum expected radiance rad_data = (rad_data + 1.3) / 0.5 data = rad_data.astype(np.int16) @@ -337,7 +339,8 @@ class Test_NC_ABI_L1B_vis_cal: def test_vis_calibrate(self, l1b_c01_file): """Test VIS calibration.""" - rad_data = np.arange(10.0).reshape((2, 5)) + 1.0 + shape = RAD_SHAPE[1000] + rad_data = np.arange(shape[0] * shape[1]).reshape(shape) + 1.0 rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( @@ -355,11 +358,12 @@ def test_vis_calibrate(self, l1b_c01_file): expected = np.array( [ - [0.15265617, 0.30531234, 0.45796851, 0.61062468, 0.76328085], - [0.91593702, 1.06859319, 1.22124936, np.nan, 1.52656171], + 0.15265617, 0.30531234, 0.45796851, 0.61062468, 0.76328085, + 0.91593702, 1.06859319, 1.22124936, np.nan, 1.52656171, ] ) - assert np.allclose(res.data, expected, equal_nan=True) + data_np = res.data.compute() + assert np.allclose(data_np[0, :10], expected, equal_nan=True) assert "scale_factor" not in res.attrs assert "_FillValue" not in res.attrs assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" @@ -371,7 +375,8 @@ class Test_NC_ABI_L1B_raw_cal: def test_raw_calibrate(self, l1b_c01_file): """Test RAW calibration.""" - rad_data = np.arange(10.0).reshape((2, 5)) + 1.0 + shape = RAD_SHAPE[1000] + rad_data = np.arange(shape[0] * shape[1]).reshape(shape) + 1.0 rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( @@ -423,7 +428,7 @@ class Test_NC_ABI_L1B_H5netcdf(Test_NC_ABI_L1B): @property def fake_rad(self): """Create fake data for the tests.""" - shape = (2, 5) + shape = RAD_SHAPE[1000] rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.0) * 50.0 rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) From 1e0e21f083cbf2cde44dbabbe5c9cc4287df091e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 29 Oct 2023 21:29:33 -0500 Subject: [PATCH 361/702] Revert AreaDefinition import for easier test mocking --- satpy/readers/abi_base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 3574349c71..28ff91ce38 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -23,7 +23,7 @@ import numpy as np import xarray as xr -from pyresample.geometry import AreaDefinition +from pyresample import geometry from satpy._compat import cached_property from satpy.readers import open_file_or_filename @@ -212,7 +212,7 @@ def _get_areadef_latlon(self, key): "fi": float(fi), "pm": float(pm)} - ll_area_def = AreaDefinition( + ll_area_def = geometry.AreaDefinition( self.nc.attrs.get("orbital_slot", "abi_geos"), self.nc.attrs.get("spatial_resolution", "ABI file area"), "abi_latlon", @@ -262,7 +262,7 @@ def _get_areadef_fixedgrid(self, key): "units": "m", "sweep": sweep_axis} - fg_area_def = AreaDefinition( + fg_area_def = geometry.AreaDefinition( self.nc.attrs.get("orbital_slot", "abi_geos"), self.nc.attrs.get("spatial_resolution", "ABI file area"), "abi_fixed_grid", From 514f5e1bef90d57282993ee04501c70cf7b4ea28 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 30 Oct 2023 11:52:33 -0500 Subject: [PATCH 362/702] More abi l1b test refactoring --- satpy/tests/reader_tests/test_abi_l1b.py | 446 +++++++++++------------ 1 file changed, 214 insertions(+), 232 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index fb08ef9361..54c78e1089 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -19,17 +19,18 @@ from __future__ import annotations from datetime import datetime -from typing import Callable +from pathlib import Path +from typing import Any, Callable from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr +from pytest_lazyfixture import lazy_fixture from satpy import DataQuery, Scene from satpy.readers.abi_l1b import NC_ABI_L1B -from satpy.tests.utils import make_dataid from satpy.utils import ignore_pyproj_proj_warnings RAD_SHAPE = { @@ -72,10 +73,14 @@ def _create_fake_rad_dataset(rad: xr.DataArray, resolution: int) -> xr.Dataset: rad = _create_fake_rad_dataarray(rad=rad, resolution=resolution) x__ = xr.DataArray( - range(rad.shape[1]), attrs={"scale_factor": 2.0, "add_offset": -1.0}, dims=("x",) + range(rad.shape[1]), + attrs={"scale_factor": 2.0, "add_offset": -1.0}, + dims=("x",), ) y__ = xr.DataArray( - range(rad.shape[0]), attrs={"scale_factor": -2.0, "add_offset": 1.0}, dims=("y",) + range(rad.shape[0]), + attrs={"scale_factor": -2.0, "add_offset": 1.0}, + dims=("y",), ) proj = xr.DataArray( np.int64(0), @@ -126,92 +131,144 @@ def generate_l1b_filename(chan_name: str) -> str: return f"OR_ABI-L1b-RadC-M4{chan_name}_G16_s20161811540362_e20161811545170_c20161811545230_suffix.nc" -@pytest.fixture(scope="module") -def l1b_c01_file(tmp_path_factory) -> Callable: - def _create_file_handler(rad: xr.DataArray | None = None): - filename = generate_l1b_filename("C01") - data_path = tmp_path_factory.mktemp("abi_l1b") / filename - dataset = _create_fake_rad_dataset(rad=rad, resolution=1000) - dataset.to_netcdf(data_path) - scn = Scene( - reader="abi_l1b", - filenames=[str(data_path)], - ) - return scn +@pytest.fixture() +def c01_refl(tmp_path) -> xr.DataArray: + scn = _create_scene_for_data(tmp_path, "C01", None, 1000) + scn.load(["C01"]) + return scn["C01"] + + +@pytest.fixture() +def c01_rad(tmp_path) -> xr.DataArray: + scn = _create_scene_for_data(tmp_path, "C01", None, 1000) + scn.load([DataQuery(name="C01", calibration="radiance")]) + return scn["C01"] + - return _create_file_handler +@pytest.fixture() +def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: + shape = RAD_SHAPE[1000] + rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.0) * 50.0 + rad_data = (rad_data + 1.0) / 0.5 + rad_data = rad_data.astype(np.int16) + rad = xr.DataArray( + da.from_array(rad_data), + attrs={ + "scale_factor": 0.5, + "add_offset": -1.0, + "_FillValue": np.array([1002]), + "units": "W m-2 um-1 sr-1", + "valid_range": (0, 4095), + }, + ) + scn = _create_scene_for_data(tmp_path, "C01", rad, 1000) + scn.load([DataQuery(name="C01", calibration="radiance")]) + return scn["C01"] + + +@pytest.fixture() +def c01_counts(tmp_path) -> xr.DataArray: + scn = _create_scene_for_data(tmp_path, "C01", None, 1000) + scn.load([DataQuery(name="C01", calibration="counts")]) + return scn["C01"] + + +def _create_scene_for_data( + tmp_path: Path, + channel_name: str, + rad: xr.DataArray | None, + resolution: int, + reader_kwargs: dict[str, Any] | None = None, +) -> Scene: + filename = generate_l1b_filename(channel_name) + data_path = tmp_path / filename + dataset = _create_fake_rad_dataset(rad=rad, resolution=resolution) + dataset.to_netcdf(data_path) + scn = Scene( + reader="abi_l1b", + filenames=[str(data_path)], + reader_kwargs=reader_kwargs, + ) + return scn -@pytest.fixture(scope="module") -def l1b_c07_file(tmp_path_factory) -> Callable: - def _create_file_handler( - rad: xr.DataArray, - clip_negative_radiances: bool = False, +@pytest.fixture() +def c07_bt_creator(tmp_path) -> Callable: + def _load_data_array( + clip_negative_radiances: bool = False, ): - filename = generate_l1b_filename("C07") - data_path = tmp_path_factory.mktemp("abi_l1b") / filename - dataset = _create_fake_rad_dataset(rad=rad, resolution=2000) - dataset.to_netcdf(data_path) - scn = Scene( - reader="abi_l1b", - filenames=[str(data_path)], - reader_kwargs={"clip_negative_radiances": clip_negative_radiances} + rad = _fake_c07_data() + scn = _create_scene_for_data( + tmp_path, + "C07", + rad, + 2000, + {"clip_negative_radiances": clip_negative_radiances}, ) - return scn + scn.load(["C07"]) + return scn["C07"] - return _create_file_handler + return _load_data_array -class TestABIYAML: - """Tests for the ABI L1b reader's YAML configuration.""" - - @pytest.mark.parametrize( - ("channel", "suffix"), - [ - ("C{:02d}".format(num), suffix) - for num in range(1, 17) - for suffix in ("", "_test_suffix") - ], +def _fake_c07_data() -> xr.DataArray: + shape = RAD_SHAPE[2000] + values = np.arange(shape[0] * shape[1]) + rad_data = (values.reshape(shape) + 1.0) * 50.0 + rad_data[0, 0] = -0.0001 # introduce below minimum expected radiance + rad_data = (rad_data + 1.3) / 0.5 + data = rad_data.astype(np.int16) + rad = xr.DataArray( + da.from_array(data), + dims=("y", "x"), + attrs={ + "scale_factor": 0.5, + "add_offset": -1.3, + "_FillValue": np.int16( + np.floor(((9 + 1) * 50.0 + 1.3) / 0.5) + ), # last rad_data value + }, ) - def test_file_patterns_match(self, channel, suffix): - """Test that the configured file patterns work.""" - from satpy.readers import configs_for_reader, load_reader + return rad - reader_configs = list(configs_for_reader("abi_l1b"))[0] - reader = load_reader(reader_configs) - fn1 = ( + +@pytest.mark.parametrize( + ("channel", "suffix"), + [ + ("C{:02d}".format(num), suffix) + for num in range(1, 17) + for suffix in ("", "_test_suffix") + ], +) +def test_file_patterns_match(channel, suffix): + """Test that the configured file patterns work.""" + from satpy.readers import configs_for_reader, load_reader + + reader_configs = list(configs_for_reader("abi_l1b"))[0] + reader = load_reader(reader_configs) + fn1 = ( + "OR_ABI-L1b-RadM1-M3{}_G16_s20182541300210_e20182541300267" + "_c20182541300308{}.nc" + ).format(channel, suffix) + loadables = reader.select_files_from_pathnames([fn1]) + assert len(loadables) == 1 + if not suffix and channel in ["C01", "C02", "C03", "C05"]: + fn2 = ( "OR_ABI-L1b-RadM1-M3{}_G16_s20182541300210_e20182541300267" - "_c20182541300308{}.nc" - ).format(channel, suffix) - loadables = reader.select_files_from_pathnames([fn1]) + "_c20182541300308-000000_0.nc" + ).format(channel) + loadables = reader.select_files_from_pathnames([fn2]) assert len(loadables) == 1 - if not suffix and channel in ["C01", "C02", "C03", "C05"]: - fn2 = ( - "OR_ABI-L1b-RadM1-M3{}_G16_s20182541300210_e20182541300267" - "_c20182541300308-000000_0.nc" - ).format(channel) - loadables = reader.select_files_from_pathnames([fn2]) - assert len(loadables) == 1 +@pytest.mark.parametrize( + "c01_data_arr", [lazy_fixture("c01_rad"), lazy_fixture("c01_rad_h5netcdf")] +) class Test_NC_ABI_L1B: """Test the NC_ABI_L1B reader.""" - @property - def fake_rad(self): - """Create fake data for these tests. - - Needs to be an instance method so the subclass can override it. - - """ - return None - - def test_get_dataset(self, l1b_c01_file): + def test_get_dataset(self, c01_data_arr): """Test the get_dataset method.""" - scn = l1b_c01_file(rad=self.fake_rad) - key = make_dataid(name="C01", calibration="radiance") - scn.load([key]) - exp = { "calibration": "radiance", "instrument_ID": None, @@ -244,7 +301,7 @@ def test_get_dataset(self, l1b_c01_file): "end_time": datetime(2017, 9, 20, 17, 41, 17, 500000), } - res = scn["C01"] + res = c01_data_arr assert "area" in res.attrs for exp_key, exp_val in exp.items(): assert res.attrs[exp_key] == exp_val @@ -255,13 +312,11 @@ def test_get_dataset(self, l1b_c01_file): assert "time" not in res.coords assert "time" not in res.dims - def test_get_area_def(self, l1b_c01_file): + def test_get_area_def(self, c01_data_arr): """Test the area generation.""" from pyresample.geometry import AreaDefinition - scn = l1b_c01_file(rad=self.fake_rad) - scn.load(["C01"]) - area_def = scn["C01"].attrs["area"] + area_def = c01_data_arr.attrs["area"] assert isinstance(area_def, AreaDefinition) with ignore_pyproj_proj_warnings(): @@ -281,165 +336,92 @@ def test_get_area_def(self, l1b_c01_file): for proj_key, proj_val in exp_dict.items(): assert proj_dict[proj_key] == proj_val - assert area_def.shape == scn["C01"].shape + assert area_def.shape == c01_data_arr.shape assert area_def.area_extent == (-2.0, -2998.0, 4998.0, 2.0) -class Test_NC_ABI_L1B_ir_cal: - """Test the NC_ABI_L1B reader's default IR calibration.""" - - @pytest.mark.parametrize("clip_negative_radiances", [False, True]) - def test_ir_calibrate(self, l1b_c07_file, clip_negative_radiances): - """Test IR calibration.""" - scn = l1b_c07_file(rad=_fake_ir_data(), clip_negative_radiances=clip_negative_radiances) - scn.load([DataQuery(name="C07", calibration="brightness_temperature")]) - res = scn["C07"] - - clipped_ir = 134.68753 if clip_negative_radiances else np.nan - expected = np.array( - [ - clipped_ir, 304.97037, 332.22778, 354.6147, 374.08688, - 391.58655, 407.64786, 422.60635, 436.68802, np.nan, - ] - ) - data_np = res.data.compute() - np.testing.assert_allclose(data_np[0, :10], expected, equal_nan=True, atol=1e-04) - - # make sure the attributes from the file are in the data array - assert "scale_factor" not in res.attrs - assert "_FillValue" not in res.attrs - assert res.attrs["standard_name"] == "toa_brightness_temperature" - assert res.attrs["long_name"] == "Brightness Temperature" - - -def _fake_ir_data(): - shape = RAD_SHAPE[2000] - values = np.arange(shape[0] * shape[1]) - rad_data = (values.reshape(shape) + 1.0) * 50.0 - rad_data[0, 0] = -0.0001 # introduce below minimum expected radiance - rad_data = (rad_data + 1.3) / 0.5 - data = rad_data.astype(np.int16) - - rad = xr.DataArray( - da.from_array(data), - dims=("y", "x"), - attrs={ - "scale_factor": 0.5, - "add_offset": -1.3, - "_FillValue": np.int16( - np.floor(((9 + 1) * 50.0 + 1.3) / 0.5) - ), # last rad_data value - }, +@pytest.mark.parametrize("clip_negative_radiances", [False, True]) +def test_ir_calibrate(self, c07_bt_creator, clip_negative_radiances): + """Test IR calibration.""" + res = c07_bt_creator(clip_negative_radiances=clip_negative_radiances) + clipped_ir = 134.68753 if clip_negative_radiances else np.nan + expected = np.array( + [ + clipped_ir, + 304.97037, + 332.22778, + 354.6147, + 374.08688, + 391.58655, + 407.64786, + 422.60635, + 436.68802, + np.nan, + ] + ) + data_np = res.data.compute() + np.testing.assert_allclose( + data_np[0, :10], expected, equal_nan=True, atol=1e-04 ) - return rad - - -class Test_NC_ABI_L1B_vis_cal: - """Test the NC_ABI_L1B reader.""" - - def test_vis_calibrate(self, l1b_c01_file): - """Test VIS calibration.""" - shape = RAD_SHAPE[1000] - rad_data = np.arange(shape[0] * shape[1]).reshape(shape) + 1.0 - rad_data = (rad_data + 1.0) / 0.5 - rad_data = rad_data.astype(np.int16) - rad = xr.DataArray( - da.from_array(rad_data), - dims=("y", "x"), - attrs={ - "scale_factor": 0.5, - "add_offset": -1.0, - "_FillValue": 20, - }, - ) - scn = l1b_c01_file(rad=rad) - scn.load(["C01"]) - res = scn["C01"] - - expected = np.array( - [ - 0.15265617, 0.30531234, 0.45796851, 0.61062468, 0.76328085, - 0.91593702, 1.06859319, 1.22124936, np.nan, 1.52656171, - ] - ) - data_np = res.data.compute() - assert np.allclose(data_np[0, :10], expected, equal_nan=True) - assert "scale_factor" not in res.attrs - assert "_FillValue" not in res.attrs - assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" - assert res.attrs["long_name"] == "Bidirectional Reflectance" - - -class Test_NC_ABI_L1B_raw_cal: - """Test the NC_ABI_L1B reader raw calibration.""" - - def test_raw_calibrate(self, l1b_c01_file): - """Test RAW calibration.""" - shape = RAD_SHAPE[1000] - rad_data = np.arange(shape[0] * shape[1]).reshape(shape) + 1.0 - rad_data = (rad_data + 1.0) / 0.5 - rad_data = rad_data.astype(np.int16) - rad = xr.DataArray( - da.from_array(rad_data), - dims=("y", "x"), - attrs={ - "scale_factor": 0.5, - "add_offset": -1.0, - "_FillValue": 20, - }, - ) - scn = l1b_c01_file(rad=rad) - scn.load([DataQuery(name="C01", calibration="counts")]) - res = scn["C01"] - - # We expect the raw data to be unchanged - expected = res.data - assert np.allclose(res.data, expected, equal_nan=True) - - # check for the presence of typical attributes - assert "scale_factor" in res.attrs - assert "add_offset" in res.attrs - assert "_FillValue" in res.attrs - assert "orbital_parameters" in res.attrs - assert "platform_shortname" in res.attrs - assert "scene_id" in res.attrs - - # determine if things match their expected values/types. - assert res.data.dtype == np.int16 - assert res.attrs["standard_name"] == "counts" - assert res.attrs["long_name"] == "Raw Counts" - - -class Test_NC_ABI_File: - """Test file opening.""" - - @mock.patch("satpy.readers.abi_base.xr") - def test_open_dataset(self, _): # noqa: PT019 - """Test openning a dataset.""" - openable_thing = mock.MagicMock() - - NC_ABI_L1B(openable_thing, {"platform_shortname": "g16"}, {}) - openable_thing.open.assert_called() + # make sure the attributes from the file are in the data array + assert "scale_factor" not in res.attrs + assert "_FillValue" not in res.attrs + assert res.attrs["standard_name"] == "toa_brightness_temperature" + assert res.attrs["long_name"] == "Brightness Temperature" -class Test_NC_ABI_L1B_H5netcdf(Test_NC_ABI_L1B): - """Allow h5netcdf peculiarities.""" - @property - def fake_rad(self): - """Create fake data for the tests.""" - shape = RAD_SHAPE[1000] - rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.0) * 50.0 - rad_data = (rad_data + 1.0) / 0.5 - rad_data = rad_data.astype(np.int16) - rad = xr.DataArray( - da.from_array(rad_data), - attrs={ - "scale_factor": 0.5, - "add_offset": -1.0, - "_FillValue": np.array([1002]), - "units": "W m-2 um-1 sr-1", - "valid_range": (0, 4095), - }, - ) - return rad +def test_vis_calibrate(c01_refl): + """Test VIS calibration.""" + res = c01_refl + expected = np.array( + [ + 7.632808, + 15.265616, + 22.898426, + 30.531233, + 38.164043, + 45.796852, + 53.429657, + 61.062466, + 68.695274, + np.nan, + ] + ) + data_np = res.data.compute() + np.testing.assert_allclose(data_np[0, :10], expected, equal_nan=True) + assert "scale_factor" not in res.attrs + assert "_FillValue" not in res.attrs + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" + assert res.attrs["long_name"] == "Bidirectional Reflectance" + + +def test_raw_calibrate(c01_counts): + """Test RAW calibration.""" + res = c01_counts + + # We expect the raw data to be unchanged + expected = res.data + assert np.allclose(res.data, expected, equal_nan=True) + + # check for the presence of typical attributes + assert "scale_factor" in res.attrs + assert "add_offset" in res.attrs + assert "_FillValue" in res.attrs + assert "orbital_parameters" in res.attrs + assert "platform_shortname" in res.attrs + assert "scene_id" in res.attrs + + # determine if things match their expected values/types. + assert res.data.dtype == np.int16 + assert res.attrs["standard_name"] == "counts" + assert res.attrs["long_name"] == "Raw Counts" + + +@mock.patch("satpy.readers.abi_base.xr") +def test_open_dataset(_): # noqa: PT019 + """Test opening a dataset.""" + openable_thing = mock.MagicMock() + + NC_ABI_L1B(openable_thing, {"platform_shortname": "g16"}, {}) + openable_thing.open.assert_called() From 83609cca303a5ae3abe3e4b1959f16d77f26b6bb Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 30 Oct 2023 13:11:47 -0500 Subject: [PATCH 363/702] Undo forcing GRB fill to floating point Caused failure in GLM L2 DQF processing --- satpy/readers/abi_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 28ff91ce38..3fdd724e12 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -140,7 +140,7 @@ def is_int(val): new_fill = fill else: new_fill = np.nan - data = data.where(data != fill, np.float32(new_fill)) + data = data.where(data != fill, new_fill) if factor != 1 and item in ("x", "y"): # be more precise with x/y coordinates # see get_area_def for more information From 8b5c450509105cff28881a621d7044cda309f665 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 30 Oct 2023 14:38:07 -0500 Subject: [PATCH 364/702] Fix various inconsistencies in ABI L1b DataArrays --- satpy/readers/abi_base.py | 7 +- satpy/readers/abi_l1b.py | 1 + satpy/tests/reader_tests/test_abi_l1b.py | 141 +++++++++++++---------- 3 files changed, 84 insertions(+), 65 deletions(-) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 3fdd724e12..1c0824ab27 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -139,18 +139,13 @@ def is_int(val): if is_int(data) and is_int(factor) and is_int(offset): new_fill = fill else: - new_fill = np.nan + new_fill = np.float32(np.nan) data = data.where(data != fill, new_fill) if factor != 1 and item in ("x", "y"): # be more precise with x/y coordinates # see get_area_def for more information data = data * np.round(float(factor), 6) + np.round(float(offset), 6) elif factor != 1: - # make sure the factor is a 64-bit float - # can't do this in place since data is most likely uint16 - # and we are making it a 64-bit float - if not is_int(factor): - factor = np.float32(factor) data = data * np.float32(factor) + np.float32(offset) return data diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index c3da53c9c7..4933b0982a 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -49,6 +49,7 @@ def get_dataset(self, key, info): # For raw cal, don't apply scale and offset, return raw file counts if key["calibration"] == "counts": radiances = self.nc["Rad"].copy() + radiances = self._adjust_coords(radiances, "Rad") else: radiances = self["Rad"] diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 54c78e1089..61f1746bf9 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -25,6 +25,7 @@ import dask.array as da import numpy as np +import numpy.typing as npt import pytest import xarray as xr from pytest_lazyfixture import lazy_fixture @@ -153,6 +154,7 @@ def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: rad_data = rad_data.astype(np.int16) rad = xr.DataArray( da.from_array(rad_data), + dims=("y", "x"), attrs={ "scale_factor": 0.5, "add_offset": -1.0, @@ -173,25 +175,6 @@ def c01_counts(tmp_path) -> xr.DataArray: return scn["C01"] -def _create_scene_for_data( - tmp_path: Path, - channel_name: str, - rad: xr.DataArray | None, - resolution: int, - reader_kwargs: dict[str, Any] | None = None, -) -> Scene: - filename = generate_l1b_filename(channel_name) - data_path = tmp_path / filename - dataset = _create_fake_rad_dataset(rad=rad, resolution=resolution) - dataset.to_netcdf(data_path) - scn = Scene( - reader="abi_l1b", - filenames=[str(data_path)], - reader_kwargs=reader_kwargs, - ) - return scn - - @pytest.fixture() def c07_bt_creator(tmp_path) -> Callable: def _load_data_array( @@ -232,6 +215,73 @@ def _fake_c07_data() -> xr.DataArray: return rad +def _create_scene_for_data( + tmp_path: Path, + channel_name: str, + rad: xr.DataArray | None, + resolution: int, + reader_kwargs: dict[str, Any] | None = None, +) -> Scene: + filename = generate_l1b_filename(channel_name) + data_path = tmp_path / filename + dataset = _create_fake_rad_dataset(rad=rad, resolution=resolution) + dataset.to_netcdf(data_path) + scn = Scene( + reader="abi_l1b", + filenames=[str(data_path)], + reader_kwargs=reader_kwargs, + ) + return scn + + +def _get_and_check_array(data_arr: xr.DataArray, exp_dtype: npt.DTypeLike) -> npt.NDArray: + data_np = data_arr.data.compute() + assert data_np.dtype == data_arr.dtype + assert data_np.dtype == exp_dtype + return data_np + +def _check_area(data_arr: xr.DataArray) -> None: + from pyresample.geometry import AreaDefinition + + area_def = data_arr.attrs["area"] + assert isinstance(area_def, AreaDefinition) + + with ignore_pyproj_proj_warnings(): + proj_dict = area_def.crs.to_dict() + exp_dict = { + "h": 1.0, + "lon_0": -90.0, + "proj": "geos", + "sweep": "x", + "units": "m", + } + if "R" in proj_dict: + assert proj_dict["R"] == 1 + else: + assert proj_dict["a"] == 1 + assert proj_dict["b"] == 1 + for proj_key, proj_val in exp_dict.items(): + assert proj_dict[proj_key] == proj_val + + assert area_def.shape == data_arr.shape + if area_def.shape[0] == RAD_SHAPE[1000][0]: + exp_extent = (-2.0, -2998.0, 4998.0, 2.0) + else: + exp_extent = (-2.0, -1498.0, 2498.0, 2.0) + assert area_def.area_extent == exp_extent + + +def _check_dims_and_coords(data_arr: xr.DataArray) -> None: + assert "y" in data_arr.dims + assert "x" in data_arr.dims + + # we remove any time dimension information + assert "t" not in data_arr.coords + assert "t" not in data_arr.dims + assert "time" not in data_arr.coords + assert "time" not in data_arr.dims + + @pytest.mark.parametrize( ("channel", "suffix"), [ @@ -302,46 +352,15 @@ def test_get_dataset(self, c01_data_arr): } res = c01_data_arr - assert "area" in res.attrs + _get_and_check_array(res, np.float32) + _check_area(res) + _check_dims_and_coords(res) for exp_key, exp_val in exp.items(): assert res.attrs[exp_key] == exp_val - # we remove any time dimension information - assert "t" not in res.coords - assert "t" not in res.dims - assert "time" not in res.coords - assert "time" not in res.dims - - def test_get_area_def(self, c01_data_arr): - """Test the area generation.""" - from pyresample.geometry import AreaDefinition - - area_def = c01_data_arr.attrs["area"] - assert isinstance(area_def, AreaDefinition) - - with ignore_pyproj_proj_warnings(): - proj_dict = area_def.crs.to_dict() - exp_dict = { - "h": 1.0, - "lon_0": -90.0, - "proj": "geos", - "sweep": "x", - "units": "m", - } - if "R" in proj_dict: - assert proj_dict["R"] == 1 - else: - assert proj_dict["a"] == 1 - assert proj_dict["b"] == 1 - for proj_key, proj_val in exp_dict.items(): - assert proj_dict[proj_key] == proj_val - - assert area_def.shape == c01_data_arr.shape - assert area_def.area_extent == (-2.0, -2998.0, 4998.0, 2.0) - @pytest.mark.parametrize("clip_negative_radiances", [False, True]) -def test_ir_calibrate(self, c07_bt_creator, clip_negative_radiances): +def test_ir_calibrate(c07_bt_creator, clip_negative_radiances): """Test IR calibration.""" res = c07_bt_creator(clip_negative_radiances=clip_negative_radiances) clipped_ir = 134.68753 if clip_negative_radiances else np.nan @@ -359,7 +378,9 @@ def test_ir_calibrate(self, c07_bt_creator, clip_negative_radiances): np.nan, ] ) - data_np = res.data.compute() + data_np = _get_and_check_array(res, np.float32) + _check_area(res) + _check_dims_and_coords(res) np.testing.assert_allclose( data_np[0, :10], expected, equal_nan=True, atol=1e-04 ) @@ -388,7 +409,9 @@ def test_vis_calibrate(c01_refl): np.nan, ] ) - data_np = res.data.compute() + data_np = _get_and_check_array(res, np.float32) + _check_area(res) + _check_dims_and_coords(res) np.testing.assert_allclose(data_np[0, :10], expected, equal_nan=True) assert "scale_factor" not in res.attrs assert "_FillValue" not in res.attrs @@ -401,8 +424,9 @@ def test_raw_calibrate(c01_counts): res = c01_counts # We expect the raw data to be unchanged - expected = res.data - assert np.allclose(res.data, expected, equal_nan=True) + _get_and_check_array(res, np.int16) + _check_area(res) + _check_dims_and_coords(res) # check for the presence of typical attributes assert "scale_factor" in res.attrs @@ -413,7 +437,6 @@ def test_raw_calibrate(c01_counts): assert "scene_id" in res.attrs # determine if things match their expected values/types. - assert res.data.dtype == np.int16 assert res.attrs["standard_name"] == "counts" assert res.attrs["long_name"] == "Raw Counts" From 14f59c49e4b327c349f510dd97b73e046eb14f72 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 31 Oct 2023 15:19:58 -0500 Subject: [PATCH 365/702] Add dask chunk size checks to ABI l1b tests --- satpy/readers/abi_base.py | 42 ++++++++++---- satpy/tests/reader_tests/test_abi_l1b.py | 70 ++++++++++++++++-------- 2 files changed, 79 insertions(+), 33 deletions(-) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 1c0824ab27..07a29e3043 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -18,9 +18,11 @@ """Advance Baseline Imager reader base class for the Level 1b and l2+ reader.""" import logging +import math from contextlib import suppress from datetime import datetime +import dask import numpy as np import xarray as xr from pyresample import geometry @@ -28,11 +30,10 @@ from satpy._compat import cached_property from satpy.readers import open_file_or_filename from satpy.readers.file_handlers import BaseFileHandler -from satpy.utils import get_legacy_chunk_size +from satpy.utils import get_dask_chunk_size_in_bytes logger = logging.getLogger(__name__) -CHUNK_SIZE = get_legacy_chunk_size() PLATFORM_NAMES = { "g16": "GOES-16", "g17": "GOES-17", @@ -62,15 +63,8 @@ def __init__(self, filename, filename_info, filetype_info): @cached_property def nc(self): """Get the xarray dataset for this file.""" - import math - - from satpy.utils import get_dask_chunk_size_in_bytes - chunk_size_for_high_res = math.sqrt(get_dask_chunk_size_in_bytes() / 4) # 32-bit floats - chunk_size_for_high_res = np.round(max(chunk_size_for_high_res / (4 * 226), 1)) * (4 * 226) - low_res_factor = int(self.filetype_info.get("resolution", 2000) // 500) - res_chunk_bytes = int(chunk_size_for_high_res / low_res_factor) * 4 - import dask - with dask.config.set({"array.chunk-size": res_chunk_bytes}): + chunk_bytes = self._chunk_bytes_for_resolution() + with dask.config.set({"array.chunk-size": chunk_bytes}): f_obj = open_file_or_filename(self.filename) nc = xr.open_dataset(f_obj, decode_cf=True, @@ -79,6 +73,32 @@ def nc(self): nc = self._rename_dims(nc) return nc + def _chunk_bytes_for_resolution(self) -> int: + """Get a best-guess optimal chunk size for resolution-based chunking. + + First a chunk size is chosen for the provided Dask setting `array.chunk-size` + and then aligned with a hardcoded on-disk chunk size of 226. This is then + adjusted to match the current resolution. + + This should result in 500 meter data having 4 times as many pixels per + dask array chunk (2 in each dimension) as 1km data and 8 times as many + as 2km data. As data is combined or upsampled geographically the arrays + should not need to be rechunked. Care is taken to make sure that array + chunks are aligned with on-disk file chunks at all resolutions, but at + the cost of flexibility due to a hardcoded on-disk chunk size of 226 + elements per dimension. + + """ + num_high_res_elems_per_dim = math.sqrt(get_dask_chunk_size_in_bytes() / 4) # 32-bit floats + # assume on-disk chunk size of 226 + # this is true for all CSPP Geo GRB output (226 for all sectors) and full disk from other sources + # 250 has been seen for AWS/CLASS CONUS, Mesoscale 1, and Mesoscale 2 files + # we align this with 4 on-disk chunks at 500m, so it will be 2 on-disk chunks for 1km, and 1 for 2km + high_res_elems_disk_aligned = np.round(max(num_high_res_elems_per_dim / (4 * 226), 1)) * (4 * 226) + low_res_factor = int(self.filetype_info.get("resolution", 2000) // 500) + res_elems_per_dim = int(high_res_elems_disk_aligned / low_res_factor) + return (res_elems_per_dim ** 2) * 4 + @staticmethod def _rename_dims(nc): if "t" in nc.dims or "t" in nc.coords: diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 61f1746bf9..ec3a0334cc 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -23,6 +23,7 @@ from typing import Any, Callable from unittest import mock +import dask import dask.array as da import numpy as np import numpy.typing as npt @@ -36,9 +37,12 @@ RAD_SHAPE = { 500: (3000, 5000), # conus - 500m - 1000: (1500, 2500), # conus - 1km - 2000: (750, 1250), # conus - 2km } +# RAD_SHAPE = { +# 500: (21696, 21696), # fldk - 500m +# } +RAD_SHAPE[1000] = (RAD_SHAPE[500][0] // 2, RAD_SHAPE[500][1] // 2) +RAD_SHAPE[2000] = (RAD_SHAPE[500][0] // 4, RAD_SHAPE[500][1] // 4) def _create_fake_rad_dataarray( @@ -54,7 +58,7 @@ def _create_fake_rad_dataarray( rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( - da.from_array(rad_data), + da.from_array(rad_data, chunks=226), dims=("y", "x"), attrs={ "scale_factor": 0.5, @@ -134,15 +138,21 @@ def generate_l1b_filename(chan_name: str) -> str: @pytest.fixture() def c01_refl(tmp_path) -> xr.DataArray: - scn = _create_scene_for_data(tmp_path, "C01", None, 1000) - scn.load(["C01"]) + # 4 bytes for 32-bit floats + # 4 on-disk chunks for 500 meter data + # 226 on-disk chunk size + # Square (**2) for 2D size + with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + scn = _create_scene_for_data(tmp_path, "C01", None, 1000) + scn.load(["C01"]) return scn["C01"] @pytest.fixture() def c01_rad(tmp_path) -> xr.DataArray: - scn = _create_scene_for_data(tmp_path, "C01", None, 1000) - scn.load([DataQuery(name="C01", calibration="radiance")]) + with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + scn = _create_scene_for_data(tmp_path, "C01", None, 1000) + scn.load([DataQuery(name="C01", calibration="radiance")]) return scn["C01"] @@ -153,7 +163,7 @@ def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( - da.from_array(rad_data), + da.from_array(rad_data, chunks=226), dims=("y", "x"), attrs={ "scale_factor": 0.5, @@ -163,15 +173,17 @@ def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: "valid_range": (0, 4095), }, ) - scn = _create_scene_for_data(tmp_path, "C01", rad, 1000) - scn.load([DataQuery(name="C01", calibration="radiance")]) + with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + scn = _create_scene_for_data(tmp_path, "C01", rad, 1000) + scn.load([DataQuery(name="C01", calibration="radiance")]) return scn["C01"] @pytest.fixture() def c01_counts(tmp_path) -> xr.DataArray: - scn = _create_scene_for_data(tmp_path, "C01", None, 1000) - scn.load([DataQuery(name="C01", calibration="counts")]) + with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + scn = _create_scene_for_data(tmp_path, "C01", None, 1000) + scn.load([DataQuery(name="C01", calibration="counts")]) return scn["C01"] @@ -181,14 +193,15 @@ def _load_data_array( clip_negative_radiances: bool = False, ): rad = _fake_c07_data() - scn = _create_scene_for_data( - tmp_path, - "C07", - rad, - 2000, - {"clip_negative_radiances": clip_negative_radiances}, - ) - scn.load(["C07"]) + with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + scn = _create_scene_for_data( + tmp_path, + "C07", + rad, + 2000, + {"clip_negative_radiances": clip_negative_radiances}, + ) + scn.load(["C07"]) return scn["C07"] return _load_data_array @@ -202,7 +215,7 @@ def _fake_c07_data() -> xr.DataArray: rad_data = (rad_data + 1.3) / 0.5 data = rad_data.astype(np.int16) rad = xr.DataArray( - da.from_array(data), + da.from_array(data, chunks=226), dims=("y", "x"), attrs={ "scale_factor": 0.5, @@ -225,7 +238,12 @@ def _create_scene_for_data( filename = generate_l1b_filename(channel_name) data_path = tmp_path / filename dataset = _create_fake_rad_dataset(rad=rad, resolution=resolution) - dataset.to_netcdf(data_path) + dataset.to_netcdf( + data_path, + encoding={ + "Rad": {"chunksizes": [226, 226]}, + }, + ) scn = Scene( reader="abi_l1b", filenames=[str(data_path)], @@ -236,10 +254,18 @@ def _create_scene_for_data( def _get_and_check_array(data_arr: xr.DataArray, exp_dtype: npt.DTypeLike) -> npt.NDArray: data_np = data_arr.data.compute() + assert isinstance(data_arr, xr.DataArray) + assert isinstance(data_arr.data, da.Array) + assert isinstance(data_np, np.ndarray) + res = 1000 if RAD_SHAPE[1000][0] == data_np.shape[0] else 2000 + assert data_arr.chunks[0][0] == 226 * (4 / (res / 500)) + assert data_arr.chunks[1][0] == 226 * (4 / (res / 500)) + assert data_np.dtype == data_arr.dtype assert data_np.dtype == exp_dtype return data_np + def _check_area(data_arr: xr.DataArray) -> None: from pyresample.geometry import AreaDefinition From 4d9fcbf84433f4adb05d0e1ba18480568de91b77 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 1 Nov 2023 09:41:31 +0200 Subject: [PATCH 366/702] Use a custom scheduler to check number of computes in NDVIHybridGreen compositor --- satpy/tests/compositor_tests/test_spectral.py | 33 +++++++++++-------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 36a3dd9355..55b24b1982 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -15,12 +15,14 @@ # satpy. If not, see . """Tests for spectral correction compositors.""" +import dask import dask.array as da import numpy as np import pytest import xarray as xr from satpy.composites.spectral import GreenCorrector, HybridGreen, NDVIHybridGreen, SpectralBlender +from satpy.tests.utils import CustomScheduler class TestSpectralComposites: @@ -92,25 +94,28 @@ def setup_method(self): def test_ndvi_hybrid_green(self): """Test General functionality with linear scaling from ndvi to blend fraction.""" - comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), - standard_name="toa_bidirectional_reflectance") - - # Test General functionality with linear strength (=1.0) - res = comp((self.c01, self.c02, self.c03)) - assert isinstance(res, xr.DataArray) - assert isinstance(res.data, da.Array) - assert res.attrs["name"] == "ndvi_hybrid_green" - assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" - data = res.values + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), + standard_name="toa_bidirectional_reflectance") + + # Test General functionality with linear strength (=1.0) + res = comp((self.c01, self.c02, self.c03)) + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs["name"] == "ndvi_hybrid_green" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" + data = res.values np.testing.assert_array_almost_equal(data, np.array([[0.2633, 0.3071], [0.2115, 0.3420]]), decimal=4) def test_nonliniear_scaling(self): """Test non-linear scaling using `strength` term.""" - comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), strength=2.0, prerequisites=(0.51, 0.65, 0.85), - standard_name="toa_bidirectional_reflectance") + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), strength=2.0, + prerequisites=(0.51, 0.65, 0.85), + standard_name="toa_bidirectional_reflectance") - res = comp((self.c01, self.c02, self.c03)) - np.testing.assert_array_almost_equal(res.values, np.array([[0.2646, 0.3075], [0.2120, 0.3471]]), decimal=4) + res = comp((self.c01, self.c02, self.c03)).compute() + np.testing.assert_array_almost_equal(res.data, np.array([[0.2646, 0.3075], [0.2120, 0.3471]]), decimal=4) def test_invalid_strength(self): """Test using invalid `strength` term for non-linear scaling.""" From 51f90e6c6a716fec0e64f55660289c49c07b395b Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 1 Nov 2023 09:42:51 +0200 Subject: [PATCH 367/702] Clip values instead of using da.where() twice --- satpy/composites/spectral.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/satpy/composites/spectral.py b/satpy/composites/spectral.py index 59e8518a7e..448d7cb26a 100644 --- a/satpy/composites/spectral.py +++ b/satpy/composites/spectral.py @@ -18,8 +18,6 @@ import logging import warnings -import dask.array as da - from satpy.composites import GenericCompositor from satpy.dataset import combine_metadata @@ -166,8 +164,7 @@ def __call__(self, projectables, optional_datasets=None, **attrs): ndvi = (ndvi_input[1] - ndvi_input[0]) / (ndvi_input[1] + ndvi_input[0]) - ndvi.data = da.where(ndvi > self.ndvi_min, ndvi, self.ndvi_min) - ndvi.data = da.where(ndvi < self.ndvi_max, ndvi, self.ndvi_max) + ndvi = ndvi.clip(self.ndvi_min, self.ndvi_max) # Introduce non-linearity to ndvi for non-linear scaling to NIR blend fraction if self.strength != 1.0: # self._apply_strength() has no effect if strength = 1.0 -> no non-linear behaviour From 5db79f55232dcada3b984c5d886f48420a515524 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 1 Nov 2023 14:09:13 +0100 Subject: [PATCH 368/702] Update sgli reader, add tests --- satpy/etc/readers/sgli_l1b.yaml | 40 +++-- satpy/readers/sgli_l1b.py | 189 +++++++++++++++------- satpy/tests/reader_tests/test_sgli_l1b.py | 104 ++++++++++++ 3 files changed, 264 insertions(+), 69 deletions(-) diff --git a/satpy/etc/readers/sgli_l1b.yaml b/satpy/etc/readers/sgli_l1b.yaml index 41cb3bed9e..cbf5c4989d 100644 --- a/satpy/etc/readers/sgli_l1b.yaml +++ b/satpy/etc/readers/sgli_l1b.yaml @@ -6,6 +6,36 @@ reader: default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + data_identification_keys: + name: + required: true + wavelength: + type: !!python/name:satpy.dataset.dataid.WavelengthRange + polarization: + transitive: true + resolution: + transitive: false + calibration: + enum: + - reflectance + - brightness_temperature + - radiance + - counts + transitive: true + modifiers: + default: [] + type: !!python/name:satpy.dataset.dataid.ModifierTuple + + coord_identification_keys: + name: + required: true + polarization: + transitive: true + resolution: + transitive: false + + + file_types: gcom-c_l1b_v: file_reader: !!python/name:satpy.readers.sgli_l1b.HDF5SGLI @@ -31,7 +61,6 @@ datasets: longitude_v: name: longitude_v resolution: [250, 1000] - file_type: gcom-c_l1b standard_name: longitude units: degree file_key: Geometry_data/Longitude @@ -40,7 +69,6 @@ datasets: latitude_v: name: latitude_v resolution: [250, 1000] - file_type: gcom-c_l1b standard_name: latitude units: degree file_key: Geometry_data/Latitude @@ -50,7 +78,6 @@ datasets: name: longitude_p resolution: 1000 polarization: [0, -60, 60] - file_type: gcom-c_l1b standard_name: longitude units: degree file_key: Geometry_data/Longitude @@ -60,7 +87,6 @@ datasets: name: latitude_p resolution: 1000 polarization: [0, -60, 60] - file_type: gcom-c_l1b standard_name: latitude units: degree file_key: Geometry_data/Latitude @@ -69,7 +95,6 @@ datasets: longitude_ir: name: longitude_ir resolution: [250, 500, 1000] - file_type: gcom-c_l1b standard_name: longitude units: degree file_key: Geometry_data/Longitude @@ -78,7 +103,6 @@ datasets: latitude_ir: name: latitude_ir resolution: [250, 500, 1000] - file_type: gcom-c_l1b standard_name: latitude units: degree file_key: Geometry_data/Latitude @@ -87,7 +111,6 @@ datasets: solar_zenith_angle: name: solar_zenith_angle sensor: sgli - wavelength: [0.3925,0.4,0.4075] resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v @@ -96,7 +119,6 @@ datasets: solar_azimuth_angle: name: solar_azimuth_angle sensor: sgli - wavelength: [0.3925,0.4,0.4075] resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v @@ -105,7 +127,6 @@ datasets: satellite_zenith_angle: name: satellite_zenith_angle sensor: sgli - wavelength: [0.3925,0.4,0.4075] resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v @@ -114,7 +135,6 @@ datasets: satellite_azimuth_angle: name: satellite_azimuth_angle sensor: sgli - wavelength: [0.3925,0.4,0.4075] resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 97db07a756..25289b5de3 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -34,18 +34,19 @@ import h5py import numpy as np import xarray as xr +from dask.array.core import normalize_chunks from xarray import Dataset, Variable from xarray.backends import BackendArray, BackendEntrypoint from xarray.core import indexing -from satpy import CHUNK_SIZE +# from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) -resolutions = {'Q': 250, - 'K': 1000, - 'L': 1000} +resolutions = {"Q": 250, + "K": 1000, + "L": 1000} def interpolate(arr, sampling, full_shape): @@ -58,13 +59,14 @@ def interpolate(arr, sampling, full_shape): full_x = np.arange(0, full_shape[0]) full_y = np.arange(0, full_shape[1]) + from scipy.interpolate import RectBivariateSpline spl = RectBivariateSpline( tie_x, tie_y, arr) values = spl(full_x, full_y) - return da.from_array(values, chunks=(CHUNK_SIZE, CHUNK_SIZE)) + return da.from_array(values, chunks=(1000, 1000)) class HDF5SGLI(BaseFileHandler): @@ -73,78 +75,147 @@ class HDF5SGLI(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): """Initialize the filehandler.""" super().__init__(filename, filename_info, filetype_info) - self.resolution = resolutions[self.filename_info['resolution']] - self.fh = h5py.File(self.filename, 'r') + self.resolution = resolutions[self.filename_info["resolution"]] + self.h5file = h5py.File(self.filename, "r") @property def start_time(self): """Get the start time.""" - the_time = self.fh['Global_attributes'].attrs['Scene_start_time'].item() - return datetime.strptime(the_time.decode('ascii'), '%Y%m%d %H:%M:%S.%f') + the_time = self.h5file["Global_attributes"].attrs["Scene_start_time"].item() + return datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f") @property def end_time(self): """Get the end time.""" - the_time = self.fh['Global_attributes'].attrs['Scene_end_time'].item() - return datetime.strptime(the_time.decode('ascii'), '%Y%m%d %H:%M:%S.%f') + the_time = self.h5file["Global_attributes"].attrs["Scene_end_time"].item() + return datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f") def get_dataset(self, key, info): """Get the dataset.""" if key["resolution"] != self.resolution: return - if key["polarization"] is not None: - pols = {0: '0', -60: 'm60', 60: 'p60'} - file_key = info['file_key'].format(pol=pols[key["polarization"]]) - else: - file_key = info['file_key'] - - h5dataset = self.fh[file_key] - - resampling_interval = h5dataset.attrs.get('Resampling_interval', 1) - if resampling_interval != 1: - logger.debug('Interpolating %s.', key["name"]) - full_shape = (self.fh['Image_data'].attrs['Number_of_lines'], - self.fh['Image_data'].attrs['Number_of_pixels']) - dataset = interpolate(h5dataset, resampling_interval, full_shape) - else: - dataset = da.from_array(h5dataset[:].astype(' 116 + +def test_loading_lon_lat(sgli_file): + handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) + did = dict(name="longitude_v", resolution=1000, polarization=None) + res = handler.get_dataset(did, {"file_key": "Geometry_data/Longitude"}) + assert res.shape == (1955, 1250) + assert res.chunks is not None + assert res.dtype == np.float32 From bfb96c73ba042a1976b8a6e4b79d4f4618fdf511 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 1 Nov 2023 14:52:34 +0100 Subject: [PATCH 369/702] Start replacing assertRaises --- satpy/tests/test_readers.py | 34 ++++++++++++++++++--------------- satpy/tests/test_resample.py | 2 +- satpy/tests/test_utils.py | 1 - satpy/tests/test_writers.py | 7 ++++--- satpy/tests/test_yaml_reader.py | 22 ++++++++++----------- 5 files changed, 34 insertions(+), 32 deletions(-) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index 8250f691a0..378f3fdb5a 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -166,7 +166,8 @@ def test_getitem(self): assert d[0.5] == "1h" assert d["test4"] == "4refl" assert d[make_dataid(name="test4", calibration="radiance")] == "4rad" - self.assertRaises(KeyError, d.getitem, "1h") + with pytest.raises(KeyError): + d.getitem("1h") # test with full tuple assert d[make_dsq(name="test", wavelength=(0, 0.5, 1), resolution=1000)] == "1" @@ -203,7 +204,8 @@ def test_get_key(self): assert res1 != res3 # more than 1 result when default is to ask for 1 result - self.assertRaises(KeyError, get_key, "test4", d, best=False) + with pytest.raises(KeyError): + get_key("test4", d, best=False) def test_contains(self): """Test DatasetDict contains method.""" @@ -288,9 +290,9 @@ def test_filenames_and_reader(self): def test_bad_reader_name_with_filenames(self): """Test bad reader name with filenames provided.""" from satpy.readers import load_readers - self.assertRaises(ValueError, load_readers, reader="i_dont_exist", filenames=[ - "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", - ]) + with pytest.raises(ValueError, match="No reader named: i_dont_exist"): + load_readers(reader="i_dont_exist", + filenames=["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"]) def test_filenames_as_path(self): """Test with filenames specified as pathlib.Path.""" @@ -318,9 +320,8 @@ def test_filenames_as_dict_bad_reader(self): "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], "__fake__": ["fake.txt"], } - self.assertRaisesRegex(ValueError, - r"(?=.*__fake__)(?!.*viirs)(^No reader.+)", - load_readers, filenames=filenames) + with pytest.raises(ValueError, match=r"(?=.*__fake__)(?!.*viirs)(^No reader.+)"): + load_readers(filenames=filenames) def test_filenames_as_dict_with_reader(self): """Test loading from a filenames dict with a single reader specified. @@ -343,7 +344,8 @@ def test_empty_filenames_as_dict(self): filenames = { "viirs_sdr": [], } - self.assertRaises(ValueError, load_readers, filenames=filenames) + with pytest.raises(ValueError, match="No supported files found"): + load_readers(filenames=filenames) # two readers, one is empty filenames = { @@ -370,7 +372,8 @@ def test_missing_requirements(self, *mocks): with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=r"No handler for reading requirement.*", category=UserWarning) for filenames in [epi_miss, pro_miss, epi_pro_miss]: - self.assertRaises(ValueError, load_readers, reader="seviri_l1b_hrit", filenames=filenames) + with pytest.raises(ValueError, match="No dataset could be loaded.*"): + load_readers(reader="seviri_l1b_hrit", filenames=filenames) # Filenames from multiple scans at_least_one_complete = [ @@ -400,8 +403,8 @@ def test_all_filtered(self): filter_params = {"start_time": datetime.datetime(1970, 1, 1), "end_time": datetime.datetime(1970, 1, 2), "area": None} - self.assertRaises(ValueError, load_readers, - filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) + with pytest.raises(ValueError, match="No dataset could be loaded.*"): + load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) def test_all_filtered_multiple(self): """Test behaviour if no file matches the filter parameters.""" @@ -414,8 +417,8 @@ def test_all_filtered_multiple(self): } filter_params = {"start_time": datetime.datetime(1970, 1, 1), "end_time": datetime.datetime(1970, 1, 2)} - self.assertRaises(ValueError, load_readers, - filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) + with pytest.raises(ValueError, match="No dataset could be loaded."): + load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) def test_almost_all_filtered(self): """Test behaviour if only one reader has datasets.""" @@ -725,7 +728,8 @@ def test_bad_reader(self): # touch the file so it exists on disk with mock.patch("yaml.load") as load: load.side_effect = yaml.YAMLError("Import problems") - self.assertRaises(yaml.YAMLError, group_files, [], reader="abi_l1b") + with pytest.raises(yaml.YAMLError): + group_files([], reader="abi_l1b") def test_default_behavior(self): """Test the default behavior with the 'abi_l1b' reader.""" diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index 66e93009d2..7135661578 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -834,7 +834,7 @@ def test_compute(self): # Too many dimensions data = da.ones((3, 5, 5)) - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="BucketFraction not implemented for 3D datasets"): _ = self.bucket.compute(data) @mock.patch("pyresample.bucket.BucketResampler") diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 6f5db02087..18ff839599 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -80,7 +80,6 @@ def test_lonlat2xyz(self, lonlat, xyz): ((90, 90), (1, 0, 0)), ((-90, 90), (-1, 0, 0)), ((180, 90), (0, -1, 0)), - ((0, -90), (0, -1, 0)), ((0, 45), (0, sqrt(2) / 2, sqrt(2) / 2)), ((0, 60), (0, sqrt(3) / 2, sqrt(1) / 2)), ], diff --git a/satpy/tests/test_writers.py b/satpy/tests/test_writers.py index c2d049dae1..dd26b06c82 100644 --- a/satpy/tests/test_writers.py +++ b/satpy/tests/test_writers.py @@ -40,7 +40,8 @@ def test_to_image_1d(self): # 1D from satpy.writers import to_image p = xr.DataArray(np.arange(25), dims=["y"]) - self.assertRaises(ValueError, to_image, p) + with pytest.raises(ValueError, match="Need at least a 2D array to make an image."): + to_image(p) @mock.patch("satpy.writers.XRImage") def test_to_image_2d(self, mock_geoimage): @@ -113,8 +114,8 @@ def test_basic_init_provided_enh(self): def test_init_nonexistent_enh_file(self): """Test Enhancer init with a nonexistent enhancement configuration file.""" from satpy.writers import Enhancer - self.assertRaises( - ValueError, Enhancer, enhancement_config_file="is_not_a_valid_filename_?.yaml") + with pytest.raises(ValueError, match="YAML file doesn't exist or string is not YAML dict:.*"): + Enhancer(enhancement_config_file="is_not_a_valid_filename_?.yaml") class _BaseCustomEnhancementConfigTests: diff --git a/satpy/tests/test_yaml_reader.py b/satpy/tests/test_yaml_reader.py index 35752cd237..41439a1ac6 100644 --- a/satpy/tests/test_yaml_reader.py +++ b/satpy/tests/test_yaml_reader.py @@ -327,7 +327,9 @@ def setUp(self): def test_deprecated_passing_config_files(self): """Test that we get an exception when config files are passed to inti.""" - self.assertRaises(ValueError, yr.FileYAMLReader, "/path/to/some/file.yaml") + with pytest.raises(ValueError, + match="Passing config files to create a Reader is deprecated.*"): + yr.FileYAMLReader("/path/to/some/file.yaml") def test_all_data_ids(self): """Check that all datasets ids are returned.""" @@ -409,15 +411,11 @@ def test_start_end_time(self): """Check start and end time behaviours.""" self.reader.file_handlers = {} - def get_start_time(): - return self.reader.start_time + with pytest.raises(RuntimeError): + self.reader.start_time - self.assertRaises(RuntimeError, get_start_time) - - def get_end_time(): - return self.reader.end_time - - self.assertRaises(RuntimeError, get_end_time) + with pytest.raises(RuntimeError): + self.reader.end_time fh0 = FakeFH(datetime(1999, 12, 30, 0, 0), datetime(1999, 12, 31, 0, 0)) @@ -780,7 +778,7 @@ def test_load_dataset_with_area_for_single_areas(self, ldwa): np.testing.assert_equal(res.coords["time"], np.arange(2)) # check wrong input - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="Target orientation for Dataset unknown_name not recognized.*"): _ = reader._load_dataset_with_area(dsid, coords, "wronginput") # check native orientation, nothing should change @@ -1043,11 +1041,11 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): # Projectable is None mss.return_value = [0, 0, 0, False, None] - with self.assertRaises(KeyError): + with pytest.raises(KeyError): res = reader._load_dataset(None, None, None) # Failure is True mss.return_value = [0, 0, 0, True, 0] - with self.assertRaises(KeyError): + with pytest.raises(KeyError): res = reader._load_dataset(None, None, None) # Setup input, and output of mocked functions From 7db371af672a9ea15db69866631ae4a0597334e3 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 1 Nov 2023 17:11:49 +0200 Subject: [PATCH 370/702] Test that NDVIHybridGreenCompositor doesn't up-cast the data --- satpy/tests/compositor_tests/test_spectral.py | 25 ++++++++++++++----- 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 55b24b1982..3f9d65a78d 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -85,12 +85,15 @@ class TestNdviHybridGreenCompositor: def setup_method(self): """Initialize channels.""" - self.c01 = xr.DataArray(da.from_array([[0.25, 0.30], [0.20, 0.30]], chunks=25), - dims=("y", "x"), attrs={"name": "C02"}) - self.c02 = xr.DataArray(da.from_array([[0.25, 0.30], [0.25, 0.35]], chunks=25), - dims=("y", "x"), attrs={"name": "C03"}) - self.c03 = xr.DataArray(da.from_array([[0.35, 0.35], [0.28, 0.65]], chunks=25), - dims=("y", "x"), attrs={"name": "C04"}) + self.c01 = xr.DataArray( + da.from_array(np.array([[0.25, 0.30], [0.20, 0.30]], dtype=np.float32), chunks=25), + dims=("y", "x"), attrs={"name": "C02"}) + self.c02 = xr.DataArray( + da.from_array(np.array([[0.25, 0.30], [0.25, 0.35]], dtype=np.float32), chunks=25), + dims=("y", "x"), attrs={"name": "C03"}) + self.c03 = xr.DataArray( + da.from_array(np.array([[0.35, 0.35], [0.28, 0.65]], dtype=np.float32), chunks=25), + dims=("y", "x"), attrs={"name": "C04"}) def test_ndvi_hybrid_green(self): """Test General functionality with linear scaling from ndvi to blend fraction.""" @@ -107,6 +110,16 @@ def test_ndvi_hybrid_green(self): data = res.values np.testing.assert_array_almost_equal(data, np.array([[0.2633, 0.3071], [0.2115, 0.3420]]), decimal=4) + def test_ndvi_hybrid_green_dtype(self): + """Test that the datatype is not altered by the compositor.""" + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), + standard_name="toa_bidirectional_reflectance") + + # Test General functionality with linear strength (=1.0) + res = comp((self.c01, self.c02, self.c03)).compute() + assert res.data.dtype == np.float32 + def test_nonliniear_scaling(self): """Test non-linear scaling using `strength` term.""" with dask.config.set(scheduler=CustomScheduler(max_computes=1)): From edd0632df09e0456b3738b8da3728723de2a33fe Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 1 Nov 2023 15:23:40 -0500 Subject: [PATCH 371/702] Remove unnecessary float cast in satpy/readers/abi_l1b.py --- satpy/readers/abi_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index 4933b0982a..29ed6f668c 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -133,7 +133,7 @@ def _raw_calibrate(self, data): def _vis_calibrate(self, data): """Calibrate visible channels to reflectance.""" solar_irradiance = self["esun"] - esd = self["earth_sun_distance_anomaly_in_AU"].astype(np.float32) + esd = self["earth_sun_distance_anomaly_in_AU"] factor = np.pi * esd * esd / solar_irradiance From 144778b3e53a8fa3256f6500b112daaaa8b9ed3e Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 2 Nov 2023 08:52:24 +0200 Subject: [PATCH 372/702] Update satpy/tests/compositor_tests/test_spectral.py Co-authored-by: David Hoese --- satpy/tests/compositor_tests/test_spectral.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 3f9d65a78d..2773b5d4a5 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -127,7 +127,10 @@ def test_nonliniear_scaling(self): prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") - res = comp((self.c01, self.c02, self.c03)).compute() + res = comp((self.c01, self.c02, self.c03)) + res_np = res.data.compute() + assert res.dtype == res_np.dtype + assert res.dtype == np.float32 np.testing.assert_array_almost_equal(res.data, np.array([[0.2646, 0.3075], [0.2120, 0.3471]]), decimal=4) def test_invalid_strength(self): From 5b2cbc3225463892386b634b0ce1586272a95cfc Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 2 Nov 2023 08:54:43 +0200 Subject: [PATCH 373/702] Fix typo in test method name --- satpy/tests/compositor_tests/test_spectral.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 2773b5d4a5..2a3c92eca8 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -120,7 +120,7 @@ def test_ndvi_hybrid_green_dtype(self): res = comp((self.c01, self.c02, self.c03)).compute() assert res.data.dtype == np.float32 - def test_nonliniear_scaling(self): + def test_nonlinear_scaling(self): """Test non-linear scaling using `strength` term.""" with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), strength=2.0, From dd2c879bc2ea0c6af825b93bf1139062c1df63b2 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 2 Nov 2023 08:57:03 +0200 Subject: [PATCH 374/702] Remove copy-paste comment --- satpy/tests/compositor_tests/test_spectral.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 2a3c92eca8..e46cff4d0c 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -115,8 +115,6 @@ def test_ndvi_hybrid_green_dtype(self): with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") - - # Test General functionality with linear strength (=1.0) res = comp((self.c01, self.c02, self.c03)).compute() assert res.data.dtype == np.float32 From 006136ef5a8590e6c7d63b1c2db97f5a70693726 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 2 Nov 2023 14:00:42 -0500 Subject: [PATCH 375/702] Switch abi l1b reader tests to use reader-level interfaces --- satpy/tests/reader_tests/test_abi_l1b.py | 43 +++++++++--------------- 1 file changed, 16 insertions(+), 27 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index ec3a0334cc..a6acd7f027 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -31,16 +31,14 @@ import xarray as xr from pytest_lazyfixture import lazy_fixture -from satpy import DataQuery, Scene +from satpy import DataQuery from satpy.readers.abi_l1b import NC_ABI_L1B +from satpy.readers.yaml_reader import FileYAMLReader from satpy.utils import ignore_pyproj_proj_warnings RAD_SHAPE = { 500: (3000, 5000), # conus - 500m } -# RAD_SHAPE = { -# 500: (21696, 21696), # fldk - 500m -# } RAD_SHAPE[1000] = (RAD_SHAPE[500][0] // 2, RAD_SHAPE[500][1] // 2) RAD_SHAPE[2000] = (RAD_SHAPE[500][0] // 4, RAD_SHAPE[500][1] // 4) @@ -143,17 +141,15 @@ def c01_refl(tmp_path) -> xr.DataArray: # 226 on-disk chunk size # Square (**2) for 2D size with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): - scn = _create_scene_for_data(tmp_path, "C01", None, 1000) - scn.load(["C01"]) - return scn["C01"] + reader = _create_reader_for_data(tmp_path, "C01", None, 1000) + return reader.load(["C01"])["C01"] @pytest.fixture() def c01_rad(tmp_path) -> xr.DataArray: with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): - scn = _create_scene_for_data(tmp_path, "C01", None, 1000) - scn.load([DataQuery(name="C01", calibration="radiance")]) - return scn["C01"] + reader = _create_reader_for_data(tmp_path, "C01", None, 1000) + return reader.load([DataQuery(name="C01", calibration="radiance")])["C01"] @pytest.fixture() @@ -174,17 +170,15 @@ def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: }, ) with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): - scn = _create_scene_for_data(tmp_path, "C01", rad, 1000) - scn.load([DataQuery(name="C01", calibration="radiance")]) - return scn["C01"] + reader = _create_reader_for_data(tmp_path, "C01", rad, 1000) + return reader.load([DataQuery(name="C01", calibration="radiance")])["C01"] @pytest.fixture() def c01_counts(tmp_path) -> xr.DataArray: with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): - scn = _create_scene_for_data(tmp_path, "C01", None, 1000) - scn.load([DataQuery(name="C01", calibration="counts")]) - return scn["C01"] + reader = _create_reader_for_data(tmp_path, "C01", None, 1000) + return reader.load([DataQuery(name="C01", calibration="counts")])["C01"] @pytest.fixture() @@ -194,15 +188,14 @@ def _load_data_array( ): rad = _fake_c07_data() with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): - scn = _create_scene_for_data( + reader = _create_reader_for_data( tmp_path, "C07", rad, 2000, {"clip_negative_radiances": clip_negative_radiances}, ) - scn.load(["C07"]) - return scn["C07"] + return reader.load(["C07"])["C07"] return _load_data_array @@ -228,13 +221,13 @@ def _fake_c07_data() -> xr.DataArray: return rad -def _create_scene_for_data( +def _create_reader_for_data( tmp_path: Path, channel_name: str, rad: xr.DataArray | None, resolution: int, reader_kwargs: dict[str, Any] | None = None, -) -> Scene: +) -> FileYAMLReader: filename = generate_l1b_filename(channel_name) data_path = tmp_path / filename dataset = _create_fake_rad_dataset(rad=rad, resolution=resolution) @@ -244,12 +237,8 @@ def _create_scene_for_data( "Rad": {"chunksizes": [226, 226]}, }, ) - scn = Scene( - reader="abi_l1b", - filenames=[str(data_path)], - reader_kwargs=reader_kwargs, - ) - return scn + from satpy.readers import load_readers + return load_readers([str(data_path)], "abi_l1b", reader_kwargs=reader_kwargs)["abi_l1b"] def _get_and_check_array(data_arr: xr.DataArray, exp_dtype: npt.DTypeLike) -> npt.NDArray: From c35381897d5e3bd19597d96d0a6b1774edb8dc88 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 1 Nov 2023 10:04:23 -0500 Subject: [PATCH 376/702] Add dask chunk size checks to ABI l1b tests --- satpy/tests/writer_tests/test_awips_tiled.py | 7 ++++--- satpy/writers/awips_tiled.py | 5 ++++- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/satpy/tests/writer_tests/test_awips_tiled.py b/satpy/tests/writer_tests/test_awips_tiled.py index eab72e8f5b..63113a9f94 100644 --- a/satpy/tests/writer_tests/test_awips_tiled.py +++ b/satpy/tests/writer_tests/test_awips_tiled.py @@ -495,9 +495,10 @@ def test_multivar_numbered_tiles_glm(self, sector, extra_kwargs, tmp_path): "_FillValue": 1, }) - w.save_datasets([ds1, ds2, ds3, dqf], sector_id="TEST", source_name="TESTS", - tile_count=(3, 3), template="glm_l2_rad{}".format(sector.lower()), - **extra_kwargs) + with pytest.warns(UserWarning, match="Production location attribute "): + w.save_datasets([ds1, ds2, ds3, dqf], sector_id="TEST", source_name="TESTS", + tile_count=(3, 3), template="glm_l2_rad{}".format(sector.lower()), + **extra_kwargs) fn_glob = self._get_glm_glob_filename(extra_kwargs) all_files = glob(os.path.join(str(tmp_path), fn_glob)) assert len(all_files) == 9 diff --git a/satpy/writers/awips_tiled.py b/satpy/writers/awips_tiled.py index 5f10418e8a..9bab65fe35 100644 --- a/satpy/writers/awips_tiled.py +++ b/satpy/writers/awips_tiled.py @@ -620,9 +620,12 @@ def _get_factor_offset_fill(input_data_arr, vmin, vmax, encoding): # file data type to allow for extra fill values num_fills = 0 - if is_unsigned or unsigned_in_signed: + if is_unsigned: # max value fills = [2 ** file_bit_depth - 1] + elif unsigned_in_signed: + # max unsigned value is -1 as a signed int + fills = [-1] else: # max value fills = [2 ** (file_bit_depth - 1) - 1] From 2e53366d3666672ceeffb6f8e2b7d5218e7d393d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 1 Nov 2023 10:04:59 -0500 Subject: [PATCH 377/702] Remove use of pkg_resources (deprecated) in sphinx conf.py --- doc/source/conf.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 3bef218f89..f6606dc6c9 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -18,8 +18,6 @@ import sys from datetime import datetime -from pkg_resources import get_distribution - # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. @@ -28,12 +26,14 @@ from reader_table import generate_reader_table # noqa: E402 +import satpy # noqa: E402 + # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # get version using setuptools-scm -release = get_distribution("satpy").version +release = satpy.__version__ # The full version, including alpha/beta/rc tags. # for example take major/minor version = ".".join(release.split(".")[:2]) From 57369902601c3834a24ea14fc82d03d9f843093b Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 1 Nov 2023 10:40:35 -0500 Subject: [PATCH 378/702] Fix time string parsing with newer versions of xarray and numpy Something in a recent change makes xarray or numpy allow for `str + DataArray` returning a DataArray and this makes `datetime.strptime` mad. --- satpy/readers/viirs_atms_sdr_base.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/satpy/readers/viirs_atms_sdr_base.py b/satpy/readers/viirs_atms_sdr_base.py index be0a7a0d65..159a84a070 100644 --- a/satpy/readers/viirs_atms_sdr_base.py +++ b/satpy/readers/viirs_atms_sdr_base.py @@ -100,11 +100,11 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): super().__init__(filename, filename_info, filetype_info, **kwargs) def _parse_datetime(self, datestr, timestr): - try: - datetime_str = datestr + timestr - except TypeError: - datetime_str = (str(datestr.data.compute().astype(str)) + - str(timestr.data.compute().astype(str))) + if not isinstance(datestr, str): + datestr = str(datestr.data.compute().astype(str)) + if not isinstance(timestr, str): + timestr = str(timestr.data.compute().astype(str)) + datetime_str = datestr + timestr time_val = datetime.strptime(datetime_str, "%Y%m%d%H%M%S.%fZ") if abs(time_val - NO_DATE) < EPSILON_TIME: From c25c0bf34cc3cd8a62d254d5016e95374125de51 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 1 Nov 2023 13:37:14 -0500 Subject: [PATCH 379/702] Fix incorrect data type in AMI tests Tests used a 16-bit signed integer, but real world data is unsigned. Using signed results in not being able to mask with expected quality flags --- satpy/tests/reader_tests/test_ami_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_ami_l1b.py b/satpy/tests/reader_tests/test_ami_l1b.py index cdbc4468c9..a9909fd48b 100644 --- a/satpy/tests/reader_tests/test_ami_l1b.py +++ b/satpy/tests/reader_tests/test_ami_l1b.py @@ -65,7 +65,7 @@ def setUp(self, xr_, counts=None): if counts is None: rad_data = (np.arange(10.).reshape((2, 5)) + 1.) * 50. rad_data = (rad_data + 1.) / 0.5 - rad_data = rad_data.astype(np.int16) + rad_data = rad_data.astype(np.uint16) counts = xr.DataArray( da.from_array(rad_data, chunks="auto"), dims=("y", "x"), From ea791e92f28219572e7bbefc9432d44760af994a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 1 Nov 2023 14:13:10 -0500 Subject: [PATCH 380/702] Fix LI L2 tests using signed values with an unsigned type --- satpy/tests/reader_tests/_li_test_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/_li_test_utils.py b/satpy/tests/reader_tests/_li_test_utils.py index d6a32253f5..32107006fc 100644 --- a/satpy/tests/reader_tests/_li_test_utils.py +++ b/satpy/tests/reader_tests/_li_test_utils.py @@ -370,13 +370,13 @@ def l2_lfl_schema(settings=None): "default_data": lambda: np.random.uniform(stime, etime, nobs) }, "l1b_geolocation_warning": { - "format": "u8", + "format": "i1", "shape": ("flashes",), "long_name": "L1b geolocation warning", "default_data": lambda: -127 }, "l1b_radiometric_warning": { - "format": "u8", + "format": "i1", "shape": ("flashes",), "long_name": "L1b radiometric warning", "default_data": lambda: -127 From e82379eff586ff1d76c623bbe2b08940b2ffed35 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 2 Nov 2023 09:50:07 -0500 Subject: [PATCH 381/702] Add workaround for dtype not being preserved in xr.where call --- satpy/readers/mersi_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index 905db0654f..7070131f51 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -141,7 +141,7 @@ def _mask_data(self, data, dataset_id, attrs): if dataset_id.get("calibration") == "counts": # preserve integer type of counts if possible attrs["_FillValue"] = fill_value - new_fill = fill_value + new_fill = data.dtype.type(fill_value) else: new_fill = np.nan if valid_range is not None: From bec297a05d32e8da0ad70d53011fe231204859b1 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 2 Nov 2023 10:03:46 -0500 Subject: [PATCH 382/702] Add workaround for xarray where with integer types --- satpy/readers/fci_l1c_nc.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index e42975b3a4..2d3e047c5e 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -334,11 +334,10 @@ def _get_dataset_measurand(self, key, info=None): vr = attrs.get("valid_range", [-np.inf, np.inf]) if key["calibration"] == "counts": attrs["_FillValue"] = fv - nfv = fv + nfv = data.dtype.type(fv) else: nfv = np.nan - data = data.where(data >= vr[0], nfv) - data = data.where(data <= vr[1], nfv) + data = data.where((data >= vr[0]) & (data <= vr[1]), nfv) res = self.calibrate(data, key) From 9f69ff5a439c0f8a8f45a1a123fbee0b58e041ea Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 2 Nov 2023 10:36:09 -0500 Subject: [PATCH 383/702] Add workaround for integer overflow in goes imager reader Numpy 2.0 complains about 1000 not fitting in a uint8, but I also cast other large numbers to be a little safe. --- satpy/readers/goes_imager_hrit.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/readers/goes_imager_hrit.py b/satpy/readers/goes_imager_hrit.py index 457d5d809c..9ac6fe4484 100644 --- a/satpy/readers/goes_imager_hrit.py +++ b/satpy/readers/goes_imager_hrit.py @@ -117,11 +117,11 @@ class CalibrationError(Exception): def make_sgs_time(sgs_time_array): """Make sgs time.""" - year = ((sgs_time_array["century"] >> 4) * 1000 + - (sgs_time_array["century"] & 15) * 100 + + year = ((sgs_time_array["century"] >> 4) * np.int64(1000) + + (sgs_time_array["century"] & 15) * np.int64(100) + (sgs_time_array["year"] >> 4) * 10 + (sgs_time_array["year"] & 15)) - doy = ((sgs_time_array["doy1"] >> 4) * 100 + + doy = ((sgs_time_array["doy1"] >> 4) * np.int64(100) + (sgs_time_array["doy1"] & 15) * 10 + (sgs_time_array["doy_hours"] >> 4)) hours = ((sgs_time_array["doy_hours"] & 15) * 10 + @@ -130,7 +130,7 @@ def make_sgs_time(sgs_time_array): (sgs_time_array["mins_secs"] >> 4)) secs = ((sgs_time_array["mins_secs"] & 15) * 10 + (sgs_time_array["secs_msecs"] >> 4)) - msecs = ((sgs_time_array["secs_msecs"] & 15) * 100 + + msecs = ((sgs_time_array["secs_msecs"] & 15) * np.int64(100) + (sgs_time_array["msecs"] >> 4) * 10 + (sgs_time_array["msecs"] & 15)) return (datetime(int(year), 1, 1) + From b55da03f125398a99f34cffcbec2e695b92977ad Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 2 Nov 2023 11:44:10 -0500 Subject: [PATCH 384/702] Fix dtype handling in modis readers --- satpy/readers/hdfeos_base.py | 3 ++- satpy/tests/reader_tests/modis_tests/test_modis_l1b.py | 4 ++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index f60040a46f..c4bd5ebd7b 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -285,7 +285,8 @@ def _get_good_data_mask(self, data_arr, is_category=False): if is_category and np.issubdtype(data_arr.dtype, np.integer): # no need to mask, the fill value is already what it needs to be return None, None - new_fill = np.nan + fill_type = data_arr.dtype.type if np.issubdtype(data_arr.dtype, np.floating) else np.float32 + new_fill = fill_type(np.nan) data_arr.attrs.pop("_FillValue", None) good_mask = data_arr != fill_value return good_mask, new_fill diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py index 11068b6577..d4998a67f9 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py @@ -77,6 +77,10 @@ def _load_and_check_geolocation(scene, resolution, exp_res, exp_shape, has_res, assert lat_arr.shape == exp_shape # compute lon/lat at the same time to avoid wasted computation lon_vals, lat_vals = dask.compute(lon_arr, lat_arr) + assert lon_arr.dtype == lat_arr.dtype + assert lon_arr.dtype == np.float32 + assert lon_vals.dtype == lon_arr.dtype + assert lat_vals.dtype == lat_arr.dtype np.testing.assert_array_less(lon_vals, 0) np.testing.assert_array_less(0, lat_vals) check_callback(lon_arr) From 6c20a67aa56d79090f054588ed836394fb4b3c5b Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 2 Nov 2023 12:35:56 -0500 Subject: [PATCH 385/702] Be more intentional with dtype casting in goes imager hrit reader --- satpy/readers/goes_imager_hrit.py | 41 +++++++++++++++++++------------ 1 file changed, 25 insertions(+), 16 deletions(-) diff --git a/satpy/readers/goes_imager_hrit.py b/satpy/readers/goes_imager_hrit.py index 9ac6fe4484..1724ba214d 100644 --- a/satpy/readers/goes_imager_hrit.py +++ b/satpy/readers/goes_imager_hrit.py @@ -117,22 +117,31 @@ class CalibrationError(Exception): def make_sgs_time(sgs_time_array): """Make sgs time.""" - year = ((sgs_time_array["century"] >> 4) * np.int64(1000) + - (sgs_time_array["century"] & 15) * np.int64(100) + - (sgs_time_array["year"] >> 4) * 10 + - (sgs_time_array["year"] & 15)) - doy = ((sgs_time_array["doy1"] >> 4) * np.int64(100) + - (sgs_time_array["doy1"] & 15) * 10 + - (sgs_time_array["doy_hours"] >> 4)) - hours = ((sgs_time_array["doy_hours"] & 15) * 10 + - (sgs_time_array["hours_mins"] >> 4)) - mins = ((sgs_time_array["hours_mins"] & 15) * 10 + - (sgs_time_array["mins_secs"] >> 4)) - secs = ((sgs_time_array["mins_secs"] & 15) * 10 + - (sgs_time_array["secs_msecs"] >> 4)) - msecs = ((sgs_time_array["secs_msecs"] & 15) * np.int64(100) + - (sgs_time_array["msecs"] >> 4) * 10 + - (sgs_time_array["msecs"] & 15)) + century = sgs_time_array["century"].astype(np.int64) + year = sgs_time_array["year"].astype(np.int64) + doy1 = sgs_time_array["doy1"].astype(np.int64) + doy_hours = sgs_time_array["doy_hours"].astype(np.int64) + hours_mins = sgs_time_array["hours_mins"].astype(np.int64) + mins_secs = sgs_time_array["mins_secs"].astype(np.int64) + secs_msecs = sgs_time_array["secs_msecs"].astype(np.int64) + msecs = sgs_time_array["msecs"].astype(np.int64) + + year = ((century >> 4) * 1000 + + (century & 15) * 100 + + (year >> 4) * 10 + + (year & 15)) + doy = ((doy1 >> 4) * 100 + + (doy1 & 15) * 10 + + (doy_hours >> 4)) + hours = ((doy_hours & 15) * 10 + + (hours_mins >> 4)) + mins = ((hours_mins & 15) * 10 + + (mins_secs >> 4)) + secs = ((mins_secs & 15) * 10 + + (secs_msecs >> 4)) + msecs = ((secs_msecs & 15) * 100 + + (msecs >> 4) * 10 + + (msecs & 15)) return (datetime(int(year), 1, 1) + timedelta(days=int(doy - 1), hours=int(hours), From 07bb8204d19bed79370cec6ad913c3892173cc11 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 2 Nov 2023 12:41:17 -0500 Subject: [PATCH 386/702] Fix accidental dtype upcasting in seviri l1b calibration --- satpy/readers/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/utils.py b/satpy/readers/utils.py index 18c7193a43..c1bf7c7497 100644 --- a/satpy/readers/utils.py +++ b/satpy/readers/utils.py @@ -459,7 +459,7 @@ def apply_earthsun_distance_correction(reflectance, utc_date=None): reflectance.attrs["sun_earth_distance_correction_applied"] = True reflectance.attrs["sun_earth_distance_correction_factor"] = sun_earth_dist with xr.set_options(keep_attrs=True): - reflectance = reflectance * sun_earth_dist * sun_earth_dist + reflectance = reflectance * reflectance.dtype.type(sun_earth_dist * sun_earth_dist) return reflectance @@ -472,5 +472,5 @@ def remove_earthsun_distance_correction(reflectance, utc_date=None): reflectance.attrs["sun_earth_distance_correction_applied"] = False reflectance.attrs["sun_earth_distance_correction_factor"] = sun_earth_dist with xr.set_options(keep_attrs=True): - reflectance = reflectance / (sun_earth_dist * sun_earth_dist) + reflectance = reflectance / reflectance.dtype.type(sun_earth_dist * sun_earth_dist) return reflectance From 4424c5d150b6e113bd4cecf753b867d796a945ca Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 3 Nov 2023 15:55:03 -0500 Subject: [PATCH 387/702] Add xfail for unstable skyfield and h5py-based tests --- satpy/tests/modifier_tests/test_parallax.py | 4 +++ .../tests/reader_tests/test_gerb_l2_hr_h5.py | 2 ++ satpy/tests/test_utils.py | 2 ++ satpy/tests/utils.py | 28 +++++++++++++++++++ 4 files changed, 36 insertions(+) diff --git a/satpy/tests/modifier_tests/test_parallax.py b/satpy/tests/modifier_tests/test_parallax.py index f1385e9b18..e1b426dce2 100644 --- a/satpy/tests/modifier_tests/test_parallax.py +++ b/satpy/tests/modifier_tests/test_parallax.py @@ -30,6 +30,7 @@ from pyresample import create_area_def import satpy.resample +from satpy.tests.utils import xfail_skyfield_unstable_numpy2 from satpy.writers import get_enhanced_image # NOTE: @@ -438,6 +439,7 @@ def test_correct_area_cloudy_same_area(self, ): corrector = ParallaxCorrection(area) corrector(sc["CTH_constant"]) + @pytest.mark.xfail(xfail_skyfield_unstable_numpy2(), reason="Skyfield doesn't support numpy 2 yet") def test_correct_area_no_orbital_parameters(self, caplog, fake_tle): """Test ParallaxCorrection when CTH has no orbital parameters. @@ -761,6 +763,7 @@ def fake_scene(self, yaml_code): "area": area}) return sc + @pytest.mark.xfail(xfail_skyfield_unstable_numpy2(), reason="Skyfield doesn't support numpy 2 yet") def test_double_load(self, fake_scene, conf_file, fake_tle): """Test that loading corrected and uncorrected works correctly. @@ -790,6 +793,7 @@ def test_no_compute(self, fake_scene, conf_file): sccc.return_value = [os.fspath(conf_file)] fake_scene.load(["parallax_corrected_VIS006"]) + @pytest.mark.xfail(xfail_skyfield_unstable_numpy2(), reason="Skyfield doesn't support numpy 2 yet") def test_enhanced_image(self, fake_scene, conf_file, fake_tle): """Test that image enhancement is the same.""" with unittest.mock.patch( diff --git a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py index 0333f3df2b..d504b75d13 100644 --- a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py +++ b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py @@ -22,6 +22,7 @@ import pytest from satpy import Scene +from satpy.tests.utils import xfail_h5py_unstable_numpy2 FNAME = "G4_SEV4_L20_HR_SOL_TH_20190606_130000_V000.hdf" @@ -120,6 +121,7 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): return filename +@pytest.mark.xfail(xfail_h5py_unstable_numpy2(), reason="h5py doesn't include numpy 2 fix") @pytest.mark.parametrize("name", ["Solar Flux", "Thermal Flux", "Solar Radiance", "Thermal Radiance"]) def test_dataset_load(gerb_l2_hr_h5_dummy_file, name): """Test loading the solar flux component.""" diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 6f5db02087..f6633e159d 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -29,6 +29,7 @@ import pytest import xarray as xr +from satpy.tests.utils import xfail_skyfield_unstable_numpy2 from satpy.utils import ( angle2xyz, get_legacy_chunk_size, @@ -202,6 +203,7 @@ def test_get_satpos_fails_with_informative_error(self, attrs): with pytest.raises(KeyError, match="Unable to determine satellite position.*"): get_satpos(data_arr) + @pytest.mark.xfail(xfail_skyfield_unstable_numpy2(), reason="Skyfield does not support numpy 2 yet") def test_get_satpos_from_satname(self, caplog): """Test getting satellite position from satellite name only.""" import pyorbital.tlefile diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py index ca958fce37..e2b70fe86c 100644 --- a/satpy/tests/utils.py +++ b/satpy/tests/utils.py @@ -407,3 +407,31 @@ def assert_attrs_equal(attrs, attrs_exp, tolerance=0): ) except TypeError: assert attrs[key] == attrs_exp[key], err_msg + + +def xfail_skyfield_unstable_numpy2(): + """Determine if skyfield-based tests should be xfail in the unstable numpy 2.x environment.""" + try: + import skyfield + except ImportError: + skyfield = None + + import os + is_unstable_ci = os.environ.get("UNSTABLE", "0") in ("1", "true") + is_np2 = np.__version__.startswith("2") + return skyfield is None and is_np2 and is_unstable_ci + + +def xfail_h5py_unstable_numpy2(): + """Determine if h5py-based tests should be xfail in the unstable numpy 2.x environment.""" + from packaging import version + try: + import h5py + is_broken_h5py = version.parse(h5py.__version__) < version.parse("3.10.0") + except ImportError: + is_broken_h5py = True + + import os + is_unstable_ci = os.environ.get("UNSTABLE", "0") in ("1", "true") + is_np2 = np.__version__.startswith("2") + return is_broken_h5py and is_np2 and is_unstable_ci From 65ec94fb88b09c167a0e886941c35778c09e9567 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 3 Nov 2023 19:55:01 -0500 Subject: [PATCH 388/702] Fix skyfield numpy compatibility check in xfail func --- satpy/tests/utils.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py index e2b70fe86c..b543afc7b4 100644 --- a/satpy/tests/utils.py +++ b/satpy/tests/utils.py @@ -413,12 +413,15 @@ def xfail_skyfield_unstable_numpy2(): """Determine if skyfield-based tests should be xfail in the unstable numpy 2.x environment.""" try: import skyfield + + # known numpy incompatibility: + from skyfield import timelib # noqa except ImportError: skyfield = None import os is_unstable_ci = os.environ.get("UNSTABLE", "0") in ("1", "true") - is_np2 = np.__version__.startswith("2") + is_np2 = np.__version__.startswith("2.") return skyfield is None and is_np2 and is_unstable_ci From 432615b583f7e6ab9b8e87702a060d1be20bb5fd Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 3 Nov 2023 20:25:15 -0500 Subject: [PATCH 389/702] Fix h5py version check in xfail test func --- satpy/tests/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py index b543afc7b4..7471bfc31c 100644 --- a/satpy/tests/utils.py +++ b/satpy/tests/utils.py @@ -430,11 +430,11 @@ def xfail_h5py_unstable_numpy2(): from packaging import version try: import h5py - is_broken_h5py = version.parse(h5py.__version__) < version.parse("3.10.0") + is_broken_h5py = version.parse(h5py.__version__) <= version.parse("3.10.0") except ImportError: is_broken_h5py = True import os is_unstable_ci = os.environ.get("UNSTABLE", "0") in ("1", "true") - is_np2 = np.__version__.startswith("2") + is_np2 = np.__version__.startswith("2.") return is_broken_h5py and is_np2 and is_unstable_ci From c8e7464607de02e917f82dcf1999de6237772498 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 3 Nov 2023 20:53:21 -0500 Subject: [PATCH 390/702] Fix dtype preservation in abi_l1b with numpy 2 --- satpy/readers/abi_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index 29ed6f668c..07626f330d 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -163,7 +163,7 @@ def _ir_calibrate(self, data): if self.clip_negative_radiances: min_rad = self._get_minimum_radiance(data) - data = data.clip(min=min_rad) + data = data.clip(min=data.dtype.type(min_rad)) res = (fk2 / np.log(fk1 / data + 1) - bc1) / bc2 res.attrs = data.attrs From f08e0fd1e2b5c8339a15a39be55272d306889bfa Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 3 Nov 2023 21:26:42 -0500 Subject: [PATCH 391/702] Fix h5py mock in docs build to handle xfail version checks --- doc/source/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index f6606dc6c9..df006727c0 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -44,7 +44,7 @@ class Mock(object): # noqa def __init__(self, *args, **kwargs): """Mask any arguments to mock object.""" - pass + self.__version__ = "0.0.0" def __call__(self, *args, **kwargs): """Mock a function and class object when accessed from mocked module.""" From 0096ad1b89174f9059becf523befc46086a7b042 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 6 Nov 2023 14:08:22 +0100 Subject: [PATCH 392/702] Replace usage of assertraises in test_demo.py --- satpy/tests/test_demo.py | 23 ++++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/satpy/tests/test_demo.py b/satpy/tests/test_demo.py index 32e8016f58..8f5c59bf3c 100644 --- a/satpy/tests/test_demo.py +++ b/satpy/tests/test_demo.py @@ -27,6 +27,8 @@ from collections import defaultdict from unittest import mock +import pytest + # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path @@ -89,9 +91,11 @@ def test_get_us_midlatitude_cyclone_abi(self, gcsfs_mod): gcsfs_mod.GCSFileSystem.return_value = gcsfs_inst gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] # expected 16 files, got 2 - self.assertRaises(RuntimeError, get_us_midlatitude_cyclone_abi) + with pytest.raises(RuntimeError): + get_us_midlatitude_cyclone_abi() # unknown access method - self.assertRaises(NotImplementedError, get_us_midlatitude_cyclone_abi, method="unknown") + with pytest.raises(NotImplementedError): + get_us_midlatitude_cyclone_abi(method="unknown") gcsfs_inst.glob.return_value = ["a.nc"] * 16 filenames = get_us_midlatitude_cyclone_abi() @@ -109,8 +113,10 @@ def test_get_hurricane_florence_abi(self, gcsfs_mod): # only return 5 results total gcsfs_inst.glob.side_effect = _GlobHelper([5, 0]) # expected 16 files * 10 frames, got 16 * 5 - self.assertRaises(RuntimeError, get_hurricane_florence_abi) - self.assertRaises(NotImplementedError, get_hurricane_florence_abi, method="unknown") + with pytest.raises(RuntimeError): + get_hurricane_florence_abi() + with pytest.raises(NotImplementedError): + get_hurricane_florence_abi(method="unknown") gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi() @@ -157,7 +163,8 @@ def test_get_bucket_files(self, gcsfs_mod): gcsfs_inst.glob.side_effect = None # reset mock side effect gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] - self.assertRaises(OSError, get_bucket_files, "*.nc", "does_not_exist") + with pytest.raises(OSError, match="Directory does not exist: does_not_exist"): + get_bucket_files("*.nc", "does_not_exist") open("a.nc", "w").close() # touch the file gcsfs_inst.get.reset_mock() @@ -176,13 +183,15 @@ def test_get_bucket_files(self, gcsfs_mod): # if we don't get any results then we expect an exception gcsfs_inst.get.reset_mock() gcsfs_inst.glob.return_value = [] - self.assertRaises(OSError, get_bucket_files, "*.nc", ".") + with pytest.raises(OSError, match="No files could be found or downloaded."): + get_bucket_files("*.nc", ".") @mock.patch("satpy.demo._google_cloud_platform.gcsfs", None) def test_no_gcsfs(self): """Test that 'gcsfs' is required.""" from satpy.demo._google_cloud_platform import get_bucket_files - self.assertRaises(RuntimeError, get_bucket_files, "*.nc", ".") + with pytest.raises(RuntimeError): + get_bucket_files("*.nc", ".") class TestAHIDemoDownload: From 4e485e93158efae51ef0a1e5178e0227cf838c3a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 6 Nov 2023 12:22:52 -0600 Subject: [PATCH 393/702] Fix ABI readers using wrong dtype for resolution-based chunks In-file data is 16-bit so our size has to be based on that --- satpy/readers/abi_base.py | 4 ++-- satpy/tests/reader_tests/test_abi_l1b.py | 26 ++++++++++++++---------- 2 files changed, 17 insertions(+), 13 deletions(-) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 07a29e3043..107382d7ba 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -94,10 +94,10 @@ def _chunk_bytes_for_resolution(self) -> int: # this is true for all CSPP Geo GRB output (226 for all sectors) and full disk from other sources # 250 has been seen for AWS/CLASS CONUS, Mesoscale 1, and Mesoscale 2 files # we align this with 4 on-disk chunks at 500m, so it will be 2 on-disk chunks for 1km, and 1 for 2km - high_res_elems_disk_aligned = np.round(max(num_high_res_elems_per_dim / (4 * 226), 1)) * (4 * 226) + high_res_elems_disk_aligned = round(max(num_high_res_elems_per_dim / (4 * 226), 1)) * (4 * 226) low_res_factor = int(self.filetype_info.get("resolution", 2000) // 500) res_elems_per_dim = int(high_res_elems_disk_aligned / low_res_factor) - return (res_elems_per_dim ** 2) * 4 + return (res_elems_per_dim ** 2) * 2 # 16-bit integers on disk @staticmethod def _rename_dims(nc): diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index a6acd7f027..1c7d2c78ef 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -136,18 +136,14 @@ def generate_l1b_filename(chan_name: str) -> str: @pytest.fixture() def c01_refl(tmp_path) -> xr.DataArray: - # 4 bytes for 32-bit floats - # 4 on-disk chunks for 500 meter data - # 226 on-disk chunk size - # Square (**2) for 2D size - with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", None, 1000) return reader.load(["C01"])["C01"] @pytest.fixture() def c01_rad(tmp_path) -> xr.DataArray: - with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", None, 1000) return reader.load([DataQuery(name="C01", calibration="radiance")])["C01"] @@ -169,14 +165,14 @@ def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: "valid_range": (0, 4095), }, ) - with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", rad, 1000) return reader.load([DataQuery(name="C01", calibration="radiance")])["C01"] @pytest.fixture() def c01_counts(tmp_path) -> xr.DataArray: - with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", None, 1000) return reader.load([DataQuery(name="C01", calibration="counts")])["C01"] @@ -187,7 +183,7 @@ def _load_data_array( clip_negative_radiances: bool = False, ): rad = _fake_c07_data() - with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + with _apply_dask_chunk_size(): reader = _create_reader_for_data( tmp_path, "C07", @@ -241,14 +237,22 @@ def _create_reader_for_data( return load_readers([str(data_path)], "abi_l1b", reader_kwargs=reader_kwargs)["abi_l1b"] +def _apply_dask_chunk_size(): + # 226 on-disk chunk size + # 8 on-disk chunks for 500 meter data + # Square (**2) for 2D size + # 4 bytes for 32-bit floats + return dask.config.set({"array.chunk-size": ((226 * 8) ** 2) * 4}) + + def _get_and_check_array(data_arr: xr.DataArray, exp_dtype: npt.DTypeLike) -> npt.NDArray: data_np = data_arr.data.compute() assert isinstance(data_arr, xr.DataArray) assert isinstance(data_arr.data, da.Array) assert isinstance(data_np, np.ndarray) res = 1000 if RAD_SHAPE[1000][0] == data_np.shape[0] else 2000 - assert data_arr.chunks[0][0] == 226 * (4 / (res / 500)) - assert data_arr.chunks[1][0] == 226 * (4 / (res / 500)) + assert data_arr.chunks[0][0] == 226 * (8 / (res / 500)) + assert data_arr.chunks[1][0] == 226 * (8 / (res / 500)) assert data_np.dtype == data_arr.dtype assert data_np.dtype == exp_dtype From 78c11911769be180470eb8da8f131d8fea4ad333 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Tue, 7 Nov 2023 08:10:07 +0100 Subject: [PATCH 394/702] Update thresholds for high-level and low-level cloud layers abased on feedback from CIRA, i.e. the developers of the GeoColor composite blend (personal communication, 27.09.2023). --- satpy/composites/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 5924d7794c..0deecec642 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1077,7 +1077,7 @@ class HighCloudCompositor(CloudCompositor): of where abs(latitude). """ - def __init__(self, name, transition_min=(200., 220.), transition_max=280, latitude_min=(30., 60.), + def __init__(self, name, transition_min=(210., 230.), transition_max=300, latitude_min=(30., 60.), transition_gamma=1.0, **kwargs): """Collect custom configuration values. @@ -1149,7 +1149,7 @@ class LowCloudCompositor(CloudCompositor): """ def __init__(self, name, values_land=(1,), values_sea=(0,), - range_land=(1.0, 4.5), + range_land=(0.0, 4.0), range_sea=(0.0, 4.0), transition_gamma=1.0, **kwargs): """Init info. From 68f93047257267520f73d273b7eb057c63e00ab1 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Tue, 7 Nov 2023 08:11:47 +0100 Subject: [PATCH 395/702] Add TODOs for code consolidation and optimization. --- satpy/composites/__init__.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 0deecec642..dea3830b22 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1109,6 +1109,8 @@ def __call__(self, projectables, **kwargs): `projectables` is expected to be a list or tuple with a single element: - index 0: Brightness temperature of a thermal infrared window channel (e.g. 10.5 microns). """ + # TODO Optimize and make sure that there are no early unnecessary dask computations. Is there a way to avoid + # computation of the latitude array? if len(projectables) != 1: raise ValueError(f"Expected 1 dataset, got {len(projectables)}") @@ -1191,6 +1193,7 @@ def __call__(self, projectables, **kwargs): - index 1. Brightness temperature of the window channel (used to filter out noise-induced false alarms). - index 2: Land-Sea-Mask. """ + # TODO Optimize and make sure that there are no early unnecessary dask computations if len(projectables) != 3: raise ValueError(f"Expected 3 datasets, got {len(projectables)}") @@ -1200,6 +1203,8 @@ def __call__(self, projectables, **kwargs): lsm = lsm.round() # Make sure to have whole numbers in case of smearing from resampling # Avoid spurious false alarms caused by noise in the 3.9um channel that can occur for very cold cloud tops + # TODO Consolidate this. Should it really be set to zero and thus within the threshold range? What if the + # lower threshold would be changed to -1 btd = btd.where(bt_win >= 230, 0.0) # Call CloudCompositor for land surface pixels From 07d10c9915928cf6d43794db4a89bfbf43fc2d7b Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Tue, 7 Nov 2023 09:04:49 +0100 Subject: [PATCH 396/702] Add url to land water mask to use for GeoColor low-level cloud detection. --- satpy/etc/composites/abi.yaml | 4 ++-- satpy/etc/composites/ahi.yaml | 4 ++-- satpy/etc/composites/fci.yaml | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/etc/composites/abi.yaml b/satpy/etc/composites/abi.yaml index a686b829ae..e950ba027f 100644 --- a/satpy/etc/composites/abi.yaml +++ b/satpy/etc/composites/abi.yaml @@ -783,8 +783,8 @@ composites: - name: C13 - compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: land_sea_mask - # TODO Change filename - filename: "/tcenas/proj/optcalimg/strandgren/GeoColor/static_data/gshhs_land_sea_mask_3km_i.tif" + url: "https://zenodo.org/records/10076199/files/gshhs_land_water_mask_3km_i.tif" + known_hash: "sha256:96df83c57416217e191f95dde3d3c1ce0373a8fc220e929228873db246ca3569" geo_color_background_with_low_clouds: compositor: !!python/name:satpy.composites.BackgroundCompositor diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 4f6ae6932f..e088bcf1a6 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -533,8 +533,8 @@ composites: - name: B13 - compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: land_sea_mask - # TODO Change filename - filename: "/tcenas/proj/optcalimg/strandgren/GeoColor/static_data/gshhs_land_sea_mask_3km_i.tif" + url: "https://zenodo.org/records/10076199/files/gshhs_land_water_mask_3km_i.tif" + known_hash: "sha256:96df83c57416217e191f95dde3d3c1ce0373a8fc220e929228873db246ca3569" geo_color_background_with_low_clouds: compositor: !!python/name:satpy.composites.BackgroundCompositor diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 25ba032cac..c8a32910ca 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -130,8 +130,8 @@ composites: - name: ir_105 - compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: land_sea_mask - # TODO Change filename - filename: "/tcenas/proj/optcalimg/strandgren/GeoColor/static_data/gshhs_land_sea_mask_3km_i.tif" + url: "https://zenodo.org/records/10076199/files/gshhs_land_water_mask_3km_i.tif" + known_hash: "sha256:96df83c57416217e191f95dde3d3c1ce0373a8fc220e929228873db246ca3569" geo_color_background_with_low_clouds: compositor: !!python/name:satpy.composites.BackgroundCompositor From 39bf45e9dd701339fec9531b4b9fc50b7be5ade4 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Tue, 7 Nov 2023 09:07:46 +0100 Subject: [PATCH 397/702] Update doc strings. --- satpy/composites/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index dea3830b22..bc5a199aa0 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1063,7 +1063,8 @@ class HighCloudCompositor(CloudCompositor): temperature (cloud opacity). In contrast to the `CloudCompositor`, the brightness temperature threshold at the lower end, used to identify high opaque clouds, is made a function of the latitude in order to have tropopause level clouds appear opaque at both high and low latitudes. This follows the Geocolor - implementation of high clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`). + implementation of high clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`), but + with some adjustments to the thresholds based on recent developments and feedback from CIRA. The two brightness temperature thresholds in `transition_min` are used together with the corresponding latitude limits in `latitude_min` to compute a modified version of `transition_min` that is later used @@ -1144,7 +1145,8 @@ class LowCloudCompositor(CloudCompositor): function of the `BTD` value itself. Two sets of thresholds are used, one set for land surface types (`range_land`) and another one for sea/water surface types (`range_sea`), respectively. Hence, this compositor requires a land-sea-mask as a prerequisite input. This follows the GeoColor - implementation of night-time low-level clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`). + implementation of night-time low-level clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`), but + with some adjustments to the thresholds based on recent developments and feedback from CIRA. Please note that the spectral test and thus the output of the compositor (using the expected input data) is only applicable during night-time. @@ -1165,8 +1167,6 @@ def __init__(self, name, values_land=(1,), values_sea=(0,), difference over land surface types. range_sea (tuple): Threshold values used for masking low-level clouds from the brightness temperature difference over sea/water. - latitude_min (tuple): Latitude values defining the intervals for computing latitude-dependent - transition_min values. transition_gamma (float): Gamma correction to apply to the alpha channel within the brightness temperature difference range. """ From 9f9e34ddad87fb69ebfdf9b4b89cd1f5872938fb Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 10:20:18 +0000 Subject: [PATCH 398/702] Add a reader for MODIS Level 3 files in CMG format. --- satpy/readers/hdfeos_base.py | 5 +- satpy/readers/modis_l3.py | 111 +++++++++++++++++++++++++++++++++++ 2 files changed, 115 insertions(+), 1 deletion(-) create mode 100644 satpy/readers/modis_l3.py diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index f60040a46f..9964eeb2e1 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -148,7 +148,10 @@ def _read_mda(cls, lines, element=None): @classmethod def _split_line(cls, line, lines): - key, val = line.split("=") + try: + key, val = line.split("=") + except ValueError: + key, val = line.split("=", maxsplit=1) key = key.strip() val = val.strip() try: diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py new file mode 100644 index 0000000000..3c60efba87 --- /dev/null +++ b/satpy/readers/modis_l3.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Modis level 3 hdf-eos format reader. + +Introduction +------------ + +The ``modis_l3`` reader reads Modis L3 products in hdf-eos format. +Since there are a multitude of different level 3 datasets not all of theses are implemented (yet). + + +Currently the reader supports: + - mcd43c1: BRDF/Albedo Model Parameters dataset + - mcd43c3: BRDF/Albedo Albedo dataset + +To get a list of the available datasets for a given file refer to the "Load data" section in :doc:`../reading`. + +""" +import logging + +from pyresample import geometry + +from satpy.readers.hdfeos_base import HDFEOSGeoReader +from satpy.utils import get_legacy_chunk_size + +logger = logging.getLogger(__name__) +CHUNK_SIZE = get_legacy_chunk_size() + + +class ModisL3GriddedHDFFileHandler(HDFEOSGeoReader): + """File handler for MODIS HDF-EOS Level 3 CMG gridded files.""" + + def __init__(self, filename, filename_info, filetype_info, **kwargs): + """Init the file handler.""" + super().__init__(filename, filename_info, filetype_info, **kwargs) + + # Initialise number of rows and columns + self.nrows = self.metadata["GridStructure"]["GRID_1"]["YDim"] + self.ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] + + # Get the grid name and other projection info + gridname = self.metadata["GridStructure"]["GRID_1"]["GridName"] + if "CMG" not in gridname: + raise ValueError("Only CMG grids are supported") + + # Get the grid resolution + pos = gridname.rfind("_") + 1 + pos2 = gridname.rfind("Deg") + self.resolution = float(gridname[pos:pos2]) + + upperleft = self.metadata["GridStructure"]["GRID_1"]["UpperLeftPointMtrs"] + lowerright = self.metadata["GridStructure"]["GRID_1"]["LowerRightMtrs"] + + self.area_extent = (upperleft[0], lowerright[1], lowerright[0], upperleft[1]) + + + def available_datasets(self, configured_datasets=None): + """Automatically determine datasets provided by this file.""" + logger.debug("Available_datasets begin...") + + ds_dict = self.sd.datasets() + + yield from super().available_datasets(configured_datasets) + common = {"file_type": "mcd43_cmg_hdf", "resolution": self.resolution} + for key in ds_dict.keys(): + if "/" in key: # not a dataset + continue + yield True, {"name": key} | common + + def get_dataset(self, dataset_id, dataset_info): + """Get DataArray for specified dataset.""" + dataset_name = dataset_id["name"] + dataset = self.load_dataset(dataset_name, dataset_info.pop("category", False)) + self._add_satpy_metadata(dataset_id, dataset) + + return dataset + + + def get_area_def(self, dsid): + """Get the area definition. + + This is fixed, but not defined in the file. So we must + generate it ourselves with some assumptions. + """ + proj_param = "EPSG:4326" + + area = geometry.AreaDefinition("gridded_modis", + "A gridded L3 MODIS area", + "longlat", + proj_param, + self.ncols, + self.nrows, + self.area_extent) + self.area = area + + return self.area From 7c7d5fc5670e76574f815396c239d552688b9dfd Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 10:40:04 +0000 Subject: [PATCH 399/702] Update reader for MODIS Level 3 files in CMG format. --- satpy/etc/readers/modis_l3.yaml | 16 ++++++++++++++++ satpy/readers/modis_l3.py | 13 ++++++++++++- 2 files changed, 28 insertions(+), 1 deletion(-) create mode 100644 satpy/etc/readers/modis_l3.yaml diff --git a/satpy/etc/readers/modis_l3.yaml b/satpy/etc/readers/modis_l3.yaml new file mode 100644 index 0000000000..5ad2f32e04 --- /dev/null +++ b/satpy/etc/readers/modis_l3.yaml @@ -0,0 +1,16 @@ +reader: + name: modis_l3 + short_name: MODIS l3 + long_name: MODIS Level 3 (mcd43) data in HDF-EOS format + description: MODIS HDF-EOS L3 Reader + status: Beta + supports_fsspec: false + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + sensors: [modis] + +file_types: + mcd43_cmg_hdf: + file_patterns: + - 'MCD43C{prod_type}.A{start_time:%Y%j}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' + - 'M{platform_indicator:1s}D09CMG.A{start_time:%Y%j}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' + file_reader: !!python/name:satpy.readers.modis_l3.ModisL3GriddedHDFFileHandler diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 3c60efba87..2b9387ed58 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -61,11 +61,22 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): # Get the grid resolution pos = gridname.rfind("_") + 1 pos2 = gridname.rfind("Deg") - self.resolution = float(gridname[pos:pos2]) + + # Some products don't have resolution listed. + if pos < 0 or pos2 < 0: + self.resolution = 360. / self.ncols + else: + self.resolution = float(gridname[pos:pos2]) upperleft = self.metadata["GridStructure"]["GRID_1"]["UpperLeftPointMtrs"] lowerright = self.metadata["GridStructure"]["GRID_1"]["LowerRightMtrs"] + # For some reason, a few of the CMG products multiply their + # decimal degree extents by one million. This fixes it. + if lowerright[0] > 1e6: + upperleft = tuple(val / 1e6 for val in upperleft) + lowerright = tuple(val / 1e6 for val in lowerright) + self.area_extent = (upperleft[0], lowerright[1], lowerright[0], upperleft[1]) From 0de217a4c7f7db8e343d6e1173ca0103cc37c57a Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 10:44:15 +0000 Subject: [PATCH 400/702] Update MODIS L3 docstring. --- satpy/readers/modis_l3.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 2b9387ed58..86560624a8 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -21,12 +21,13 @@ ------------ The ``modis_l3`` reader reads Modis L3 products in hdf-eos format. -Since there are a multitude of different level 3 datasets not all of theses are implemented (yet). +There are multiple level 3 products, including some on sinusoidal grids and some on the climate modeling grid (CMG). +This reader supports the CMG products at present, and the sinusoidal products will be added if there is demand. -Currently the reader supports: - - mcd43c1: BRDF/Albedo Model Parameters dataset - - mcd43c3: BRDF/Albedo Albedo dataset +The reader has been tested with: + - MCD43c*: BRDF/Albedo data, such as parameters, albedo and nbar + - MOD09CMG: Surface Reflectance on climate monitoring grid. To get a list of the available datasets for a given file refer to the "Load data" section in :doc:`../reading`. From b4cf2c43447a6c87ceb9b1a767b0ffc5b036e3d0 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 10:47:12 +0000 Subject: [PATCH 401/702] Restructure the L3 MODIS reader. --- satpy/readers/modis_l3.py | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 86560624a8..bff33b190c 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -46,15 +46,8 @@ class ModisL3GriddedHDFFileHandler(HDFEOSGeoReader): """File handler for MODIS HDF-EOS Level 3 CMG gridded files.""" - def __init__(self, filename, filename_info, filetype_info, **kwargs): - """Init the file handler.""" - super().__init__(filename, filename_info, filetype_info, **kwargs) - - # Initialise number of rows and columns - self.nrows = self.metadata["GridStructure"]["GRID_1"]["YDim"] - self.ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] - - # Get the grid name and other projection info + def _sort_grid(self): + """Get the grid properties.""" gridname = self.metadata["GridStructure"]["GRID_1"]["GridName"] if "CMG" not in gridname: raise ValueError("Only CMG grids are supported") @@ -65,9 +58,9 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): # Some products don't have resolution listed. if pos < 0 or pos2 < 0: - self.resolution = 360. / self.ncols + resolution = 360. / self.ncols else: - self.resolution = float(gridname[pos:pos2]) + resolution = float(gridname[pos:pos2]) upperleft = self.metadata["GridStructure"]["GRID_1"]["UpperLeftPointMtrs"] lowerright = self.metadata["GridStructure"]["GRID_1"]["LowerRightMtrs"] @@ -78,7 +71,21 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): upperleft = tuple(val / 1e6 for val in upperleft) lowerright = tuple(val / 1e6 for val in lowerright) - self.area_extent = (upperleft[0], lowerright[1], lowerright[0], upperleft[1]) + area_extent = (upperleft[0], lowerright[1], lowerright[0], upperleft[1]) + + return resolution, area_extent + + + def __init__(self, filename, filename_info, filetype_info, **kwargs): + """Init the file handler.""" + super().__init__(filename, filename_info, filetype_info, **kwargs) + + # Initialise number of rows and columns + self.nrows = self.metadata["GridStructure"]["GRID_1"]["YDim"] + self.ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] + + # Get the grid name and other projection info + self.resolution, self.area_extent = self._sort_grid() def available_datasets(self, configured_datasets=None): From 1457ac67e7457ab1c05d2472a73c0b632864278a Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 11:44:39 +0000 Subject: [PATCH 402/702] Add tests for MODIS L3 reader. --- .../modis_tests/_modis_fixtures.py | 80 ++++++++++++++++++- .../reader_tests/modis_tests/conftest.py | 2 + .../reader_tests/modis_tests/test_modis_l3.py | 78 ++++++++++++++++++ 3 files changed, 159 insertions(+), 1 deletion(-) create mode 100644 satpy/tests/reader_tests/modis_tests/test_modis_l3.py diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index 66221c613e..aff84de7be 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -51,6 +51,9 @@ def _shape_for_resolution(resolution: int) -> tuple[int, int]: + # Case of a CMG 0.05 degree file, for L3 tests + if resolution == -999: + return 3600, 7200 assert resolution in RES_TO_REPEAT_FACTOR factor = RES_TO_REPEAT_FACTOR[resolution] if factor == 1: @@ -252,7 +255,10 @@ def create_hdfeos_test_file(filename: str, if geo_resolution is None or file_shortname is None: raise ValueError("'geo_resolution' and 'file_shortname' are required when including metadata.") setattr(h, 'CoreMetadata.0', _create_core_metadata(file_shortname)) # noqa - setattr(h, 'StructMetadata.0', _create_struct_metadata(geo_resolution)) # noqa + if geo_resolution == -999 or geo_resolution == -9999: + setattr(h, 'StructMetadata.0', _create_struct_metadata_cmg(geo_resolution)) # noqa + else: + setattr(h, 'StructMetadata.0', _create_struct_metadata(geo_resolution)) # noqa setattr(h, 'ArchiveMetadata.0', _create_header_metadata()) # noqa for var_name, var_info in variable_infos.items(): @@ -326,6 +332,31 @@ def _create_struct_metadata(geo_resolution: int) -> str: return struct_metadata_header +def _create_struct_metadata_cmg(res) -> str: + # Case of a MOD09 file + gridline = 'GridName="MOD09CMG"\n' + upleft = "UpperLeftPointMtrs=(-180000000.000000,90000000.000000)\n" + upright = "LowerRightMtrs=(180000000.000000,-90000000.000000)\n" + if res == -9999: + # Case of a MCD43 file + gridline = 'GridName="MCD_CMG_BRDF_0.05Deg"\n' + upleft = "UpperLeftPointMtrs=(-180.000000,90.000000)\n" + upright = "LowerRightMtrs=(180.000000,-90.000000)\n" + + struct_metadata_header = ("GROUP=SwathStructure\n" + "END_GROUP=SwathStructure\n" + "GROUP=GridStructure\n" + "GROUP=GRID_1\n" + f"{gridline}\n" + "XDim=7200\n" + "YDim=3600\n" + f"{upleft}\n" + f"{upright}\n" + "END_GROUP=GRID_1\n" + "END_GROUP=GridStructure\nEND") + return struct_metadata_header + + def _create_header_metadata() -> str: archive_metadata_header = "GROUP = ARCHIVEDMETADATA\nEND_GROUP = ARCHIVEDMETADATA\nEND" return archive_metadata_header @@ -471,6 +502,28 @@ def _get_cloud_mask_variable_info(var_name: str, resolution: int) -> dict: } +def _get_l3_refl_variable_info(var_name: str) -> dict: + shape = (3600, 7200) + data = np.zeros((shape[0], shape[1]), dtype=np.int16) + row_dim_name = "XDim" + col_dim_name = "YDim" + return { + var_name: { + "data": data, + "type": SDC.INT16, + "fill_value": -28672, + "attrs": { + # dim_labels are just unique dimension names, may not match exactly with real world files + "dim_labels": [row_dim_name, + col_dim_name], + "valid_range": (-100, 16000), + "scale_factor": 1e-4, + "add_offset": 0., + }, + }, + } + + def _get_mask_byte1_variable_info() -> dict: shape = _shape_for_resolution(1000) data = np.zeros((shape[0], shape[1]), dtype=np.uint16) @@ -537,6 +590,31 @@ def modis_l2_nasa_mod35_file(tmpdir_factory) -> list[str]: return [full_path] +def generate_nasa_l3_filename(prefix: str) -> str: + """Generate a file name that follows MODIS 09 L3 convention in a temporary directory.""" + now = datetime.now() + return f"{prefix}.A{now:%Y%j}.061.{now:%Y%j%H%M%S}.hdf" + + +@pytest.fixture(scope="session") +def modis_l3_nasa_mod09_file(tmpdir_factory) -> list[str]: + """Create a single MOD09 L3 HDF4 file with headers.""" + filename = generate_nasa_l3_filename("MOD09CMG") + full_path = str(tmpdir_factory.mktemp("modis_l3").join(filename)) + variable_infos = _get_l3_refl_variable_info("Coarse_Resolution_Surface_Reflectance_Band_2") + create_hdfeos_test_file(full_path, variable_infos, geo_resolution=-999, file_shortname="MOD09") + return [full_path] + +@pytest.fixture(scope="session") +def modis_l3_nasa_mod43_file(tmpdir_factory) -> list[str]: + """Create a single MOD09 L3 HDF4 file with headers.""" + filename = generate_nasa_l3_filename("MCD43C1") + full_path = str(tmpdir_factory.mktemp("modis_l3").join(filename)) + variable_infos = _get_l3_refl_variable_info("BRDF_Albedo_Parameter1_Band2") + create_hdfeos_test_file(full_path, variable_infos, geo_resolution=-9999, file_shortname="MCD43C1") + return [full_path] + + @pytest.fixture(scope="session") def modis_l2_nasa_mod35_mod03_files(modis_l2_nasa_mod35_file, modis_l1b_nasa_mod03_file) -> list[str]: """Create a MOD35 L2 HDF4 file and MOD03 L1b geolocation file.""" diff --git a/satpy/tests/reader_tests/modis_tests/conftest.py b/satpy/tests/reader_tests/modis_tests/conftest.py index e6a8432653..309b16321f 100644 --- a/satpy/tests/reader_tests/modis_tests/conftest.py +++ b/satpy/tests/reader_tests/modis_tests/conftest.py @@ -32,4 +32,6 @@ modis_l2_nasa_mod06_file, modis_l2_nasa_mod35_file, modis_l2_nasa_mod35_mod03_files, + modis_l3_nasa_mod09_file, + modis_l3_nasa_mod43_file, ) diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py new file mode 100644 index 0000000000..1203ecf205 --- /dev/null +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Unit tests for MODIS L3 HDF reader.""" + +from __future__ import annotations + +import dask.array as da +import pytest +from pyresample import geometry +from pytest_lazyfixture import lazy_fixture + +from satpy import Scene, available_readers + +from ._modis_fixtures import _shape_for_resolution + + +def _expected_area(): + proj_param = "EPSG:4326" + + return geometry.AreaDefinition("gridded_modis", + "A gridded L3 MODIS area", + "longlat", + proj_param, + 7200, + 3600, + (-180, -90, 180, 90)) + + +class TestModisL3: + """Test MODIS L3 reader.""" + + def test_available_reader(self): + """Test that MODIS L3 reader is available.""" + assert "modis_l3" in available_readers() + + @pytest.mark.parametrize( + ("loadable", "filename"), + [ + ("Coarse_Resolution_Surface_Reflectance_Band_2", lazy_fixture("modis_l3_nasa_mod09_file")), + ("BRDF_Albedo_Parameter1_Band2",lazy_fixture("modis_l3_nasa_mod43_file")), + ] + ) + def test_scene_available_datasets(self, loadable, filename): + """Test that datasets are available.""" + scene = Scene(reader="modis_l3", filenames=filename) + available_datasets = scene.all_dataset_names() + assert len(available_datasets) > 0 + assert loadable in available_datasets + + def test_load_l3_dataset(self, modis_l3_nasa_mod09_file): + """Load and check an L2 variable.""" + scene = Scene(reader="modis_l3", filenames=modis_l3_nasa_mod09_file) + + ds_name = "Coarse_Resolution_Surface_Reflectance_Band_2" + scene.load([ds_name]) + + data_arr = scene[ds_name] + assert isinstance(data_arr.data, da.Array) + data_arr = data_arr.compute() + + assert data_arr.shape == _shape_for_resolution(-999) + assert data_arr.attrs.get("resolution") == 0.05 + assert data_arr.attrs.get("area") == _expected_area() From 1f501168138ab74c143975fc160ab5fc1129119d Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 11:51:39 +0000 Subject: [PATCH 403/702] Simplify the MODIS L3 code + tests somewhat. --- satpy/readers/modis_l3.py | 25 ++++++++++++------- .../modis_tests/_modis_fixtures.py | 4 +-- 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index bff33b190c..dc4600790d 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -46,22 +46,31 @@ class ModisL3GriddedHDFFileHandler(HDFEOSGeoReader): """File handler for MODIS HDF-EOS Level 3 CMG gridded files.""" - def _sort_grid(self): - """Get the grid properties.""" + def _get_res(self): + """Compute the resolution from the file metadata.""" gridname = self.metadata["GridStructure"]["GRID_1"]["GridName"] if "CMG" not in gridname: raise ValueError("Only CMG grids are supported") - # Get the grid resolution + # Get the grid resolution from the grid name pos = gridname.rfind("_") + 1 pos2 = gridname.rfind("Deg") # Some products don't have resolution listed. if pos < 0 or pos2 < 0: - resolution = 360. / self.ncols + self.resolution = 360. / self.ncols else: - resolution = float(gridname[pos:pos2]) + self.resolution = float(gridname[pos:pos2]) + + + def _sort_grid(self): + """Get the grid properties.""" + + # First, get the grid resolution + self._get_res() + + # Now compute the data extent upperleft = self.metadata["GridStructure"]["GRID_1"]["UpperLeftPointMtrs"] lowerright = self.metadata["GridStructure"]["GRID_1"]["LowerRightMtrs"] @@ -71,9 +80,7 @@ def _sort_grid(self): upperleft = tuple(val / 1e6 for val in upperleft) lowerright = tuple(val / 1e6 for val in lowerright) - area_extent = (upperleft[0], lowerright[1], lowerright[0], upperleft[1]) - - return resolution, area_extent + self.area_extent = (upperleft[0], lowerright[1], lowerright[0], upperleft[1]) def __init__(self, filename, filename_info, filetype_info, **kwargs): @@ -85,7 +92,7 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): self.ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] # Get the grid name and other projection info - self.resolution, self.area_extent = self._sort_grid() + self._sort_grid() def available_datasets(self, configured_datasets=None): diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index aff84de7be..f076e100ba 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -254,11 +254,11 @@ def create_hdfeos_test_file(filename: str, if include_metadata: if geo_resolution is None or file_shortname is None: raise ValueError("'geo_resolution' and 'file_shortname' are required when including metadata.") - setattr(h, 'CoreMetadata.0', _create_core_metadata(file_shortname)) # noqa - if geo_resolution == -999 or geo_resolution == -9999: + elif geo_resolution == -999 or geo_resolution == -9999: setattr(h, 'StructMetadata.0', _create_struct_metadata_cmg(geo_resolution)) # noqa else: setattr(h, 'StructMetadata.0', _create_struct_metadata(geo_resolution)) # noqa + setattr(h, 'CoreMetadata.0', _create_core_metadata(file_shortname)) # noqa setattr(h, 'ArchiveMetadata.0', _create_header_metadata()) # noqa for var_name, var_info in variable_infos.items(): From 7afa5773f4e03f478e4725a8bb091e5602fbdd8c Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 11:57:40 +0000 Subject: [PATCH 404/702] Further simplify the MODIS L3 tests. --- .../modis_tests/_modis_fixtures.py | 32 ++++++++++++------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index f076e100ba..40e448e067 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -596,23 +596,33 @@ def generate_nasa_l3_filename(prefix: str) -> str: return f"{prefix}.A{now:%Y%j}.061.{now:%Y%j%H%M%S}.hdf" +def modis_l3_file(tmpdir_factory, f_prefix, var_name, geo_res, f_short): + """Create a MODIS L3 file of the desired type.""" + filename = generate_nasa_l3_filename(f_prefix) + full_path = str(tmpdir_factory.mktemp("modis_l3").join(filename)) + variable_infos = _get_l3_refl_variable_info(var_name) + create_hdfeos_test_file(full_path, variable_infos, geo_resolution=geo_res, file_shortname=f_short) + return [full_path] + + @pytest.fixture(scope="session") def modis_l3_nasa_mod09_file(tmpdir_factory) -> list[str]: """Create a single MOD09 L3 HDF4 file with headers.""" - filename = generate_nasa_l3_filename("MOD09CMG") - full_path = str(tmpdir_factory.mktemp("modis_l3").join(filename)) - variable_infos = _get_l3_refl_variable_info("Coarse_Resolution_Surface_Reflectance_Band_2") - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=-999, file_shortname="MOD09") - return [full_path] + return modis_l3_file(tmpdir_factory, + "MOD09CMG", + "Coarse_Resolution_Surface_Reflectance_Band_2", + -999, + "MOD09") + @pytest.fixture(scope="session") def modis_l3_nasa_mod43_file(tmpdir_factory) -> list[str]: - """Create a single MOD09 L3 HDF4 file with headers.""" - filename = generate_nasa_l3_filename("MCD43C1") - full_path = str(tmpdir_factory.mktemp("modis_l3").join(filename)) - variable_infos = _get_l3_refl_variable_info("BRDF_Albedo_Parameter1_Band2") - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=-9999, file_shortname="MCD43C1") - return [full_path] + """Create a single MVCD43 L3 HDF4 file with headers.""" + return modis_l3_file(tmpdir_factory, + "MCD43C1", + "BRDF_Albedo_Parameter1_Band2", + -9999, + "MCD43C1") @pytest.fixture(scope="session") From 3cbc7024ffd2f4d1734fd411c421e66360af1af1 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 14:03:49 +0000 Subject: [PATCH 405/702] Add reader for OSI SAF L3 products on EASE and polar stereographic projections. --- satpy/etc/readers/osisaf_nc.yaml | 107 +++++++++++++++++++++ satpy/readers/osisaf_l3_nc.py | 154 +++++++++++++++++++++++++++++++ 2 files changed, 261 insertions(+) create mode 100644 satpy/etc/readers/osisaf_nc.yaml create mode 100644 satpy/readers/osisaf_l3_nc.py diff --git a/satpy/etc/readers/osisaf_nc.yaml b/satpy/etc/readers/osisaf_nc.yaml new file mode 100644 index 0000000000..214345da3a --- /dev/null +++ b/satpy/etc/readers/osisaf_nc.yaml @@ -0,0 +1,107 @@ +reader: + name: osisaf_nc + short_name: OSI-SAF netCDF + long_name: OSI-SAF data in netCDF4 format + description: > + A reader for OSI-SAF data in netCDF4 format. + References: + + - Dataset descriptions: https://osi-saf.eumetsat.int/documentation/products-documentation + + status: Beta + supports_fsspec: true + sensors: [osisaf] + default_channels: [] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + osi_sea_ice_conc: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_conc_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + osi_sea_ice_edge: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_edge_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + osi_sea_ice_emis: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_emis_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + osi_sea_ice_type: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_type_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + +datasets: + # Shared between various file types + status_flag: + name: status_flag + file_type: [osi_sea_ice_conc, osi_sea_ice_edge, osi_sea_ice_type] + + orbit_num_amsr: + name: orbit_num_amsr + file_type: [osi_sea_ice_edge, osi_sea_ice_type] + orbit_num_ascat: + name: orbit_num_ascat + file_type: [osi_sea_ice_edge, osi_sea_ice_type] + orbit_num_ssmis: + name: orbit_num_ssmis + file_type: [osi_sea_ice_edge, osi_sea_ice_type] + param_used: + name: param_used + file_type: [osi_sea_ice_edge, osi_sea_ice_type] + uncertainty: + name: uncertainty + file_type: [osi_sea_ice_edge, osi_sea_ice_type] + + # Sea ice concentration datasets + algorithm_uncertainty: + name: algorithm_uncertainty + file_type: osi_sea_ice_conc + confidence_level: + name: confidence_level + file_type: osi_sea_ice_conc + ice_conc: + name: ice_conc + file_type: osi_sea_ice_conc + ice_conc_unfiltered: + name: ice_conc_unfiltered + file_type: osi_sea_ice_conc + masks: + name: masks + file_type: osi_sea_ice_conc + smearing_uncertainty: + name: smearing_uncertainty + file_type: osi_sea_ice_conc + total_uncertainty: + name: total_uncertainty + file_type: osi_sea_ice_conc + + # Ice edge product + ice_edge: + name: ice_edge + file_type: osi_sea_ice_edge + + # Ice type product + ice_type: + name: ice_type + file_type: osi_sea_ice_type + + # Ice emis product + e: + name: e + file_type: osi_sea_ice_emis + ev: + name: ev + file_type: osi_sea_ice_emis + flag: + name: flag + file_type: osi_sea_ice_emis + R: + name: R + file_type: osi_sea_ice_emis + S: + name: S + file_type: osi_sea_ice_emis + teff: + name: teff + file_type: osi_sea_ice_emis + u: + name: u + file_type: osi_sea_ice_emis diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py new file mode 100644 index 0000000000..794119a300 --- /dev/null +++ b/satpy/readers/osisaf_l3_nc.py @@ -0,0 +1,154 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +# type: ignore +"""A reader for OSI-SAF level 3 products in netCDF format.""" + +import logging +from datetime import datetime + +import numpy as np + +from satpy.readers.netcdf_utils import NetCDF4FileHandler + +logger = logging.getLogger(__name__) + + +class OSISAFL3NCFileHandler(NetCDF4FileHandler): + """Reader for the OSISAF l3 netCDF format.""" + + + @staticmethod + def _parse_datetime(datestr): + return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") + + def _get_ease_grid(self): + """Set up the EASE grid.""" + from pyresample import create_area_def + + proj4str = self["Lambert_Azimuthal_Grid/attr/proj4_string"] + x_size = self["/dimension/xc"] + y_size = self["/dimension/yc"] + p_lowerleft_lat = self["lat"].values[y_size - 1, 0] + p_lowerleft_lon = self["lon"].values[y_size - 1, 0] + p_upperright_lat = self["lat"].values[0, x_size - 1] + p_upperright_lon = self["lon"].values[0, x_size - 1] + area_extent = [p_lowerleft_lon, p_lowerleft_lat, p_upperright_lon, p_upperright_lat] + area_def = create_area_def(area_id="osisaf_lambert_azimuthal_equal_area", + description="osisaf_lambert_azimuthal_equal_area", + proj_id="osisaf_lambert_azimuthal_equal_area", + projection=proj4str, width=x_size, height=y_size, area_extent=area_extent, + units="deg") + return area_def + + def _get_polar_stereographic_grid(self): + """Set up the polar stereographic grid.""" + from pyresample import create_area_def + + proj4str = self["Polar_Stereographic_Grid/attr/proj4_string"] + x_size = self["/dimension/xc"] + y_size = self["/dimension/yc"] + p_lowerleft_lat = self["lat"].values[y_size - 1, 0] + p_lowerleft_lon = self["lon"].values[y_size - 1, 0] + p_upperright_lat = self["lat"].values[0, x_size - 1] + p_upperright_lon = self["lon"].values[0, x_size - 1] + area_extent = [p_lowerleft_lon, p_lowerleft_lat, p_upperright_lon, p_upperright_lat] + area_def = create_area_def(area_id="osisaf_polar_stereographic", + description="osisaf_polar_stereographic", + proj_id="osisaf_polar_stereographic", + projection=proj4str, width=x_size, height=y_size, area_extent=area_extent, + units="deg") + return area_def + + + def get_area_def(self, area_id): + """Override abstract baseclass method""" + + if self.filename_info["grid"] == "ease": + return self._get_ease_grid() + elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": + return self._get_polar_stereographic_grid() + else: + raise ValueError(f"Unknown grid type: {self.filename_info['grid']}") + + def _get_ds_attr(self, a_name): + """Get a dataset attribute and check it's valid.""" + try: + return self[a_name] + except KeyError: + return None + + def get_dataset(self, dataset_id, ds_info): + """Load a dataset.""" + logger.debug(f"Reading {dataset_id['name']} from {self.filename}") + var_path = ds_info.get("file_key", f"{dataset_id['name']}") + + shape = self[var_path + "/shape"] + if shape[0] == 1: + # Remove the time dimension from dataset + data = self[var_path][0] + else: + data = self[var_path] + + file_units = ds_info.get("file_units") + if file_units is None: + file_units = self._get_ds_attr(var_path + "/attr/units") + if file_units is None: + file_units = 1 + + # Try to get the valid limits for the data. + # Not all datasets have these, so fall back on assuming no limits. + valid_min = self._get_ds_attr(var_path + "/attr/valid_min") + valid_max = self._get_ds_attr(var_path + "/attr/valid_max") + if valid_min is not None and valid_max is not None: + data = data.where(data >= valid_min, np.nan) + data = data.where(data <= valid_max, np.nan) + + + # Try to get the scale and offset for the data. + # As above, not all datasets have these, so fall back on assuming no limits. + scale_factor = self._get_ds_attr(var_path + "/attr/scale_factor") + scale_offset = self._get_ds_attr(var_path + "/attr/add_offset") + if scale_offset is not None and scale_factor is not None: + data = (data * scale_factor + scale_offset) + + # Try to get the fill value for the data. + # If there isn"t one, assume all remaining pixels are valid. + fill_value = self._get_ds_attr(var_path + "/attr/_FillValue") + if fill_value is not None: + data = data.where(data != fill_value, np.nan) + + # Set proper dimension names + data = data.rename({"xc": "x", "yc": "y"}) + + ds_info.update({ + "units": ds_info.get("units", file_units), + "platform_name": self["/attr/platform_name"], + "sensor": self["/attr/instrument_type"] + }) + ds_info.update(dataset_id.to_dict()) + data.attrs.update(ds_info) + return data + + @property + def start_time(self): + return self._parse_datetime(self["/attr/start_date"]) + # return self._parse_datetime(self["/attr/start_date"]) + + @property + def end_time(self): + return self._parse_datetime(self["/attr/stop_date"]) From 300a9a1cf09d62c6e2b64d2131486f089e95761e Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 14:06:50 +0000 Subject: [PATCH 406/702] Fix typos and tidy osi saf reader. --- satpy/readers/osisaf_l3_nc.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 794119a300..e61e752299 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -118,7 +118,6 @@ def get_dataset(self, dataset_id, ds_info): data = data.where(data >= valid_min, np.nan) data = data.where(data <= valid_max, np.nan) - # Try to get the scale and offset for the data. # As above, not all datasets have these, so fall back on assuming no limits. scale_factor = self._get_ds_attr(var_path + "/attr/scale_factor") @@ -127,7 +126,7 @@ def get_dataset(self, dataset_id, ds_info): data = (data * scale_factor + scale_offset) # Try to get the fill value for the data. - # If there isn"t one, assume all remaining pixels are valid. + # If there isn't one, assume all remaining pixels are valid. fill_value = self._get_ds_attr(var_path + "/attr/_FillValue") if fill_value is not None: data = data.where(data != fill_value, np.nan) From 13fffa673909606376014941f644785b09cfd1f4 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 15:26:50 +0000 Subject: [PATCH 407/702] Add tests for the OSI SAF L3 reader. --- satpy/readers/osisaf_l3_nc.py | 8 +- satpy/tests/reader_tests/test_osisaf_l3.py | 233 +++++++++++++++++++++ 2 files changed, 237 insertions(+), 4 deletions(-) create mode 100644 satpy/tests/reader_tests/test_osisaf_l3.py diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index e61e752299..00f1176b6f 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -31,7 +31,6 @@ class OSISAFL3NCFileHandler(NetCDF4FileHandler): """Reader for the OSISAF l3 netCDF format.""" - @staticmethod def _parse_datetime(datestr): return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") @@ -77,11 +76,12 @@ def _get_polar_stereographic_grid(self): def get_area_def(self, area_id): """Override abstract baseclass method""" - if self.filename_info["grid"] == "ease": - return self._get_ease_grid() + self.area_def = self._get_ease_grid() + return self.area_def elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": - return self._get_polar_stereographic_grid() + self.area_def = self._get_polar_stereographic_grid() + return self.area_def else: raise ValueError(f"Unknown grid type: {self.filename_info['grid']}") diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py new file mode 100644 index 0000000000..40cf4539e1 --- /dev/null +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -0,0 +1,233 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Module for testing the satpy.readers.osisaf_l3 module.""" + +import os +from datetime import datetime + +import numpy as np +import pytest +import xarray as xr + +from satpy import DataQuery +from satpy.readers.osisaf_l3_nc import OSISAFL3NCFileHandler + +stere_ds = xr.DataArray( + -999, + attrs={"grid_mapping_name": "polar_stereographic", + "false_easting": 0.0, + "false_northing": 0.0, + "semi_major_axis": 6378273.0, + "semi_minor_axis": 6356889.44891, + "straight_vertical_longitude_from_pole": 0.0, + "latitude_of_projection_origin": -90.0, + "standard_parallel": -70.0, + "proj4_string": "+proj=stere +a=6378273 +b=6356889.44891 +lat_0=-90 +lat_ts=-70 +lon_0=0", +}) + +ease_ds = xr.DataArray( + -999, + attrs={"grid_mapping_name": "lambert_azimuthal_equal_area", + "false_easting": 0.0, + "false_northing": 0.0, + "semi_major_axis": 6371228.0, + "longitude_of_projection_origin": 0.0, + "latitude_of_projection_origin": -90.0, + "proj4_string": "+proj=laea +a=6371228.0 +lat_0=-90 +lon_0=0", + }) + + +class TestOSISAFL3Reader: + """Test OSI-SAF level 3 netCDF reader.""" + + def setup_method(self, proj_type): + """Create a fake dataset.""" + self.base_data = np.array(([-999, 1215, 1125, 11056, 9500], [200, 1, -999, 4215, 5756])) + self.base_data = np.expand_dims(self.base_data, axis=0) + self.unc_data = np.array(([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])) + self.yc_data = np.array(([-10, -5, 0, 5, 10], [-10, -5, 0, 5, 10])) + self.xc_data = np.array(([-5, -5, -5, -5, -5], [5, 5, 5, 5, 5])) + self.time_data = np.array([1.]) + + self.lat_data = np.array(([-68, -69, -70, -71, -72], [-68, -69, -70, -71, -72])) + self.lon_data = np.array(([-60, -60, -60, -60, -60], [-65, -65, -65, -65, -65])) + self.xc = xr.DataArray( + self.xc_data, + dims=("yc", "xc"), + attrs={"standard_name": "projection_x_coordinate", "units": "km"} + ) + self.yc = xr.DataArray( + self.yc_data, + dims=("yc", "xc"), + attrs={"standard_name": "projection_y_coordinate", "units": "km"} + ) + self.time = xr.DataArray( + self.time_data, + dims=("time"), + attrs={"standard_name": "projection_y_coordinate", "units": "km"} + ) + self.lat = xr.DataArray( + self.lat_data, + dims=("yc", "xc"), + attrs={"standard_name": "latitude", "units": "degrees_north"} + ) + self.lon = xr.DataArray( + self.lon_data, + dims=("yc", "xc"), + attrs={"standard_name": "longitude", "units": "degrees_east"} + ) + self.conc = xr.DataArray( + self.base_data, + dims=("time", "yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, "units": "%", + "valid_min": 0, "valid_max": 10000, "standard_name": "sea_ice_area_fraction"} + ) + self.uncert = xr.DataArray( + self.unc_data, + dims=("yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, + "valid_min": 0, "valid_max": 10000, "standard_name": "total_uncertainty"} + ) + + data_vars = { + "ice_conc": self.conc, + "total_uncertainty": self.uncert, + "xc": self.xc, + "yc": self.yc, + "time": self.time, + "lat": self.lat, + "lon": self.lon, + "Lambert_Azimuthal_Grid": ease_ds, + "Polar_Stereographic_Grid": stere_ds} + self.fake_dataset = xr.Dataset( + data_vars=data_vars, + attrs={ + "start_date": "2022-12-15 00:00:00", + "stop_date": "2022-12-16 00:00:00", + "platform_name": "Multi-sensor analysis", + "instrument_type": "Multi-sensor analysis"}, + ) + + def test_instantiate_single_netcdf_file(self, tmp_path): + """Test initialization of file handlers - given a single netCDF file.""" + filename_info = {} + filetype_info = {} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + + + def test_get_dataset(self, tmp_path): + """Test retrieval of datasets.""" + filename_info = {} + filetype_info = {} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + + res = test.get_dataset(DataQuery(name="ice_conc"), {"standard_name": "sea_ice_area_fraction"}) + # Check we remove singleton dimension + assert res.shape[0] == 2 + assert res.shape[1] == 5 + + # Test values are correct + test_ds = self.fake_dataset["ice_conc"][0].values + test_ds = np.where(test_ds == -999, np.nan, test_ds) + test_ds = np.where(test_ds > 10000, np.nan, test_ds) + np.testing.assert_allclose(res.values, test_ds / 100) + + res = test.get_dataset(DataQuery(name="total_uncertainty"), {"standard_name": "sea_ice_area_fraction"}) + assert res.shape[0] == 2 + assert res.shape[1] == 5 + + with pytest.raises(KeyError): + test.get_dataset(DataQuery(name="erroneous dataset"), {"standard_name": "erroneous dataset"}) + + def test_get_start_and_end_times(self, tmp_path): + """Test retrieval of the sensor name from the netCDF file.""" + good_start_time = datetime(2022, 12, 15, 0, 0, 0) + good_stop_time = datetime(2022, 12, 16, 0, 0, 0) + + filename_info = {} + filetype_info = {} + + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + + assert test.start_time == good_start_time + assert test.end_time == good_stop_time + + + def test_get_area_def_ease(self, tmp_path): + """Test getting the area definition for the EASE grid.""" + filename_info = {"grid": "ease"} + filetype_info = {} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + + area_def = test.get_area_def(None) + assert area_def.description == "osisaf_lambert_azimuthal_equal_area" + assert area_def.proj_dict["R"] == 6371228 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "laea" + + assert area_def.width == 5 + assert area_def.height == 2 + np.testing.assert_allclose(area_def.area_extent, + (-2203574.302335, 1027543.572492, -1726299.781982, 996679.643829)) + + + def test_get_area_def_stere(self, tmp_path): + """Test getting the area definition for the polar stereographic grid.""" + filename_info = {"grid": "stere"} + filetype_info = {} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + + area_def = test.get_area_def(None) + assert area_def.description == "osisaf_polar_stereographic" + assert area_def.proj_dict["a"] == 6378273.0 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lat_ts"] == -70 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "stere" + + assert area_def.width == 5 + assert area_def.height == 2 + np.testing.assert_allclose(area_def.area_extent, + (-2185821.7955, 1019265.4426, -1702157.4538, 982741.0642)) + + def test_get_area_def_bad(self, tmp_path): + """Test getting the area definition for the polar stereographic grid.""" + filename_info = {"grid": "turnips"} + filetype_info = {} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + with pytest.raises(ValueError, match="Unknown grid type: turnips"): + test.get_area_def(None) From 35434d3ae172d795c0716e1125077d285fcbdf3a Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 15:40:01 +0000 Subject: [PATCH 408/702] Update satpy/readers/modis_l3.py Co-authored-by: David Hoese --- satpy/readers/modis_l3.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index dc4600790d..b5313d89cc 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -61,9 +61,6 @@ def _get_res(self): self.resolution = 360. / self.ncols else: self.resolution = float(gridname[pos:pos2]) - - - def _sort_grid(self): """Get the grid properties.""" From fb59a77e279f2cf8d9447ab8429ca1075ebc6bcf Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 15:41:39 +0000 Subject: [PATCH 409/702] Update MODIS L3 reader for review comments. --- satpy/readers/modis_l3.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index b5313d89cc..f8f94372cc 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -37,10 +37,8 @@ from pyresample import geometry from satpy.readers.hdfeos_base import HDFEOSGeoReader -from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) -CHUNK_SIZE = get_legacy_chunk_size() class ModisL3GriddedHDFFileHandler(HDFEOSGeoReader): @@ -61,6 +59,7 @@ def _get_res(self): self.resolution = 360. / self.ncols else: self.resolution = float(gridname[pos:pos2]) + def _sort_grid(self): """Get the grid properties.""" @@ -77,7 +76,7 @@ def _sort_grid(self): upperleft = tuple(val / 1e6 for val in upperleft) lowerright = tuple(val / 1e6 for val in lowerright) - self.area_extent = (upperleft[0], lowerright[1], lowerright[0], upperleft[1]) + return (upperleft[0], lowerright[1], lowerright[0], upperleft[1]) def __init__(self, filename, filename_info, filetype_info, **kwargs): @@ -89,7 +88,7 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): self.ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] # Get the grid name and other projection info - self._sort_grid() + self.area_extent = self._sort_grid() def available_datasets(self, configured_datasets=None): From 261fca6e38cba8a3a0fe145f8a5078e90bb25723 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 15:46:13 +0000 Subject: [PATCH 410/702] Update MODIS test fixtures to simplify. --- .../modis_tests/_modis_fixtures.py | 35 +++++++++++-------- 1 file changed, 21 insertions(+), 14 deletions(-) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index 40e448e067..e4272373b3 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -229,6 +229,16 @@ def generate_imapp_filename(suffix): return f"t1.{now:%y%j.%H%M}.{suffix}.hdf" +def _add_geo_metadata(h, geo_res): + """Add the geoinfo metadata to the fake file.""" + if geo_res == -999 or geo_res == -9999: + setattr(h, 'StructMetadata.0', _create_struct_metadata_cmg(geo_res)) # noqa + else: + setattr(h, 'StructMetadata.0', _create_struct_metadata(geo_res)) # noqa + + return h + + def create_hdfeos_test_file(filename: str, variable_infos: dict, geo_resolution: Optional[int] = None, @@ -254,10 +264,7 @@ def create_hdfeos_test_file(filename: str, if include_metadata: if geo_resolution is None or file_shortname is None: raise ValueError("'geo_resolution' and 'file_shortname' are required when including metadata.") - elif geo_resolution == -999 or geo_resolution == -9999: - setattr(h, 'StructMetadata.0', _create_struct_metadata_cmg(geo_resolution)) # noqa - else: - setattr(h, 'StructMetadata.0', _create_struct_metadata(geo_resolution)) # noqa + h = _add_geo_metadata(h, geo_resolution) setattr(h, 'CoreMetadata.0', _create_core_metadata(file_shortname)) # noqa setattr(h, 'ArchiveMetadata.0', _create_header_metadata()) # noqa @@ -344,16 +351,16 @@ def _create_struct_metadata_cmg(res) -> str: upright = "LowerRightMtrs=(180.000000,-90.000000)\n" struct_metadata_header = ("GROUP=SwathStructure\n" - "END_GROUP=SwathStructure\n" - "GROUP=GridStructure\n" - "GROUP=GRID_1\n" - f"{gridline}\n" - "XDim=7200\n" - "YDim=3600\n" - f"{upleft}\n" - f"{upright}\n" - "END_GROUP=GRID_1\n" - "END_GROUP=GridStructure\nEND") + "END_GROUP=SwathStructure\n" + "GROUP=GridStructure\n" + "GROUP=GRID_1\n" + f"{gridline}\n" + "XDim=7200\n" + "YDim=3600\n" + f"{upleft}\n" + f"{upright}\n" + "END_GROUP=GRID_1\n" + "END_GROUP=GridStructure\nEND") return struct_metadata_header From bdcddab6ca033d26d2363513ddb24fc07ddb2679 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 15:46:40 +0000 Subject: [PATCH 411/702] Remove rogue blank line. --- satpy/readers/modis_l3.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index f8f94372cc..dfddc0732b 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -62,7 +62,6 @@ def _get_res(self): def _sort_grid(self): """Get the grid properties.""" - # First, get the grid resolution self._get_res() From ac08013b725f2d096e91250a71c2e1ae34eeb219 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 8 Nov 2023 10:35:20 -0600 Subject: [PATCH 412/702] Remove typing ignore from satpy/readers/osisaf_l3_nc.py --- satpy/readers/osisaf_l3_nc.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 00f1176b6f..293584ffa8 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -15,7 +15,6 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -# type: ignore """A reader for OSI-SAF level 3 products in netCDF format.""" import logging From 092b452782a61b206af8be110cc84900c0c89f2a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 8 Nov 2023 10:41:32 -0600 Subject: [PATCH 413/702] Bump ruff hook version in .pre-commit-config.yaml --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index eb21aa6601..1094cf0355 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,9 +1,9 @@ exclude: '^$' fail_fast: false repos: - - repo: https://github.com/charliermarsh/ruff-pre-commit + - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.0.247' + rev: 'v0.1.4' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks From 9fd639475113feaf7fdb884bcd8087f409b7c0e1 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 16:47:23 +0000 Subject: [PATCH 414/702] Update MODIS L3 tests to check data types. --- .../tests/reader_tests/modis_tests/test_modis_l3.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py index 1203ecf205..23c1af6fc1 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py @@ -20,6 +20,7 @@ from __future__ import annotations import dask.array as da +import numpy as np import pytest from pyresample import geometry from pytest_lazyfixture import lazy_fixture @@ -71,8 +72,12 @@ def test_load_l3_dataset(self, modis_l3_nasa_mod09_file): data_arr = scene[ds_name] assert isinstance(data_arr.data, da.Array) - data_arr = data_arr.compute() + data_arr_comp = data_arr.compute() - assert data_arr.shape == _shape_for_resolution(-999) - assert data_arr.attrs.get("resolution") == 0.05 - assert data_arr.attrs.get("area") == _expected_area() + # Check types + assert data_arr_comp.dtype == data_arr.dtype + assert data_arr_comp.dtype == np.float32 + + assert data_arr_comp.shape == _shape_for_resolution(-999) + assert data_arr_comp.attrs.get("resolution") == 0.05 + assert data_arr_comp.attrs.get("area") == _expected_area() From 23c136a51df49e9433d6b65f8eb5006060f4e65f Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 17:49:05 +0000 Subject: [PATCH 415/702] Update INSAT-3D reader to get satellite location from file rather than hardcoded value. --- satpy/readers/insat3d_img_l1b_h5.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/readers/insat3d_img_l1b_h5.py b/satpy/readers/insat3d_img_l1b_h5.py index a7dcf371cc..7e444e8d34 100644 --- a/satpy/readers/insat3d_img_l1b_h5.py +++ b/satpy/readers/insat3d_img_l1b_h5.py @@ -182,6 +182,8 @@ def get_area_def(self, ds_id): a = 6378137.0 b = 6356752.314245 + nom_cen_pos = self.datatree.attrs["Nominal_Central_Point_Coordinates(degrees)_Latitude_Longitude"][1] + pdict = { "cfac": cfac, "lfac": lfac, @@ -193,7 +195,7 @@ def get_area_def(self, ds_id): "a": a, "b": b, "h": h, - "ssp_lon": 82.0, + "ssp_lon": nom_cen_pos, "a_name": "insat3d82", "a_desc": "insat3d82", "p_id": "geosmsg" From 23db54b18abf2f33450eaf5823231a8c35152cca Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 21:05:26 +0000 Subject: [PATCH 416/702] Add support for flux and sst products. --- satpy/etc/readers/osisaf_nc.yaml | 89 +++++++++++++++++++++++--- satpy/readers/osisaf_l3_nc.py | 105 ++++++++++++++++++++++++++----- 2 files changed, 170 insertions(+), 24 deletions(-) diff --git a/satpy/etc/readers/osisaf_nc.yaml b/satpy/etc/readers/osisaf_nc.yaml index 214345da3a..479b5a38db 100644 --- a/satpy/etc/readers/osisaf_nc.yaml +++ b/satpy/etc/readers/osisaf_nc.yaml @@ -16,17 +16,26 @@ reader: file_types: osi_sea_ice_conc: - file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler - file_patterns: ['ice_conc_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_conc_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] osi_sea_ice_edge: - file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler - file_patterns: ['ice_edge_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_edge_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] osi_sea_ice_emis: - file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler - file_patterns: ['ice_emis_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_emis_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] osi_sea_ice_type: - file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler - file_patterns: ['ice_type_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_type_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + osi_sst: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['{start_time:%Y%m%d%H%M%S}-{processing_center}-L3C_GHRSST-SSTskin-{sensor}_{platform_name}-v{version}.nc'] + osi_radflux_stere: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['osisaf_radiative_flux_24h_hl_{grid}-050_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + osi_radflux_grid: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['{start_time:%Y%m%d%H%M%S}-OSISAF-RADFLX-{time_period}-{platform_name}.nc'] datasets: # Shared between various file types @@ -105,3 +114,67 @@ datasets: u: name: u file_type: osi_sea_ice_emis + + # SST product + ist_dtime: + name: ist_dtime + file_type: osi_sst + ist_quality_level: + name: ist_quality_level + file_type: osi_sst + l2p_flags: + name: l2p_flags + file_type: osi_sst + landmask: + name: landmask + file_type: osi_sst + or_number_of_pixels: + name: or_number_of_pixels + file_type: osi_sst + or_number_of_pixels_ist: + name: or_number_of_pixels_ist + file_type: osi_sst + probability_of_ice: + name: probability_of_ice + file_type: osi_sst + probability_of_water: + name: probability_of_water + file_type: osi_sst + quality_level: + name: quality_level + file_type: osi_sst + sea_ice_fraction: + name: sea_ice_fraction + file_type: osi_sst + sea_surface_temperature: + name: sea_surface_temperature + file_type: osi_sst + sses_bias: + name: sses_bias + file_type: osi_sst + sses_standard_deviation: + name: sses_standard_deviation + file_type: osi_sst + sst_dtime: + name: sst_dtime + file_type: osi_sst + surface_temperature: + name: surface_temperature + file_type: osi_sst + tempflag: + name: tempflag + file_type: osi_sst + + # Radiative flux product + dli: + name: dli + file_type: [osi_radflux_stere, osi_radflux_grid] + dli_confidence_level: + name: dli_confidence_level + file_type: [osi_radflux_stere, osi_radflux_grid] + ssi: + name: ssi + file_type: [osi_radflux_stere, osi_radflux_grid] + ssi_confidence_level: + name: ssi_confidence_level + file_type: [osi_radflux_stere, osi_radflux_grid] diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 293584ffa8..b2e6ec6812 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -32,7 +32,14 @@ class OSISAFL3NCFileHandler(NetCDF4FileHandler): @staticmethod def _parse_datetime(datestr): - return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") + try: + return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") + except ValueError: + try: + return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") + except ValueError: + return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ") + def _get_ease_grid(self): """Set up the EASE grid.""" @@ -53,11 +60,37 @@ def _get_ease_grid(self): units="deg") return area_def + def _get_geographic_grid(self): + """Set up the EASE grid.""" + from pyresample import create_area_def + + x_size = self["/dimension/lon"] + y_size = self["/dimension/lat"] + lat_0 = self["lat"].min() + lon_0 = self["lon"].min() + lat_1 = self["lat"].max() + lon_1 = self["lon"].max() + area_extent = [lon_0, lat_1, lon_1, lat_0] + area_def = create_area_def(area_id="osisaf_geographic_area", + description="osisaf_geographic_area", + proj_id="osisaf_geographic_area", + projection="+proj=lonlat", width=x_size, height=y_size, area_extent=area_extent, + units="deg") + return area_def + def _get_polar_stereographic_grid(self): """Set up the polar stereographic grid.""" from pyresample import create_area_def - - proj4str = self["Polar_Stereographic_Grid/attr/proj4_string"] + try: + proj4str = self["Polar_Stereographic_Grid/attr/proj4_string"] + except KeyError: + # Some products don't have the proj str, so we construct it ourselves + sma = self["Polar_Stereographic_Grid/attr/semi_major_axis"] + smb = self["Polar_Stereographic_Grid/attr/semi_minor_axis"] + lon_0 = self["Polar_Stereographic_Grid/attr/straight_vertical_longitude_from_pole"] + lat_0 = self["Polar_Stereographic_Grid/attr/latitude_of_projection_origin"] + lat_ts = self["Polar_Stereographic_Grid/attr/standard_parallel"] + proj4str = f"+a={sma} +b={smb} +lat_ts={lat_ts} +lon_0={lon_0} +proj=stere +lat_0={lat_0}" x_size = self["/dimension/xc"] y_size = self["/dimension/yc"] p_lowerleft_lat = self["lat"].values[y_size - 1, 0] @@ -75,7 +108,13 @@ def _get_polar_stereographic_grid(self): def get_area_def(self, area_id): """Override abstract baseclass method""" - if self.filename_info["grid"] == "ease": + if self.filetype_info["file_type"] == "osi_radflux_grid": + self.area_def = self._get_geographic_grid() + return self.area_def + elif self.filetype_info["file_type"] == "osi_sst": + self.area_def = self._get_polar_stereographic_grid() + return self.area_def + elif self.filename_info["grid"] == "ease": self.area_def = self._get_ease_grid() return self.area_def elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": @@ -117,6 +156,12 @@ def get_dataset(self, dataset_id, ds_info): data = data.where(data >= valid_min, np.nan) data = data.where(data <= valid_max, np.nan) + # Try to get the fill value for the data. + # If there isn't one, assume all remaining pixels are valid. + fill_value = self._get_ds_attr(var_path + "/attr/_FillValue") + if fill_value is not None: + data = data.where(data != fill_value, np.nan) + # Try to get the scale and offset for the data. # As above, not all datasets have these, so fall back on assuming no limits. scale_factor = self._get_ds_attr(var_path + "/attr/scale_factor") @@ -124,29 +169,57 @@ def get_dataset(self, dataset_id, ds_info): if scale_offset is not None and scale_factor is not None: data = (data * scale_factor + scale_offset) - # Try to get the fill value for the data. - # If there isn't one, assume all remaining pixels are valid. - fill_value = self._get_ds_attr(var_path + "/attr/_FillValue") - if fill_value is not None: - data = data.where(data != fill_value, np.nan) - # Set proper dimension names - data = data.rename({"xc": "x", "yc": "y"}) + if self.filetype_info["file_type"] == "osi_radflux_grid": + data = data.rename({"lon": "x", "lat": "y"}) + else: + data = data.rename({"xc": "x", "yc": "y"}) ds_info.update({ "units": ds_info.get("units", file_units), - "platform_name": self["/attr/platform_name"], - "sensor": self["/attr/instrument_type"] + "platform_name": self._get_platname(), + "sensor": self._get_instname() }) ds_info.update(dataset_id.to_dict()) data.attrs.update(ds_info) return data + def _get_instname(self): + """Get instrument name.""" + try: + return self["/attr/instrument_name"] + except KeyError: + try: + return self["/attr/sensor"] + except KeyError: + return "unknown_sensor" + + def _get_platname(self): + """Get platform name.""" + try: + return self["/attr/platform_name"] + except KeyError: + return self["/attr/platform"] + + @property def start_time(self): - return self._parse_datetime(self["/attr/start_date"]) - # return self._parse_datetime(self["/attr/start_date"]) + start_t = self._get_ds_attr("/attr/start_date") + if start_t is None: + start_t = self._get_ds_attr("/attr/start_time") + if start_t is None: + start_t = self._get_ds_attr("/attr/time_coverage_start") + if start_t is None: + raise ValueError("Unknown start time attribute.") + return self._parse_datetime(start_t) @property def end_time(self): - return self._parse_datetime(self["/attr/stop_date"]) + end_t = self._get_ds_attr("/attr/stop_date") + if end_t is None: + end_t = self._get_ds_attr("/attr/stop_time") + if end_t is None: + end_t = self._get_ds_attr("/attr/time_coverage_end") + if end_t is None: + raise ValueError("Unknown stop time attribute.") + return self._parse_datetime(end_t) From aef8d90e610f1c8f62bd6c321f666b9fec038968 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 22:11:00 +0000 Subject: [PATCH 417/702] (wip) Update OSI SAF tests. --- satpy/tests/reader_tests/test_osisaf_l3.py | 193 ++++++++++++++------- 1 file changed, 131 insertions(+), 62 deletions(-) diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index 40cf4539e1..65a9efc9a1 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -30,15 +30,27 @@ stere_ds = xr.DataArray( -999, attrs={"grid_mapping_name": "polar_stereographic", - "false_easting": 0.0, - "false_northing": 0.0, - "semi_major_axis": 6378273.0, - "semi_minor_axis": 6356889.44891, - "straight_vertical_longitude_from_pole": 0.0, - "latitude_of_projection_origin": -90.0, - "standard_parallel": -70.0, - "proj4_string": "+proj=stere +a=6378273 +b=6356889.44891 +lat_0=-90 +lat_ts=-70 +lon_0=0", -}) + "false_easting": 0.0, + "false_northing": 0.0, + "semi_major_axis": 6378273.0, + "semi_minor_axis": 6356889.44891, + "straight_vertical_longitude_from_pole": 0.0, + "latitude_of_projection_origin": -90.0, + "standard_parallel": -70.0, + "proj4_string": "+proj=stere +a=6378273 +b=6356889.44891 +lat_0=-90 +lat_ts=-70 +lon_0=0", + }) + +stere_ds_noproj = xr.DataArray( + -999, + attrs={"grid_mapping_name": "polar_stereographic", + "false_easting": 0.0, + "false_northing": 0.0, + "semi_major_axis": 6378273.0, + "semi_minor_axis": 6356889.44891, + "straight_vertical_longitude_from_pole": 0.0, + "latitude_of_projection_origin": -90.0, + "standard_parallel": -70.0, + }) ease_ds = xr.DataArray( -999, @@ -51,14 +63,34 @@ "proj4_string": "+proj=laea +a=6371228.0 +lat_0=-90 +lon_0=0", }) +attrs_ice = { + "start_date": "2022-12-15 00:00:00", + "stop_date": "2022-12-16 00:00:00", + "platform_name": "Multi-sensor analysis", + "instrument_type": "Multi-sensor analysis"} + +attrs_flux = { + "time_coverage_start": "2023-10-10T00:00:00Z", + "time_coverage_end": "2023-10-10T23:59:59Z", + "platform": "NOAA-19, NOAA-20, Metop-B, Metop-C, SNPP", + "sensor": "AVHRR, VIIRS, AVHRR, AVHRR, VIIRS"} + +attrs_geo = { + "start_time": "20221228T183000Z", + "stop_time": "20221228T193000Z", + "platform": "MSG4"} -class TestOSISAFL3Reader: - """Test OSI-SAF level 3 netCDF reader.""" - def setup_method(self, proj_type): +class OSISAFL3ReaderTests: + """Test OSI-SAF level 3 netCDF reader ice files.""" + + def setup_method(self, tester="ice"): """Create a fake dataset.""" self.base_data = np.array(([-999, 1215, 1125, 11056, 9500], [200, 1, -999, 4215, 5756])) + self.base_data_ssi = np.array(([-999.99, 121.5, 11.25, 110.56, 950.0], [200, 1, -999.99, 42.15, 5.756])) + self.base_data_ssi_geo = np.array(([-32768, 121.5, 11.25, 110.56, 950.0], [200, 1, -32768, 42.15, 5.756])) self.base_data = np.expand_dims(self.base_data, axis=0) + self.base_data_ssi = np.expand_dims(self.base_data_ssi, axis=0) self.unc_data = np.array(([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])) self.yc_data = np.array(([-10, -5, 0, 5, 10], [-10, -5, 0, 5, 10])) self.xc_data = np.array(([-5, -5, -5, -5, -5], [5, 5, 5, 5, 5])) @@ -78,7 +110,7 @@ def setup_method(self, proj_type): ) self.time = xr.DataArray( self.time_data, - dims=("time"), + dims="time", attrs={"standard_name": "projection_y_coordinate", "units": "km"} ) self.lat = xr.DataArray( @@ -103,84 +135,93 @@ def setup_method(self, proj_type): attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, "valid_min": 0, "valid_max": 10000, "standard_name": "total_uncertainty"} ) + self.ssi_geo = xr.DataArray( + self.base_data_ssi_geo, + dims=("lat", "lon"), + attrs={"scale_factor": 0.1, "add_offset": 0., "_FillValue": 32768, "units": "W m-2", + "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"} + ) + self.ssi = xr.DataArray( + self.base_data_ssi, + dims=("time", "yc", "xc"), + attrs={"_FillValue": -999.99, "units": "W m-2", + "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"} + ) + self.uncert = xr.DataArray( + self.unc_data, + dims=("yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, + "valid_min": 0, "valid_max": 10000, "standard_name": "total_uncertainty"} + ) data_vars = { - "ice_conc": self.conc, - "total_uncertainty": self.uncert, - "xc": self.xc, - "yc": self.yc, - "time": self.time, - "lat": self.lat, - "lon": self.lon, - "Lambert_Azimuthal_Grid": ease_ds, - "Polar_Stereographic_Grid": stere_ds} - self.fake_dataset = xr.Dataset( - data_vars=data_vars, - attrs={ - "start_date": "2022-12-15 00:00:00", - "stop_date": "2022-12-16 00:00:00", - "platform_name": "Multi-sensor analysis", - "instrument_type": "Multi-sensor analysis"}, - ) + "xc": self.xc, + "yc": self.yc, + "time": self.time, + "lat": self.lat, + "lon": self.lon, } + if tester == "ice": + data_vars["Lambert_Azimuthal_Grid"] = ease_ds + data_vars["Polar_Stereographic_Grid"] = stere_ds + data_vars["ice_conc"] = self.conc + data_vars["total_uncertainty"] = self.uncert + elif tester == "flux_stere": + data_vars["Polar_Stereographic_Grid"] = stere_ds_noproj + data_vars["ssi"] = self.ssi + elif tester == "flux_geo": + data_vars["ssi"] = self.ssi_geo + + if tester == "ice": + self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) + elif tester == "flux_stere": + self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_flux) + elif tester == "flux_geo": + self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_geo) def test_instantiate_single_netcdf_file(self, tmp_path): """Test initialization of file handlers - given a single netCDF file.""" - filename_info = {} - filetype_info = {} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) - + OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) def test_get_dataset(self, tmp_path): """Test retrieval of datasets.""" - filename_info = {} - filetype_info = {} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) - res = test.get_dataset(DataQuery(name="ice_conc"), {"standard_name": "sea_ice_area_fraction"}) + res = test.get_dataset(DataQuery(name=self.varname), {"standard_name": self.stdname}) # Check we remove singleton dimension assert res.shape[0] == 2 assert res.shape[1] == 5 # Test values are correct - test_ds = self.fake_dataset["ice_conc"][0].values - test_ds = np.where(test_ds == -999, np.nan, test_ds) - test_ds = np.where(test_ds > 10000, np.nan, test_ds) - np.testing.assert_allclose(res.values, test_ds / 100) - - res = test.get_dataset(DataQuery(name="total_uncertainty"), {"standard_name": "sea_ice_area_fraction"}) - assert res.shape[0] == 2 - assert res.shape[1] == 5 + test_ds = self.fake_dataset[self.varname][0].values + test_ds = np.where(test_ds == self.fillv, np.nan, test_ds) + test_ds = np.where(test_ds > self.maxv, np.nan, test_ds) + test_ds = test_ds / self.scl + np.testing.assert_allclose(res.values, test_ds) with pytest.raises(KeyError): test.get_dataset(DataQuery(name="erroneous dataset"), {"standard_name": "erroneous dataset"}) def test_get_start_and_end_times(self, tmp_path): """Test retrieval of the sensor name from the netCDF file.""" - good_start_time = datetime(2022, 12, 15, 0, 0, 0) - good_stop_time = datetime(2022, 12, 16, 0, 0, 0) - - filename_info = {} - filetype_info = {} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) - - assert test.start_time == good_start_time - assert test.end_time == good_stop_time + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) + assert test.start_time == self.good_start_time + assert test.end_time == self.good_stop_time def test_get_area_def_ease(self, tmp_path): """Test getting the area definition for the EASE grid.""" filename_info = {"grid": "ease"} - filetype_info = {} + filetype_info = {"file_type": "osi_sea_ice_conc"} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) @@ -198,15 +239,12 @@ def test_get_area_def_ease(self, tmp_path): np.testing.assert_allclose(area_def.area_extent, (-2203574.302335, 1027543.572492, -1726299.781982, 996679.643829)) - def test_get_area_def_stere(self, tmp_path): """Test getting the area definition for the polar stereographic grid.""" - filename_info = {"grid": "stere"} - filetype_info = {} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" @@ -224,10 +262,41 @@ def test_get_area_def_stere(self, tmp_path): def test_get_area_def_bad(self, tmp_path): """Test getting the area definition for the polar stereographic grid.""" filename_info = {"grid": "turnips"} - filetype_info = {} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, self.filetype_info) with pytest.raises(ValueError, match="Unknown grid type: turnips"): test.get_area_def(None) + + +class TestOSISAFL3ReaderICE(OSISAFL3ReaderTests): + """Test OSI-SAF level 3 netCDF reader ice files.""" + + def setup_method(self): + super().setup_method(tester="ice") + self.filename_info = {"grid": "ease"} + self.filetype_info = {"file_type": "osi_sea_ice_conc"} + self.good_start_time = datetime(2023, 10, 10, 0, 0, 0) + self.good_stop_time = datetime(2023, 10, 10, 23, 59, 59) + self.varname = "ice_conc" + self.stdname = "sea_ice_area_fraction" + self.fillv = -999 + self.maxv = 10000 + self.scl = 100 + + +class TestOSISAFL3ReaderFlux(OSISAFL3ReaderTests): + """Test OSI-SAF level 3 netCDF reader flux files.""" + + def setup_method(self): + super().setup_method(tester="flux_stere") + self.filename_info = {} + self.filetype_info = {"file_type": "osi_radflux_stere"} + self.good_start_time = datetime(2023, 10, 10, 0, 0, 0) + self.good_stop_time = datetime(2023, 10, 10, 23, 59, 59) + self.varname = "ssi" + self.stdname = "surface_downwelling_shortwave_flux_in_air" + self.fillv = -999.99 + self.maxv = 1000 + self.scl = 1 From 3bbf8923e868acc9357df4c1cea0133ab59ed3ec Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 22:13:05 +0000 Subject: [PATCH 418/702] Update Insat-3D area def to use hardcoded field of view. --- satpy/readers/insat3d_img_l1b_h5.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/satpy/readers/insat3d_img_l1b_h5.py b/satpy/readers/insat3d_img_l1b_h5.py index 7e444e8d34..9f2224ef82 100644 --- a/satpy/readers/insat3d_img_l1b_h5.py +++ b/satpy/readers/insat3d_img_l1b_h5.py @@ -173,7 +173,12 @@ def get_area_def(self, ds_id): lines = shape[-2] cols = shape[-1] - fov = self.datatree.attrs["Field_of_View(degrees)"] + # From empirical analysis, hardcoding the view of view to 18 degrees + # produces better geolocation results. + # Uncommenting the line below will use the fov from the file instead, + # this line is kept for reference. + #fov = self.datatree.attrs["Field_of_View(degrees)"] + fov = 18 cfac = 2 ** 16 / (fov / cols) lfac = 2 ** 16 / (fov / lines) From 89d6b64e2f0bd2050957b6c32440767004345ed5 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 9 Nov 2023 09:56:05 +0000 Subject: [PATCH 419/702] Update OSI-SAF reader and finish the tests. --- satpy/readers/osisaf_l3_nc.py | 28 ++-- satpy/tests/reader_tests/test_osisaf_l3.py | 186 ++++++++++++++++----- 2 files changed, 157 insertions(+), 57 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index b2e6ec6812..8cdd35020c 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -108,20 +108,22 @@ def _get_polar_stereographic_grid(self): def get_area_def(self, area_id): """Override abstract baseclass method""" - if self.filetype_info["file_type"] == "osi_radflux_grid": - self.area_def = self._get_geographic_grid() - return self.area_def - elif self.filetype_info["file_type"] == "osi_sst": - self.area_def = self._get_polar_stereographic_grid() - return self.area_def - elif self.filename_info["grid"] == "ease": - self.area_def = self._get_ease_grid() - return self.area_def - elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": - self.area_def = self._get_polar_stereographic_grid() - return self.area_def + if "grid" in self.filename_info: + if self.filename_info["grid"] == "ease": + self.area_def = self._get_ease_grid() + return self.area_def + elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": + self.area_def = self._get_polar_stereographic_grid() + return self.area_def + else: + raise ValueError(f"Unknown grid type: {self.filename_info['grid']}") else: - raise ValueError(f"Unknown grid type: {self.filename_info['grid']}") + if self.filetype_info["file_type"] == "osi_radflux_grid": + self.area_def = self._get_geographic_grid() + return self.area_def + elif self.filetype_info["file_type"] == "osi_sst": + self.area_def = self._get_polar_stereographic_grid() + return self.area_def def _get_ds_attr(self, a_name): """Get a dataset attribute and check it's valid.""" diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index 65a9efc9a1..fd035ccbac 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -88,13 +88,17 @@ def setup_method(self, tester="ice"): """Create a fake dataset.""" self.base_data = np.array(([-999, 1215, 1125, 11056, 9500], [200, 1, -999, 4215, 5756])) self.base_data_ssi = np.array(([-999.99, 121.5, 11.25, 110.56, 950.0], [200, 1, -999.99, 42.15, 5.756])) + self.base_data_sst = np.array(([-32768, 273.2, 194.2, 220.78, 301.], [-32768, -32768, 273.22, 254.34, 204.21])) self.base_data_ssi_geo = np.array(([-32768, 121.5, 11.25, 110.56, 950.0], [200, 1, -32768, 42.15, 5.756])) self.base_data = np.expand_dims(self.base_data, axis=0) self.base_data_ssi = np.expand_dims(self.base_data_ssi, axis=0) + self.base_data_sst = np.expand_dims(self.base_data_sst, axis=0) self.unc_data = np.array(([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])) self.yc_data = np.array(([-10, -5, 0, 5, 10], [-10, -5, 0, 5, 10])) self.xc_data = np.array(([-5, -5, -5, -5, -5], [5, 5, 5, 5, 5])) self.time_data = np.array([1.]) + self.scl = 1. + self.add = 0. self.lat_data = np.array(([-68, -69, -70, -71, -72], [-68, -69, -70, -71, -72])) self.lon_data = np.array(([-60, -60, -60, -60, -60], [-65, -65, -65, -65, -65])) @@ -138,7 +142,7 @@ def setup_method(self, tester="ice"): self.ssi_geo = xr.DataArray( self.base_data_ssi_geo, dims=("lat", "lon"), - attrs={"scale_factor": 0.1, "add_offset": 0., "_FillValue": 32768, "units": "W m-2", + attrs={"scale_factor": 0.1, "add_offset": 0., "_FillValue": 32768, "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"} ) self.ssi = xr.DataArray( @@ -147,13 +151,12 @@ def setup_method(self, tester="ice"): attrs={"_FillValue": -999.99, "units": "W m-2", "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"} ) - self.uncert = xr.DataArray( - self.unc_data, - dims=("yc", "xc"), - attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, - "valid_min": 0, "valid_max": 10000, "standard_name": "total_uncertainty"} + self.sst = xr.DataArray( + self.base_data_sst, + dims=("time", "yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 273.15, "_FillValue": -32768, "units": "K", + "valid_min": -8000., "valid_max": 5000., "standard_name": "sea_ice_surface_temperature"} ) - data_vars = { "xc": self.xc, "yc": self.yc, @@ -165,13 +168,15 @@ def setup_method(self, tester="ice"): data_vars["Polar_Stereographic_Grid"] = stere_ds data_vars["ice_conc"] = self.conc data_vars["total_uncertainty"] = self.uncert + elif tester == "sst": + data_vars["Polar_Stereographic_Grid"] = stere_ds + data_vars["surface_temperature"] = self.sst elif tester == "flux_stere": data_vars["Polar_Stereographic_Grid"] = stere_ds_noproj data_vars["ssi"] = self.ssi elif tester == "flux_geo": data_vars["ssi"] = self.ssi_geo - - if tester == "ice": + if tester == "ice" or tester == "sst": self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) elif tester == "flux_stere": self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_flux) @@ -198,10 +203,10 @@ def test_get_dataset(self, tmp_path): assert res.shape[1] == 5 # Test values are correct - test_ds = self.fake_dataset[self.varname][0].values + test_ds = self.fake_dataset[self.varname].values.squeeze() test_ds = np.where(test_ds == self.fillv, np.nan, test_ds) test_ds = np.where(test_ds > self.maxv, np.nan, test_ds) - test_ds = test_ds / self.scl + test_ds = test_ds / self.scl + self.add np.testing.assert_allclose(res.values, test_ds) with pytest.raises(KeyError): @@ -218,14 +223,60 @@ def test_get_start_and_end_times(self, tmp_path): assert test.start_time == self.good_start_time assert test.end_time == self.good_stop_time + def test_get_area_def_bad(self, tmp_path): + """Test getting the area definition for the polar stereographic grid.""" + filename_info = {"grid": "turnips"} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, self.filetype_info) + with pytest.raises(ValueError, match="Unknown grid type: turnips"): + test.get_area_def(None) + + +class TestOSISAFL3ReaderICE(OSISAFL3ReaderTests): + """Test OSI-SAF level 3 netCDF reader ice files.""" + + def setup_method(self): + super().setup_method(tester="ice") + self.filename_info = {"grid": "ease"} + self.filetype_info = {"file_type": "osi_sea_ice_conc"} + self.good_start_time = datetime(2022, 12, 15, 0, 0, 0) + self.good_stop_time = datetime(2022, 12, 16, 0, 0, 0) + self.varname = "ice_conc" + self.stdname = "sea_ice_area_fraction" + self.fillv = -999 + self.maxv = 10000 + self.scl = 100 + + def test_get_area_def_stere(self, tmp_path): + """Test getting the area definition for the polar stereographic grid.""" + self.filename_info = {"grid": "stere"} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) + + area_def = test.get_area_def(None) + assert area_def.description == "osisaf_polar_stereographic" + assert area_def.proj_dict["a"] == 6378273.0 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lat_ts"] == -70 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "stere" + + assert area_def.width == 5 + assert area_def.height == 2 + np.testing.assert_allclose(area_def.area_extent, + (-2185821.7955, 1019265.4426, -1702157.4538, 982741.0642)) + + def test_get_area_def_ease(self, tmp_path): """Test getting the area definition for the EASE grid.""" - filename_info = {"grid": "ease"} - filetype_info = {"file_type": "osi_sea_ice_conc"} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), {"grid": "ease"}, self.filetype_info) area_def = test.get_area_def(None) assert area_def.description == "osisaf_lambert_azimuthal_equal_area" @@ -239,6 +290,22 @@ def test_get_area_def_ease(self, tmp_path): np.testing.assert_allclose(area_def.area_extent, (-2203574.302335, 1027543.572492, -1726299.781982, 996679.643829)) + +class TestOSISAFL3ReaderFluxStere(OSISAFL3ReaderTests): + """Test OSI-SAF level 3 netCDF reader flux files on stereographic grid.""" + + def setup_method(self): + super().setup_method(tester="flux_stere") + self.filename_info = {"grid": "polstere"} + self.filetype_info = {"file_type": "osi_radflux_stere"} + self.good_start_time = datetime(2023, 10, 10, 0, 0, 0) + self.good_stop_time = datetime(2023, 10, 10, 23, 59, 59) + self.varname = "ssi" + self.stdname = "surface_downwelling_shortwave_flux_in_air" + self.fillv = -999.99 + self.maxv = 1000 + self.scl = 1 + def test_get_area_def_stere(self, tmp_path): """Test getting the area definition for the polar stereographic grid.""" tmp_filepath = tmp_path / "fake_dataset.nc" @@ -259,44 +326,75 @@ def test_get_area_def_stere(self, tmp_path): np.testing.assert_allclose(area_def.area_extent, (-2185821.7955, 1019265.4426, -1702157.4538, 982741.0642)) - def test_get_area_def_bad(self, tmp_path): - """Test getting the area definition for the polar stereographic grid.""" - filename_info = {"grid": "turnips"} + +class TestOSISAFL3ReaderFluxGeo(OSISAFL3ReaderTests): + """Test OSI-SAF level 3 netCDF reader flux files on lat/lon grid (GEO sensors).""" + + def setup_method(self): + super().setup_method(tester="flux_geo") + self.filename_info = {} + self.filetype_info = {"file_type": "osi_radflux_grid"} + self.good_start_time = datetime(2022, 12, 28, 18, 30, 0) + self.good_stop_time = datetime(2022, 12, 28, 19, 30, 0) + self.varname = "ssi" + self.stdname = "surface_downwelling_shortwave_flux_in_air" + self.fillv = -32768 + self.maxv = 1000 + self.scl = 10 + + + def test_get_area_def_grid(self, tmp_path): + """Test getting the area definition for the lat/lon grid.""" tmp_filepath = tmp_path / "fake_dataset.nc" + self.filename_info = {} + self.filetype_info = {"file_type": "osi_radflux_grid"} self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, self.filetype_info) - with pytest.raises(ValueError, match="Unknown grid type: turnips"): - test.get_area_def(None) - + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) -class TestOSISAFL3ReaderICE(OSISAFL3ReaderTests): - """Test OSI-SAF level 3 netCDF reader ice files.""" + area_def = test.get_area_def(None) + assert area_def.description == "osisaf_geographic_area" + assert area_def.proj_dict["datum"] == "WGS84" + assert area_def.proj_dict["proj"] == "longlat" - def setup_method(self): - super().setup_method(tester="ice") - self.filename_info = {"grid": "ease"} - self.filetype_info = {"file_type": "osi_sea_ice_conc"} - self.good_start_time = datetime(2023, 10, 10, 0, 0, 0) - self.good_stop_time = datetime(2023, 10, 10, 23, 59, 59) - self.varname = "ice_conc" - self.stdname = "sea_ice_area_fraction" - self.fillv = -999 - self.maxv = 10000 - self.scl = 100 + assert area_def.width == 5 + assert area_def.height == 2 + np.testing.assert_allclose(area_def.area_extent, + (-65, -68, -60, -72)) -class TestOSISAFL3ReaderFlux(OSISAFL3ReaderTests): - """Test OSI-SAF level 3 netCDF reader flux files.""" +class TestOSISAFL3ReaderSST(OSISAFL3ReaderTests): + """Test OSI-SAF level 3 netCDF reader surface temperature files.""" def setup_method(self): - super().setup_method(tester="flux_stere") + super().setup_method(tester="sst") self.filename_info = {} - self.filetype_info = {"file_type": "osi_radflux_stere"} - self.good_start_time = datetime(2023, 10, 10, 0, 0, 0) - self.good_stop_time = datetime(2023, 10, 10, 23, 59, 59) - self.varname = "ssi" - self.stdname = "surface_downwelling_shortwave_flux_in_air" - self.fillv = -999.99 + self.filetype_info = {"file_type": "osi_sst"} + self.good_start_time = datetime(2022, 12, 15, 0, 0, 0) + self.good_stop_time = datetime(2022, 12, 16, 0, 0, 0) + self.varname = "surface_temperature" + self.stdname = "sea_ice_surface_temperature" + self.fillv = -32768 self.maxv = 1000 - self.scl = 1 + self.scl = 100 + self.add = 273.15 + + def test_get_area_def_stere(self, tmp_path): + """Test getting the area definition for the polar stereographic grid.""" + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) + + area_def = test.get_area_def(None) + assert area_def.description == "osisaf_polar_stereographic" + assert area_def.proj_dict["a"] == 6378273.0 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lat_ts"] == -70 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "stere" + + assert area_def.width == 5 + assert area_def.height == 2 + np.testing.assert_allclose(area_def.area_extent, + (-2185821.7955, 1019265.4426, -1702157.4538, 982741.0642)) From 35196b684f60b6f950b91113472cd0019db3f228 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 9 Nov 2023 10:47:14 +0000 Subject: [PATCH 420/702] Simplify OSI SAF code. --- satpy/readers/osisaf_l3_nc.py | 58 ++++++++++++++-------- satpy/tests/reader_tests/test_osisaf_l3.py | 8 ++- 2 files changed, 39 insertions(+), 27 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 8cdd35020c..1574b6ba74 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -106,24 +106,33 @@ def _get_polar_stereographic_grid(self): return area_def + def _get_finfo_grid(self): + """Get grid in case of filename info being used.""" + if self.filename_info["grid"] == "ease": + self.area_def = self._get_ease_grid() + return self.area_def + elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": + self.area_def = self._get_polar_stereographic_grid() + return self.area_def + else: + raise ValueError(f"Unknown grid type: {self.filename_info['grid']}") + + def _get_ftype_grid(self): + """Get grid in case of filetype info being used.""" + if self.filetype_info["file_type"] == "osi_radflux_grid": + self.area_def = self._get_geographic_grid() + return self.area_def + elif self.filetype_info["file_type"] == "osi_sst": + self.area_def = self._get_polar_stereographic_grid() + return self.area_def + def get_area_def(self, area_id): """Override abstract baseclass method""" if "grid" in self.filename_info: - if self.filename_info["grid"] == "ease": - self.area_def = self._get_ease_grid() - return self.area_def - elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": - self.area_def = self._get_polar_stereographic_grid() - return self.area_def - else: - raise ValueError(f"Unknown grid type: {self.filename_info['grid']}") + return self._get_finfo_grid() else: - if self.filetype_info["file_type"] == "osi_radflux_grid": - self.area_def = self._get_geographic_grid() - return self.area_def - elif self.filetype_info["file_type"] == "osi_sst": - self.area_def = self._get_polar_stereographic_grid() - return self.area_def + return self._get_ftype_grid() + def _get_ds_attr(self, a_name): """Get a dataset attribute and check it's valid.""" @@ -132,23 +141,28 @@ def _get_ds_attr(self, a_name): except KeyError: return None + def _get_ds_units(self, ds_info, var_path): + """Find the units of the datasets.""" + + file_units = ds_info.get("file_units") + if file_units is None: + file_units = self._get_ds_attr(var_path + "/attr/units") + if file_units is None: + file_units = 1 + return file_units + def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" logger.debug(f"Reading {dataset_id['name']} from {self.filename}") var_path = ds_info.get("file_key", f"{dataset_id['name']}") shape = self[var_path + "/shape"] + data = self[var_path] if shape[0] == 1: # Remove the time dimension from dataset - data = self[var_path][0] - else: - data = self[var_path] + data = data[0] - file_units = ds_info.get("file_units") - if file_units is None: - file_units = self._get_ds_attr(var_path + "/attr/units") - if file_units is None: - file_units = 1 + file_units = self._get_ds_units(ds_info, var_path) # Try to get the valid limits for the data. # Not all datasets have these, so fall back on assuming no limits. diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index fd035ccbac..59550225b0 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -168,19 +168,17 @@ def setup_method(self, tester="ice"): data_vars["Polar_Stereographic_Grid"] = stere_ds data_vars["ice_conc"] = self.conc data_vars["total_uncertainty"] = self.uncert + self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) elif tester == "sst": data_vars["Polar_Stereographic_Grid"] = stere_ds data_vars["surface_temperature"] = self.sst + self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) elif tester == "flux_stere": data_vars["Polar_Stereographic_Grid"] = stere_ds_noproj data_vars["ssi"] = self.ssi - elif tester == "flux_geo": - data_vars["ssi"] = self.ssi_geo - if tester == "ice" or tester == "sst": - self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) - elif tester == "flux_stere": self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_flux) elif tester == "flux_geo": + data_vars["ssi"] = self.ssi_geo self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_geo) def test_instantiate_single_netcdf_file(self, tmp_path): From fd0ce951ada0a33d0d8313161faa3501d58bb1e9 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 9 Nov 2023 11:01:01 +0000 Subject: [PATCH 421/702] Tidy up OSI SAF tests. --- satpy/tests/reader_tests/test_osisaf_l3.py | 128 ++++++++------------- 1 file changed, 47 insertions(+), 81 deletions(-) diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index 59550225b0..e037884c04 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -86,99 +86,67 @@ class OSISAFL3ReaderTests: def setup_method(self, tester="ice"): """Create a fake dataset.""" - self.base_data = np.array(([-999, 1215, 1125, 11056, 9500], [200, 1, -999, 4215, 5756])) - self.base_data_ssi = np.array(([-999.99, 121.5, 11.25, 110.56, 950.0], [200, 1, -999.99, 42.15, 5.756])) - self.base_data_sst = np.array(([-32768, 273.2, 194.2, 220.78, 301.], [-32768, -32768, 273.22, 254.34, 204.21])) - self.base_data_ssi_geo = np.array(([-32768, 121.5, 11.25, 110.56, 950.0], [200, 1, -32768, 42.15, 5.756])) - self.base_data = np.expand_dims(self.base_data, axis=0) - self.base_data_ssi = np.expand_dims(self.base_data_ssi, axis=0) - self.base_data_sst = np.expand_dims(self.base_data_sst, axis=0) - self.unc_data = np.array(([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])) - self.yc_data = np.array(([-10, -5, 0, 5, 10], [-10, -5, 0, 5, 10])) - self.xc_data = np.array(([-5, -5, -5, -5, -5], [5, 5, 5, 5, 5])) - self.time_data = np.array([1.]) + base_data = np.array(([-999, 1215, 1125, 11056, 9500], [200, 1, -999, 4215, 5756])) + base_data_ssi = np.array(([-999.99, 121.5, 11.25, 110.56, 950.0], [200, 1, -999.99, 42.15, 5.756])) + base_data_sst = np.array(([-32768, 273.2, 194.2, 220.78, 301.], [-32768, -32768, 273.22, 254.34, 204.21])) + base_data_ssi_geo = np.array(([-32768, 121.5, 11.25, 110.56, 950.0], [200, 1, -32768, 42.15, 5.756])) + base_data = np.expand_dims(base_data, axis=0) + base_data_ssi = np.expand_dims(base_data_ssi, axis=0) + base_data_sst = np.expand_dims(base_data_sst, axis=0) + unc_data = np.array(([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])) + yc_data = np.array(([-10, -5, 0, 5, 10], [-10, -5, 0, 5, 10])) + xc_data = np.array(([-5, -5, -5, -5, -5], [5, 5, 5, 5, 5])) + time_data = np.array([1.]) self.scl = 1. self.add = 0. - self.lat_data = np.array(([-68, -69, -70, -71, -72], [-68, -69, -70, -71, -72])) - self.lon_data = np.array(([-60, -60, -60, -60, -60], [-65, -65, -65, -65, -65])) - self.xc = xr.DataArray( - self.xc_data, - dims=("yc", "xc"), - attrs={"standard_name": "projection_x_coordinate", "units": "km"} - ) - self.yc = xr.DataArray( - self.yc_data, - dims=("yc", "xc"), - attrs={"standard_name": "projection_y_coordinate", "units": "km"} - ) - self.time = xr.DataArray( - self.time_data, - dims="time", - attrs={"standard_name": "projection_y_coordinate", "units": "km"} - ) - self.lat = xr.DataArray( - self.lat_data, - dims=("yc", "xc"), - attrs={"standard_name": "latitude", "units": "degrees_north"} - ) - self.lon = xr.DataArray( - self.lon_data, - dims=("yc", "xc"), - attrs={"standard_name": "longitude", "units": "degrees_east"} - ) - self.conc = xr.DataArray( - self.base_data, - dims=("time", "yc", "xc"), - attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, "units": "%", - "valid_min": 0, "valid_max": 10000, "standard_name": "sea_ice_area_fraction"} - ) - self.uncert = xr.DataArray( - self.unc_data, - dims=("yc", "xc"), - attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, - "valid_min": 0, "valid_max": 10000, "standard_name": "total_uncertainty"} - ) - self.ssi_geo = xr.DataArray( - self.base_data_ssi_geo, - dims=("lat", "lon"), - attrs={"scale_factor": 0.1, "add_offset": 0., "_FillValue": 32768, - "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"} - ) - self.ssi = xr.DataArray( - self.base_data_ssi, - dims=("time", "yc", "xc"), - attrs={"_FillValue": -999.99, "units": "W m-2", - "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"} - ) - self.sst = xr.DataArray( - self.base_data_sst, - dims=("time", "yc", "xc"), - attrs={"scale_factor": 0.01, "add_offset": 273.15, "_FillValue": -32768, "units": "K", - "valid_min": -8000., "valid_max": 5000., "standard_name": "sea_ice_surface_temperature"} - ) - data_vars = { - "xc": self.xc, - "yc": self.yc, - "time": self.time, - "lat": self.lat, - "lon": self.lon, } + lat_data = np.array(([-68, -69, -70, -71, -72], [-68, -69, -70, -71, -72])) + lon_data = np.array(([-60, -60, -60, -60, -60], [-65, -65, -65, -65, -65])) + + xc = xr.DataArray(xc_data, dims=("yc", "xc"), + attrs={"standard_name": "projection_x_coordinate", "units": "km"}) + yc = xr.DataArray(yc_data, dims=("yc", "xc"), + attrs={"standard_name": "projection_y_coordinate", "units": "km"}) + time = xr.DataArray(time_data, dims="time", + attrs={"standard_name": "projection_y_coordinate", "units": "km"}) + lat = xr.DataArray(lat_data, dims=("yc", "xc"), + attrs={"standard_name": "latitude", "units": "degrees_north"}) + lon = xr.DataArray(lon_data, dims=("yc", "xc"), + attrs={"standard_name": "longitude", "units": "degrees_east"}) + conc = xr.DataArray(base_data, dims=("time", "yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, "units": "%", + "valid_min": 0, "valid_max": 10000, "standard_name": "sea_ice_area_fraction"}) + uncert = xr.DataArray(unc_data, dims=("yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, "valid_min": 0, + "valid_max": 10000, "standard_name": "total_uncertainty"}) + ssi_geo = xr.DataArray(base_data_ssi_geo, dims=("lat", "lon"), + attrs={"scale_factor": 0.1, "add_offset": 0., "_FillValue": 32768, "valid_min": 0., + "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"}) + ssi = xr.DataArray(base_data_ssi, dims=("time", "yc", "xc"), + attrs={"_FillValue": -999.99, "units": "W m-2", "valid_min": 0., "valid_max": 1000., + "standard_name": "surface_downwelling_shortwave_flux_in_air"}) + sst = xr.DataArray(base_data_sst, dims=("time", "yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 273.15, "_FillValue": -32768, "units": "K", + "valid_min": -8000., "valid_max": 5000., + "standard_name": "sea_ice_surface_temperature"}) + data_vars = {"xc": xc, "yc": yc, "time": time, "lat": lat, "lon": lon} + if tester == "ice": data_vars["Lambert_Azimuthal_Grid"] = ease_ds data_vars["Polar_Stereographic_Grid"] = stere_ds - data_vars["ice_conc"] = self.conc - data_vars["total_uncertainty"] = self.uncert + data_vars["ice_conc"] = conc + data_vars["total_uncertainty"] = uncert self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) elif tester == "sst": data_vars["Polar_Stereographic_Grid"] = stere_ds - data_vars["surface_temperature"] = self.sst + data_vars["surface_temperature"] = sst self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) elif tester == "flux_stere": data_vars["Polar_Stereographic_Grid"] = stere_ds_noproj - data_vars["ssi"] = self.ssi + data_vars["ssi"] = ssi self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_flux) elif tester == "flux_geo": - data_vars["ssi"] = self.ssi_geo + data_vars["ssi"] = ssi_geo self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_geo) def test_instantiate_single_netcdf_file(self, tmp_path): @@ -268,7 +236,6 @@ def test_get_area_def_stere(self, tmp_path): np.testing.assert_allclose(area_def.area_extent, (-2185821.7955, 1019265.4426, -1702157.4538, 982741.0642)) - def test_get_area_def_ease(self, tmp_path): """Test getting the area definition for the EASE grid.""" tmp_filepath = tmp_path / "fake_dataset.nc" @@ -340,7 +307,6 @@ def setup_method(self): self.maxv = 1000 self.scl = 10 - def test_get_area_def_grid(self, tmp_path): """Test getting the area definition for the lat/lon grid.""" tmp_filepath = tmp_path / "fake_dataset.nc" From 0ef3e6b1ac15702db2d022af3b9678a0386ccceb Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 9 Nov 2023 13:01:18 -0600 Subject: [PATCH 422/702] Cleanup sgs time handling in geos_imager_hrit --- satpy/readers/goes_imager_hrit.py | 48 ++++++++++++++++--------------- 1 file changed, 25 insertions(+), 23 deletions(-) diff --git a/satpy/readers/goes_imager_hrit.py b/satpy/readers/goes_imager_hrit.py index 1724ba214d..d90ebb4a72 100644 --- a/satpy/readers/goes_imager_hrit.py +++ b/satpy/readers/goes_imager_hrit.py @@ -31,6 +31,7 @@ import numpy as np import xarray as xr +from satpy._compat import ArrayLike from satpy.readers._geos_area import get_area_definition, get_area_extent, get_geos_area_naming from satpy.readers.eum_base import recarray2dict, time_cds_short from satpy.readers.hrit_base import ( @@ -115,10 +116,21 @@ class CalibrationError(Exception): ("msecs", "u1")]) -def make_sgs_time(sgs_time_array): +def make_sgs_time(sgs_time_array: ArrayLike) -> datetime: """Make sgs time.""" + epoch_year = _epoch_year_from_sgs_time(sgs_time_array) + doy_offset = _epoch_doy_offset_from_sgs_time(sgs_time_array) + return epoch_year + doy_offset + + +def _epoch_year_from_sgs_time(sgs_time_array: ArrayLike) -> datetime: century = sgs_time_array["century"].astype(np.int64) year = sgs_time_array["year"].astype(np.int64) + year = ((century >> 4) * 1000 + (century & 15) * 100 + (year >> 4) * 10 + (year & 15)) + return datetime(int(year), 1, 1) + + +def _epoch_doy_offset_from_sgs_time(sgs_time_array: ArrayLike) -> timedelta: doy1 = sgs_time_array["doy1"].astype(np.int64) doy_hours = sgs_time_array["doy_hours"].astype(np.int64) hours_mins = sgs_time_array["hours_mins"].astype(np.int64) @@ -126,28 +138,18 @@ def make_sgs_time(sgs_time_array): secs_msecs = sgs_time_array["secs_msecs"].astype(np.int64) msecs = sgs_time_array["msecs"].astype(np.int64) - year = ((century >> 4) * 1000 + - (century & 15) * 100 + - (year >> 4) * 10 + - (year & 15)) - doy = ((doy1 >> 4) * 100 + - (doy1 & 15) * 10 + - (doy_hours >> 4)) - hours = ((doy_hours & 15) * 10 + - (hours_mins >> 4)) - mins = ((hours_mins & 15) * 10 + - (mins_secs >> 4)) - secs = ((mins_secs & 15) * 10 + - (secs_msecs >> 4)) - msecs = ((secs_msecs & 15) * 100 + - (msecs >> 4) * 10 + - (msecs & 15)) - return (datetime(int(year), 1, 1) + - timedelta(days=int(doy - 1), - hours=int(hours), - minutes=int(mins), - seconds=int(secs), - milliseconds=int(msecs))) + doy = ((doy1 >> 4) * 100 + (doy1 & 15) * 10 + (doy_hours >> 4)) + hours = ((doy_hours & 15) * 10 + (hours_mins >> 4)) + mins = ((hours_mins & 15) * 10 + (mins_secs >> 4)) + secs = ((mins_secs & 15) * 10 + (secs_msecs >> 4)) + msecs = ((secs_msecs & 15) * 100 + (msecs >> 4) * 10 + (msecs & 15)) + return timedelta( + days=int(doy - 1), + hours=int(hours), + minutes=int(mins), + seconds=int(secs), + milliseconds=int(msecs) + ) satellite_status = np.dtype([("TagType", " Date: Fri, 10 Nov 2023 09:59:32 +0200 Subject: [PATCH 423/702] Remove duplicate entries of required netcdf variables in FCI reader --- satpy/etc/readers/fci_l1c_nc.yaml | 8 -------- 1 file changed, 8 deletions(-) diff --git a/satpy/etc/readers/fci_l1c_nc.yaml b/satpy/etc/readers/fci_l1c_nc.yaml index f89699ae3a..4462087a39 100644 --- a/satpy/etc/readers/fci_l1c_nc.yaml +++ b/satpy/etc/readers/fci_l1c_nc.yaml @@ -37,16 +37,12 @@ file_types: - data/{channel_name}/measured/index_map - data/mtg_geos_projection/attr/inverse_flattening - data/mtg_geos_projection/attr/longitude_of_projection_origin - - data/mtg_geos_projection/attr/longitude_of_projection_origin - - data/mtg_geos_projection/attr/perspective_point_height - - data/mtg_geos_projection/attr/perspective_point_height - data/mtg_geos_projection/attr/perspective_point_height - data/mtg_geos_projection/attr/semi_major_axis - data/swath_direction - data/swath_number - index - state/celestial/earth_sun_distance - - state/celestial/earth_sun_distance - state/celestial/subsolar_latitude - state/celestial/subsolar_longitude - state/celestial/sun_satellite_distance @@ -95,16 +91,12 @@ file_types: - data/{channel_name}/measured/index_map - data/mtg_geos_projection/attr/inverse_flattening - data/mtg_geos_projection/attr/longitude_of_projection_origin - - data/mtg_geos_projection/attr/longitude_of_projection_origin - - data/mtg_geos_projection/attr/perspective_point_height - - data/mtg_geos_projection/attr/perspective_point_height - data/mtg_geos_projection/attr/perspective_point_height - data/mtg_geos_projection/attr/semi_major_axis - data/swath_direction - data/swath_number - index - state/celestial/earth_sun_distance - - state/celestial/earth_sun_distance - state/celestial/subsolar_latitude - state/celestial/subsolar_longitude - state/celestial/sun_satellite_distance From f3c4c7945512b2e01821ea358b25a2751da8325f Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 10 Nov 2023 10:25:54 +0200 Subject: [PATCH 424/702] Remove attribute names that are read via their parent --- satpy/etc/readers/fci_l1c_nc.yaml | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/satpy/etc/readers/fci_l1c_nc.yaml b/satpy/etc/readers/fci_l1c_nc.yaml index 4462087a39..d241b3fa9e 100644 --- a/satpy/etc/readers/fci_l1c_nc.yaml +++ b/satpy/etc/readers/fci_l1c_nc.yaml @@ -20,7 +20,6 @@ file_types: expected_segments: 40 required_netcdf_variables: - attr/platform - - data/mtg_geos_projection - data/{channel_name}/measured/start_position_row - data/{channel_name}/measured/end_position_row - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber @@ -35,10 +34,7 @@ file_types: - data/{channel_name}/measured/y - data/{channel_name}/measured/pixel_quality - data/{channel_name}/measured/index_map - - data/mtg_geos_projection/attr/inverse_flattening - - data/mtg_geos_projection/attr/longitude_of_projection_origin - - data/mtg_geos_projection/attr/perspective_point_height - - data/mtg_geos_projection/attr/semi_major_axis + - data/mtg_geos_projection - data/swath_direction - data/swath_number - index @@ -74,7 +70,6 @@ file_types: expected_segments: 40 required_netcdf_variables: - attr/platform - - data/mtg_geos_projection - data/{channel_name}/measured/start_position_row - data/{channel_name}/measured/end_position_row - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber @@ -89,10 +84,7 @@ file_types: - data/{channel_name}/measured/y - data/{channel_name}/measured/pixel_quality - data/{channel_name}/measured/index_map - - data/mtg_geos_projection/attr/inverse_flattening - - data/mtg_geos_projection/attr/longitude_of_projection_origin - - data/mtg_geos_projection/attr/perspective_point_height - - data/mtg_geos_projection/attr/semi_major_axis + - data/mtg_geos_projection - data/swath_direction - data/swath_number - index From e43f3fa59ffc51a43d910ee4784165a8065ae5cb Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 10 Nov 2023 13:59:44 +0100 Subject: [PATCH 425/702] Fix nwcsaf_geo start time to be nominal time --- satpy/readers/nwcsaf_nc.py | 5 ++++- satpy/tests/reader_tests/test_nwcsaf_nc.py | 10 +++++++--- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/satpy/readers/nwcsaf_nc.py b/satpy/readers/nwcsaf_nc.py index 1b3d65cb96..5d8320f954 100644 --- a/satpy/readers/nwcsaf_nc.py +++ b/satpy/readers/nwcsaf_nc.py @@ -372,7 +372,10 @@ def __del__(self): @property def start_time(self): """Return the start time of the object.""" - return read_nwcsaf_time(self.nc.attrs["time_coverage_start"]) + try: + return read_nwcsaf_time(self.nc.attrs["nominal_product_time"]) + except KeyError: + return read_nwcsaf_time(self.nc.attrs["time_coverage_start"]) @property def end_time(self): diff --git a/satpy/tests/reader_tests/test_nwcsaf_nc.py b/satpy/tests/reader_tests/test_nwcsaf_nc.py index 2070e5187c..07d6cee174 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_nc.py +++ b/satpy/tests/reader_tests/test_nwcsaf_nc.py @@ -44,6 +44,7 @@ "pal_rgb": 3} NOMINAL_LONGITUDE = 0.0 +NOMINAL_TIME = "2023-01-18T10:30:00Z" START_TIME = "2023-01-18T10:39:17Z" END_TIME = "2023-01-18T10:42:22Z" START_TIME_PPS = "20230118T103917000Z" @@ -57,6 +58,9 @@ global_attrs.update(PROJ) +global_attrs_geo = global_attrs.copy() +global_attrs_geo["nominal_product_time"] = NOMINAL_TIME + CTTH_PALETTE_MEANINGS = ("0 500 1000 1500") COT_PALETTE_MEANINGS = ("0 2 5 8 10 13 16 19 23 26 29 33 36 40 43 47 51 55 59 63 68 72 77 81 86 91 96" @@ -90,7 +94,7 @@ def nwcsaf_geo_ct_filename(tmp_path_factory): return create_nwcsaf_geo_ct_file(tmp_path_factory.mktemp("data")) -def create_nwcsaf_geo_ct_file(directory, attrs=global_attrs): +def create_nwcsaf_geo_ct_file(directory, attrs=global_attrs_geo): """Create a CT file.""" filename = directory / "S_NWC_CT_MSG4_MSG-N-VISIR_20230118T103000Z_PLAX.nc" with h5netcdf.File(filename, mode="w") as nc_file: @@ -227,7 +231,7 @@ def nwcsaf_pps_cpp_filehandler(nwcsaf_pps_cpp_filename): @pytest.fixture(scope="session") def nwcsaf_old_geo_ct_filename(tmp_path_factory): """Create a CT file and return the filename.""" - attrs = global_attrs.copy() + attrs = global_attrs_geo.copy() attrs.update(PROJ_KM) attrs["time_coverage_start"] = np.array(["2023-01-18T10:39:17Z"], dtype="S20") return create_nwcsaf_geo_ct_file(tmp_path_factory.mktemp("data-old"), attrs=attrs) @@ -343,7 +347,7 @@ def test_times_are_in_dataset_attributes(self, nwcsaf_geo_ct_filehandler): def test_start_time(self, nwcsaf_geo_ct_filehandler): """Test the start time property.""" - assert nwcsaf_geo_ct_filehandler.start_time == read_nwcsaf_time(START_TIME) + assert nwcsaf_geo_ct_filehandler.start_time == read_nwcsaf_time(NOMINAL_TIME) def test_end_time(self, nwcsaf_geo_ct_filehandler): """Test the end time property.""" From bff527e3bb666f2f30683c678bb0e46051c437fb Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 13 Nov 2023 17:49:42 +0000 Subject: [PATCH 426/702] Update the OSI SAF L3 reader with some suggestions from review. --- satpy/readers/osisaf_l3_nc.py | 31 +++++++++------------- satpy/tests/reader_tests/test_osisaf_l3.py | 2 -- 2 files changed, 12 insertions(+), 21 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 1574b6ba74..fd0a5cfc20 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2023 Satpy developers # # This file is part of satpy. @@ -20,8 +18,6 @@ import logging from datetime import datetime -import numpy as np - from satpy.readers.netcdf_utils import NetCDF4FileHandler logger = logging.getLogger(__name__) @@ -29,18 +25,6 @@ class OSISAFL3NCFileHandler(NetCDF4FileHandler): """Reader for the OSISAF l3 netCDF format.""" - - @staticmethod - def _parse_datetime(datestr): - try: - return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") - except ValueError: - try: - return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") - except ValueError: - return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ") - - def _get_ease_grid(self): """Set up the EASE grid.""" from pyresample import create_area_def @@ -169,14 +153,14 @@ def get_dataset(self, dataset_id, ds_info): valid_min = self._get_ds_attr(var_path + "/attr/valid_min") valid_max = self._get_ds_attr(var_path + "/attr/valid_max") if valid_min is not None and valid_max is not None: - data = data.where(data >= valid_min, np.nan) - data = data.where(data <= valid_max, np.nan) + data = data.where(data >= valid_min) + data = data.where(data <= valid_max) # Try to get the fill value for the data. # If there isn't one, assume all remaining pixels are valid. fill_value = self._get_ds_attr(var_path + "/attr/_FillValue") if fill_value is not None: - data = data.where(data != fill_value, np.nan) + data = data.where(data != fill_value) # Try to get the scale and offset for the data. # As above, not all datasets have these, so fall back on assuming no limits. @@ -217,6 +201,15 @@ def _get_platname(self): except KeyError: return self["/attr/platform"] + @staticmethod + def _parse_datetime(datestr): + try: + return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") + except ValueError: + try: + return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") + except ValueError: + return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ") @property def start_time(self): diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index e037884c04..a9a595202b 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2023 Satpy developers # # This file is part of satpy. From 7f817731cb6ff125f4e7b3e60afeaa2680f0abe6 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 13 Nov 2023 17:52:48 +0000 Subject: [PATCH 427/702] Remove unneeded function in OSI-SAF L3 --- satpy/readers/osisaf_l3_nc.py | 31 ++++++++++++------------------- 1 file changed, 12 insertions(+), 19 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index fd0a5cfc20..e5e185bb51 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -118,19 +118,12 @@ def get_area_def(self, area_id): return self._get_ftype_grid() - def _get_ds_attr(self, a_name): - """Get a dataset attribute and check it's valid.""" - try: - return self[a_name] - except KeyError: - return None - def _get_ds_units(self, ds_info, var_path): """Find the units of the datasets.""" file_units = ds_info.get("file_units") if file_units is None: - file_units = self._get_ds_attr(var_path + "/attr/units") + file_units = self.get(var_path + "/attr/units") if file_units is None: file_units = 1 return file_units @@ -150,22 +143,22 @@ def get_dataset(self, dataset_id, ds_info): # Try to get the valid limits for the data. # Not all datasets have these, so fall back on assuming no limits. - valid_min = self._get_ds_attr(var_path + "/attr/valid_min") - valid_max = self._get_ds_attr(var_path + "/attr/valid_max") + valid_min = self.get(var_path + "/attr/valid_min") + valid_max = self.get(var_path + "/attr/valid_max") if valid_min is not None and valid_max is not None: data = data.where(data >= valid_min) data = data.where(data <= valid_max) # Try to get the fill value for the data. # If there isn't one, assume all remaining pixels are valid. - fill_value = self._get_ds_attr(var_path + "/attr/_FillValue") + fill_value = self.get(var_path + "/attr/_FillValue") if fill_value is not None: data = data.where(data != fill_value) # Try to get the scale and offset for the data. # As above, not all datasets have these, so fall back on assuming no limits. - scale_factor = self._get_ds_attr(var_path + "/attr/scale_factor") - scale_offset = self._get_ds_attr(var_path + "/attr/add_offset") + scale_factor = self.get(var_path + "/attr/scale_factor") + scale_offset = self.get(var_path + "/attr/add_offset") if scale_offset is not None and scale_factor is not None: data = (data * scale_factor + scale_offset) @@ -213,22 +206,22 @@ def _parse_datetime(datestr): @property def start_time(self): - start_t = self._get_ds_attr("/attr/start_date") + start_t = self.get("/attr/start_date") if start_t is None: - start_t = self._get_ds_attr("/attr/start_time") + start_t = self.get("/attr/start_time") if start_t is None: - start_t = self._get_ds_attr("/attr/time_coverage_start") + start_t = self.get("/attr/time_coverage_start") if start_t is None: raise ValueError("Unknown start time attribute.") return self._parse_datetime(start_t) @property def end_time(self): - end_t = self._get_ds_attr("/attr/stop_date") + end_t = self.get("/attr/stop_date") if end_t is None: - end_t = self._get_ds_attr("/attr/stop_time") + end_t = self.get("/attr/stop_time") if end_t is None: - end_t = self._get_ds_attr("/attr/time_coverage_end") + end_t = self.get("/attr/time_coverage_end") if end_t is None: raise ValueError("Unknown stop time attribute.") return self._parse_datetime(end_t) From c450ad502f0a024984ed1d1e326e6df1d3087055 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 13 Nov 2023 17:55:11 +0000 Subject: [PATCH 428/702] Update OSI SAF area def docstring. --- satpy/readers/osisaf_l3_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index e5e185bb51..fa93424518 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -111,7 +111,7 @@ def _get_ftype_grid(self): return self.area_def def get_area_def(self, area_id): - """Override abstract baseclass method""" + """Get the area definition, which varies depending on file type and structure.""" if "grid" in self.filename_info: return self._get_finfo_grid() else: From cd864e300701e98b539de0b02471d2e32a0b25cc Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 13 Nov 2023 18:00:02 +0000 Subject: [PATCH 429/702] Add support for NRT ice concentration files to OSI SAF L3 reader. --- satpy/etc/readers/osisaf_nc.yaml | 4 ++-- satpy/readers/osisaf_l3_nc.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/etc/readers/osisaf_nc.yaml b/satpy/etc/readers/osisaf_nc.yaml index 479b5a38db..d789ae414c 100644 --- a/satpy/etc/readers/osisaf_nc.yaml +++ b/satpy/etc/readers/osisaf_nc.yaml @@ -17,7 +17,8 @@ reader: file_types: osi_sea_ice_conc: file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler - file_patterns: ['ice_conc_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + file_patterns: ['ice_conc_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc', + 'S-OSI_-{product_centre}_-{sensor}-GL_{hemisphere:2s}_CONCn__-{start_time:%Y%m%d%H%M}Z.nc'] osi_sea_ice_edge: file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler file_patterns: ['ice_edge_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] @@ -42,7 +43,6 @@ datasets: status_flag: name: status_flag file_type: [osi_sea_ice_conc, osi_sea_ice_edge, osi_sea_ice_type] - orbit_num_amsr: name: orbit_num_amsr file_type: [osi_sea_ice_edge, osi_sea_ice_type] diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index fa93424518..0cc5e672b3 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -106,7 +106,7 @@ def _get_ftype_grid(self): if self.filetype_info["file_type"] == "osi_radflux_grid": self.area_def = self._get_geographic_grid() return self.area_def - elif self.filetype_info["file_type"] == "osi_sst": + elif self.filetype_info["file_type"] in ["osi_sst", "osi_sea_ice_conc"]: self.area_def = self._get_polar_stereographic_grid() return self.area_def From fd704fe6db0de046a2462efba497dd562904a734 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 13 Nov 2023 20:32:10 +0000 Subject: [PATCH 430/702] Simplify date parsing in OSI SAF reader. --- satpy/readers/osisaf_l3_nc.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 0cc5e672b3..1affb3a883 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -196,13 +196,12 @@ def _get_platname(self): @staticmethod def _parse_datetime(datestr): - try: - return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") - except ValueError: + for dt_format in ("%Y-%m-%d %H:%M:%S","%Y%m%dT%H%M%SZ", "%Y-%m-%dT%H:%M:%SZ"): try: - return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") + return datetime.strptime(datestr, dt_format) except ValueError: - return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ") + continue + raise ValueError(f"Unsupported date format: {datestr}") @property def start_time(self): From 9ef2af9cbc3642c0dc90a55e8e4e097d475997f8 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 14 Nov 2023 11:54:48 +0100 Subject: [PATCH 431/702] Avoid modification to CHANGELOG --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 12c8e50194..799ae0a867 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1143,7 +1143,7 @@ In this release 6 issues were closed. * [PR 1606](https://github.com/pytroll/satpy/pull/1606) - Make FSFile hashable again ([1605](https://github.com/pytroll/satpy/issues/1605), [1604](https://github.com/pytroll/satpy/issues/1604)) * [PR 1603](https://github.com/pytroll/satpy/pull/1603) - Update slstr_l2.yaml * [PR 1600](https://github.com/pytroll/satpy/pull/1600) - When setting `upper_right_corner` make sure that all dataset coordinates are flipped -* [PR 1588](https://github.com/pytroll/satpy/pull/1588) - Bugfix of add_coordinates_attrs_coords ([1493](https://github.com/pytroll/satpy/issues/1493)) +* [PR 1588](https://github.com/pytroll/satpy/pull/1588) - Bugfix of link_coords ([1493](https://github.com/pytroll/satpy/issues/1493)) #### Features added From e96f0de1f935865146a9967e35db65c8f0063b2f Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 14 Nov 2023 12:02:42 +0100 Subject: [PATCH 432/702] Rename _encode* functions --- satpy/cf/attrs.py | 30 +++++++++++++++--------------- satpy/tests/cf_tests/test_attrs.py | 6 +++--- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/satpy/cf/attrs.py b/satpy/cf/attrs.py index d4a41f2bfc..f4d6ece0d0 100644 --- a/satpy/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -65,7 +65,7 @@ def _encode(self, obj): return str(obj) -def __encode_nc(obj): +def _encode_object(obj): """Try to encode `obj` as a netCDF/Zarr compatible datatype which most closely resembles the object's nature. Raises: @@ -90,23 +90,15 @@ def __encode_nc(obj): raise ValueError('Unable to encode') -def _encode_nc(obj): - """Encode the given object as a netcdf compatible datatype.""" - try: - return obj.to_cf() - except AttributeError: - return _encode_python_objects(obj) - - def _encode_python_objects(obj): """Try to find the datatype which most closely resembles the object's nature. If on failure, encode as a string. Plain lists are encoded recursively. """ if isinstance(obj, (list, tuple)) and all([not isinstance(item, (list, tuple)) for item in obj]): - return [_encode_nc(item) for item in obj] + return [_encode_to_cf(item) for item in obj] try: - dump = __encode_nc(obj) + dump = _encode_object(obj) except ValueError: try: # Decode byte-strings @@ -117,7 +109,15 @@ def _encode_python_objects(obj): return dump -def _encode_attrs_nc(attrs): +def _encode_to_cf(obj): + """Encode the given object as a netcdf compatible datatype.""" + try: + return obj.to_cf() + except AttributeError: + return _encode_python_objects(obj) + + +def _encode_nc_attrs(attrs): """Encode dataset attributes in a netcdf compatible datatype. Args: @@ -130,7 +130,7 @@ def _encode_attrs_nc(attrs): encoded_attrs = [] for key, val in sorted(attrs.items()): if val is not None: - encoded_attrs.append((key, _encode_nc(val))) + encoded_attrs.append((key, _encode_to_cf(val))) return OrderedDict(encoded_attrs) @@ -193,7 +193,7 @@ def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): if flatten_attrs: dataarray.attrs = flatten_dict(dataarray.attrs) - dataarray.attrs = _encode_attrs_nc(dataarray.attrs) + dataarray.attrs = _encode_nc_attrs(dataarray.attrs) return dataarray @@ -215,7 +215,7 @@ def preprocess_header_attrs(header_attrs, flatten_attrs=False): if header_attrs is not None: if flatten_attrs: header_attrs = flatten_dict(header_attrs) - header_attrs = _encode_attrs_nc(header_attrs) # OrderedDict + header_attrs = _encode_nc_attrs(header_attrs) # OrderedDict else: header_attrs = {} header_attrs = _add_history(header_attrs) diff --git a/satpy/tests/cf_tests/test_attrs.py b/satpy/tests/cf_tests/test_attrs.py index 787d1dc82d..9306ae9749 100644 --- a/satpy/tests/cf_tests/test_attrs.py +++ b/satpy/tests/cf_tests/test_attrs.py @@ -22,16 +22,16 @@ class TestCFAttributeEncoding: """Test case for CF attribute encodings.""" - def test__encode_attrs_nc(self): + def test__encode_nc_attrs(self): """Test attributes encoding.""" - from satpy.cf.attrs import _encode_attrs_nc + from satpy.cf.attrs import _encode_nc_attrs from satpy.tests.cf_tests._test_data import get_test_attrs from satpy.tests.utils import assert_dict_array_equality attrs, expected, _ = get_test_attrs() # Test encoding - encoded = _encode_attrs_nc(attrs) + encoded = _encode_nc_attrs(attrs) assert_dict_array_equality(expected, encoded) # Test decoding of json-encoded attributes From 48df162107648870be89f0af9ddbd4fa6d1d9aba Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 14 Nov 2023 12:04:24 +0100 Subject: [PATCH 433/702] Update xarray version --- satpy/tests/writer_tests/test_cf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index f50a8e99d3..fc1fa67343 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -570,5 +570,5 @@ def _should_use_compression_keyword(): versions = _get_backend_versions() return ( versions["libnetcdf"] >= Version("4.9.0") and - versions["xarray"] >= Version("2023.10") + versions["xarray"] >= Version("2023.11") ) From 14b1f066416190b464b630750d12519f00ef5fe4 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 14 Nov 2023 13:36:54 +0100 Subject: [PATCH 434/702] Set default epoch argument to None --- satpy/_scene_converters.py | 4 --- satpy/cf/coords.py | 2 +- satpy/cf/dataarray.py | 6 ++++- satpy/cf/datasets.py | 33 ++++++++++++++----------- satpy/scene.py | 2 +- satpy/tests/cf_tests/test_coords.py | 2 -- satpy/tests/cf_tests/test_dataaarray.py | 2 -- satpy/tests/test_writers.py | 3 +-- satpy/writers/cf_writer.py | 20 ++++++++------- 9 files changed, 37 insertions(+), 37 deletions(-) diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py index 54ccf85ac9..a890963a06 100644 --- a/satpy/_scene_converters.py +++ b/satpy/_scene_converters.py @@ -90,12 +90,8 @@ def to_xarray(scn, A CF-compliant xr.Dataset """ - from satpy.cf import EPOCH from satpy.cf.datasets import collect_cf_datasets - if epoch is None: - epoch = EPOCH - # Get list of DataArrays if datasets is None: datasets = list(scn.keys()) # list all loaded DataIDs diff --git a/satpy/cf/coords.py b/satpy/cf/coords.py index f486502a44..6e7a0892e9 100644 --- a/satpy/cf/coords.py +++ b/satpy/cf/coords.py @@ -188,7 +188,7 @@ def check_unique_projection_coords(dict_dataarrays): token_x = tokenize(dataarray["x"].data) unique_x.add(token_x) if len(unique_x) > 1 or len(unique_y) > 1: - raise ValueError("Datasets to be saved in one file (or one group) must have identical projection coordinates. " + raise ValueError("Datasets to be saved in one file (or one group) must have identical projection coordinates." "Please group them by area or save them in separate files.") diff --git a/satpy/cf/dataarray.py b/satpy/cf/dataarray.py index 5a7779f4c1..a8ac78d9a3 100644 --- a/satpy/cf/dataarray.py +++ b/satpy/cf/dataarray.py @@ -57,7 +57,7 @@ def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name def make_cf_dataarray(dataarray, - epoch=EPOCH, + epoch=None, flatten_attrs=False, exclude_attrs=None, include_orig_name=True, @@ -70,6 +70,7 @@ def make_cf_dataarray(dataarray, The data array to be made CF-compliant. epoch : str, optional Reference time for encoding of time coordinates. + If None, the default reference time is retrieved using `from satpy.cf import EPOCH` flatten_attrs : bool, optional If True, flatten dict-type attributes. The default is False. @@ -89,6 +90,9 @@ def make_cf_dataarray(dataarray, CF-compliant xr.DataArray. """ + if epoch is None: + epoch = EPOCH + dataarray = _preprocess_dataarray_name(dataarray=dataarray, numeric_name_prefix=numeric_name_prefix, include_orig_name=include_orig_name) diff --git a/satpy/cf/datasets.py b/satpy/cf/datasets.py index e801be2fdf..d85a943fe7 100644 --- a/satpy/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -61,13 +61,14 @@ def _get_groups(groups, list_datarrays): def _collect_cf_dataset(list_dataarrays, - epoch=EPOCH, - flatten_attrs=False, - exclude_attrs=None, - include_lonlats=True, - pretty=False, - include_orig_name=True, - numeric_name_prefix="CHANNEL_"): + epoch, + flatten_attrs, + exclude_attrs, + include_lonlats, + pretty, + include_orig_name, + numeric_name_prefix, + ): """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Dataset. Parameters @@ -77,19 +78,18 @@ def _collect_cf_dataset(list_dataarrays, epoch : str Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using `from satpy.cf import EPOCH` - flatten_attrs : bool, optional + flatten_attrs : bool If True, flatten dict-type attributes. - exclude_attrs : list, optional + exclude_attrs : list List of xr.DataArray attribute names to be excluded. - include_lonlats : bool, optional + include_lonlats : bool If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. - pretty : bool, optional + pretty : bool Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. - include_orig_name : bool, optional + include_orig_name : bool Include the original dataset name as a variable attribute in the xr.Dataset. - numeric_name_prefix : str, optional + numeric_name_prefix : str Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. @@ -180,7 +180,7 @@ def collect_cf_datasets(list_dataarrays, flatten_attrs=False, pretty=True, include_lonlats=True, - epoch=EPOCH, + epoch=None, include_orig_name=True, numeric_name_prefix="CHANNEL_", groups=None): @@ -231,6 +231,9 @@ def collect_cf_datasets(list_dataarrays, from satpy.cf.attrs import preprocess_header_attrs from satpy.cf.coords import add_time_bounds_dimension + if epoch is None: + epoch = EPOCH + if not list_dataarrays: raise RuntimeError("None of the requested datasets have been " "generated or could not be loaded. Requested " diff --git a/satpy/scene.py b/satpy/scene.py index 587721a7be..bb8cf0ffab 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1128,7 +1128,7 @@ def to_xarray(self, epoch (str): Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using "from satpy.cf import EPOCH" + If None, the default reference time is defined using "from satpy.cf import EPOCH" flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): diff --git a/satpy/tests/cf_tests/test_coords.py b/satpy/tests/cf_tests/test_coords.py index 2462f59181..68ba319741 100644 --- a/satpy/tests/cf_tests/test_coords.py +++ b/satpy/tests/cf_tests/test_coords.py @@ -228,5 +228,3 @@ def test_has_projection_coords(self, datasets): assert has_projection_coords(datasets) datasets["lat"].attrs["standard_name"] = "dummy" assert not has_projection_coords(datasets) - - # add_xy_coords_attrs diff --git a/satpy/tests/cf_tests/test_dataaarray.py b/satpy/tests/cf_tests/test_dataaarray.py index 099013bbbc..d0154cd84f 100644 --- a/satpy/tests/cf_tests/test_dataaarray.py +++ b/satpy/tests/cf_tests/test_dataaarray.py @@ -114,5 +114,3 @@ def test_make_cf_dataarray_one_dimensional_array(self): arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=("y",), coords={"y": [0, 1, 2, 3], "acq_time": ("y", [0, 1, 2, 3])}) _ = make_cf_dataarray(arr) - - # _handle_dataarray_name diff --git a/satpy/tests/test_writers.py b/satpy/tests/test_writers.py index c2d049dae1..c11066d3f6 100644 --- a/satpy/tests/test_writers.py +++ b/satpy/tests/test_writers.py @@ -874,9 +874,8 @@ def test_group_results_by_output_file(tmp_path): """ from pyresample import create_area_def + from satpy.tests.utils import make_fake_scene from satpy.writers import group_results_by_output_file - - from .utils import make_fake_scene x = 10 fake_area = create_area_def("sargasso", 4326, resolution=1, width=x, height=x, center=(0, 0)) fake_scene = make_fake_scene( diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index de3b445025..7076cc841d 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -162,7 +162,7 @@ import xarray as xr from packaging.version import Version -from satpy.cf import EPOCH +from satpy.cf import EPOCH # noqa: F401 (for backward compatibility) from satpy.writers import Writer logger = logging.getLogger(__name__) @@ -232,7 +232,7 @@ def save_dataset(self, dataset, filename=None, fill_value=None, **kwargs): """Save the *dataset* to a given *filename*.""" return self.save_datasets([dataset], filename, **kwargs) - def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=EPOCH, + def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=None, flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False, include_orig_name=True, numeric_name_prefix="CHANNEL_", **to_netcdf_kwargs): """Save the given datasets in one netCDF file. @@ -256,6 +256,7 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, preference for 'netcdf4'. epoch (str): Reference time for encoding of time coordinates. + If None, the default reference time is defined using `from satpy.cf import EPOCH` flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): @@ -326,23 +327,24 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, return written @staticmethod - def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, + def da2cf(dataarray, epoch=None, flatten_attrs=False, exclude_attrs=None, include_orig_name=True, numeric_name_prefix="CHANNEL_"): """Convert the dataarray to something cf-compatible. Args: dataarray (xr.DataArray): - The data array to be converted + The data array to be converted. epoch (str): - Reference time for encoding of time coordinates + Reference time for encoding of time coordinates. + If None, the default reference time is defined using `from satpy.cf import EPOCH` flatten_attrs (bool): - If True, flatten dict-type attributes + If True, flatten dict-type attributes. exclude_attrs (list): - List of dataset attributes to be excluded + List of dataset attributes to be excluded. include_orig_name (bool): - Include the original dataset name in the netcdf variable attributes + Include the original dataset name in the netcdf variable attributes. numeric_name_prefix (str): - Prepend dataset name with this if starting with a digit + Prepend dataset name with this if starting with a digit. """ from satpy.cf.dataarray import make_cf_dataarray warnings.warn("CFWriter.da2cf is deprecated." From ec5f8fc4cafb1a89a39a42a7311077c896ef59a1 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 14 Nov 2023 13:41:05 +0100 Subject: [PATCH 435/702] Reduce future risk of circular imports --- satpy/_scene_converters.py | 2 +- satpy/cf/dataarray.py | 5 +++-- satpy/cf/datasets.py | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py index a890963a06..c400a159f1 100644 --- a/satpy/_scene_converters.py +++ b/satpy/_scene_converters.py @@ -66,7 +66,7 @@ def to_xarray(scn, epoch (str): Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using "from satpy.cf import EPOCH" + If None, the default reference time is defined using "from satpy.cf import EPOCH" flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): diff --git a/satpy/cf/dataarray.py b/satpy/cf/dataarray.py index a8ac78d9a3..39b5d82dc9 100644 --- a/satpy/cf/dataarray.py +++ b/satpy/cf/dataarray.py @@ -19,7 +19,6 @@ import logging import warnings -from satpy.cf import EPOCH from satpy.cf.attrs import preprocess_datarray_attrs from satpy.cf.coords import add_xy_coords_attrs, set_cf_time_info @@ -70,7 +69,7 @@ def make_cf_dataarray(dataarray, The data array to be made CF-compliant. epoch : str, optional Reference time for encoding of time coordinates. - If None, the default reference time is retrieved using `from satpy.cf import EPOCH` + If None, the default reference time is defined using `from satpy.cf import EPOCH` flatten_attrs : bool, optional If True, flatten dict-type attributes. The default is False. @@ -90,6 +89,8 @@ def make_cf_dataarray(dataarray, CF-compliant xr.DataArray. """ + from satpy.cf import EPOCH + if epoch is None: epoch = EPOCH diff --git a/satpy/cf/datasets.py b/satpy/cf/datasets.py index d85a943fe7..43b85003de 100644 --- a/satpy/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -22,7 +22,6 @@ import xarray as xr -from satpy.cf import EPOCH from satpy.writers.cf_writer import CF_DTYPES, CF_VERSION logger = logging.getLogger(__name__) @@ -228,6 +227,7 @@ def collect_cf_datasets(list_dataarrays, header_attrs : dict Global attributes to be attached to the xr.Dataset / netCDF4. """ + from satpy.cf import EPOCH from satpy.cf.attrs import preprocess_header_attrs from satpy.cf.coords import add_time_bounds_dimension From 1930845ba2cef1b2849b445a8a906193bb340963 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 14 Nov 2023 13:03:03 +0200 Subject: [PATCH 436/702] Keep all FCI radiance, reflectance and brightness temperatures as 32-bit floats --- satpy/readers/fci_l1c_nc.py | 28 +++++++-------- satpy/tests/reader_tests/test_fci_l1c_nc.py | 39 +++++++++++---------- 2 files changed, 35 insertions(+), 32 deletions(-) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 2d3e047c5e..0c7b9fb8cc 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -330,13 +330,13 @@ def _get_dataset_measurand(self, key, info=None): fv = attrs.pop( "FillValue", - default_fillvals.get(data.dtype.str[1:], np.nan)) - vr = attrs.get("valid_range", [-np.inf, np.inf]) + default_fillvals.get(data.dtype.str[1:], np.float32(np.nan))) + vr = attrs.get("valid_range", [np.float32(-np.inf), np.float32(np.inf)]) if key["calibration"] == "counts": attrs["_FillValue"] = fv nfv = data.dtype.type(fv) else: - nfv = np.nan + nfv = np.float32(np.nan) data = data.where((data >= vr[0]) & (data <= vr[1]), nfv) res = self.calibrate(data, key) @@ -632,16 +632,15 @@ def calibrate_counts_to_rad(self, data, key): def calibrate_rad_to_bt(self, radiance, key): """IR channel calibration.""" # using the method from PUG section Converting from Effective Radiance to Brightness Temperature for IR Channels - measured = self.get_channel_measured_group_path(key["name"]) - vc = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_coefficient_wavenumber") + vc = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_coefficient_wavenumber").astype(np.float32) - a = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_coefficient_a") - b = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_coefficient_b") + a = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_coefficient_a").astype(np.float32) + b = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_coefficient_b").astype(np.float32) - c1 = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_constant_c1") - c2 = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_constant_c2") + c1 = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_constant_c1").astype(np.float32) + c2 = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_constant_c2").astype(np.float32) for v in (vc, a, b, c1, c2): if v == v.attrs.get("FillValue", @@ -652,26 +651,27 @@ def calibrate_rad_to_bt(self, radiance, key): v.attrs.get("long_name", "at least one necessary coefficient"), measured)) - return radiance * np.nan + return radiance * np.float32(np.nan) nom = c2 * vc - denom = a * np.log(1 + (c1 * vc ** 3) / radiance) + denom = a * np.log(1 + (c1 * vc ** np.float32(3.)) / radiance) res = nom / denom - b / a + return res def calibrate_rad_to_refl(self, radiance, key): """VIS channel calibration.""" measured = self.get_channel_measured_group_path(key["name"]) - cesi = self.get_and_cache_npxr(measured + "/channel_effective_solar_irradiance") + cesi = self.get_and_cache_npxr(measured + "/channel_effective_solar_irradiance").astype(np.float32) if cesi == cesi.attrs.get( "FillValue", default_fillvals.get(cesi.dtype.str[1:])): logger.error( "channel effective solar irradiance set to fill value, " "cannot produce reflectance for {:s}.".format(measured)) - return radiance * np.nan + return radiance * np.float32(np.nan) sun_earth_distance = np.mean( self.get_and_cache_npxr("state/celestial/earth_sun_distance")) / 149597870.7 # [AU] @@ -683,7 +683,7 @@ def calibrate_rad_to_refl(self, radiance, key): "".format(sun_earth_distance)) sun_earth_distance = 1 - res = 100 * radiance * np.pi * sun_earth_distance ** 2 / cesi + res = 100 * radiance * np.float32(np.pi) * np.float32(sun_earth_distance) ** np.float32(2) / cesi return res diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 78226fc7ed..792de90462 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -103,18 +103,21 @@ def _get_test_calib_for_channel_ir(data, meas_path): from pyspectral.blackbody import C_SPEED as c from pyspectral.blackbody import H_PLANCK as h from pyspectral.blackbody import K_BOLTZMANN as k - data[meas_path + "/radiance_to_bt_conversion_coefficient_wavenumber"] = FakeH5Variable(da.array(955)) - data[meas_path + "/radiance_to_bt_conversion_coefficient_a"] = FakeH5Variable(da.array(1)) - data[meas_path + "/radiance_to_bt_conversion_coefficient_b"] = FakeH5Variable(da.array(0.4)) - data[meas_path + "/radiance_to_bt_conversion_constant_c1"] = FakeH5Variable(da.array(1e11 * 2 * h * c ** 2)) - data[meas_path + "/radiance_to_bt_conversion_constant_c2"] = FakeH5Variable(da.array(1e2 * h * c / k)) + data[meas_path + "/radiance_to_bt_conversion_coefficient_wavenumber"] = FakeH5Variable( + da.array(955.0, dtype=np.float32)) + data[meas_path + "/radiance_to_bt_conversion_coefficient_a"] = FakeH5Variable(da.array(1.0, dtype=np.float32)) + data[meas_path + "/radiance_to_bt_conversion_coefficient_b"] = FakeH5Variable(da.array(0.4, dtype=np.float32)) + data[meas_path + "/radiance_to_bt_conversion_constant_c1"] = FakeH5Variable( + da.array(1e11 * 2 * h * c ** 2, dtype=np.float32)) + data[meas_path + "/radiance_to_bt_conversion_constant_c2"] = FakeH5Variable( + da.array(1e2 * h * c / k, dtype=np.float32)) return data def _get_test_calib_for_channel_vis(data, meas): data["state/celestial/earth_sun_distance"] = FakeH5Variable( da.repeat(da.array([149597870.7]), 6000), dims=("x")) - data[meas + "/channel_effective_solar_irradiance"] = FakeH5Variable(da.array(50)) + data[meas + "/channel_effective_solar_irradiance"] = FakeH5Variable(da.array((50.0), dtype=np.float32)) return data @@ -124,7 +127,7 @@ def _get_test_calib_data_for_channel(data, ch_str): _get_test_calib_for_channel_ir(data, meas_path) elif ch_str.startswith("vis") or ch_str.startswith("nir"): _get_test_calib_for_channel_vis(data, meas_path) - data[meas_path + "/radiance_unit_conversion_coefficient"] = xr.DataArray(da.array(1234.56)) + data[meas_path + "/radiance_unit_conversion_coefficient"] = xr.DataArray(da.array(1234.56, dtype=np.float32)) def _get_test_image_data_for_channel(data, ch_str, n_rows_cols): @@ -145,8 +148,8 @@ def _get_test_image_data_for_channel(data, ch_str, n_rows_cols): dims=("y", "x"), attrs={ "valid_range": [0, 8191], - "warm_scale_factor": 2, - "warm_add_offset": -300, + "warm_scale_factor": np.float32(2.0), + "warm_add_offset": np.float32(-300.0), **common_attrs } ) @@ -156,8 +159,8 @@ def _get_test_image_data_for_channel(data, ch_str, n_rows_cols): dims=("y", "x"), attrs={ "valid_range": [0, 4095], - "warm_scale_factor": 1, - "warm_add_offset": 0, + "warm_scale_factor": np.float32(1.0), + "warm_add_offset": np.float32(0.0), **common_attrs } ) @@ -521,10 +524,10 @@ def test_load_radiance(self, reader_configs, fh_param, fh_param["channels"]["terran_grid_type"]): assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - assert res[ch].dtype == np.float64 + assert res[ch].dtype == np.float32 assert res[ch].attrs["calibration"] == "radiance" assert res[ch].attrs["units"] == "mW m-2 sr-1 (cm-1)-1" - assert res[ch].attrs["radiance_unit_conversion_coefficient"] == 1234.56 + assert res[ch].attrs["radiance_unit_conversion_coefficient"].values == np.float32(1234.56) if ch == "ir_38": numpy.testing.assert_array_equal(res[ch][-1], 15) numpy.testing.assert_array_equal(res[ch][0], 9700) @@ -544,7 +547,7 @@ def test_load_reflectance(self, reader_configs, fh_param, for ch, grid_type in zip(fh_param["channels"]["solar"], fh_param["channels"]["solar_grid_type"]): assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - assert res[ch].dtype == np.float64 + assert res[ch].dtype == np.float32 assert res[ch].attrs["calibration"] == "reflectance" assert res[ch].attrs["units"] == "%" numpy.testing.assert_array_almost_equal(res[ch], 100 * 15 * 1 * np.pi / 50) @@ -564,15 +567,15 @@ def test_load_bt(self, reader_configs, caplog, fh_param, for ch, grid_type in zip(fh_param["channels"]["terran"], fh_param["channels"]["terran_grid_type"]): assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - assert res[ch].dtype == np.float64 + assert res[ch].dtype == np.float32 assert res[ch].attrs["calibration"] == "brightness_temperature" assert res[ch].attrs["units"] == "K" if ch == "ir_38": - numpy.testing.assert_array_almost_equal(res[ch][-1], 209.68274099) - numpy.testing.assert_array_almost_equal(res[ch][0], 1888.851296) + numpy.testing.assert_array_almost_equal(res[ch][-1], np.float32(209.68275)) + numpy.testing.assert_array_almost_equal(res[ch][0], np.float32(1888.8513)) else: - numpy.testing.assert_array_almost_equal(res[ch], 209.68274099) + numpy.testing.assert_array_almost_equal(res[ch], np.float32(209.68275)) @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) From 9a8810f4303630c561addd28eab90dfb1ed93d32 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 14 Nov 2023 14:04:55 +0100 Subject: [PATCH 437/702] Fix tests --- satpy/cf/coords.py | 5 +++++ satpy/cf/dataarray.py | 5 ----- satpy/cf/datasets.py | 26 ++++++++++++++------------ satpy/tests/cf_tests/test_datasets.py | 4 +++- 4 files changed, 22 insertions(+), 18 deletions(-) diff --git a/satpy/cf/coords.py b/satpy/cf/coords.py index 6e7a0892e9..05e8a792fd 100644 --- a/satpy/cf/coords.py +++ b/satpy/cf/coords.py @@ -100,6 +100,11 @@ def set_cf_time_info(dataarray, epoch): - the time coordinate has size 1 """ + from satpy.cf import EPOCH + + if epoch is None: + epoch = EPOCH + dataarray["time"].encoding["units"] = epoch dataarray["time"].attrs["standard_name"] = "time" dataarray["time"].attrs.pop("bounds", None) diff --git a/satpy/cf/dataarray.py b/satpy/cf/dataarray.py index 39b5d82dc9..078c53c462 100644 --- a/satpy/cf/dataarray.py +++ b/satpy/cf/dataarray.py @@ -89,11 +89,6 @@ def make_cf_dataarray(dataarray, CF-compliant xr.DataArray. """ - from satpy.cf import EPOCH - - if epoch is None: - epoch = EPOCH - dataarray = _preprocess_dataarray_name(dataarray=dataarray, numeric_name_prefix=numeric_name_prefix, include_orig_name=include_orig_name) diff --git a/satpy/cf/datasets.py b/satpy/cf/datasets.py index 43b85003de..3cb72af442 100644 --- a/satpy/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -60,14 +60,13 @@ def _get_groups(groups, list_datarrays): def _collect_cf_dataset(list_dataarrays, - epoch, - flatten_attrs, - exclude_attrs, - include_lonlats, - pretty, - include_orig_name, - numeric_name_prefix, - ): + epoch=None, + flatten_attrs=False, + exclude_attrs=None, + include_lonlats=True, + pretty=False, + include_orig_name=True, + numeric_name_prefix="CHANNEL_"): """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Dataset. Parameters @@ -77,17 +76,24 @@ def _collect_cf_dataset(list_dataarrays, epoch : str Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is defined using `from satpy.cf import EPOCH` + flatten_attrs : bool, optional flatten_attrs : bool If True, flatten dict-type attributes. + exclude_attrs : list, optional exclude_attrs : list List of xr.DataArray attribute names to be excluded. + include_lonlats : bool, optional include_lonlats : bool If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. + pretty : bool, optional pretty : bool Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. + include_orig_name : bool, optional include_orig_name : bool Include the original dataset name as a variable attribute in the xr.Dataset. + numeric_name_prefix : str, optional numeric_name_prefix : str Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. @@ -227,13 +233,9 @@ def collect_cf_datasets(list_dataarrays, header_attrs : dict Global attributes to be attached to the xr.Dataset / netCDF4. """ - from satpy.cf import EPOCH from satpy.cf.attrs import preprocess_header_attrs from satpy.cf.coords import add_time_bounds_dimension - if epoch is None: - epoch = EPOCH - if not list_dataarrays: raise RuntimeError("None of the requested datasets have been " "generated or could not be loaded. Requested " diff --git a/satpy/tests/cf_tests/test_datasets.py b/satpy/tests/cf_tests/test_datasets.py index b234a7c945..e943026509 100644 --- a/satpy/tests/cf_tests/test_datasets.py +++ b/satpy/tests/cf_tests/test_datasets.py @@ -52,7 +52,9 @@ def test_collect_cf_dataset(self): attrs={"name": "var2", "long_name": "variable 2"})] # Collect datasets - ds = _collect_cf_dataset(list_dataarrays, include_lonlats=True) + ds = _collect_cf_dataset(list_dataarrays, + include_lonlats=True, + ) # Test results assert len(ds.keys()) == 3 From 9d1dc6e4f7bbebb4ef3deca2f0a148850c17e1cd Mon Sep 17 00:00:00 2001 From: Florian Fichtner <12199342+fwfichtner@users.noreply.github.com> Date: Tue, 11 Jan 2022 14:26:36 +0100 Subject: [PATCH 438/702] Update AVHRR EPS reader to read cloud flags information --- satpy/etc/readers/avhrr_l1b_eps.yaml | 8 ++++++++ satpy/readers/eps_l1b.py | 3 +++ 2 files changed, 11 insertions(+) diff --git a/satpy/etc/readers/avhrr_l1b_eps.yaml b/satpy/etc/readers/avhrr_l1b_eps.yaml index 7bfa0e7160..e759d28d66 100644 --- a/satpy/etc/readers/avhrr_l1b_eps.yaml +++ b/satpy/etc/readers/avhrr_l1b_eps.yaml @@ -89,6 +89,7 @@ datasets: - latitude file_type: avhrr_eps + latitude: name: latitude resolution: 1050 @@ -131,6 +132,13 @@ datasets: coordinates: [longitude, latitude] file_type: avhrr_eps + cloud_flags: + name: cloud_flags + sensor: avhrr-3 + resolution: 1050 + coordinates: [longitude, latitude] + file_type: avhrr_eps + file_types: avhrr_eps: file_reader: !!python/name:satpy.readers.eps_l1b.EPSAVHRRFile diff --git a/satpy/readers/eps_l1b.py b/satpy/readers/eps_l1b.py index 23e4ca712d..0c35a2eaad 100644 --- a/satpy/readers/eps_l1b.py +++ b/satpy/readers/eps_l1b.py @@ -299,6 +299,9 @@ def get_dataset(self, key, info): dataset = self._get_angle_dataarray(key) elif key["name"] in ["1", "2", "3a", "3A", "3b", "3B", "4", "5"]: dataset = self._get_calibrated_dataarray(key) + elif key['name'] == "cloud_flags": + array = self["CLOUD_INFORMATION"] + dataset = create_xarray(array) else: logger.info("Can't load channel in eps_l1b: " + str(key["name"])) return From c3132760affc26f4b13466fe3a01146e3732a4d0 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 15 Nov 2023 12:57:15 +0200 Subject: [PATCH 439/702] Keep original dtype in DayNightCompositor --- satpy/composites/__init__.py | 17 ++++++----- satpy/modifiers/angles.py | 2 ++ satpy/tests/test_composites.py | 52 +++++++++++++++++++++------------- 3 files changed, 42 insertions(+), 29 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index d3a1e510cb..e3c9dc190a 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -713,9 +713,7 @@ def __call__( datasets = self.match_data_arrays(datasets) # At least one composite is requested. foreground_data = datasets[0] - weights = self._get_coszen_blending_weights(datasets) - # Apply enhancements to the foreground data foreground_data = enhance2dataset(foreground_data) @@ -759,7 +757,6 @@ def _get_coszen_blending_weights( # Calculate blending weights coszen -= np.min((lim_high, lim_low)) coszen /= np.abs(lim_low - lim_high) - return coszen.clip(0, 1) def _get_data_for_single_side_product( @@ -786,8 +783,8 @@ def _mask_weights(self, weights): def _get_day_night_data_for_single_side_product(self, foreground_data): if "day" in self.day_night: - return foreground_data, 0 - return 0, foreground_data + return foreground_data, foreground_data.dtype.type(0) + return foreground_data.dtype.type(0), foreground_data def _get_data_for_combined_product(self, day_data, night_data): # Apply enhancements also to night-side data @@ -848,15 +845,16 @@ def _weight_data( def _get_band_names(day_data, night_data): try: bands = day_data["bands"] - except TypeError: + except (IndexError, TypeError): bands = night_data["bands"] return bands def _get_single_band_data(data, band): - if isinstance(data, int): + try: + return data.sel(bands=band) + except AttributeError: return data - return data.sel(bands=band) def _get_single_channel(data: xr.DataArray) -> xr.DataArray: @@ -894,7 +892,8 @@ def add_alpha_bands(data): alpha = new_data[0].copy() alpha.data = da.ones((data.sizes["y"], data.sizes["x"]), - chunks=new_data[0].chunks) + chunks=new_data[0].chunks, + dtype=data.dtype) # Rename band to indicate it's alpha alpha["bands"] = "A" new_data.append(alpha) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 1d059e1f5a..847df7505e 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -395,6 +395,8 @@ def get_cos_sza(data_arr: xr.DataArray) -> xr.DataArray: """ chunks = _geo_chunks_from_data_arr(data_arr) lons, lats = _get_valid_lonlats(data_arr.attrs["area"], chunks) + lons = lons.astype(data_arr.dtype) + lats = lats.astype(data_arr.dtype) cos_sza = _get_cos_sza(data_arr.attrs["start_time"], lons, lats) return _geo_dask_to_data_array(cos_sza) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index bf8a9dfb9e..5bcbca0a1e 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -396,7 +396,7 @@ def setUp(self): start_time = datetime(2018, 1, 1, 18, 0, 0) # RGB - a = np.zeros((3, 2, 2), dtype=np.float64) + a = np.zeros((3, 2, 2), dtype=np.float32) a[:, 0, 0] = 0.1 a[:, 0, 1] = 0.2 a[:, 1, 0] = 0.3 @@ -404,7 +404,7 @@ def setUp(self): a = da.from_array(a, a.shape) self.data_a = xr.DataArray(a, attrs={"test": "a", "start_time": start_time}, coords={"bands": bands}, dims=("bands", "y", "x")) - b = np.zeros((3, 2, 2), dtype=np.float64) + b = np.zeros((3, 2, 2), dtype=np.float32) b[:, 0, 0] = np.nan b[:, 0, 1] = 0.25 b[:, 1, 0] = 0.50 @@ -413,7 +413,7 @@ def setUp(self): self.data_b = xr.DataArray(b, attrs={"test": "b", "start_time": start_time}, coords={"bands": bands}, dims=("bands", "y", "x")) - sza = np.array([[80., 86.], [94., 100.]]) + sza = np.array([[80., 86.], [94., 100.]], dtype=np.float32) sza = da.from_array(sza, sza.shape) self.sza = xr.DataArray(sza, dims=("y", "x")) @@ -437,8 +437,9 @@ def test_daynight_sza(self): comp = DayNightCompositor(name="dn_test", day_night="day_night") res = comp((self.data_a, self.data_b, self.sza)) res = res.compute() - expected = np.array([[0., 0.22122352], [0.5, 1.]]) - np.testing.assert_allclose(res.values[0], expected) + expected = np.array([[0., 0.22122352], [0.5, 1.]], dtype=np.float32) + assert res.dtype == np.float32 + np.testing.assert_allclose(res.values[0], expected, rtol=1e-6) def test_daynight_area(self): """Test compositor both day and night portions when SZA data is not provided.""" @@ -448,7 +449,8 @@ def test_daynight_area(self): comp = DayNightCompositor(name="dn_test", day_night="day_night") res = comp((self.data_a, self.data_b)) res = res.compute() - expected_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) + expected_channel = np.array([[0., 0.33164983], [0.66835017, 1.]], dtype=np.float32) + assert res.dtype == np.float32 for i in range(3): np.testing.assert_allclose(res.values[i], expected_channel) @@ -460,8 +462,9 @@ def test_night_only_sza_with_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=True) res = comp((self.data_b, self.sza)) res = res.compute() - expected_red_channel = np.array([[np.nan, 0.], [0.5, 1.]]) - expected_alpha = np.array([[0., 0.33296056], [1., 1.]]) + expected_red_channel = np.array([[np.nan, 0.], [0.49999994, 1.]], dtype=np.float32) + expected_alpha = np.array([[0., 0.3329599], [1., 1.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_red_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) @@ -473,7 +476,8 @@ def test_night_only_sza_without_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=False) res = comp((self.data_a, self.sza)) res = res.compute() - expected = np.array([[0., 0.11042631], [0.66835017, 1.]]) + expected = np.array([[0., 0.11042608], [0.6683501, 1.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected) assert "A" not in res.bands @@ -485,8 +489,10 @@ def test_night_only_area_with_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=True) res = comp((self.data_b,)) res = res.compute() - expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]]) - expected_alpha = np.array([[np.nan, 0.], [0., 0.]]) + expected_l_channel = np.array([[np.nan, 0.], [0.49999994, 1.]], dtype=np.float32) + # FIXME: with the current changes the np.nan becomes 0.0 instead, why?! + expected_alpha = np.array([[np.nan, 0.], [0., 0.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_l_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) @@ -498,7 +504,8 @@ def test_night_only_area_without_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=False) res = comp((self.data_b,)) res = res.compute() - expected = np.array([[np.nan, 0.], [0., 0.]]) + expected = np.array([[np.nan, 0.], [0., 0.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected) assert "A" not in res.bands @@ -510,8 +517,9 @@ def test_day_only_sza_with_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_a, self.sza)) res = res.compute() - expected_red_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) - expected_alpha = np.array([[1., 0.66703944], [0., 0.]]) + expected_red_channel = np.array([[0., 0.33164983], [0.66835017, 1.]], dtype=np.float32) + expected_alpha = np.array([[1., 0.6670401], [0., 0.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_red_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) @@ -523,7 +531,8 @@ def test_day_only_sza_without_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=False) res = comp((self.data_a, self.sza)) res = res.compute() - expected_channel_data = np.array([[0., 0.22122352], [0., 0.]]) + expected_channel_data = np.array([[0., 0.22122373], [0., 0.]], dtype=np.float32) + assert res.dtype == np.float32 for i in range(3): np.testing.assert_allclose(res.values[i], expected_channel_data) assert "A" not in res.bands @@ -536,8 +545,9 @@ def test_day_only_area_with_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_a,)) res = res.compute() - expected_l_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) - expected_alpha = np.array([[1., 1.], [1., 1.]]) + expected_l_channel = np.array([[0., 0.33164983], [0.66835017, 1.]], dtype=np.float32) + expected_alpha = np.array([[1., 1.], [1., 1.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_l_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) @@ -549,8 +559,9 @@ def test_day_only_area_with_alpha_and_missing_data(self): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_b,)) res = res.compute() - expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]]) - expected_alpha = np.array([[np.nan, 1.], [1., 1.]]) + expected_l_channel = np.array([[np.nan, 0.], [0.49999994, 1.]], dtype=np.float32) + expected_alpha = np.array([[np.nan, 1.], [1., 1.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_l_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) @@ -562,7 +573,8 @@ def test_day_only_area_without_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=False) res = comp((self.data_a,)) res = res.compute() - expected = np.array([[0., 0.33164983], [0.66835017, 1.]]) + expected = np.array([[0., 0.33164983], [0.66835017, 1.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected) assert "A" not in res.bands From d8c594b53255d4f82b756de807c4d5f434280997 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 14:45:15 +0000 Subject: [PATCH 440/702] Add line to HDF-EOS tests to ensure that line splitting in attrs is handled correctly. --- satpy/tests/reader_tests/modis_tests/_modis_fixtures.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index e4272373b3..0b79e00854 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -365,7 +365,9 @@ def _create_struct_metadata_cmg(res) -> str: def _create_header_metadata() -> str: - archive_metadata_header = "GROUP = ARCHIVEDMETADATA\nEND_GROUP = ARCHIVEDMETADATA\nEND" + archive_metadata_header = ("GROUP = ARCHIVEDMETADATA\n" + 'TEST_URL = "http://modis.gsfc.nasa.gov/?some_val=100"\n' + "END_GROUP = ARCHIVEDMETADATA\nEND") return archive_metadata_header From 19d1b9969c9464d65735f5ce867deadac7e1bfc6 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 14:47:16 +0000 Subject: [PATCH 441/702] Fix docstring in MODIS L3 reader. --- satpy/readers/modis_l3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index dfddc0732b..68c7b435ed 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -20,7 +20,7 @@ Introduction ------------ -The ``modis_l3`` reader reads Modis L3 products in hdf-eos format. +The ``modis_l3`` reader reads MODIS L3 products in HDF-EOS format. There are multiple level 3 products, including some on sinusoidal grids and some on the climate modeling grid (CMG). This reader supports the CMG products at present, and the sinusoidal products will be added if there is demand. From 59e22405c03b408e67f067272b6369aa9a66fa24 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 14:49:46 +0000 Subject: [PATCH 442/702] Re-order functions in the MODIS L3 reader. --- satpy/readers/modis_l3.py | 45 ++++++++++++++++++++------------------- 1 file changed, 23 insertions(+), 22 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 68c7b435ed..2055d041d9 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -44,21 +44,17 @@ class ModisL3GriddedHDFFileHandler(HDFEOSGeoReader): """File handler for MODIS HDF-EOS Level 3 CMG gridded files.""" - def _get_res(self): - """Compute the resolution from the file metadata.""" - gridname = self.metadata["GridStructure"]["GRID_1"]["GridName"] - if "CMG" not in gridname: - raise ValueError("Only CMG grids are supported") + def __init__(self, filename, filename_info, filetype_info, **kwargs): + """Init the file handler.""" + super().__init__(filename, filename_info, filetype_info, **kwargs) - # Get the grid resolution from the grid name - pos = gridname.rfind("_") + 1 - pos2 = gridname.rfind("Deg") + # Initialise number of rows and columns + self.nrows = self.metadata["GridStructure"]["GRID_1"]["YDim"] + self.ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] + + # Get the grid name and other projection info + self.area_extent = self._sort_grid() - # Some products don't have resolution listed. - if pos < 0 or pos2 < 0: - self.resolution = 360. / self.ncols - else: - self.resolution = float(gridname[pos:pos2]) def _sort_grid(self): """Get the grid properties.""" @@ -75,19 +71,24 @@ def _sort_grid(self): upperleft = tuple(val / 1e6 for val in upperleft) lowerright = tuple(val / 1e6 for val in lowerright) - return (upperleft[0], lowerright[1], lowerright[0], upperleft[1]) + return upperleft[0], lowerright[1], lowerright[0], upperleft[1] - def __init__(self, filename, filename_info, filetype_info, **kwargs): - """Init the file handler.""" - super().__init__(filename, filename_info, filetype_info, **kwargs) + def _get_res(self): + """Compute the resolution from the file metadata.""" + gridname = self.metadata["GridStructure"]["GRID_1"]["GridName"] + if "CMG" not in gridname: + raise ValueError("Only CMG grids are supported") - # Initialise number of rows and columns - self.nrows = self.metadata["GridStructure"]["GRID_1"]["YDim"] - self.ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] + # Get the grid resolution from the grid name + pos = gridname.rfind("_") + 1 + pos2 = gridname.rfind("Deg") - # Get the grid name and other projection info - self.area_extent = self._sort_grid() + # Some products don't have resolution listed. + if pos < 0 or pos2 < 0: + self.resolution = 360. / self.ncols + else: + self.resolution = float(gridname[pos:pos2]) def available_datasets(self, configured_datasets=None): From 876d683d758986acc9f277b6266de774eaeca752 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 14:50:55 +0000 Subject: [PATCH 443/702] Re-order code in the `available_datasets` function for MODIS L3. --- satpy/readers/modis_l3.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 2055d041d9..ef0a55975e 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -93,12 +93,11 @@ def _get_res(self): def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" - logger.debug("Available_datasets begin...") - - ds_dict = self.sd.datasets() yield from super().available_datasets(configured_datasets) common = {"file_type": "mcd43_cmg_hdf", "resolution": self.resolution} + ds_dict = self.sd.datasets() + for key in ds_dict.keys(): if "/" in key: # not a dataset continue From 60a4a297427d37252a0a7dab766e7437549ace53 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 14:53:01 +0000 Subject: [PATCH 444/702] Remove `self.area` from the MODIS L3 code. --- satpy/readers/modis_l3.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index ef0a55975e..316fa5b9ae 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -127,6 +127,5 @@ def get_area_def(self, dsid): self.ncols, self.nrows, self.area_extent) - self.area = area - return self.area + return area From 01f09aa10ba029d7c05b6703b07f0963fc738a40 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 14:53:41 +0000 Subject: [PATCH 445/702] Add missing space in MODIS L3 tests. --- satpy/tests/reader_tests/modis_tests/test_modis_l3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py index 23c1af6fc1..c7a0d3e9cf 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py @@ -53,7 +53,7 @@ def test_available_reader(self): ("loadable", "filename"), [ ("Coarse_Resolution_Surface_Reflectance_Band_2", lazy_fixture("modis_l3_nasa_mod09_file")), - ("BRDF_Albedo_Parameter1_Band2",lazy_fixture("modis_l3_nasa_mod43_file")), + ("BRDF_Albedo_Parameter1_Band2", lazy_fixture("modis_l3_nasa_mod43_file")), ] ) def test_scene_available_datasets(self, loadable, filename): From ef16a1a46105c23a2188586511d159dcd5472f24 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 15:07:54 +0000 Subject: [PATCH 446/702] Rename MODIS L3 file type to be more generic and refactor the dynamic dataset handler. --- satpy/etc/readers/modis_l3.yaml | 2 +- satpy/readers/modis_l3.py | 39 +++++++++++++++++++++++++++++---- 2 files changed, 36 insertions(+), 5 deletions(-) diff --git a/satpy/etc/readers/modis_l3.yaml b/satpy/etc/readers/modis_l3.yaml index 5ad2f32e04..608d15601a 100644 --- a/satpy/etc/readers/modis_l3.yaml +++ b/satpy/etc/readers/modis_l3.yaml @@ -9,7 +9,7 @@ reader: sensors: [modis] file_types: - mcd43_cmg_hdf: + modis_l3_cmg_hdf: file_patterns: - 'MCD43C{prod_type}.A{start_time:%Y%j}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D09CMG.A{start_time:%Y%j}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 316fa5b9ae..2c2f331c29 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -33,6 +33,7 @@ """ import logging +from typing import Iterable from pyresample import geometry @@ -94,14 +95,44 @@ def _get_res(self): def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" + # Initialise set of variable names to carry through code + handled_var_names = set() + yield from super().available_datasets(configured_datasets) - common = {"file_type": "mcd43_cmg_hdf", "resolution": self.resolution} + ds_dict = self.sd.datasets() - for key in ds_dict.keys(): - if "/" in key: # not a dataset + for is_avail, ds_info in (configured_datasets or []): + file_key = ds_info.get("file_key", ds_info["name"]) + # we must add all variables here even if another file handler has + # claimed the variable. It could be another instance of this file + # type and we don't want to add that variable dynamically if the + # other file handler defined it by the YAML definition. + handled_var_names.add(file_key) + if is_avail is not None: + # some other file handler said it has this dataset + # we don't know any more information than the previous + # file handler so let's yield early + yield is_avail, ds_info + continue + if self.file_type_matches(ds_info["file_type"]) is None: + # this is not the file type for this dataset + yield None, ds_info + yield file_key in ds_dict.keys(), ds_info + + yield from self._dynamic_variables_from_file(handled_var_names) + + def _dynamic_variables_from_file(self, handled_var_names: set) -> Iterable[tuple[bool, dict]]: + + for var_name in self.sd.datasets().keys(): + if var_name in handled_var_names: + # skip variables that YAML had configured, but allow lon/lats + # to be reprocessed due to our dynamic coordinate naming continue - yield True, {"name": key} | common + common = {"file_type": "modis_l3_cmg_hdf", + "resolution": self.resolution, + "name": var_name} + yield True, common def get_dataset(self, dataset_id, dataset_info): """Get DataArray for specified dataset.""" From 9794f3df5a49546323d3ac7644cff308d8ac8204 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 15:18:38 +0000 Subject: [PATCH 447/702] Add additional check on MODIS L3 bounds to ensure values are scaled correctly. --- satpy/readers/modis_l3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 2c2f331c29..4ff0bd1dc6 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -68,7 +68,7 @@ def _sort_grid(self): # For some reason, a few of the CMG products multiply their # decimal degree extents by one million. This fixes it. - if lowerright[0] > 1e6: + if lowerright[0] > 1e6 or upperleft[0] > 1e6: upperleft = tuple(val / 1e6 for val in upperleft) lowerright = tuple(val / 1e6 for val in lowerright) From 23f2e0f97bc477adfdfb5eae63bc94f1f1ac1b49 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 15:45:59 +0000 Subject: [PATCH 448/702] Remove rogue `yield from` in MODIS L3 reader. --- satpy/readers/modis_l3.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 4ff0bd1dc6..1a205545ac 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -98,8 +98,6 @@ def available_datasets(self, configured_datasets=None): # Initialise set of variable names to carry through code handled_var_names = set() - yield from super().available_datasets(configured_datasets) - ds_dict = self.sd.datasets() for is_avail, ds_info in (configured_datasets or []): From a0e8386b6e332e770bba2edb43ad5a192dc90aca Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 15:56:22 +0000 Subject: [PATCH 449/702] Properly handle incorrect datasets for the VIIRS EDR and MODIS --- satpy/readers/modis_l3.py | 1 + satpy/readers/viirs_edr.py | 1 + 2 files changed, 2 insertions(+) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 1a205545ac..2370609e7a 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -116,6 +116,7 @@ def available_datasets(self, configured_datasets=None): if self.file_type_matches(ds_info["file_type"]) is None: # this is not the file type for this dataset yield None, ds_info + continue yield file_key in ds_dict.keys(), ds_info yield from self._dynamic_variables_from_file(handled_var_names) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 646d7e0d17..b0eaf7b7ba 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -215,6 +215,7 @@ def available_datasets(self, configured_datasets=None): if self.file_type_matches(ds_info["file_type"]) is None: # this is not the file type for this dataset yield None, ds_info + continue yield file_key in self.nc, ds_info yield from self._dynamic_variables_from_file(handled_var_names) From b748479cf9d197b48d2bf830c6edc9cfbcfd030c Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 16:16:18 +0000 Subject: [PATCH 450/702] Simplify the MODIS L3 code and add tests for the dynamic dataset availability. --- satpy/readers/modis_l3.py | 62 +++++++++---------- .../reader_tests/modis_tests/test_modis_l3.py | 23 +++++++ 2 files changed, 52 insertions(+), 33 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 2370609e7a..485fc1031f 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -49,30 +49,8 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): """Init the file handler.""" super().__init__(filename, filename_info, filetype_info, **kwargs) - # Initialise number of rows and columns - self.nrows = self.metadata["GridStructure"]["GRID_1"]["YDim"] - self.ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] - - # Get the grid name and other projection info - self.area_extent = self._sort_grid() - - - def _sort_grid(self): - """Get the grid properties.""" - # First, get the grid resolution - self._get_res() - - # Now compute the data extent - upperleft = self.metadata["GridStructure"]["GRID_1"]["UpperLeftPointMtrs"] - lowerright = self.metadata["GridStructure"]["GRID_1"]["LowerRightMtrs"] - - # For some reason, a few of the CMG products multiply their - # decimal degree extents by one million. This fixes it. - if lowerright[0] > 1e6 or upperleft[0] > 1e6: - upperleft = tuple(val / 1e6 for val in upperleft) - lowerright = tuple(val / 1e6 for val in lowerright) - - return upperleft[0], lowerright[1], lowerright[0], upperleft[1] + # Get the grid resolution, name and other projection info + self.resolution = self._get_res() def _get_res(self): @@ -85,11 +63,12 @@ def _get_res(self): pos = gridname.rfind("_") + 1 pos2 = gridname.rfind("Deg") + # Initialise number of rows and columns # Some products don't have resolution listed. if pos < 0 or pos2 < 0: - self.resolution = 360. / self.ncols + return 360. / self.metadata["GridStructure"]["GRID_1"]["XDim"] else: - self.resolution = float(gridname[pos:pos2]) + return float(gridname[pos:pos2]) def available_datasets(self, configured_datasets=None): @@ -104,7 +83,7 @@ def available_datasets(self, configured_datasets=None): file_key = ds_info.get("file_key", ds_info["name"]) # we must add all variables here even if another file handler has # claimed the variable. It could be another instance of this file - # type and we don't want to add that variable dynamically if the + # type, and we don't want to add that variable dynamically if the # other file handler defined it by the YAML definition. handled_var_names.add(file_key) if is_avail is not None: @@ -122,11 +101,9 @@ def available_datasets(self, configured_datasets=None): yield from self._dynamic_variables_from_file(handled_var_names) def _dynamic_variables_from_file(self, handled_var_names: set) -> Iterable[tuple[bool, dict]]: - for var_name in self.sd.datasets().keys(): if var_name in handled_var_names: - # skip variables that YAML had configured, but allow lon/lats - # to be reprocessed due to our dynamic coordinate naming + # skip variables that YAML had configured continue common = {"file_type": "modis_l3_cmg_hdf", "resolution": self.resolution, @@ -141,6 +118,20 @@ def get_dataset(self, dataset_id, dataset_info): return dataset + def _get_area_extent(self): + """Get the grid properties.""" + + # Now compute the data extent + upperleft = self.metadata["GridStructure"]["GRID_1"]["UpperLeftPointMtrs"] + lowerright = self.metadata["GridStructure"]["GRID_1"]["LowerRightMtrs"] + + # For some reason, a few of the CMG products multiply their + # decimal degree extents by one million. This fixes it. + if lowerright[0] > 1e6 or upperleft[0] > 1e6: + upperleft = tuple(val / 1e6 for val in upperleft) + lowerright = tuple(val / 1e6 for val in lowerright) + + return upperleft[0], lowerright[1], lowerright[0], upperleft[1] def get_area_def(self, dsid): """Get the area definition. @@ -150,12 +141,17 @@ def get_area_def(self, dsid): """ proj_param = "EPSG:4326" + # Get the size of the dataset + nrows = self.metadata["GridStructure"]["GRID_1"]["YDim"] + ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] + + # Construct the area definition area = geometry.AreaDefinition("gridded_modis", "A gridded L3 MODIS area", "longlat", proj_param, - self.ncols, - self.nrows, - self.area_extent) + ncols, + nrows, + self._get_area_extent()) return area diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py index c7a0d3e9cf..3f6a9e8250 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py @@ -63,6 +63,29 @@ def test_scene_available_datasets(self, loadable, filename): assert len(available_datasets) > 0 assert loadable in available_datasets + from satpy.readers.modis_l3 import ModisL3GriddedHDFFileHandler + fh = ModisL3GriddedHDFFileHandler(filename[0], {}, {"file_type": "modis_l3_cmg_hdf"}) + configured_datasets = [[None, {"name": "none_ds", "file_type": "modis_l3_cmg_hdf"}], + [True, {"name": "true_ds", "file_type": "modis_l3_cmg_hdf"}], + [False, {"name": "false_ds", "file_type": "modis_l3_cmg_hdf"}], + [None, {"name": "other_ds", "file_type": "modis_l2_random"}]] + for status, mda in fh.available_datasets(configured_datasets): + if mda["name"] == "none_ds": + assert mda["file_type"] == "modis_l3_cmg_hdf" + assert status is False + elif mda["name"] == "true_ds": + assert mda["file_type"] == "modis_l3_cmg_hdf" + assert status + elif mda["name"] == "false_ds": + assert mda["file_type"] == "modis_l3_cmg_hdf" + assert status is False + elif mda["name"] == "other_ds": + assert mda["file_type"] == "modis_l2_random" + assert status is None + elif mda["name"] == loadable: + assert mda["file_type"] == "modis_l3_cmg_hdf" + assert status + def test_load_l3_dataset(self, modis_l3_nasa_mod09_file): """Load and check an L2 variable.""" scene = Scene(reader="modis_l3", filenames=modis_l3_nasa_mod09_file) From 3dab2c6587b92f98214ce4d204958074051f2a7b Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 17:20:46 +0000 Subject: [PATCH 451/702] Refactor MODIS tests. --- .../modis_tests/_modis_fixtures.py | 80 +++++++++---------- .../reader_tests/modis_tests/test_modis_l3.py | 4 +- 2 files changed, 38 insertions(+), 46 deletions(-) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index 0b79e00854..c702752b28 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -51,9 +51,6 @@ def _shape_for_resolution(resolution: int) -> tuple[int, int]: - # Case of a CMG 0.05 degree file, for L3 tests - if resolution == -999: - return 3600, 7200 assert resolution in RES_TO_REPEAT_FACTOR factor = RES_TO_REPEAT_FACTOR[resolution] if factor == 1: @@ -229,21 +226,9 @@ def generate_imapp_filename(suffix): return f"t1.{now:%y%j.%H%M}.{suffix}.hdf" -def _add_geo_metadata(h, geo_res): - """Add the geoinfo metadata to the fake file.""" - if geo_res == -999 or geo_res == -9999: - setattr(h, 'StructMetadata.0', _create_struct_metadata_cmg(geo_res)) # noqa - else: - setattr(h, 'StructMetadata.0', _create_struct_metadata(geo_res)) # noqa - - return h - - def create_hdfeos_test_file(filename: str, variable_infos: dict, - geo_resolution: Optional[int] = None, - file_shortname: Optional[str] = None, - include_metadata: bool = True): + metadata_dict: Optional[dict] = {}): """Create a fake MODIS L1b HDF4 file with headers. Args: @@ -256,17 +241,27 @@ def create_hdfeos_test_file(filename: str, file_shortname: Short name of the file to be stored in global metadata attributes. Only used if ``include_metadata`` is ``True`` (default). - include_metadata: Include global metadata attributes (default: True). + metadata_dict: A dictionary of metadata to be added to the file. """ h = SD(filename, SDC.WRITE | SDC.CREATE) - if include_metadata: - if geo_resolution is None or file_shortname is None: - raise ValueError("'geo_resolution' and 'file_shortname' are required when including metadata.") - h = _add_geo_metadata(h, geo_resolution) - setattr(h, 'CoreMetadata.0', _create_core_metadata(file_shortname)) # noqa - setattr(h, 'ArchiveMetadata.0', _create_header_metadata()) # noqa + if metadata_dict is not None and metadata_dict != {}: + # Check if we're dealing with an L3 file + if "l3_type" not in metadata_dict.keys(): + if "file_shortname" not in metadata_dict["file_shortname"].keys(): + raise ValueError("'file_shortname' is required when including metadata.") + # For L1 and L2 files we need to know the resolution + if "geo_resolution" not in metadata_dict.keys(): + raise ValueError("'geo_resolution' is required when including L1/L2 metadata.") + setattr(h, "StructMetadata.0", _create_struct_metadata(metadata_dict["geo_resolution"])) + setattr(h, "CoreMetadata.0", _create_core_metadata(metadata_dict["file_shortname"])) # noqa + else: + # For an L3 file, we just call the relevant metadata creator + setattr(h, "StructMetadata.0", _create_struct_metadata_cmg(metadata_dict["l3_type"])) + setattr(h, "CoreMetadata.0", _create_core_metadata(metadata_dict["l3_type"])) # noqa + + setattr(h, "ArchiveMetadata.0", _create_header_metadata()) # noqa for var_name, var_info in variable_infos.items(): _add_variable_to_file(h, var_name, var_info) @@ -339,13 +334,14 @@ def _create_struct_metadata(geo_resolution: int) -> str: return struct_metadata_header -def _create_struct_metadata_cmg(res) -> str: +def _create_struct_metadata_cmg(ftype: str) -> str: # Case of a MOD09 file - gridline = 'GridName="MOD09CMG"\n' - upleft = "UpperLeftPointMtrs=(-180000000.000000,90000000.000000)\n" - upright = "LowerRightMtrs=(180000000.000000,-90000000.000000)\n" - if res == -9999: - # Case of a MCD43 file + if ftype == "MOD09": + gridline = 'GridName="MOD09CMG"\n' + upleft = "UpperLeftPointMtrs=(-180000000.000000,90000000.000000)\n" + upright = "LowerRightMtrs=(180000000.000000,-90000000.000000)\n" + # Case of a MCD43 file + elif ftype == "MCD43C1": gridline = 'GridName="MCD_CMG_BRDF_0.05Deg"\n' upleft = "UpperLeftPointMtrs=(-180.000000,90.000000)\n" upright = "LowerRightMtrs=(180.000000,-90.000000)\n" @@ -381,7 +377,7 @@ def modis_l1b_nasa_mod021km_file(tmpdir_factory) -> list[str]: variable_infos.update(_get_visible_variable_info("EV_500_Aggr1km_RefSB", 1000, AVAILABLE_HKM_PRODUCT_NAMES)) variable_infos.update(_get_visible_variable_info("EV_250_Aggr1km_RefSB", 1000, AVAILABLE_QKM_PRODUCT_NAMES)) variable_infos.update(_get_emissive_variable_info("EV_1KM_Emissive", 1000, AVAILABLE_1KM_IR_PRODUCT_NAMES)) - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=5000, file_shortname="MOD021KM") + create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 5000, "file_shortname": "MOD021KM"}) return [full_path] @@ -395,7 +391,7 @@ def modis_l1b_imapp_1000m_file(tmpdir_factory) -> list[str]: variable_infos.update(_get_visible_variable_info("EV_500_Aggr1km_RefSB", 1000, AVAILABLE_HKM_PRODUCT_NAMES)) variable_infos.update(_get_visible_variable_info("EV_250_Aggr1km_RefSB", 1000, AVAILABLE_QKM_PRODUCT_NAMES)) variable_infos.update(_get_emissive_variable_info("EV_1KM_Emissive", 1000, AVAILABLE_1KM_IR_PRODUCT_NAMES)) - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=5000, file_shortname="MOD021KM") + create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 5000, "file_shortname": "MOD021KM"}) return [full_path] @@ -406,7 +402,7 @@ def modis_l1b_nasa_mod02hkm_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=False) variable_infos.update(_get_visible_variable_info("EV_500_RefSB", 250, AVAILABLE_QKM_PRODUCT_NAMES)) - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=1000, file_shortname="MOD02HKM") + create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 1000, "file_shortname": "MOD02HKM"}) return [full_path] @@ -417,7 +413,7 @@ def modis_l1b_nasa_mod02qkm_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=False) variable_infos.update(_get_visible_variable_info("EV_250_RefSB", 250, AVAILABLE_QKM_PRODUCT_NAMES)) - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=1000, file_shortname="MOD02QKM") + create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 1000, "file_shortname": "MOD02QKM"}) return [full_path] @@ -427,7 +423,7 @@ def modis_l1b_nasa_mod03_file(tmpdir_factory) -> list[str]: filename = generate_nasa_l1b_filename("MOD03") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=True) - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=1000, file_shortname="MOD03") + create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 1000, "file_shortname": "MOD03"}) return [full_path] @@ -437,7 +433,7 @@ def modis_l1b_imapp_geo_file(tmpdir_factory) -> list[str]: filename = generate_imapp_filename("geo") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=True) - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=1000, file_shortname="MOD03") + create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 1000, "file_shortname": "MOD03"}) return [full_path] @@ -595,7 +591,7 @@ def modis_l2_nasa_mod35_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) variable_infos.update(_get_cloud_mask_variable_info("Cloud_Mask", 1000)) - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=5000, file_shortname="MOD35") + create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 5000, "file_shortname": "MOD35"}) return [full_path] @@ -605,12 +601,12 @@ def generate_nasa_l3_filename(prefix: str) -> str: return f"{prefix}.A{now:%Y%j}.061.{now:%Y%j%H%M%S}.hdf" -def modis_l3_file(tmpdir_factory, f_prefix, var_name, geo_res, f_short): +def modis_l3_file(tmpdir_factory, f_prefix, var_name, f_short): """Create a MODIS L3 file of the desired type.""" filename = generate_nasa_l3_filename(f_prefix) full_path = str(tmpdir_factory.mktemp("modis_l3").join(filename)) variable_infos = _get_l3_refl_variable_info(var_name) - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=geo_res, file_shortname=f_short) + create_hdfeos_test_file(full_path, variable_infos, {"l3_type": f_short}) return [full_path] @@ -620,7 +616,6 @@ def modis_l3_nasa_mod09_file(tmpdir_factory) -> list[str]: return modis_l3_file(tmpdir_factory, "MOD09CMG", "Coarse_Resolution_Surface_Reflectance_Band_2", - -999, "MOD09") @@ -630,7 +625,6 @@ def modis_l3_nasa_mod43_file(tmpdir_factory) -> list[str]: return modis_l3_file(tmpdir_factory, "MCD43C1", "BRDF_Albedo_Parameter1_Band2", - -9999, "MCD43C1") @@ -647,7 +641,7 @@ def modis_l2_nasa_mod06_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) variable_infos.update(_get_basic_variable_info("Surface_Pressure", 5000)) - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=5000, file_shortname="MOD06") + create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 5000, "file_shortname": "MOD06"}) return [full_path] @@ -658,7 +652,7 @@ def modis_l2_imapp_snowmask_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=False) variable_infos.update(_get_basic_variable_info("Snow_Mask", 1000)) - create_hdfeos_test_file(full_path, variable_infos, include_metadata=False) + create_hdfeos_test_file(full_path, variable_infos, {}) return [full_path] @@ -675,7 +669,7 @@ def modis_l2_imapp_mask_byte1_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=False) variable_infos.update(_get_mask_byte1_variable_info()) - create_hdfeos_test_file(full_path, variable_infos, include_metadata=False) + create_hdfeos_test_file(full_path, variable_infos, {}) return [full_path] diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py index 3f6a9e8250..de8ff682a1 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py @@ -27,8 +27,6 @@ from satpy import Scene, available_readers -from ._modis_fixtures import _shape_for_resolution - def _expected_area(): proj_param = "EPSG:4326" @@ -101,6 +99,6 @@ def test_load_l3_dataset(self, modis_l3_nasa_mod09_file): assert data_arr_comp.dtype == data_arr.dtype assert data_arr_comp.dtype == np.float32 - assert data_arr_comp.shape == _shape_for_resolution(-999) + assert data_arr_comp.shape == (3600, 7200) assert data_arr_comp.attrs.get("resolution") == 0.05 assert data_arr_comp.attrs.get("area") == _expected_area() From 1b136b9c19f15f121c4468b97cfcdf9ff4380662 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 17:21:58 +0000 Subject: [PATCH 452/702] Fix bug in MODIS tests. --- satpy/tests/reader_tests/modis_tests/_modis_fixtures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index c702752b28..7c20091d31 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -249,7 +249,7 @@ def create_hdfeos_test_file(filename: str, if metadata_dict is not None and metadata_dict != {}: # Check if we're dealing with an L3 file if "l3_type" not in metadata_dict.keys(): - if "file_shortname" not in metadata_dict["file_shortname"].keys(): + if "file_shortname" not in metadata_dict.keys(): raise ValueError("'file_shortname' is required when including metadata.") # For L1 and L2 files we need to know the resolution if "geo_resolution" not in metadata_dict.keys(): From d0774021726ab09b17c1fbdd4bde8e4650e9a034 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 17:25:08 +0000 Subject: [PATCH 453/702] Remove mutable argument from modis tests. Co-authored-by: David Hoese --- satpy/tests/reader_tests/modis_tests/_modis_fixtures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index 7c20091d31..84ac7fc5ae 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -228,7 +228,7 @@ def generate_imapp_filename(suffix): def create_hdfeos_test_file(filename: str, variable_infos: dict, - metadata_dict: Optional[dict] = {}): + metadata_dict: Optional[dict] = None): """Create a fake MODIS L1b HDF4 file with headers. Args: From fe554dd1763de12226d53a5ec643623ac380c1ae Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 17:59:46 +0000 Subject: [PATCH 454/702] Additional refactoring of MODIS tests. --- .../modis_tests/_modis_fixtures.py | 98 ++++++++++++------- 1 file changed, 61 insertions(+), 37 deletions(-) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index 84ac7fc5ae..e792b70d89 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -19,7 +19,6 @@ from __future__ import annotations from datetime import datetime, timedelta -from typing import Optional import numpy as np import pytest @@ -228,40 +227,29 @@ def generate_imapp_filename(suffix): def create_hdfeos_test_file(filename: str, variable_infos: dict, - metadata_dict: Optional[dict] = None): + struct_meta: str = "", + core_meta: str = "", + archive_meta: str = "", + ) -> None: """Create a fake MODIS L1b HDF4 file with headers. Args: filename: Full path of filename to be created. variable_infos: Dictionary mapping HDF4 variable names to dictionary of variable information (see ``_add_variable_to_file``). - geo_resolution: Resolution of geolocation datasets to be stored in the - metadata strings stored in the global metadata attributes. Only - used if ``include_metadata`` is ``True`` (default). - file_shortname: Short name of the file to be stored in global metadata - attributes. Only used if ``include_metadata`` is ``True`` - (default). - metadata_dict: A dictionary of metadata to be added to the file. + struct_meta: Contents of the 'StructMetadata.0' header. + core_meta: Contents of the 'CoreMetadata.0' header. + archive_meta:Contents of the 'ArchiveMetadata.0' header. """ h = SD(filename, SDC.WRITE | SDC.CREATE) - if metadata_dict is not None and metadata_dict != {}: - # Check if we're dealing with an L3 file - if "l3_type" not in metadata_dict.keys(): - if "file_shortname" not in metadata_dict.keys(): - raise ValueError("'file_shortname' is required when including metadata.") - # For L1 and L2 files we need to know the resolution - if "geo_resolution" not in metadata_dict.keys(): - raise ValueError("'geo_resolution' is required when including L1/L2 metadata.") - setattr(h, "StructMetadata.0", _create_struct_metadata(metadata_dict["geo_resolution"])) - setattr(h, "CoreMetadata.0", _create_core_metadata(metadata_dict["file_shortname"])) # noqa - else: - # For an L3 file, we just call the relevant metadata creator - setattr(h, "StructMetadata.0", _create_struct_metadata_cmg(metadata_dict["l3_type"])) - setattr(h, "CoreMetadata.0", _create_core_metadata(metadata_dict["l3_type"])) # noqa - - setattr(h, "ArchiveMetadata.0", _create_header_metadata()) # noqa + if struct_meta != "": + setattr(h, "StructMetadata.0", struct_meta) + if core_meta != "": + setattr(h, "CoreMetadata.0", core_meta) + if archive_meta != "": + setattr(h, "ArchiveMetadata.0", archive_meta) for var_name, var_info in variable_infos.items(): _add_variable_to_file(h, var_name, var_info) @@ -341,7 +329,7 @@ def _create_struct_metadata_cmg(ftype: str) -> str: upleft = "UpperLeftPointMtrs=(-180000000.000000,90000000.000000)\n" upright = "LowerRightMtrs=(180000000.000000,-90000000.000000)\n" # Case of a MCD43 file - elif ftype == "MCD43C1": + else: gridline = 'GridName="MCD_CMG_BRDF_0.05Deg"\n' upleft = "UpperLeftPointMtrs=(-180.000000,90.000000)\n" upright = "LowerRightMtrs=(180.000000,-90.000000)\n" @@ -377,7 +365,11 @@ def modis_l1b_nasa_mod021km_file(tmpdir_factory) -> list[str]: variable_infos.update(_get_visible_variable_info("EV_500_Aggr1km_RefSB", 1000, AVAILABLE_HKM_PRODUCT_NAMES)) variable_infos.update(_get_visible_variable_info("EV_250_Aggr1km_RefSB", 1000, AVAILABLE_QKM_PRODUCT_NAMES)) variable_infos.update(_get_emissive_variable_info("EV_1KM_Emissive", 1000, AVAILABLE_1KM_IR_PRODUCT_NAMES)) - create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 5000, "file_shortname": "MOD021KM"}) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata(5000), + _create_core_metadata("MOD021KM"), + _create_header_metadata()) return [full_path] @@ -391,7 +383,11 @@ def modis_l1b_imapp_1000m_file(tmpdir_factory) -> list[str]: variable_infos.update(_get_visible_variable_info("EV_500_Aggr1km_RefSB", 1000, AVAILABLE_HKM_PRODUCT_NAMES)) variable_infos.update(_get_visible_variable_info("EV_250_Aggr1km_RefSB", 1000, AVAILABLE_QKM_PRODUCT_NAMES)) variable_infos.update(_get_emissive_variable_info("EV_1KM_Emissive", 1000, AVAILABLE_1KM_IR_PRODUCT_NAMES)) - create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 5000, "file_shortname": "MOD021KM"}) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata(5000), + _create_core_metadata("MOD021KM"), + _create_header_metadata()) return [full_path] @@ -402,7 +398,11 @@ def modis_l1b_nasa_mod02hkm_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=False) variable_infos.update(_get_visible_variable_info("EV_500_RefSB", 250, AVAILABLE_QKM_PRODUCT_NAMES)) - create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 1000, "file_shortname": "MOD02HKM"}) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata(1000), + _create_core_metadata("MOD02HKM"), + _create_header_metadata()) return [full_path] @@ -413,7 +413,11 @@ def modis_l1b_nasa_mod02qkm_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=False) variable_infos.update(_get_visible_variable_info("EV_250_RefSB", 250, AVAILABLE_QKM_PRODUCT_NAMES)) - create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 1000, "file_shortname": "MOD02QKM"}) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata(1000), + _create_core_metadata("MOD02QKM"), + _create_header_metadata()) return [full_path] @@ -423,7 +427,11 @@ def modis_l1b_nasa_mod03_file(tmpdir_factory) -> list[str]: filename = generate_nasa_l1b_filename("MOD03") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=True) - create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 1000, "file_shortname": "MOD03"}) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata(1000), + _create_core_metadata("MOD03"), + _create_header_metadata()) return [full_path] @@ -433,7 +441,11 @@ def modis_l1b_imapp_geo_file(tmpdir_factory) -> list[str]: filename = generate_imapp_filename("geo") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=True) - create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 1000, "file_shortname": "MOD03"}) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata(1000), + _create_core_metadata("MOD03"), + _create_header_metadata()) return [full_path] @@ -591,7 +603,11 @@ def modis_l2_nasa_mod35_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) variable_infos.update(_get_cloud_mask_variable_info("Cloud_Mask", 1000)) - create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 5000, "file_shortname": "MOD35"}) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata(5000), + _create_core_metadata("MOD35"), + _create_header_metadata()) return [full_path] @@ -606,7 +622,11 @@ def modis_l3_file(tmpdir_factory, f_prefix, var_name, f_short): filename = generate_nasa_l3_filename(f_prefix) full_path = str(tmpdir_factory.mktemp("modis_l3").join(filename)) variable_infos = _get_l3_refl_variable_info(var_name) - create_hdfeos_test_file(full_path, variable_infos, {"l3_type": f_short}) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata_cmg(f_short), + _create_core_metadata(f_short), + _create_header_metadata()) return [full_path] @@ -641,7 +661,11 @@ def modis_l2_nasa_mod06_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) variable_infos.update(_get_basic_variable_info("Surface_Pressure", 5000)) - create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 5000, "file_shortname": "MOD06"}) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata(5000), + _create_core_metadata("MOD06"), + _create_header_metadata()) return [full_path] @@ -652,7 +676,7 @@ def modis_l2_imapp_snowmask_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=False) variable_infos.update(_get_basic_variable_info("Snow_Mask", 1000)) - create_hdfeos_test_file(full_path, variable_infos, {}) + create_hdfeos_test_file(full_path, variable_infos) return [full_path] @@ -669,7 +693,7 @@ def modis_l2_imapp_mask_byte1_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=False) variable_infos.update(_get_mask_byte1_variable_info()) - create_hdfeos_test_file(full_path, variable_infos, {}) + create_hdfeos_test_file(full_path, variable_infos) return [full_path] From a211edd9ba77c169b8f5989521a1be42d879b02f Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 19:34:15 +0000 Subject: [PATCH 455/702] Refactor MODIS L3 reader and improve tests. --- satpy/readers/modis_l3.py | 48 ++++++++----------- .../modis_tests/_modis_fixtures.py | 13 ++--- 2 files changed, 27 insertions(+), 34 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 485fc1031f..29e0247fdc 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -44,33 +44,6 @@ class ModisL3GriddedHDFFileHandler(HDFEOSGeoReader): """File handler for MODIS HDF-EOS Level 3 CMG gridded files.""" - - def __init__(self, filename, filename_info, filetype_info, **kwargs): - """Init the file handler.""" - super().__init__(filename, filename_info, filetype_info, **kwargs) - - # Get the grid resolution, name and other projection info - self.resolution = self._get_res() - - - def _get_res(self): - """Compute the resolution from the file metadata.""" - gridname = self.metadata["GridStructure"]["GRID_1"]["GridName"] - if "CMG" not in gridname: - raise ValueError("Only CMG grids are supported") - - # Get the grid resolution from the grid name - pos = gridname.rfind("_") + 1 - pos2 = gridname.rfind("Deg") - - # Initialise number of rows and columns - # Some products don't have resolution listed. - if pos < 0 or pos2 < 0: - return 360. / self.metadata["GridStructure"]["GRID_1"]["XDim"] - else: - return float(gridname[pos:pos2]) - - def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" @@ -101,15 +74,34 @@ def available_datasets(self, configured_datasets=None): yield from self._dynamic_variables_from_file(handled_var_names) def _dynamic_variables_from_file(self, handled_var_names: set) -> Iterable[tuple[bool, dict]]: + res = self._get_res() for var_name in self.sd.datasets().keys(): if var_name in handled_var_names: # skip variables that YAML had configured continue common = {"file_type": "modis_l3_cmg_hdf", - "resolution": self.resolution, + "resolution": res, "name": var_name} yield True, common + + def _get_res(self): + """Compute the resolution from the file metadata.""" + gridname = self.metadata["GridStructure"]["GRID_1"]["GridName"] + if "CMG" not in gridname: + raise ValueError("Only CMG grids are supported") + + # Get the grid resolution from the grid name + pos = gridname.rfind("_") + 1 + pos2 = gridname.rfind("Deg") + + # Initialise number of rows and columns + # Some products don't have resolution listed. + if pos < 0 or pos2 < 0: + return 360. / self.metadata["GridStructure"]["GRID_1"]["XDim"] + else: + return float(gridname[pos:pos2]) + def get_dataset(self, dataset_id, dataset_info): """Get DataArray for specified dataset.""" dataset_name = dataset_id["name"] diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index e792b70d89..6dc4bf2d05 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -19,6 +19,7 @@ from __future__ import annotations from datetime import datetime, timedelta +from typing import Optional import numpy as np import pytest @@ -227,9 +228,9 @@ def generate_imapp_filename(suffix): def create_hdfeos_test_file(filename: str, variable_infos: dict, - struct_meta: str = "", - core_meta: str = "", - archive_meta: str = "", + struct_meta: Optional[str] = None, + core_meta: Optional[str] = None, + archive_meta: Optional[str] = None, ) -> None: """Create a fake MODIS L1b HDF4 file with headers. @@ -244,11 +245,11 @@ def create_hdfeos_test_file(filename: str, """ h = SD(filename, SDC.WRITE | SDC.CREATE) - if struct_meta != "": + if struct_meta: setattr(h, "StructMetadata.0", struct_meta) - if core_meta != "": + if core_meta: setattr(h, "CoreMetadata.0", core_meta) - if archive_meta != "": + if archive_meta: setattr(h, "ArchiveMetadata.0", archive_meta) for var_name, var_info in variable_infos.items(): From 328816ec1dac80f1d3773ced4a0a7a2ab3b801cf Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 20:18:06 +0000 Subject: [PATCH 456/702] Update HDFEOS code to always use `maxsplit=1` when splitting attrs. --- satpy/readers/hdfeos_base.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 6e25fd40a8..37fe714435 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -148,10 +148,7 @@ def _read_mda(cls, lines, element=None): @classmethod def _split_line(cls, line, lines): - try: - key, val = line.split("=") - except ValueError: - key, val = line.split("=", maxsplit=1) + key, val = line.split("=", maxsplit=1) key = key.strip() val = val.strip() try: From 3080dbc9efd0d70ad78e029f56a7df17bda56332 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 15 Nov 2023 23:42:42 +0100 Subject: [PATCH 457/702] Add support for angles in sgli reader --- satpy/readers/sgli_l1b.py | 93 +++++++++++++---------- satpy/tests/reader_tests/test_sgli_l1b.py | 49 +++++++++++- 2 files changed, 97 insertions(+), 45 deletions(-) diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 25289b5de3..2dace6d2b3 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -49,26 +49,6 @@ "L": 1000} -def interpolate(arr, sampling, full_shape): - """Interpolate the angles and navigation.""" - # TODO: daskify this! - # TODO: do it in cartesian coordinates ! pbs at date line and poles - # possible - tie_x = np.arange(0, arr.shape[0] * sampling, sampling) - tie_y = np.arange(0, arr.shape[1] * sampling, sampling) - full_x = np.arange(0, full_shape[0]) - full_y = np.arange(0, full_shape[1]) - - - from scipy.interpolate import RectBivariateSpline - spl = RectBivariateSpline( - tie_x, tie_y, arr) - - values = spl(full_x, full_y) - - return da.from_array(values, chunks=(1000, 1000)) - - class HDF5SGLI(BaseFileHandler): """File handler for the SGLI l1b data.""" @@ -103,20 +83,13 @@ def get_dataset(self, key, info): file_key = info["file_key"] h5dataset = self.h5file[file_key] - # resampling_interval = h5dataset.attrs.get('Resampling_interval', 1) - # if resampling_interval != 1: - # logger.debug('Interpolating %s.', key["name"]) - # full_shape = (self.h5file['Image_data'].attrs['Number_of_lines'], - # self.h5file['Image_data'].attrs['Number_of_pixels']) - # dataset = interpolate(h5dataset, resampling_interval, full_shape) - # else: - # dataset = da.from_array(h5dataset[:].astype('= 0 + +def test_loading_solar_angles(sgli_file): + handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) + did = dict(name="solar_azimuth_angle", resolution=1000, polarization=None) + res = handler.get_dataset(did, {"file_key": "Geometry_data/Sensor_zenith"}) + assert res.shape == (1955, 1250) + assert res.chunks is not None + assert res.dtype == np.float32 + assert res.max() <= 180 From 09325c2fe97a1e966e281c638c32f47fec34b811 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 16 Nov 2023 12:25:55 +0100 Subject: [PATCH 458/702] Fix docstrings --- satpy/_scene_converters.py | 58 ++++++++----------- satpy/cf/__init__.py | 2 - satpy/cf/area.py | 2 - satpy/cf/attrs.py | 2 - satpy/cf/coords.py | 2 - satpy/cf/dataarray.py | 38 ++++-------- satpy/cf/datasets.py | 115 ++++++++++++++----------------------- satpy/cf/encoding.py | 2 - satpy/writers/cf_writer.py | 51 +++++++--------- 9 files changed, 98 insertions(+), 174 deletions(-) diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py index c400a159f1..fbc0a7a627 100644 --- a/satpy/_scene_converters.py +++ b/satpy/_scene_converters.py @@ -52,42 +52,32 @@ def to_xarray(scn, If Scene DataArrays are on different areas, currently it fails, although in future we might return a DataTree object, grouped by area. - Parameters - ---------- - scn: satpy.Scene - Satpy Scene. - datasets (iterable): - List of Satpy Scene datasets to include in the output xr.Dataset. - Elements can be string name, a wavelength as a number, a DataID, - or DataQuery object. - If None (the default), it include all loaded Scene datasets. - header_attrs: - Global attributes of the output xr.Dataset. - epoch (str): - Reference time for encoding the time coordinates (if available). - Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is defined using "from satpy.cf import EPOCH" - flatten_attrs (bool): - If True, flatten dict-type attributes. - exclude_attrs (list): - List of xr.DataArray attribute names to be excluded. - include_lonlats (bool): - If True, it includes 'latitude' and 'longitude' coordinates. - If the 'area' attribute is a SwathDefinition, it always includes - latitude and longitude coordinates. - pretty (bool): - Don't modify coordinate names, if possible. Makes the file prettier, - but possibly less consistent. - include_orig_name (bool). - Include the original dataset name as a variable attribute in the xr.Dataset. - numeric_name_prefix (str): - Prefix to add the each variable with name starting with a digit. - Use '' or None to leave this out. + Args: + scn (satpy.Scene): Satpy Scene. + datasets (iterable, optional): List of Satpy Scene datasets to include in + the output xr.Dataset. Elements can be string name, a wavelength as a + number, a DataID, or DataQuery object. If None (the default), it + includes all loaded Scene datasets. + header_attrs: Global attributes of the output xr.Dataset. + epoch (str, optional): Reference time for encoding the time coordinates + (if available). Format example: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is retrieved using + "from satpy.cf_writer import EPOCH". + flatten_attrs (bool, optional): If True, flatten dict-type attributes. + exclude_attrs (list, optional): List of xr.DataArray attribute names to + be excluded. + include_lonlats (bool, optional): If True, includes 'latitude' and + 'longitude' coordinates. If the 'area' attribute is a SwathDefinition, + it always includes latitude and longitude coordinates. + pretty (bool, optional): Don't modify coordinate names, if possible. Makes + the file prettier, but possibly less consistent. + include_orig_name (bool, optional): Include the original dataset name as a + variable attribute in the xr.Dataset. + numeric_name_prefix (str, optional): Prefix to add to each variable with + name starting with a digit. Use '' or None to leave this out. Returns: - ------- - ds, xr.Dataset - A CF-compliant xr.Dataset + xr.Dataset: A CF-compliant xr.Dataset """ from satpy.cf.datasets import collect_cf_datasets diff --git a/satpy/cf/__init__.py b/satpy/cf/__init__.py index c48acebcf9..63fac5261c 100644 --- a/satpy/cf/__init__.py +++ b/satpy/cf/__init__.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- """Code for generation of CF-compliant datasets.""" EPOCH = u"seconds since 1970-01-01 00:00:00" diff --git a/satpy/cf/area.py b/satpy/cf/area.py index 5ce9f1e0c3..041338efd8 100644 --- a/satpy/cf/area.py +++ b/satpy/cf/area.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. diff --git a/satpy/cf/attrs.py b/satpy/cf/attrs.py index af1977b1f4..2cf9ffa528 100644 --- a/satpy/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. diff --git a/satpy/cf/coords.py b/satpy/cf/coords.py index 05e8a792fd..af11a62e43 100644 --- a/satpy/cf/coords.py +++ b/satpy/cf/coords.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- """Set CF-compliant spatial and temporal coordinates.""" import logging diff --git a/satpy/cf/dataarray.py b/satpy/cf/dataarray.py index 078c53c462..3c97a70336 100644 --- a/satpy/cf/dataarray.py +++ b/satpy/cf/dataarray.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. @@ -63,31 +61,19 @@ def make_cf_dataarray(dataarray, numeric_name_prefix="CHANNEL_"): """Make the xr.DataArray CF-compliant. - Parameters - ---------- - dataarray : xr.DataArray - The data array to be made CF-compliant. - epoch : str, optional - Reference time for encoding of time coordinates. - If None, the default reference time is defined using `from satpy.cf import EPOCH` - flatten_attrs : bool, optional - If True, flatten dict-type attributes. - The default is False. - exclude_attrs : list, optional - List of dataset attributes to be excluded. - The default is None. - include_orig_name : bool, optional - Include the original dataset name in the netcdf variable attributes. - The default is True. - numeric_name_prefix : TYPE, optional - Prepend dataset name with this if starting with a digit. - The default is ``"CHANNEL_"``. - - Returns - ------- - new_data : xr.DataArray - CF-compliant xr.DataArray. + Args: + dataarray (xr.DataArray): The data array to be made CF-compliant. + epoch (str, optional): Reference time for encoding of time coordinates. + If None, the default reference time is defined using `from satpy.cf import EPOCH`. + flatten_attrs (bool, optional): If True, flatten dict-type attributes. Defaults to False. + exclude_attrs (list, optional): List of dataset attributes to be excluded. Defaults to None. + include_orig_name (bool, optional): Include the original dataset name in the netcdf variable attributes. + Defaults to True. + numeric_name_prefix (str, optional): Prepend dataset name with this if starting with a digit. + Defaults to "CHANNEL_". + Returns: + xr.DataArray: A CF-compliant xr.DataArray. """ dataarray = _preprocess_dataarray_name(dataarray=dataarray, numeric_name_prefix=numeric_name_prefix, diff --git a/satpy/cf/datasets.py b/satpy/cf/datasets.py index 3cb72af442..70ac3fb014 100644 --- a/satpy/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. @@ -69,39 +67,24 @@ def _collect_cf_dataset(list_dataarrays, numeric_name_prefix="CHANNEL_"): """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Dataset. - Parameters - ---------- - list_dataarrays : list - List of DataArrays to make CF compliant and merge into a xr.Dataset. - epoch : str - Reference time for encoding the time coordinates (if available). - Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is defined using `from satpy.cf import EPOCH` - flatten_attrs : bool, optional - flatten_attrs : bool - If True, flatten dict-type attributes. - exclude_attrs : list, optional - exclude_attrs : list - List of xr.DataArray attribute names to be excluded. - include_lonlats : bool, optional - include_lonlats : bool - If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. - If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. - pretty : bool, optional - pretty : bool - Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. - include_orig_name : bool, optional - include_orig_name : bool - Include the original dataset name as a variable attribute in the xr.Dataset. - numeric_name_prefix : str, optional - numeric_name_prefix : str - Prefix to add the each variable with name starting with a digit. - Use '' or None to leave this out. - - Returns - ------- - ds : xr.Dataset - A partially CF-compliant xr.Dataset + Args: + list_dataarrays (list): List of DataArrays to make CF compliant and merge into an xr.Dataset. + epoch (str, optional): Reference time for encoding the time coordinates. + Example format: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is defined using `from satpy.cf import EPOCH`. + flatten_attrs (bool, optional): If True, flatten dict-type attributes. + exclude_attrs (list, optional): List of xr.DataArray attribute names to be excluded. + include_lonlats (bool, optional): If True, includes 'latitude' and 'longitude' coordinates also for a + satpy.Scene defined on an AreaDefinition. + If the 'area' attribute is a SwathDefinition, it always includes latitude and longitude coordinates. + pretty (bool, optional): Don't modify coordinate names, if possible. + Makes the file prettier, but possibly less consistent. + include_orig_name (bool, optional): Include the original dataset name as a variable attribute in the xr.Dataset. + numeric_name_prefix (str, optional): Prefix to add to each variable with a name starting with a digit. + Use '' or None to leave this out. + + Returns: + xr.Dataset: A partially CF-compliant xr.Dataset. """ from satpy.cf.area import area2cf from satpy.cf.coords import ( @@ -194,44 +177,30 @@ def collect_cf_datasets(list_dataarrays, If the xr.DataArrays does not share the same dimensions, it creates a collection of xr.Datasets sharing the same dimensions. - Parameters - ---------- - list_dataarrays (list): - List of DataArrays to make CF compliant and merge into groups of xr.Datasets. - header_attrs: (dict): - Global attributes of the output xr.Dataset. - epoch (str): - Reference time for encoding the time coordinates (if available). - Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using `from satpy.cf import EPOCH` - flatten_attrs (bool): - If True, flatten dict-type attributes. - exclude_attrs (list): - List of xr.DataArray attribute names to be excluded. - include_lonlats (bool): - If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. - If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. - pretty (bool): - Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. - include_orig_name (bool). - Include the original dataset name as a variable attribute in the xr.Dataset. - numeric_name_prefix (str): - Prefix to add the each variable with name starting with a digit. - Use '' or None to leave this out. - groups (dict): - Group datasets according to the given assignment: - - `{'': ['dataset_name1', 'dataset_name2', ...]}` - - It is used to create grouped netCDFs using the CF_Writer. - If None (the default), no groups will be created. - - Returns - ------- - grouped_datasets : dict - A dictionary of CF-compliant xr.Dataset: {group_name: xr.Dataset} - header_attrs : dict - Global attributes to be attached to the xr.Dataset / netCDF4. + Args: + list_dataarrays (list): List of DataArrays to make CF compliant and merge into groups of xr.Datasets. + header_attrs (dict): Global attributes of the output xr.Dataset. + epoch (str, optional): Reference time for encoding the time coordinates. + Example format: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is retrieved using `from satpy.cf import EPOCH`. + flatten_attrs (bool, optional): If True, flatten dict-type attributes. + exclude_attrs (list, optional): List of xr.DataArray attribute names to be excluded. + include_lonlats (bool, optional): If True, includes 'latitude' and 'longitude' coordinates also + for a satpy.Scene defined on an AreaDefinition. + If the 'area' attribute is a SwathDefinition, it always includes latitude and longitude coordinates. + pretty (bool, optional): Don't modify coordinate names, if possible. + Makes the file prettier, but possibly less consistent. + include_orig_name (bool, optional): Include the original dataset name as a variable attribute in the xr.Dataset. + numeric_name_prefix (str, optional): Prefix to add to each variable with a name starting with a digit. + Use '' or None to leave this out. + groups (dict, optional): Group datasets according to the given assignment: + `{'': ['dataset_name1', 'dataset_name2', ...]}`. + Used to create grouped netCDFs using the CF_Writer. If None, no groups will be created. + + Returns: + tuple: A tuple containing: + - grouped_datasets (dict): A dictionary of CF-compliant xr.Dataset: {group_name: xr.Dataset}. + - header_attrs (dict): Global attributes to be attached to the xr.Dataset / netCDF4. """ from satpy.cf.attrs import preprocess_header_attrs from satpy.cf.coords import add_time_bounds_dimension diff --git a/satpy/cf/encoding.py b/satpy/cf/encoding.py index 3cdf1fdf1d..5c77b6d69f 100644 --- a/satpy/cf/encoding.py +++ b/satpy/cf/encoding.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 7076cc841d..1204754bd0 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. @@ -240,36 +238,27 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, Note that all datasets (if grouping: in one group) must have the same projection coordinates. Args: - datasets (list): - List of xr.DataArray to be saved. - filename (str): - Output file - groups (dict): - Group datasets according to the given assignment: `{'group_name': ['dataset1', 'dataset2', ...]}`. - Group name `None` corresponds to the root of the file, i.e. no group will be created. + datasets (list): List of xr.DataArray to be saved. + filename (str): Output file. + groups (dict): Group datasets according to the given assignment: + `{'group_name': ['dataset1', 'dataset2', ...]}`. + The group name `None` corresponds to the root of the file, i.e., no group will be created. Warning: The results will not be fully CF compliant! - header_attrs: - Global attributes to be included. - engine (str): - Module to be used for writing netCDF files. Follows xarray's - :meth:`~xarray.Dataset.to_netcdf` engine choices with a - preference for 'netcdf4'. - epoch (str): - Reference time for encoding of time coordinates. - If None, the default reference time is defined using `from satpy.cf import EPOCH` - flatten_attrs (bool): - If True, flatten dict-type attributes. - exclude_attrs (list): - List of dataset attributes to be excluded. - include_lonlats (bool): - Always include latitude and longitude coordinates, even for datasets with area definition. - pretty (bool): - Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. - include_orig_name (bool). - Include the original dataset name as a variable attribute in the final netCDF. - numeric_name_prefix (str): - Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. - + header_attrs: Global attributes to be included. + engine (str, optional): Module to be used for writing netCDF files. Follows xarray's + :meth:`~xarray.Dataset.to_netcdf` engine choices with a preference for 'netcdf4'. + epoch (str, optional): Reference time for encoding of time coordinates. + If None, the default reference time is defined using `from satpy.cf import EPOCH`. + flatten_attrs (bool, optional): If True, flatten dict-type attributes. + exclude_attrs (list, optional): List of dataset attributes to be excluded. + include_lonlats (bool, optional): Always include latitude and longitude coordinates, + even for datasets with area definition. + pretty (bool, optional): Don't modify coordinate names, if possible. + Makes the file prettier, but possibly less consistent. + include_orig_name (bool, optional): Include the original dataset name as a variable + attribute in the final netCDF. + numeric_name_prefix (str, optional): Prefix to add to each variable with a name starting with a digit. + Use '' or None to leave this out. """ from satpy.cf.datasets import collect_cf_datasets from satpy.cf.encoding import update_encoding From c42d1edbde9b48667ef2d3be1092ed0da329073b Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 16 Nov 2023 12:30:43 +0100 Subject: [PATCH 459/702] Move EPOCH to satpy.cf.coords --- satpy/cf/__init__.py | 2 -- satpy/cf/coords.py | 5 +++-- satpy/cf/dataarray.py | 2 +- satpy/cf/datasets.py | 4 ++-- satpy/scene.py | 2 +- satpy/writers/cf_writer.py | 6 +++--- 6 files changed, 10 insertions(+), 11 deletions(-) diff --git a/satpy/cf/__init__.py b/satpy/cf/__init__.py index 63fac5261c..f8f662a93b 100644 --- a/satpy/cf/__init__.py +++ b/satpy/cf/__init__.py @@ -1,3 +1 @@ """Code for generation of CF-compliant datasets.""" - -EPOCH = u"seconds since 1970-01-01 00:00:00" diff --git a/satpy/cf/coords.py b/satpy/cf/coords.py index af11a62e43..80ce22de39 100644 --- a/satpy/cf/coords.py +++ b/satpy/cf/coords.py @@ -13,6 +13,9 @@ logger = logging.getLogger(__name__) +EPOCH = u"seconds since 1970-01-01 00:00:00" + + def add_xy_coords_attrs(dataarray): """Add relevant attributes to x, y coordinates.""" # If there are no coords, return dataarray @@ -98,8 +101,6 @@ def set_cf_time_info(dataarray, epoch): - the time coordinate has size 1 """ - from satpy.cf import EPOCH - if epoch is None: epoch = EPOCH diff --git a/satpy/cf/dataarray.py b/satpy/cf/dataarray.py index 3c97a70336..5df68da887 100644 --- a/satpy/cf/dataarray.py +++ b/satpy/cf/dataarray.py @@ -64,7 +64,7 @@ def make_cf_dataarray(dataarray, Args: dataarray (xr.DataArray): The data array to be made CF-compliant. epoch (str, optional): Reference time for encoding of time coordinates. - If None, the default reference time is defined using `from satpy.cf import EPOCH`. + If None, the default reference time is defined using `from satpy.cf.coords import EPOCH`. flatten_attrs (bool, optional): If True, flatten dict-type attributes. Defaults to False. exclude_attrs (list, optional): List of dataset attributes to be excluded. Defaults to None. include_orig_name (bool, optional): Include the original dataset name in the netcdf variable attributes. diff --git a/satpy/cf/datasets.py b/satpy/cf/datasets.py index 70ac3fb014..c6ea6fd351 100644 --- a/satpy/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -71,7 +71,7 @@ def _collect_cf_dataset(list_dataarrays, list_dataarrays (list): List of DataArrays to make CF compliant and merge into an xr.Dataset. epoch (str, optional): Reference time for encoding the time coordinates. Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is defined using `from satpy.cf import EPOCH`. + If None, the default reference time is defined using `from satpy.cf.coords import EPOCH`. flatten_attrs (bool, optional): If True, flatten dict-type attributes. exclude_attrs (list, optional): List of xr.DataArray attribute names to be excluded. include_lonlats (bool, optional): If True, includes 'latitude' and 'longitude' coordinates also for a @@ -182,7 +182,7 @@ def collect_cf_datasets(list_dataarrays, header_attrs (dict): Global attributes of the output xr.Dataset. epoch (str, optional): Reference time for encoding the time coordinates. Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using `from satpy.cf import EPOCH`. + If None, the default reference time is retrieved using `from satpy.cf.coords import EPOCH`. flatten_attrs (bool, optional): If True, flatten dict-type attributes. exclude_attrs (list, optional): List of xr.DataArray attribute names to be excluded. include_lonlats (bool, optional): If True, includes 'latitude' and 'longitude' coordinates also diff --git a/satpy/scene.py b/satpy/scene.py index bb8cf0ffab..4722a47533 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1128,7 +1128,7 @@ def to_xarray(self, epoch (str): Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is defined using "from satpy.cf import EPOCH" + If None, the default reference time is defined using "from satpy.cf.coords import EPOCH" flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 1204754bd0..b64a288213 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -160,7 +160,7 @@ import xarray as xr from packaging.version import Version -from satpy.cf import EPOCH # noqa: F401 (for backward compatibility) +from satpy.cf.coords import EPOCH # noqa: F401 (for backward compatibility) from satpy.writers import Writer logger = logging.getLogger(__name__) @@ -248,7 +248,7 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine (str, optional): Module to be used for writing netCDF files. Follows xarray's :meth:`~xarray.Dataset.to_netcdf` engine choices with a preference for 'netcdf4'. epoch (str, optional): Reference time for encoding of time coordinates. - If None, the default reference time is defined using `from satpy.cf import EPOCH`. + If None, the default reference time is defined using `from satpy.cf.coords import EPOCH`. flatten_attrs (bool, optional): If True, flatten dict-type attributes. exclude_attrs (list, optional): List of dataset attributes to be excluded. include_lonlats (bool, optional): Always include latitude and longitude coordinates, @@ -325,7 +325,7 @@ def da2cf(dataarray, epoch=None, flatten_attrs=False, exclude_attrs=None, The data array to be converted. epoch (str): Reference time for encoding of time coordinates. - If None, the default reference time is defined using `from satpy.cf import EPOCH` + If None, the default reference time is defined using `from satpy.cf.coords import EPOCH` flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): From 73871f2a38ee229733bf286b3a13010e37a51beb Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 16 Nov 2023 13:33:11 +0200 Subject: [PATCH 460/702] Fix test value --- satpy/tests/test_composites.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 5bcbca0a1e..cf9f2fa6d2 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -489,7 +489,7 @@ def test_night_only_area_with_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=True) res = comp((self.data_b,)) res = res.compute() - expected_l_channel = np.array([[np.nan, 0.], [0.49999994, 1.]], dtype=np.float32) + expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]], dtype=np.float32) # FIXME: with the current changes the np.nan becomes 0.0 instead, why?! expected_alpha = np.array([[np.nan, 0.], [0., 0.]], dtype=np.float32) assert res.dtype == np.float32 From 6127a917ca44c7aa60ebcd276147f9c64f0820b5 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 16 Nov 2023 14:46:14 +0200 Subject: [PATCH 461/702] Fix getting mask for single side product --- satpy/composites/__init__.py | 2 +- satpy/tests/test_composites.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index e3c9dc190a..d0afdda950 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -869,7 +869,7 @@ def _get_single_channel(data: xr.DataArray) -> xr.DataArray: def _get_weight_mask_for_single_side_product(data_a, data_b): - if isinstance(data_a, int): + if data_b.shape: return ~da.isnan(data_b) return ~da.isnan(data_a) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index cf9f2fa6d2..c77c17aa37 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -490,7 +490,6 @@ def test_night_only_area_with_alpha(self): res = comp((self.data_b,)) res = res.compute() expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]], dtype=np.float32) - # FIXME: with the current changes the np.nan becomes 0.0 instead, why?! expected_alpha = np.array([[np.nan, 0.], [0., 0.]], dtype=np.float32) assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_l_channel) From 735c3f460b6c86291f4fb8b4002dd812e6a34297 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 16 Nov 2023 14:56:51 +0200 Subject: [PATCH 462/702] Fix/revert expected test values --- satpy/tests/test_composites.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index c77c17aa37..a564003e81 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -437,7 +437,7 @@ def test_daynight_sza(self): comp = DayNightCompositor(name="dn_test", day_night="day_night") res = comp((self.data_a, self.data_b, self.sza)) res = res.compute() - expected = np.array([[0., 0.22122352], [0.5, 1.]], dtype=np.float32) + expected = np.array([[0., 0.22122374], [0.5, 1.]], dtype=np.float32) assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected, rtol=1e-6) @@ -462,7 +462,7 @@ def test_night_only_sza_with_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=True) res = comp((self.data_b, self.sza)) res = res.compute() - expected_red_channel = np.array([[np.nan, 0.], [0.49999994, 1.]], dtype=np.float32) + expected_red_channel = np.array([[np.nan, 0.], [0.5, 1.]], dtype=np.float32) expected_alpha = np.array([[0., 0.3329599], [1., 1.]], dtype=np.float32) assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_red_channel) @@ -476,7 +476,7 @@ def test_night_only_sza_without_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=False) res = comp((self.data_a, self.sza)) res = res.compute() - expected = np.array([[0., 0.11042608], [0.6683501, 1.]], dtype=np.float32) + expected = np.array([[0., 0.11042609], [0.6683502, 1.]], dtype=np.float32) assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected) assert "A" not in res.bands @@ -558,7 +558,7 @@ def test_day_only_area_with_alpha_and_missing_data(self): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_b,)) res = res.compute() - expected_l_channel = np.array([[np.nan, 0.], [0.49999994, 1.]], dtype=np.float32) + expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]], dtype=np.float32) expected_alpha = np.array([[np.nan, 1.], [1., 1.]], dtype=np.float32) assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_l_channel) From 0a18d2da91893472fd802175161405157c3854e6 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 16 Nov 2023 09:29:53 -0600 Subject: [PATCH 463/702] Add test for VIIRS EDR available datasets fix --- satpy/tests/reader_tests/test_viirs_edr.py | 27 ++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 9b13f384e2..a6932520c0 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -285,6 +285,33 @@ def _create_fake_dataset(vars_dict: dict[str, xr.DataArray]) -> xr.Dataset: return ds +def test_available_datasets(aod_file): + """Test that available datasets doesn't claim non-filetype datasets. + + For example, if a YAML-configured dataset's file type is not loaded + then the available status is `None` and should remain `None`. This + means no file type knows what to do with this dataset. If it is + `False` then that means that a file type knows of the dataset, but + that the variable is not available in the file. In the below test + this isn't the case so the YAML-configured dataset should be + provided once and have a `None` availability. + + """ + from satpy.readers.viirs_edr import VIIRSJRRFileHandler + file_handler = VIIRSJRRFileHandler( + aod_file, + {"platform_shortname": "npp"}, + {"file_type": "jrr_aod"}, + ) + fake_yaml_datasets = [ + (None, {"file_key": "fake", "file_type": "fake_file", "name": "fake"}), + ] + available_datasets = list(file_handler.available_datasets(configured_datasets=fake_yaml_datasets)) + fake_availables = [avail_tuple for avail_tuple in available_datasets if avail_tuple[1]["name"] == "fake"] + assert len(fake_availables) == 1 + assert fake_availables[0][0] is None + + class TestVIIRSJRRReader: """Test the VIIRS JRR L2 reader.""" From ba09f1859368a4faec05e4079301026fd83c9e9c Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 16 Nov 2023 18:32:32 +0100 Subject: [PATCH 464/702] Simplify functions for CodeScene happiness --- satpy/cf/area.py | 6 ++- satpy/cf/attrs.py | 45 ++++++++++------ satpy/cf/coords.py | 122 ++++++++++++++++++++++++++++-------------- satpy/cf/dataarray.py | 5 +- satpy/cf/datasets.py | 19 ++++--- 5 files changed, 127 insertions(+), 70 deletions(-) diff --git a/satpy/cf/area.py b/satpy/cf/area.py index 041338efd8..93c8b28eed 100644 --- a/satpy/cf/area.py +++ b/satpy/cf/area.py @@ -67,9 +67,11 @@ def _add_grid_mapping(dataarray): def area2cf(dataarray, include_lonlats=False, got_lonlats=False): """Convert an area to at CF grid mapping or lon and lats.""" res = [] - if not got_lonlats and (isinstance(dataarray.attrs["area"], SwathDefinition) or include_lonlats): + include_lonlats = include_lonlats or isinstance(dataarray.attrs["area"], SwathDefinition) + is_area_def = isinstance(dataarray.attrs["area"], AreaDefinition) + if not got_lonlats and include_lonlats: dataarray = _add_lonlat_coords(dataarray) - if isinstance(dataarray.attrs["area"], AreaDefinition): + if is_area_def: dataarray, gmapping = _add_grid_mapping(dataarray) res.append(gmapping) res.append(dataarray) diff --git a/satpy/cf/attrs.py b/satpy/cf/attrs.py index 2cf9ffa528..eb562c1c93 100644 --- a/satpy/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -59,35 +59,50 @@ def _encode(self, obj): return tuple(obj) elif isinstance(obj, np.ndarray): return obj.tolist() - return str(obj) +def _encode_numpy_array(obj): + """Encode numpy array as a netCDF4 serializable datatype.""" + from satpy.writers.cf_writer import NC4_DTYPES + + # Only plain 1-d arrays are supported. Skip record arrays and multi-dimensional arrays. + is_plain_1d = not obj.dtype.fields and len(obj.shape) <= 1 + if is_plain_1d: + if obj.dtype in NC4_DTYPES: + return obj + elif obj.dtype == np.bool_: + # Boolean arrays are not supported, convert to array of strings. + return [s.lower() for s in obj.astype(str)] + return obj.tolist() + else: + raise ValueError("Only a 1D numpy array can be encoded as netCDF attribute.") + + def _encode_object(obj): """Try to encode `obj` as a netCDF/Zarr compatible datatype which most closely resembles the object's nature. Raises: ValueError if no such datatype could be found """ - from satpy.writers.cf_writer import NC4_DTYPES - if isinstance(obj, int) and not isinstance(obj, (bool, np.bool_)): return obj elif isinstance(obj, (float, str, np.integer, np.floating)): return obj elif isinstance(obj, np.ndarray): - # Only plain 1-d arrays are supported. Skip record arrays and multi-dimensional arrays. - is_plain_1d = not obj.dtype.fields and len(obj.shape) <= 1 - if is_plain_1d: - if obj.dtype in NC4_DTYPES: - return obj - elif obj.dtype == np.bool_: - # Boolean arrays are not supported, convert to array of strings. - return [s.lower() for s in obj.astype(str)] - return obj.tolist() + return _encode_numpy_array(obj) raise ValueError("Unable to encode") +def _try_decode_object(obj): + """Try to decode byte string""" + try: + decoded = obj.decode() + except AttributeError: + decoded = obj + return decoded + + def _encode_python_objects(obj): """Try to find the datatype which most closely resembles the object's nature. @@ -98,11 +113,7 @@ def _encode_python_objects(obj): try: dump = _encode_object(obj) except ValueError: - try: - # Decode byte-strings - decoded = obj.decode() - except AttributeError: - decoded = obj + decoded = _try_decode_object(obj) dump = json.dumps(decoded, cls=AttributeEncoder).strip('"') return dump diff --git a/satpy/cf/coords.py b/satpy/cf/coords.py index 80ce22de39..ba1d195663 100644 --- a/satpy/cf/coords.py +++ b/satpy/cf/coords.py @@ -46,12 +46,26 @@ def _is_projected(dataarray): return True +def _is_area(dataarray): + if isinstance(dataarray.attrs["area"], AreaDefinition): + return True + else: + return False + + +def _is_swath(dataarray): + if isinstance(dataarray.attrs["area"], SwathDefinition): + return True + else: + return False + + def _try_to_get_crs(dataarray): """Try to get a CRS from attributes.""" if "area" in dataarray.attrs: - if isinstance(dataarray.attrs["area"], AreaDefinition): + if _is_area(dataarray): return dataarray.attrs["area"].crs - if not isinstance(dataarray.attrs["area"], SwathDefinition): + if not _is_swath(dataarray): logger.warning( f"Could not tell CRS from area of type {type(dataarray.attrs['area']).__name__:s}. " "Assuming projected CRS.") @@ -116,9 +130,7 @@ def set_cf_time_info(dataarray, epoch): def _is_lon_or_lat_dataarray(dataarray): """Check if the DataArray represents the latitude or longitude coordinate.""" - if "standard_name" in dataarray.attrs and dataarray.attrs["standard_name"] in ["longitude", "latitude"]: - return True - return False + return dataarray.attrs.get("standard_name", "") in ("longitude", "latitude") def has_projection_coords(dict_datarrays): @@ -129,6 +141,35 @@ def has_projection_coords(dict_datarrays): return False +def _get_is_nondimensional_coords_dict(dict_dataarrays): + tokens = defaultdict(set) + for dataarray in dict_dataarrays.values(): + for coord_name in dataarray.coords: + if not _is_lon_or_lat_dataarray(dataarray[coord_name]) and coord_name not in dataarray.dims: + tokens[coord_name].add(tokenize(dataarray[coord_name].data)) + coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) + return coords_unique + + +def _warn_if_pretty_but_not_unique(pretty, coord_name): + """Warn if coordinates cannot be pretty-formatted due to non-uniqueness.""" + if pretty: + warnings.warn( + f'Cannot pretty-format "{coord_name}" coordinates because they are ' + 'not identical among the given datasets', + stacklevel=2 + ) + + +def _rename_coords(dict_dataarrays, coord_name): + """Rename coordinates in the datasets.""" + for name, dataarray in dict_dataarrays.items(): + if coord_name in dataarray.coords: + rename = {coord_name: f"{name}_{coord_name}"} + dict_dataarrays[name] = dataarray.rename(rename) + return dict_dataarrays + + def ensure_unique_nondimensional_coords(dict_dataarrays, pretty=False): """Make non-dimensional coordinates unique among all datasets. @@ -155,28 +196,14 @@ def ensure_unique_nondimensional_coords(dict_dataarrays, pretty=False): """ # Determine which non-dimensional coordinates are unique # - coords_unique has structure: {coord_name: True/False} - tokens = defaultdict(set) - for dataarray in dict_dataarrays.values(): - for coord_name in dataarray.coords: - if not _is_lon_or_lat_dataarray(dataarray[coord_name]) and coord_name not in dataarray.dims: - tokens[coord_name].add(tokenize(dataarray[coord_name].data)) - coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) + is_coords_unique_dict = _get_is_nondimensional_coords_dict(dict_dataarrays) # Prepend dataset name, if not unique or no pretty-format desired new_dict_dataarrays = dict_dataarrays.copy() - for coord_name, unique in coords_unique.items(): + for coord_name, unique in is_coords_unique_dict.items(): if not pretty or not unique: - if pretty: - warnings.warn( - 'Cannot pretty-format "{}" coordinates because they are ' - 'not identical among the given datasets'.format(coord_name), - stacklevel=2 - ) - for name, dataarray in dict_dataarrays.items(): - if coord_name in dataarray.coords: - rename = {coord_name: "{}_{}".format(name, coord_name)} - new_dict_dataarrays[name] = new_dict_dataarrays[name].rename(rename) - + _warn_if_pretty_but_not_unique(pretty, coord_name) + new_dict_dataarrays = _rename_coords(new_dict_dataarrays, coord_name) return new_dict_dataarrays @@ -196,6 +223,7 @@ def check_unique_projection_coords(dict_dataarrays): "Please group them by area or save them in separate files.") + def add_coordinates_attrs_coords(dict_dataarrays): """Add to DataArrays the coordinates specified in the 'coordinates' attribute. @@ -208,23 +236,39 @@ def add_coordinates_attrs_coords(dict_dataarrays): In the final call to `xr.Dataset.to_netcdf()` all coordinate relations will be resolved and the `coordinates` attributes be set automatically. """ - for da_name, dataarray in dict_dataarrays.items(): - declared_coordinates = _get_coordinates_list(dataarray) - for coord in declared_coordinates: - if coord not in dataarray.coords: - try: - dimensions_not_in_data = list(set(dict_dataarrays[coord].dims) - set(dataarray.dims)) - dataarray[coord] = dict_dataarrays[coord].squeeze(dimensions_not_in_data, drop=True) - except KeyError: - warnings.warn( - 'Coordinate "{}" referenced by dataarray {} does not ' - 'exist, dropping reference.'.format(coord, da_name), - stacklevel=2 - ) - continue - + for dataarray_name in dict_dataarrays.keys(): + dict_dataarrays = _add_declared_coordinates(dict_dataarrays, + dataarray_name=dataarray_name) # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() - dataarray.attrs.pop("coordinates", None) + dict_dataarrays[dataarray_name].attrs.pop("coordinates", None) + return dict_dataarrays + + +def _add_declared_coordinates(dict_dataarrays, dataarray_name): + """Add declared coordinates to the dataarray if they exist.""" + dataarray = dict_dataarrays[dataarray_name] + declared_coordinates = _get_coordinates_list(dataarray) + for coord in declared_coordinates: + if coord not in dataarray.coords: + dict_dataarrays = _try_add_coordinate(dict_dataarrays, + dataarray_name=dataarray_name, + coord=coord) + return dict_dataarrays + + +def _try_add_coordinate(dict_dataarrays, dataarray_name, coord): + """Try to add a coordinate to the dataarray, warn if not possible.""" + try: + dataarray_dims = set(dict_dataarrays[dataarray_name].dims) + coordinate_dims = set(dict_dataarrays[coord].dims) + dimensions_to_squeeze = list(coordinate_dims - dataarray_dims) + dict_dataarrays[dataarray_name][coord] = dict_dataarrays[coord].squeeze(dimensions_to_squeeze, drop=True) + except KeyError: + warnings.warn( + f'Coordinate "{coord}" referenced by dataarray {dataarray_name} does not ' + 'exist, dropping reference.', + stacklevel=2 + ) return dict_dataarrays diff --git a/satpy/cf/dataarray.py b/satpy/cf/dataarray.py index 5df68da887..dc2ae7d6c1 100644 --- a/satpy/cf/dataarray.py +++ b/satpy/cf/dataarray.py @@ -41,15 +41,16 @@ def _handle_dataarray_name(original_name, numeric_name_prefix): def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name): """Change the DataArray name by prepending numeric_name_prefix if the name is a digit.""" original_name = None + named_has_changed = False dataarray = dataarray.copy() if "name" in dataarray.attrs: original_name = dataarray.attrs.pop("name") original_name, new_name = _handle_dataarray_name(original_name, numeric_name_prefix) dataarray = dataarray.rename(new_name) + named_has_changed = original_name != new_name - if include_orig_name and numeric_name_prefix and original_name and original_name != new_name: + if named_has_changed and include_orig_name: dataarray.attrs["original_name"] = original_name - return dataarray diff --git a/satpy/cf/datasets.py b/satpy/cf/datasets.py index c6ea6fd351..cab71de58c 100644 --- a/satpy/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -16,7 +16,6 @@ """Utility to generate a CF-compliant Datasets.""" import logging import warnings -from collections import defaultdict import xarray as xr @@ -39,6 +38,11 @@ def _get_extra_ds(dataarray, keys=None): return dict_datarrays +def _get_group_dataarrays(group_members, list_dataarrays): + """Yield DataArrays that are part of a specific group.""" + return [da for da in list_dataarrays if da.attrs["name"] in group_members] + + def _get_groups(groups, list_datarrays): """Return a dictionary with the list of xr.DataArray associated to each group. @@ -46,15 +50,10 @@ def _get_groups(groups, list_datarrays): Else, collect the DataArrays associated to each group. """ if groups is None: - grouped_dataarrays = {None: list_datarrays} - else: - grouped_dataarrays = defaultdict(list) - for datarray in list_datarrays: - for group_name, group_members in groups.items(): - if datarray.attrs["name"] in group_members: - grouped_dataarrays[group_name].append(datarray) - break - return grouped_dataarrays + return {None: list_datarrays} + + return {group_name: _get_group_dataarrays(group_members, list_datarrays) + for group_name, group_members in groups.items()} def _collect_cf_dataset(list_dataarrays, From 83e815d3b67404fa0b6f9cec458f56f63b430f12 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 16 Nov 2023 12:09:38 -0600 Subject: [PATCH 465/702] Cleanup CF attrs functions --- satpy/cf/attrs.py | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/satpy/cf/attrs.py b/satpy/cf/attrs.py index eb562c1c93..987aeec6cb 100644 --- a/satpy/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -68,15 +68,14 @@ def _encode_numpy_array(obj): # Only plain 1-d arrays are supported. Skip record arrays and multi-dimensional arrays. is_plain_1d = not obj.dtype.fields and len(obj.shape) <= 1 - if is_plain_1d: - if obj.dtype in NC4_DTYPES: - return obj - elif obj.dtype == np.bool_: - # Boolean arrays are not supported, convert to array of strings. - return [s.lower() for s in obj.astype(str)] - return obj.tolist() - else: + if not is_plain_1d: raise ValueError("Only a 1D numpy array can be encoded as netCDF attribute.") + if obj.dtype in NC4_DTYPES: + return obj + if obj.dtype == np.bool_: + # Boolean arrays are not supported, convert to array of strings. + return [s.lower() for s in obj.astype(str)] + return obj.tolist() def _encode_object(obj): @@ -85,9 +84,9 @@ def _encode_object(obj): Raises: ValueError if no such datatype could be found """ - if isinstance(obj, int) and not isinstance(obj, (bool, np.bool_)): - return obj - elif isinstance(obj, (float, str, np.integer, np.floating)): + is_nonbool_int = isinstance(obj, int) and not isinstance(obj, (bool, np.bool_)) + is_encode_type = isinstance(obj, (float, str, np.integer, np.floating)) + if is_nonbool_int or is_encode_type: return obj elif isinstance(obj, np.ndarray): return _encode_numpy_array(obj) @@ -194,10 +193,10 @@ def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): dataarray = _drop_exclude_attrs(dataarray, exclude_attrs) dataarray = _format_prerequisites_attrs(dataarray) dataarray = _remove_none_attrs(dataarray) - _ = dataarray.attrs.pop("area", None) + dataarray.attrs.pop("area", None) if "long_name" not in dataarray.attrs and "standard_name" not in dataarray.attrs: - dataarray.attrs["long_name"] = dataarray.name + dataarray.attrs["long_name"] = dataarray.attrs["name"] if flatten_attrs: dataarray.attrs = flatten_dict(dataarray.attrs) From cadcfef1129c2a74d5c4e47cb6f4deadb4b5b358 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 16 Nov 2023 12:36:54 -0600 Subject: [PATCH 466/702] Remove commented out tests --- satpy/tests/test_writers.py | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/satpy/tests/test_writers.py b/satpy/tests/test_writers.py index c11066d3f6..c40b51fa01 100644 --- a/satpy/tests/test_writers.py +++ b/satpy/tests/test_writers.py @@ -594,26 +594,6 @@ def test_geotiff(self): compute_writer_results([res]) assert os.path.isfile(fname) -# FIXME: This reader needs more information than exist at the moment -# def test_mitiff(self): -# """Test writing to mitiff file""" -# fname = os.path.join(self.base_dir, 'mitiff.tif') -# res = self.scn.save_datasets(filename=fname, -# datasets=['test'], -# writer='mitiff') -# compute_writer_results([res]) -# self.assertTrue(os.path.isfile(fname)) - -# FIXME: This reader needs more information than exist at the moment -# def test_cf(self): -# """Test writing to NetCDF4 file""" -# fname = os.path.join(self.base_dir, 'cf.nc') -# res = self.scn.save_datasets(filename=fname, -# datasets=['test'], -# writer='cf') -# compute_writer_results([res]) -# self.assertTrue(os.path.isfile(fname)) - def test_multiple_geotiff(self): """Test writing to mitiff file.""" from satpy.writers import compute_writer_results From ceabff9532d2d7aaac0803fd00e2ab3251278a96 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 16 Nov 2023 15:29:03 -0600 Subject: [PATCH 467/702] Refactor attribute handling --- satpy/cf/attrs.py | 97 ++++++++++++++++++++++++----------------------- 1 file changed, 49 insertions(+), 48 deletions(-) diff --git a/satpy/cf/attrs.py b/satpy/cf/attrs.py index 987aeec6cb..3b355748b8 100644 --- a/satpy/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -14,12 +14,15 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """CF processing of attributes.""" +from __future__ import annotations + import datetime import json import logging from collections import OrderedDict import numpy as np +import xarray as xr from satpy.writers.utils import flatten_dict @@ -142,68 +145,66 @@ def _encode_nc_attrs(attrs): return OrderedDict(encoded_attrs) -def _add_ancillary_variables_attrs(dataarray): - """Replace ancillary_variables DataArray with a list of their name.""" - list_ancillary_variable_names = [da_ancillary.attrs["name"] - for da_ancillary in dataarray.attrs.get("ancillary_variables", [])] - if list_ancillary_variable_names: - dataarray.attrs["ancillary_variables"] = " ".join(list_ancillary_variable_names) - else: - dataarray.attrs.pop("ancillary_variables", None) - return dataarray +def preprocess_datarray_attrs( + dataarray: xr.DataArray, + flatten_attrs: bool, + exclude_attrs: list[str] | None +) -> xr.DataArray: + """Preprocess DataArray attributes to be written into CF-compliant netCDF/Zarr.""" + _drop_attrs(dataarray, exclude_attrs) + _add_ancillary_variables_attrs(dataarray) + _format_prerequisites_attrs(dataarray) + if "long_name" not in dataarray.attrs and "standard_name" not in dataarray.attrs: + dataarray.attrs["long_name"] = dataarray.name -def _drop_exclude_attrs(dataarray, exclude_attrs): - """Remove user-specified list of attributes.""" - if exclude_attrs is None: - exclude_attrs = [] - for key in exclude_attrs: - dataarray.attrs.pop(key, None) - return dataarray + if flatten_attrs: + dataarray.attrs = flatten_dict(dataarray.attrs) + dataarray.attrs = _encode_nc_attrs(dataarray.attrs) -def _remove_satpy_attrs(new_data): - """Remove _satpy attribute.""" - satpy_attrs = [key for key in new_data.attrs if key.startswith("_satpy")] - for satpy_attr in satpy_attrs: - new_data.attrs.pop(satpy_attr) - new_data.attrs.pop("_last_resampler", None) - return new_data + return dataarray -def _format_prerequisites_attrs(dataarray): - """Reformat prerequisites attribute value to string.""" - if "prerequisites" in dataarray.attrs: - dataarray.attrs["prerequisites"] = [np.bytes_(str(prereq)) for prereq in dataarray.attrs["prerequisites"]] - return dataarray +def _drop_attrs( + dataarray: xr.DataArray, + user_excluded_attrs: list[str] | None +) -> None: + """Remove undesirable attributes.""" + attrs_to_drop = ( + (user_excluded_attrs or []) + + _get_satpy_attrs(dataarray) + + _get_none_attrs(dataarray) + + ["area"] + ) + for key in attrs_to_drop: + dataarray.attrs.pop(key, None) -def _remove_none_attrs(dataarray): - """Remove attribute keys with None value.""" - for key, val in dataarray.attrs.copy().items(): - if val is None: - dataarray.attrs.pop(key) - return dataarray +def _get_satpy_attrs(new_data): + """Remove _satpy attribute.""" + return [key for key in new_data.attrs if key.startswith("_satpy")] + ["_last_resampler"] -def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): - """Preprocess DataArray attributes to be written into CF-compliant netCDF/Zarr.""" - dataarray = _remove_satpy_attrs(dataarray) - dataarray = _add_ancillary_variables_attrs(dataarray) - dataarray = _drop_exclude_attrs(dataarray, exclude_attrs) - dataarray = _format_prerequisites_attrs(dataarray) - dataarray = _remove_none_attrs(dataarray) - dataarray.attrs.pop("area", None) +def _get_none_attrs(dataarray): + """Remove attribute keys with None value.""" + return [attr_name for attr_name, attr_val in dataarray.attrs.items() if attr_val is None] - if "long_name" not in dataarray.attrs and "standard_name" not in dataarray.attrs: - dataarray.attrs["long_name"] = dataarray.attrs["name"] - if flatten_attrs: - dataarray.attrs = flatten_dict(dataarray.attrs) +def _add_ancillary_variables_attrs(dataarray: xr.DataArray) -> None: + """Replace ancillary_variables DataArray with a list of their name.""" + list_ancillary_variable_names = [da_ancillary.attrs["name"] + for da_ancillary in dataarray.attrs.get("ancillary_variables", [])] + if list_ancillary_variable_names: + dataarray.attrs["ancillary_variables"] = " ".join(list_ancillary_variable_names) + else: + dataarray.attrs.pop("ancillary_variables", None) - dataarray.attrs = _encode_nc_attrs(dataarray.attrs) - return dataarray +def _format_prerequisites_attrs(dataarray: xr.DataArray) -> None: + """Reformat prerequisites attribute value to string.""" + if "prerequisites" in dataarray.attrs: + dataarray.attrs["prerequisites"] = [np.bytes_(str(prereq)) for prereq in dataarray.attrs["prerequisites"]] def _add_history(attrs): From bf681f2f88204aab02334dd9c12ddfcb6a2f9fc1 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 16 Nov 2023 15:54:07 -0600 Subject: [PATCH 468/702] Rename dataarray to data_arr and add type annotations --- satpy/cf/area.py | 46 ++--- satpy/cf/attrs.py | 54 +++--- satpy/cf/coords.py | 207 +++++++++++------------ satpy/cf/{dataarray.py => data_array.py} | 32 ++-- satpy/cf/datasets.py | 14 +- satpy/tests/cf_tests/test_dataaarray.py | 24 +-- satpy/writers/cf_writer.py | 16 +- 7 files changed, 196 insertions(+), 197 deletions(-) rename satpy/cf/{dataarray.py => data_array.py} (74%) diff --git a/satpy/cf/area.py b/satpy/cf/area.py index 93c8b28eed..88a12a3c52 100644 --- a/satpy/cf/area.py +++ b/satpy/cf/area.py @@ -23,24 +23,24 @@ logger = logging.getLogger(__name__) -def _add_lonlat_coords(dataarray): +def _add_lonlat_coords(data_arr: xr.DataArray) -> xr.DataArray: """Add 'longitude' and 'latitude' coordinates to DataArray.""" - dataarray = dataarray.copy() - area = dataarray.attrs["area"] - ignore_dims = {dim: 0 for dim in dataarray.dims if dim not in ["x", "y"]} - chunks = getattr(dataarray.isel(**ignore_dims), "chunks", None) + data_arr = data_arr.copy() + area = data_arr.attrs["area"] + ignore_dims = {dim: 0 for dim in data_arr.dims if dim not in ["x", "y"]} + chunks = getattr(data_arr.isel(**ignore_dims), "chunks", None) lons, lats = area.get_lonlats(chunks=chunks) - dataarray["longitude"] = xr.DataArray(lons, dims=["y", "x"], - attrs={"name": "longitude", + data_arr["longitude"] = xr.DataArray(lons, dims=["y", "x"], + attrs={"name": "longitude", "standard_name": "longitude", "units": "degrees_east"}, - name="longitude") - dataarray["latitude"] = xr.DataArray(lats, dims=["y", "x"], - attrs={"name": "latitude", + name="longitude") + data_arr["latitude"] = xr.DataArray(lats, dims=["y", "x"], + attrs={"name": "latitude", "standard_name": "latitude", "units": "degrees_north"}, - name="latitude") - return dataarray + name="latitude") + return data_arr def _create_grid_mapping(area): @@ -55,24 +55,24 @@ def _create_grid_mapping(area): return area.area_id, grid_mapping -def _add_grid_mapping(dataarray): +def _add_grid_mapping(data_arr: xr.DataArray) -> tuple[xr.DataArray, xr.DataArray]: """Convert an area to at CF grid mapping.""" - dataarray = dataarray.copy() - area = dataarray.attrs["area"] + data_arr = data_arr.copy() + area = data_arr.attrs["area"] gmapping_var_name, attrs = _create_grid_mapping(area) - dataarray.attrs["grid_mapping"] = gmapping_var_name - return dataarray, xr.DataArray(0, attrs=attrs, name=gmapping_var_name) + data_arr.attrs["grid_mapping"] = gmapping_var_name + return data_arr, xr.DataArray(0, attrs=attrs, name=gmapping_var_name) -def area2cf(dataarray, include_lonlats=False, got_lonlats=False): +def area2cf(data_arr: xr.DataArray, include_lonlats: bool = False, got_lonlats: bool = False) -> list[xr.DataArray]: """Convert an area to at CF grid mapping or lon and lats.""" res = [] - include_lonlats = include_lonlats or isinstance(dataarray.attrs["area"], SwathDefinition) - is_area_def = isinstance(dataarray.attrs["area"], AreaDefinition) + include_lonlats = include_lonlats or isinstance(data_arr.attrs["area"], SwathDefinition) + is_area_def = isinstance(data_arr.attrs["area"], AreaDefinition) if not got_lonlats and include_lonlats: - dataarray = _add_lonlat_coords(dataarray) + data_arr = _add_lonlat_coords(data_arr) if is_area_def: - dataarray, gmapping = _add_grid_mapping(dataarray) + data_arr, gmapping = _add_grid_mapping(data_arr) res.append(gmapping) - res.append(dataarray) + res.append(data_arr) return res diff --git a/satpy/cf/attrs.py b/satpy/cf/attrs.py index 3b355748b8..f9d49416c8 100644 --- a/satpy/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -145,66 +145,66 @@ def _encode_nc_attrs(attrs): return OrderedDict(encoded_attrs) -def preprocess_datarray_attrs( - dataarray: xr.DataArray, +def preprocess_attrs( + data_arr: xr.DataArray, flatten_attrs: bool, exclude_attrs: list[str] | None ) -> xr.DataArray: """Preprocess DataArray attributes to be written into CF-compliant netCDF/Zarr.""" - _drop_attrs(dataarray, exclude_attrs) - _add_ancillary_variables_attrs(dataarray) - _format_prerequisites_attrs(dataarray) + _drop_attrs(data_arr, exclude_attrs) + _add_ancillary_variables_attrs(data_arr) + _format_prerequisites_attrs(data_arr) - if "long_name" not in dataarray.attrs and "standard_name" not in dataarray.attrs: - dataarray.attrs["long_name"] = dataarray.name + if "long_name" not in data_arr.attrs and "standard_name" not in data_arr.attrs: + data_arr.attrs["long_name"] = data_arr.name if flatten_attrs: - dataarray.attrs = flatten_dict(dataarray.attrs) + data_arr.attrs = flatten_dict(data_arr.attrs) - dataarray.attrs = _encode_nc_attrs(dataarray.attrs) + data_arr.attrs = _encode_nc_attrs(data_arr.attrs) - return dataarray + return data_arr def _drop_attrs( - dataarray: xr.DataArray, + data_arr: xr.DataArray, user_excluded_attrs: list[str] | None ) -> None: """Remove undesirable attributes.""" attrs_to_drop = ( - (user_excluded_attrs or []) + - _get_satpy_attrs(dataarray) + - _get_none_attrs(dataarray) + - ["area"] + (user_excluded_attrs or []) + + _get_satpy_attrs(data_arr) + + _get_none_attrs(data_arr) + + ["area"] ) for key in attrs_to_drop: - dataarray.attrs.pop(key, None) + data_arr.attrs.pop(key, None) -def _get_satpy_attrs(new_data): +def _get_satpy_attrs(data_arr: xr.DataArray) -> list[str]: """Remove _satpy attribute.""" - return [key for key in new_data.attrs if key.startswith("_satpy")] + ["_last_resampler"] + return [key for key in data_arr.attrs if key.startswith("_satpy")] + ["_last_resampler"] -def _get_none_attrs(dataarray): +def _get_none_attrs(data_arr: xr.DataArray) -> list[str]: """Remove attribute keys with None value.""" - return [attr_name for attr_name, attr_val in dataarray.attrs.items() if attr_val is None] + return [attr_name for attr_name, attr_val in data_arr.attrs.items() if attr_val is None] -def _add_ancillary_variables_attrs(dataarray: xr.DataArray) -> None: +def _add_ancillary_variables_attrs(data_arr: xr.DataArray) -> None: """Replace ancillary_variables DataArray with a list of their name.""" list_ancillary_variable_names = [da_ancillary.attrs["name"] - for da_ancillary in dataarray.attrs.get("ancillary_variables", [])] + for da_ancillary in data_arr.attrs.get("ancillary_variables", [])] if list_ancillary_variable_names: - dataarray.attrs["ancillary_variables"] = " ".join(list_ancillary_variable_names) + data_arr.attrs["ancillary_variables"] = " ".join(list_ancillary_variable_names) else: - dataarray.attrs.pop("ancillary_variables", None) + data_arr.attrs.pop("ancillary_variables", None) -def _format_prerequisites_attrs(dataarray: xr.DataArray) -> None: +def _format_prerequisites_attrs(data_arr: xr.DataArray) -> None: """Reformat prerequisites attribute value to string.""" - if "prerequisites" in dataarray.attrs: - dataarray.attrs["prerequisites"] = [np.bytes_(str(prereq)) for prereq in dataarray.attrs["prerequisites"]] + if "prerequisites" in data_arr.attrs: + data_arr.attrs["prerequisites"] = [np.bytes_(str(prereq)) for prereq in data_arr.attrs["prerequisites"]] def _add_history(attrs): diff --git a/satpy/cf/coords.py b/satpy/cf/coords.py index ba1d195663..48a0748509 100644 --- a/satpy/cf/coords.py +++ b/satpy/cf/coords.py @@ -1,4 +1,5 @@ """Set CF-compliant spatial and temporal coordinates.""" +from __future__ import annotations import logging import warnings @@ -8,6 +9,7 @@ import numpy as np import xarray as xr from dask.base import tokenize +from pyproj import CRS from pyresample.geometry import AreaDefinition, SwathDefinition logger = logging.getLogger(__name__) @@ -16,27 +18,27 @@ EPOCH = u"seconds since 1970-01-01 00:00:00" -def add_xy_coords_attrs(dataarray): +def add_xy_coords_attrs(data_arr: xr.DataArray) -> xr.DataArray: """Add relevant attributes to x, y coordinates.""" # If there are no coords, return dataarray - if not dataarray.coords.keys() & {"x", "y", "crs"}: - return dataarray + if not data_arr.coords.keys() & {"x", "y", "crs"}: + return data_arr # If projected area - if _is_projected(dataarray): - dataarray = _add_xy_projected_coords_attrs(dataarray) + if _is_projected(data_arr): + data_arr = _add_xy_projected_coords_attrs(data_arr) else: - dataarray = _add_xy_geographic_coords_attrs(dataarray) - if "crs" in dataarray.coords: - dataarray = dataarray.drop_vars("crs") - return dataarray + data_arr = _add_xy_geographic_coords_attrs(data_arr) + if "crs" in data_arr.coords: + data_arr = data_arr.drop_vars("crs") + return data_arr -def _is_projected(dataarray): +def _is_projected(data_arr: xr.DataArray) -> bool: """Guess whether data are projected or not.""" - crs = _try_to_get_crs(dataarray) + crs = _try_to_get_crs(data_arr) if crs: return crs.is_projected - units = _try_get_units_from_coords(dataarray) + units = _try_get_units_from_coords(data_arr) if units: if units.endswith("m"): return True @@ -46,65 +48,60 @@ def _is_projected(dataarray): return True -def _is_area(dataarray): - if isinstance(dataarray.attrs["area"], AreaDefinition): - return True - else: - return False +def _is_area(data_arr: xr.DataArray) -> bool: + return isinstance(data_arr.attrs["area"], AreaDefinition) -def _is_swath(dataarray): - if isinstance(dataarray.attrs["area"], SwathDefinition): - return True - else: - return False +def _is_swath(data_arr: xr.DataArray) -> bool: + return isinstance(data_arr.attrs["area"], SwathDefinition) -def _try_to_get_crs(dataarray): +def _try_to_get_crs(data_arr: xr.DataArray) -> CRS: """Try to get a CRS from attributes.""" - if "area" in dataarray.attrs: - if _is_area(dataarray): - return dataarray.attrs["area"].crs - if not _is_swath(dataarray): + if "area" in data_arr.attrs: + if _is_area(data_arr): + return data_arr.attrs["area"].crs + if not _is_swath(data_arr): logger.warning( - f"Could not tell CRS from area of type {type(dataarray.attrs['area']).__name__:s}. " + f"Could not tell CRS from area of type {type(data_arr.attrs['area']).__name__:s}. " "Assuming projected CRS.") - if "crs" in dataarray.coords: - return dataarray.coords["crs"].item() + if "crs" in data_arr.coords: + return data_arr.coords["crs"].item() -def _try_get_units_from_coords(dataarray): +def _try_get_units_from_coords(data_arr: xr.DataArray) -> str | None: """Try to retrieve coordinate x/y units.""" for c in ["x", "y"]: with suppress(KeyError): # If the data has only 1 dimension, it has only one of x or y coords - if "units" in dataarray.coords[c].attrs: - return dataarray.coords[c].attrs["units"] + if "units" in data_arr.coords[c].attrs: + return data_arr.coords[c].attrs["units"] + return None -def _add_xy_projected_coords_attrs(dataarray, x="x", y="y"): +def _add_xy_projected_coords_attrs(data_arr: xr.DataArray, x: str = "x", y: str = "y") -> xr.DataArray: """Add relevant attributes to x, y coordinates of a projected CRS.""" - if x in dataarray.coords: - dataarray[x].attrs["standard_name"] = "projection_x_coordinate" - dataarray[x].attrs["units"] = "m" - if y in dataarray.coords: - dataarray[y].attrs["standard_name"] = "projection_y_coordinate" - dataarray[y].attrs["units"] = "m" - return dataarray + if x in data_arr.coords: + data_arr[x].attrs["standard_name"] = "projection_x_coordinate" + data_arr[x].attrs["units"] = "m" + if y in data_arr.coords: + data_arr[y].attrs["standard_name"] = "projection_y_coordinate" + data_arr[y].attrs["units"] = "m" + return data_arr -def _add_xy_geographic_coords_attrs(dataarray, x="x", y="y"): +def _add_xy_geographic_coords_attrs(data_arr: xr.DataArray, x: str = "x", y: str = "y") -> xr.DataArray: """Add relevant attributes to x, y coordinates of a geographic CRS.""" - if x in dataarray.coords: - dataarray[x].attrs["standard_name"] = "longitude" - dataarray[x].attrs["units"] = "degrees_east" - if y in dataarray.coords: - dataarray[y].attrs["standard_name"] = "latitude" - dataarray[y].attrs["units"] = "degrees_north" - return dataarray + if x in data_arr.coords: + data_arr[x].attrs["standard_name"] = "longitude" + data_arr[x].attrs["units"] = "degrees_east" + if y in data_arr.coords: + data_arr[y].attrs["standard_name"] = "latitude" + data_arr[y].attrs["units"] = "degrees_north" + return data_arr -def set_cf_time_info(dataarray, epoch): +def set_cf_time_info(data_arr: xr.DataArray, epoch: str | None) -> xr.DataArray: """Set CF time attributes and encoding. It expand the DataArray with a time dimension if does not yet exists. @@ -118,37 +115,33 @@ def set_cf_time_info(dataarray, epoch): if epoch is None: epoch = EPOCH - dataarray["time"].encoding["units"] = epoch - dataarray["time"].attrs["standard_name"] = "time" - dataarray["time"].attrs.pop("bounds", None) + data_arr["time"].encoding["units"] = epoch + data_arr["time"].attrs["standard_name"] = "time" + data_arr["time"].attrs.pop("bounds", None) - if "time" not in dataarray.dims and dataarray["time"].size not in dataarray.shape: - dataarray = dataarray.expand_dims("time") + if "time" not in data_arr.dims and data_arr["time"].size not in data_arr.shape: + data_arr = data_arr.expand_dims("time") - return dataarray + return data_arr -def _is_lon_or_lat_dataarray(dataarray): - """Check if the DataArray represents the latitude or longitude coordinate.""" - return dataarray.attrs.get("standard_name", "") in ("longitude", "latitude") +def has_projection_coords(data_arrays: dict[str, xr.DataArray]) -> bool: + """Check if DataArray collection has a "longitude" or "latitude" DataArray.""" + return any(_is_lon_or_lat_dataarray(data_arr) for data_arr in data_arrays.values()) -def has_projection_coords(dict_datarrays): - """Check if DataArray collection has a "longitude" or "latitude" DataArray.""" - for dataarray in dict_datarrays.values(): - if _is_lon_or_lat_dataarray(dataarray): - return True - return False +def _is_lon_or_lat_dataarray(data_arr: xr.DataArray) -> bool: + """Check if the DataArray represents the latitude or longitude coordinate.""" + return data_arr.attrs.get("standard_name", "") in ("longitude", "latitude") -def _get_is_nondimensional_coords_dict(dict_dataarrays): +def _get_is_nondimensional_coords_dict(data_arrays: dict[str, xr.DataArray]) -> dict[str, bool]: tokens = defaultdict(set) - for dataarray in dict_dataarrays.values(): - for coord_name in dataarray.coords: - if not _is_lon_or_lat_dataarray(dataarray[coord_name]) and coord_name not in dataarray.dims: - tokens[coord_name].add(tokenize(dataarray[coord_name].data)) - coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) - return coords_unique + for data_arr in data_arrays.values(): + for coord_name in data_arr.coords: + if not _is_lon_or_lat_dataarray(data_arr[coord_name]) and coord_name not in data_arr.dims: + tokens[coord_name].add(tokenize(data_arr[coord_name].data)) + return dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) def _warn_if_pretty_but_not_unique(pretty, coord_name): @@ -161,16 +154,19 @@ def _warn_if_pretty_but_not_unique(pretty, coord_name): ) -def _rename_coords(dict_dataarrays, coord_name): +def _rename_coords(data_arrays: dict[str, xr.DataArray], coord_name: str) -> dict[str, xr.DataArray]: """Rename coordinates in the datasets.""" - for name, dataarray in dict_dataarrays.items(): + for name, dataarray in data_arrays.items(): if coord_name in dataarray.coords: rename = {coord_name: f"{name}_{coord_name}"} - dict_dataarrays[name] = dataarray.rename(rename) - return dict_dataarrays + data_arrays[name] = dataarray.rename(rename) + return data_arrays -def ensure_unique_nondimensional_coords(dict_dataarrays, pretty=False): +def ensure_unique_nondimensional_coords( + data_arrays: dict[str, xr.DataArray], + pretty: bool = False +) -> dict[str, xr.DataArray]: """Make non-dimensional coordinates unique among all datasets. Non-dimensional coordinates, such as scanline timestamps, @@ -185,9 +181,9 @@ def ensure_unique_nondimensional_coords(dict_dataarrays, pretty=False): this is not applied to latitude and longitude. Args: - datas (dict): + datas: Dictionary of (dataset name, dataset) - pretty (bool): + pretty: Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. Returns: @@ -196,10 +192,10 @@ def ensure_unique_nondimensional_coords(dict_dataarrays, pretty=False): """ # Determine which non-dimensional coordinates are unique # - coords_unique has structure: {coord_name: True/False} - is_coords_unique_dict = _get_is_nondimensional_coords_dict(dict_dataarrays) + is_coords_unique_dict = _get_is_nondimensional_coords_dict(data_arrays) # Prepend dataset name, if not unique or no pretty-format desired - new_dict_dataarrays = dict_dataarrays.copy() + new_dict_dataarrays = data_arrays.copy() for coord_name, unique in is_coords_unique_dict.items(): if not pretty or not unique: _warn_if_pretty_but_not_unique(pretty, coord_name) @@ -207,11 +203,11 @@ def ensure_unique_nondimensional_coords(dict_dataarrays, pretty=False): return new_dict_dataarrays -def check_unique_projection_coords(dict_dataarrays): +def check_unique_projection_coords(data_arrays: dict[str, xr.DataArray]) -> None: """Check that all datasets share the same projection coordinates x/y.""" unique_x = set() unique_y = set() - for dataarray in dict_dataarrays.values(): + for dataarray in data_arrays.values(): if "y" in dataarray.dims: token_y = tokenize(dataarray["y"].data) unique_y.add(token_y) @@ -223,8 +219,7 @@ def check_unique_projection_coords(dict_dataarrays): "Please group them by area or save them in separate files.") - -def add_coordinates_attrs_coords(dict_dataarrays): +def add_coordinates_attrs_coords(data_arrays: dict[str, xr.DataArray]) -> dict[str, xr.DataArray]: """Add to DataArrays the coordinates specified in the 'coordinates' attribute. It deal with the 'coordinates' attributes indicating lat/lon coords @@ -236,51 +231,55 @@ def add_coordinates_attrs_coords(dict_dataarrays): In the final call to `xr.Dataset.to_netcdf()` all coordinate relations will be resolved and the `coordinates` attributes be set automatically. """ - for dataarray_name in dict_dataarrays.keys(): - dict_dataarrays = _add_declared_coordinates(dict_dataarrays, - dataarray_name=dataarray_name) + for dataarray_name in data_arrays.keys(): + data_arrays = _add_declared_coordinates(data_arrays, + dataarray_name=dataarray_name) # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() - dict_dataarrays[dataarray_name].attrs.pop("coordinates", None) - return dict_dataarrays + data_arrays[dataarray_name].attrs.pop("coordinates", None) + return data_arrays -def _add_declared_coordinates(dict_dataarrays, dataarray_name): +def _add_declared_coordinates(data_arrays: dict[str, xr.DataArray], dataarray_name: str) -> dict[str, xr.DataArray]: """Add declared coordinates to the dataarray if they exist.""" - dataarray = dict_dataarrays[dataarray_name] + dataarray = data_arrays[dataarray_name] declared_coordinates = _get_coordinates_list(dataarray) for coord in declared_coordinates: if coord not in dataarray.coords: - dict_dataarrays = _try_add_coordinate(dict_dataarrays, - dataarray_name=dataarray_name, - coord=coord) - return dict_dataarrays + data_arrays = _try_add_coordinate(data_arrays, + dataarray_name=dataarray_name, + coord=coord) + return data_arrays -def _try_add_coordinate(dict_dataarrays, dataarray_name, coord): +def _try_add_coordinate( + data_arrays: dict[str, xr.DataArray], + dataarray_name: str, + coord: str +) -> dict[str, xr.DataArray]: """Try to add a coordinate to the dataarray, warn if not possible.""" try: - dataarray_dims = set(dict_dataarrays[dataarray_name].dims) - coordinate_dims = set(dict_dataarrays[coord].dims) + dataarray_dims = set(data_arrays[dataarray_name].dims) + coordinate_dims = set(data_arrays[coord].dims) dimensions_to_squeeze = list(coordinate_dims - dataarray_dims) - dict_dataarrays[dataarray_name][coord] = dict_dataarrays[coord].squeeze(dimensions_to_squeeze, drop=True) + data_arrays[dataarray_name][coord] = data_arrays[coord].squeeze(dimensions_to_squeeze, drop=True) except KeyError: warnings.warn( f'Coordinate "{coord}" referenced by dataarray {dataarray_name} does not ' 'exist, dropping reference.', stacklevel=2 ) - return dict_dataarrays + return data_arrays -def _get_coordinates_list(dataarray): +def _get_coordinates_list(data_arr: xr.DataArray) -> list[str]: """Return a list with the coordinates names specified in the 'coordinates' attribute.""" - declared_coordinates = dataarray.attrs.get("coordinates", []) + declared_coordinates = data_arr.attrs.get("coordinates", []) if isinstance(declared_coordinates, str): declared_coordinates = declared_coordinates.split(" ") return declared_coordinates -def add_time_bounds_dimension(ds, time="time"): +def add_time_bounds_dimension(ds: xr.Dataset, time: str = "time") -> xr.Dataset: """Add time bound dimension to xr.Dataset.""" start_times = [] end_times = [] diff --git a/satpy/cf/dataarray.py b/satpy/cf/data_array.py similarity index 74% rename from satpy/cf/dataarray.py rename to satpy/cf/data_array.py index dc2ae7d6c1..ef86953f84 100644 --- a/satpy/cf/dataarray.py +++ b/satpy/cf/data_array.py @@ -17,13 +17,13 @@ import logging import warnings -from satpy.cf.attrs import preprocess_datarray_attrs +from satpy.cf.attrs import preprocess_attrs from satpy.cf.coords import add_xy_coords_attrs, set_cf_time_info logger = logging.getLogger(__name__) -def _handle_dataarray_name(original_name, numeric_name_prefix): +def _handle_data_array_name(original_name, numeric_name_prefix): if original_name[0].isdigit(): if numeric_name_prefix: new_name = numeric_name_prefix + original_name @@ -38,14 +38,14 @@ def _handle_dataarray_name(original_name, numeric_name_prefix): return original_name, new_name -def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name): +def _preprocess_data_array_name(dataarray, numeric_name_prefix, include_orig_name): """Change the DataArray name by prepending numeric_name_prefix if the name is a digit.""" original_name = None named_has_changed = False dataarray = dataarray.copy() if "name" in dataarray.attrs: original_name = dataarray.attrs.pop("name") - original_name, new_name = _handle_dataarray_name(original_name, numeric_name_prefix) + original_name, new_name = _handle_data_array_name(original_name, numeric_name_prefix) dataarray = dataarray.rename(new_name) named_has_changed = original_name != new_name @@ -54,12 +54,12 @@ def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name return dataarray -def make_cf_dataarray(dataarray, - epoch=None, - flatten_attrs=False, - exclude_attrs=None, - include_orig_name=True, - numeric_name_prefix="CHANNEL_"): +def make_cf_data_array(dataarray, + epoch=None, + flatten_attrs=False, + exclude_attrs=None, + include_orig_name=True, + numeric_name_prefix="CHANNEL_"): """Make the xr.DataArray CF-compliant. Args: @@ -76,12 +76,12 @@ def make_cf_dataarray(dataarray, Returns: xr.DataArray: A CF-compliant xr.DataArray. """ - dataarray = _preprocess_dataarray_name(dataarray=dataarray, - numeric_name_prefix=numeric_name_prefix, - include_orig_name=include_orig_name) - dataarray = preprocess_datarray_attrs(dataarray=dataarray, - flatten_attrs=flatten_attrs, - exclude_attrs=exclude_attrs) + dataarray = _preprocess_data_array_name(dataarray=dataarray, + numeric_name_prefix=numeric_name_prefix, + include_orig_name=include_orig_name) + dataarray = preprocess_attrs(data_arr=dataarray, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs) dataarray = add_xy_coords_attrs(dataarray) if "time" in dataarray.coords: dataarray = set_cf_time_info(dataarray, epoch=epoch) diff --git a/satpy/cf/datasets.py b/satpy/cf/datasets.py index cab71de58c..2c5080ee42 100644 --- a/satpy/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -92,7 +92,7 @@ def _collect_cf_dataset(list_dataarrays, ensure_unique_nondimensional_coords, has_projection_coords, ) - from satpy.cf.dataarray import make_cf_dataarray + from satpy.cf.data_array import make_cf_data_array # Create dictionary of input datarrays # --> Since keys=None, it doesn't never retrieve ancillary variables !!! @@ -134,12 +134,12 @@ def _collect_cf_dataset(list_dataarrays, # --> NOTE: If the input list_dataarrays have different pyresample areas with the same name # area information can be lost here !!! for new_dataarray in list_new_dataarrays: - new_dataarray = make_cf_dataarray(new_dataarray, - epoch=epoch, - flatten_attrs=flatten_attrs, - exclude_attrs=exclude_attrs, - include_orig_name=include_orig_name, - numeric_name_prefix=numeric_name_prefix) + new_dataarray = make_cf_data_array(new_dataarray, + epoch=epoch, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) dict_cf_dataarrays[new_dataarray.name] = new_dataarray # Check all DataArrays have same projection coordinates diff --git a/satpy/tests/cf_tests/test_dataaarray.py b/satpy/tests/cf_tests/test_dataaarray.py index d0154cd84f..50e5b54424 100644 --- a/satpy/tests/cf_tests/test_dataaarray.py +++ b/satpy/tests/cf_tests/test_dataaarray.py @@ -25,23 +25,23 @@ def test_preprocess_dataarray_name(): """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix include orig name.""" from satpy import Scene - from satpy.cf.dataarray import _preprocess_dataarray_name + from satpy.cf.data_array import _preprocess_data_array_name scn = Scene() scn["1"] = xr.DataArray([1, 2, 3]) dataarray = scn["1"] # If numeric_name_prefix is a string, test add the original_name attributes - out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="TEST", include_orig_name=True) + out_da = _preprocess_data_array_name(dataarray, numeric_name_prefix="TEST", include_orig_name=True) assert out_da.attrs["original_name"] == "1" # If numeric_name_prefix is empty string, False or None, test do not add original_name attributes - out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="", include_orig_name=True) + out_da = _preprocess_data_array_name(dataarray, numeric_name_prefix="", include_orig_name=True) assert "original_name" not in out_da.attrs - out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix=False, include_orig_name=True) + out_da = _preprocess_data_array_name(dataarray, numeric_name_prefix=False, include_orig_name=True) assert "original_name" not in out_da.attrs - out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix=None, include_orig_name=True) + out_da = _preprocess_data_array_name(dataarray, numeric_name_prefix=None, include_orig_name=True) assert "original_name" not in out_da.attrs @@ -49,7 +49,7 @@ def test_make_cf_dataarray_lonlat(): """Test correct CF encoding for area with lon/lat units.""" from pyresample import create_area_def - from satpy.cf.dataarray import make_cf_dataarray + from satpy.cf.data_array import make_cf_data_array from satpy.resample import add_crs_xy_coords area = create_area_def("mavas", 4326, shape=(5, 5), @@ -59,7 +59,7 @@ def test_make_cf_dataarray_lonlat(): dims=("y", "x"), attrs={"area": area}) da = add_crs_xy_coords(da, area) - new_da = make_cf_dataarray(da) + new_da = make_cf_data_array(da) assert new_da["x"].attrs["units"] == "degrees_east" assert new_da["y"].attrs["units"] == "degrees_north" @@ -69,7 +69,7 @@ class TestCfDataArray: def test_make_cf_dataarray(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" - from satpy.cf.dataarray import make_cf_dataarray + from satpy.cf.data_array import make_cf_data_array from satpy.tests.cf_tests._test_data import get_test_attrs from satpy.tests.utils import assert_dict_array_equality @@ -94,7 +94,7 @@ def test_make_cf_dataarray(self): coords={"y": [0, 1], "x": [1, 2], "acq_time": ("y", [3, 4])}) # Test conversion to something cf-compliant - res = make_cf_dataarray(arr) + res = make_cf_data_array(arr) np.testing.assert_array_equal(res["x"], arr["x"]) np.testing.assert_array_equal(res["y"], arr["y"]) np.testing.assert_array_equal(res["acq_time"], arr["acq_time"]) @@ -103,14 +103,14 @@ def test_make_cf_dataarray(self): assert_dict_array_equality(res.attrs, attrs_expected) # Test attribute kwargs - res_flat = make_cf_dataarray(arr, flatten_attrs=True, exclude_attrs=["int"]) + res_flat = make_cf_data_array(arr, flatten_attrs=True, exclude_attrs=["int"]) attrs_expected_flat.pop("int") assert_dict_array_equality(res_flat.attrs, attrs_expected_flat) def test_make_cf_dataarray_one_dimensional_array(self): """Test the conversion of an 1d DataArray to a CF-compatible DataArray.""" - from satpy.cf.dataarray import make_cf_dataarray + from satpy.cf.data_array import make_cf_data_array arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=("y",), coords={"y": [0, 1, 2, 3], "acq_time": ("y", [0, 1, 2, 3])}) - _ = make_cf_dataarray(arr) + _ = make_cf_data_array(arr) diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index b64a288213..4f67215bd1 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -335,16 +335,16 @@ def da2cf(dataarray, epoch=None, flatten_attrs=False, exclude_attrs=None, numeric_name_prefix (str): Prepend dataset name with this if starting with a digit. """ - from satpy.cf.dataarray import make_cf_dataarray + from satpy.cf.data_array import make_cf_data_array warnings.warn("CFWriter.da2cf is deprecated." - "Use satpy.cf.dataarray.make_cf_dataarray instead.", + "Use satpy.cf.dataarray.make_cf_data_array instead.", DeprecationWarning, stacklevel=3) - return make_cf_dataarray(dataarray=dataarray, - epoch=epoch, - flatten_attrs=flatten_attrs, - exclude_attrs=exclude_attrs, - include_orig_name=include_orig_name, - numeric_name_prefix=numeric_name_prefix) + return make_cf_data_array(dataarray=dataarray, + epoch=epoch, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) @staticmethod def update_encoding(dataset, to_netcdf_kwargs): From 055cbef95d2d13b3957206f94d3c29b775ed4e8d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 16 Nov 2023 21:22:03 -0600 Subject: [PATCH 469/702] Reduce code complexity --- satpy/tests/cf_tests/test_area.py | 42 +++++++++++-------------------- satpy/tests/utils.py | 24 ++++++++++++------ 2 files changed, 30 insertions(+), 36 deletions(-) diff --git a/satpy/tests/cf_tests/test_area.py b/satpy/tests/cf_tests/test_area.py index cf8548d568..a00df3925e 100644 --- a/satpy/tests/cf_tests/test_area.py +++ b/satpy/tests/cf_tests/test_area.py @@ -18,6 +18,7 @@ """Tests for the CF Area.""" import dask.array as da import numpy as np +import pytest import xarray as xr from pyresample import AreaDefinition, SwathDefinition @@ -71,7 +72,7 @@ def test_area2cf(self): assert "latitude" in res[0].coords assert "grid_mapping" not in res[0].attrs - def test__add_grid_mapping(self): + def test_add_grid_mapping(self): """Test the conversion from pyresample area object to CF grid mapping.""" from satpy.cf.area import _add_grid_mapping @@ -255,7 +256,8 @@ def _gm_matches(gmapping, expected): assert new_ds.attrs["grid_mapping"] == "geos" _gm_matches(grid_mapping, geos_expected) - def test__add_lonlat_coords(self): + @pytest.mark.parametrize("dims", [("y", "x"), ("bands", "y", "x")]) + def test_add_lonlat_coords(self, dims): """Test the conversion from areas to lon/lat.""" from satpy.cf.area import _add_lonlat_coords @@ -268,35 +270,19 @@ def test__add_lonlat_coords(self): [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] ) lons_ref, lats_ref = area.get_lonlats() - dataarray = xr.DataArray(data=[[1, 2], [3, 4]], dims=("y", "x"), attrs={"area": area}) + if len(dims) == 2: + data_arr = xr.DataArray(data=[[1, 2], [3, 4]], dims=dims, attrs={"area": area}) + else: + data_arr = xr.DataArray( + data=da.from_array(np.arange(3 * 10 * 10).reshape(3, 10, 10), chunks=(1, 5, 5)), + dims=("bands", "y", "x"), + attrs={"area": area}, + ) - res = _add_lonlat_coords(dataarray) + res = _add_lonlat_coords(data_arr) # original should be unmodified - assert "longitude" not in dataarray.coords - assert set(res.coords) == {"longitude", "latitude"} - lat = res["latitude"] - lon = res["longitude"] - np.testing.assert_array_equal(lat.data, lats_ref) - np.testing.assert_array_equal(lon.data, lons_ref) - assert {"name": "latitude", "standard_name": "latitude", "units": "degrees_north"}.items() <= lat.attrs.items() - assert {"name": "longitude", "standard_name": "longitude", "units": "degrees_east"}.items() <= lon.attrs.items() - - area = AreaDefinition( - "seviri", - "Native SEVIRI grid", - "geos", - "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", - 10, 10, - [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] - ) - lons_ref, lats_ref = area.get_lonlats() - dataarray = xr.DataArray(data=da.from_array(np.arange(3 * 10 * 10).reshape(3, 10, 10), chunks=(1, 5, 5)), - dims=("bands", "y", "x"), attrs={"area": area}) - res = _add_lonlat_coords(dataarray) - - # original should be unmodified - assert "longitude" not in dataarray.coords + assert "longitude" not in data_arr.coords assert set(res.coords) == {"longitude", "latitude"} lat = res["latitude"] lon = res["longitude"] diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py index 1fb736d427..a6ebf8753e 100644 --- a/satpy/tests/utils.py +++ b/satpy/tests/utils.py @@ -18,6 +18,7 @@ from contextlib import contextmanager from datetime import datetime +from typing import Any from unittest import mock import dask.array as da @@ -414,14 +415,21 @@ def assert_dict_array_equality(d1, d2): assert set(d1.keys()) == set(d2.keys()) for key, val1 in d1.items(): val2 = d2[key] - if isinstance(val1, np.ndarray): - np.testing.assert_array_equal(val1, val2) - assert val1.dtype == val2.dtype - else: - assert val1 == val2 - if isinstance(val1, (np.floating, np.integer, np.bool_)): - assert isinstance(val2, np.generic) - assert val1.dtype == val2.dtype + compare_func = _compare_numpy_array if isinstance(val1, np.ndarray) else _compare_nonarray + compare_func(val1, val2) + + +def _compare_numpy_array(val1: np.ndarray, val2: np.ndarray) -> None: + np.testing.assert_array_equal(val1, val2) + assert val1.dtype == val2.dtype + + +def _compare_nonarray(val1: Any, val2: Any) -> None: + assert val1 == val2 + if isinstance(val1, (np.floating, np.integer, np.bool_)): + assert isinstance(val2, np.generic) + assert val1.dtype == val2.dtype + def xfail_skyfield_unstable_numpy2(): """Determine if skyfield-based tests should be xfail in the unstable numpy 2.x environment.""" From 2a65eea467d6bb0526fce172a121aa4f95c146cb Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 16 Nov 2023 21:48:34 -0600 Subject: [PATCH 470/702] Refactor CF area tests --- satpy/tests/cf_tests/test_area.py | 70 +++++++++++++++---------------- 1 file changed, 34 insertions(+), 36 deletions(-) diff --git a/satpy/tests/cf_tests/test_area.py b/satpy/tests/cf_tests/test_area.py index a00df3925e..0539ebeb86 100644 --- a/satpy/tests/cf_tests/test_area.py +++ b/satpy/tests/cf_tests/test_area.py @@ -22,18 +22,25 @@ import xarray as xr from pyresample import AreaDefinition, SwathDefinition +from satpy.cf.area import area2cf + + +@pytest.fixture() +def input_data_arr() -> xr.DataArray: + return xr.DataArray( + data=[[1, 2], [3, 4]], + dims=("y", "x"), + coords={"y": [1, 2], "x": [3, 4]}, + attrs={"name": "var1"}, + ) + class TestCFArea: """Test case for CF Area.""" - def test_area2cf(self): + @pytest.mark.parametrize("include_lonlats", [False, True]) + def test_area2cf_geos_area_nolonlats(self, input_data_arr, include_lonlats): """Test the conversion of an area to CF standards.""" - from satpy.cf.area import area2cf - - ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}, - attrs={"name": "var1"}) - - # a) Area Definition and strict=False geos = AreaDefinition( area_id="geos", description="geos", @@ -41,32 +48,21 @@ def test_area2cf(self): projection={"proj": "geos", "h": 35785831., "a": 6378169., "b": 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) - ds = ds_base.copy(deep=True) - ds.attrs["area"] = geos + input_data_arr.attrs["area"] = geos - res = area2cf(ds, include_lonlats=False) + res = area2cf(input_data_arr, include_lonlats=include_lonlats) assert len(res) == 2 assert res[0].size == 1 # grid mapping variable assert res[0].name == res[1].attrs["grid_mapping"] + if include_lonlats: + assert "longitude" in res[1].coords + assert "latitude" in res[1].coords - # b) Area Definition and include_lonlats=False - ds = ds_base.copy(deep=True) - ds.attrs["area"] = geos - res = area2cf(ds, include_lonlats=True) - # same as above - assert len(res) == 2 - assert res[0].size == 1 # grid mapping variable - assert res[0].name == res[1].attrs["grid_mapping"] - # but now also have the lon/lats - assert "longitude" in res[1].coords - assert "latitude" in res[1].coords - - # c) Swath Definition + def test_area2cf_swath(self, input_data_arr): swath = SwathDefinition(lons=[[1, 1], [2, 2]], lats=[[1, 2], [1, 2]]) - ds = ds_base.copy(deep=True) - ds.attrs["area"] = swath + input_data_arr.attrs["area"] = swath - res = area2cf(ds, include_lonlats=False) + res = area2cf(input_data_arr, include_lonlats=False) assert len(res) == 1 assert "longitude" in res[0].coords assert "latitude" in res[0].coords @@ -76,15 +72,6 @@ def test_add_grid_mapping(self): """Test the conversion from pyresample area object to CF grid mapping.""" from satpy.cf.area import _add_grid_mapping - def _gm_matches(gmapping, expected): - """Assert that all keys in ``expected`` match the values in ``gmapping``.""" - for attr_key, attr_val in expected.attrs.items(): - test_val = gmapping.attrs[attr_key] - if attr_val is None or isinstance(attr_val, str): - assert test_val == attr_val - else: - np.testing.assert_almost_equal(test_val, attr_val, decimal=3) - ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}, attrs={"name": "var1"}) @@ -261,12 +248,13 @@ def test_add_lonlat_coords(self, dims): """Test the conversion from areas to lon/lat.""" from satpy.cf.area import _add_lonlat_coords + width, height = (2, 2) if len(dims) == 2 else (10, 10) area = AreaDefinition( "seviri", "Native SEVIRI grid", "geos", "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", - 2, 2, + width, height, [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] ) lons_ref, lats_ref = area.get_lonlats() @@ -290,3 +278,13 @@ def test_add_lonlat_coords(self, dims): np.testing.assert_array_equal(lon.data, lons_ref) assert {"name": "latitude", "standard_name": "latitude", "units": "degrees_north"}.items() <= lat.attrs.items() assert {"name": "longitude", "standard_name": "longitude", "units": "degrees_east"}.items() <= lon.attrs.items() + + +def _gm_matches(gmapping, expected): + """Assert that all keys in ``expected`` match the values in ``gmapping``.""" + for attr_key, attr_val in expected.attrs.items(): + test_val = gmapping.attrs[attr_key] + if attr_val is None or isinstance(attr_val, str): + assert test_val == attr_val + else: + np.testing.assert_almost_equal(test_val, attr_val, decimal=3) From 63e8407f6e23c68a0a5afdfbf9fe3e3e2f17edc6 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 16 Nov 2023 21:59:59 -0600 Subject: [PATCH 471/702] Refactor CF area tests a little more --- satpy/tests/cf_tests/test_area.py | 94 ++++++++++++++----------------- 1 file changed, 41 insertions(+), 53 deletions(-) diff --git a/satpy/tests/cf_tests/test_area.py b/satpy/tests/cf_tests/test_area.py index 0539ebeb86..31b51b6cd9 100644 --- a/satpy/tests/cf_tests/test_area.py +++ b/satpy/tests/cf_tests/test_area.py @@ -22,7 +22,7 @@ import xarray as xr from pyresample import AreaDefinition, SwathDefinition -from satpy.cf.area import area2cf +from satpy.cf.area import _add_grid_mapping, area2cf @pytest.fixture() @@ -68,14 +68,12 @@ def test_area2cf_swath(self, input_data_arr): assert "latitude" in res[0].coords assert "grid_mapping" not in res[0].attrs - def test_add_grid_mapping(self): - """Test the conversion from pyresample area object to CF grid mapping.""" - from satpy.cf.area import _add_grid_mapping + def test_add_grid_mapping_cf_repr(self, input_data_arr): + """Test the conversion from pyresample area object to CF grid mapping. - ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}, - attrs={"name": "var1"}) + Projection has a corresponding CF representation (e.g. geos). - # a) Projection has a corresponding CF representation (e.g. geos) + """ a = 6378169. b = 6356583.8 h = 35785831. @@ -97,9 +95,8 @@ def test_add_grid_mapping(self): # 'sweep_angle_axis': None, }) - ds = ds_base.copy() - ds.attrs["area"] = geos - new_ds, grid_mapping = _add_grid_mapping(ds) + input_data_arr.attrs["area"] = geos + new_ds, grid_mapping = _add_grid_mapping(input_data_arr) if "sweep_angle_axis" in grid_mapping.attrs: # older versions of pyproj might not include this assert grid_mapping.attrs["sweep_angle_axis"] == "y" @@ -107,9 +104,14 @@ def test_add_grid_mapping(self): assert new_ds.attrs["grid_mapping"] == "geos" _gm_matches(grid_mapping, geos_expected) # should not have been modified - assert "grid_mapping" not in ds.attrs + assert "grid_mapping" not in input_data_arr.attrs + + def test_add_grid_mapping_no_cf_repr(self, input_data_arr): + """Test the conversion from pyresample area object to CF grid mapping. + + Projection does not have a corresponding CF representation (e.g. COSMO). - # b) Projection does not have a corresponding CF representation (COSMO) + """ cosmo7 = AreaDefinition( area_id="cosmo7", description="cosmo7", @@ -119,11 +121,9 @@ def test_add_grid_mapping(self): width=597, height=510, area_extent=[-1812933, -1003565, 814056, 1243448] ) + input_data_arr.attrs["area"] = cosmo7 - ds = ds_base.copy() - ds.attrs["area"] = cosmo7 - - new_ds, grid_mapping = _add_grid_mapping(ds) + new_ds, grid_mapping = _add_grid_mapping(input_data_arr) assert "crs_wkt" in grid_mapping.attrs wkt = grid_mapping.attrs["crs_wkt"] assert 'ELLIPSOID["WGS 84"' in wkt @@ -133,7 +133,12 @@ def test_add_grid_mapping(self): assert 'PARAMETER["o_lon_p",-5.465' in wkt assert new_ds.attrs["grid_mapping"] == "cosmo7" - # c) Projection Transverse Mercator + def test_add_grid_mapping_transverse_mercator(self, input_data_arr): + """Test the conversion from pyresample area object to CF grid mapping. + + Projection is transverse mercator. + + """ lat_0 = 36.5 lon_0 = 15.0 @@ -154,13 +159,17 @@ def test_add_grid_mapping(self): "false_northing": 0., }) - ds = ds_base.copy() - ds.attrs["area"] = tmerc - new_ds, grid_mapping = _add_grid_mapping(ds) + input_data_arr.attrs["area"] = tmerc + new_ds, grid_mapping = _add_grid_mapping(input_data_arr) assert new_ds.attrs["grid_mapping"] == "tmerc" _gm_matches(grid_mapping, tmerc_expected) - # d) Projection that has a representation but no explicit a/b + def test_add_grid_mapping_cf_repr_no_ab(self, input_data_arr): + """Test the conversion from pyresample area object to CF grid mapping. + + Projection has a corresponding CF representation but no explicit a/b. + + """ h = 35785831. geos = AreaDefinition( area_id="geos", @@ -175,19 +184,24 @@ def test_add_grid_mapping(self): "latitude_of_projection_origin": 0, "longitude_of_projection_origin": 0, "grid_mapping_name": "geostationary", + "reference_ellipsoid_name": "WGS 84", # 'semi_major_axis': 6378137.0, # 'semi_minor_axis': 6356752.314, # 'sweep_angle_axis': None, }) - ds = ds_base.copy() - ds.attrs["area"] = geos - new_ds, grid_mapping = _add_grid_mapping(ds) + input_data_arr.attrs["area"] = geos + new_ds, grid_mapping = _add_grid_mapping(input_data_arr) assert new_ds.attrs["grid_mapping"] == "geos" _gm_matches(grid_mapping, geos_expected) - # e) oblique Mercator + def test_add_grid_mapping_oblique_mercator(self, input_data_arr): + """Test the conversion from pyresample area object to CF grid mapping. + + Projection is oblique mercator. + + """ area = AreaDefinition( area_id="omerc_otf", description="On-the-fly omerc area", @@ -211,38 +225,12 @@ def test_add_grid_mapping(self): "reference_ellipsoid_name": "WGS 84"} omerc_expected = xr.DataArray(data=0, attrs=omerc_dict) - ds = ds_base.copy() - ds.attrs["area"] = area - new_ds, grid_mapping = _add_grid_mapping(ds) + input_data_arr.attrs["area"] = area + new_ds, grid_mapping = _add_grid_mapping(input_data_arr) assert new_ds.attrs["grid_mapping"] == "omerc_otf" _gm_matches(grid_mapping, omerc_expected) - # f) Projection that has a representation but no explicit a/b - h = 35785831. - geos = AreaDefinition( - area_id="geos", - description="geos", - proj_id="geos", - projection={"proj": "geos", "h": h, "datum": "WGS84", "ellps": "GRS80", - "lat_0": 0, "lon_0": 0}, - width=2, height=2, - area_extent=[-1, -1, 1, 1]) - geos_expected = xr.DataArray(data=0, - attrs={"perspective_point_height": h, - "latitude_of_projection_origin": 0, - "longitude_of_projection_origin": 0, - "grid_mapping_name": "geostationary", - "reference_ellipsoid_name": "WGS 84", - }) - - ds = ds_base.copy() - ds.attrs["area"] = geos - new_ds, grid_mapping = _add_grid_mapping(ds) - - assert new_ds.attrs["grid_mapping"] == "geos" - _gm_matches(grid_mapping, geos_expected) - @pytest.mark.parametrize("dims", [("y", "x"), ("bands", "y", "x")]) def test_add_lonlat_coords(self, dims): """Test the conversion from areas to lon/lat.""" From 5706549920ae25c7b0053e1fa2e6a431f8213604 Mon Sep 17 00:00:00 2001 From: Florian Fichtner <12199342+fwfichtner@users.noreply.github.com> Date: Fri, 17 Nov 2023 10:36:46 +0100 Subject: [PATCH 472/702] add unittest cloud_flags --- satpy/tests/reader_tests/test_eps_l1b.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/satpy/tests/reader_tests/test_eps_l1b.py b/satpy/tests/reader_tests/test_eps_l1b.py index 22035b81e7..d9a484face 100644 --- a/satpy/tests/reader_tests/test_eps_l1b.py +++ b/satpy/tests/reader_tests/test_eps_l1b.py @@ -152,6 +152,15 @@ def test_angles(self): assert res.attrs["sensor"] == "avhrr-3" assert res.attrs["name"] == "solar_zenith_angle" + def test_clould_flags(self): + """Test getting the cloud flags.""" + did = make_dataid(name="cloud_flags") + res = self.fh.get_dataset(did, {}) + assert isinstance(res, xr.DataArray) + assert res.attrs["platform_name"] == "Metop-C" + assert res.attrs["sensor"] == "avhrr-3" + assert res.attrs["name"] == "cloud_flags" + @mock.patch("satpy.readers.eps_l1b.EPSAVHRRFile.__getitem__") def test_get_full_angles_twice(self, mock__getitem__): """Test get full angles twice.""" From 7691b9c4dcd20d6b9e10100d964b5d39a36d8588 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 17 Nov 2023 16:58:36 +0000 Subject: [PATCH 473/702] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/etc/readers/avhrr_l1b_eps.yaml | 6 +++--- satpy/readers/eps_l1b.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/etc/readers/avhrr_l1b_eps.yaml b/satpy/etc/readers/avhrr_l1b_eps.yaml index e759d28d66..fbb4b4ec18 100644 --- a/satpy/etc/readers/avhrr_l1b_eps.yaml +++ b/satpy/etc/readers/avhrr_l1b_eps.yaml @@ -89,7 +89,7 @@ datasets: - latitude file_type: avhrr_eps - + latitude: name: latitude resolution: 1050 @@ -137,8 +137,8 @@ datasets: sensor: avhrr-3 resolution: 1050 coordinates: [longitude, latitude] - file_type: avhrr_eps - + file_type: avhrr_eps + file_types: avhrr_eps: file_reader: !!python/name:satpy.readers.eps_l1b.EPSAVHRRFile diff --git a/satpy/readers/eps_l1b.py b/satpy/readers/eps_l1b.py index 0c35a2eaad..25a050d00a 100644 --- a/satpy/readers/eps_l1b.py +++ b/satpy/readers/eps_l1b.py @@ -301,7 +301,7 @@ def get_dataset(self, key, info): dataset = self._get_calibrated_dataarray(key) elif key['name'] == "cloud_flags": array = self["CLOUD_INFORMATION"] - dataset = create_xarray(array) + dataset = create_xarray(array) else: logger.info("Can't load channel in eps_l1b: " + str(key["name"])) return From c1c2240af91723da74b49ced23fe7e6f49b9fb69 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 17 Nov 2023 11:01:43 -0600 Subject: [PATCH 474/702] Refactor angle loading in eps_l1b.py --- satpy/readers/eps_l1b.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/satpy/readers/eps_l1b.py b/satpy/readers/eps_l1b.py index 25a050d00a..15a345de2b 100644 --- a/satpy/readers/eps_l1b.py +++ b/satpy/readers/eps_l1b.py @@ -316,16 +316,14 @@ def get_dataset(self, key, info): def _get_angle_dataarray(self, key): """Get an angle dataarray.""" - sun_azi, sun_zen, sat_azi, sat_zen = self.get_full_angles() - if key["name"] == "solar_zenith_angle": - dataset = create_xarray(sun_zen) - elif key["name"] == "solar_azimuth_angle": - dataset = create_xarray(sun_azi) - if key["name"] == "satellite_zenith_angle": - dataset = create_xarray(sat_zen) - elif key["name"] == "satellite_azimuth_angle": - dataset = create_xarray(sat_azi) - return dataset + arr_index = { + "solar_azimuth_angle": 0, + "solar_zenith_angle": 1, + "satellite_azimuth_angle": 2, + "satellite_zenith_angle": 3, + }[key["name"] + data = self.get_full_angles()[arr_index] + return create_xarray(data) @cached_property def three_a_mask(self): From 41815049b33f29db3bc70dcb21b8223dd64350c6 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 17 Nov 2023 11:06:48 -0600 Subject: [PATCH 475/702] More refactoring in eps_l1b.py --- satpy/readers/eps_l1b.py | 37 ++++++++++++++++++++----------------- 1 file changed, 20 insertions(+), 17 deletions(-) diff --git a/satpy/readers/eps_l1b.py b/satpy/readers/eps_l1b.py index 15a345de2b..bd03f40820 100644 --- a/satpy/readers/eps_l1b.py +++ b/satpy/readers/eps_l1b.py @@ -287,22 +287,9 @@ def get_dataset(self, key, info): if self.sections is None: self._read_all() - if key["name"] in ["longitude", "latitude"]: - lons, lats = self.get_full_lonlats() - if key["name"] == "longitude": - dataset = create_xarray(lons) - else: - dataset = create_xarray(lats) - - elif key["name"] in ["solar_zenith_angle", "solar_azimuth_angle", - "satellite_zenith_angle", "satellite_azimuth_angle"]: - dataset = self._get_angle_dataarray(key) - elif key["name"] in ["1", "2", "3a", "3A", "3b", "3B", "4", "5"]: - dataset = self._get_calibrated_dataarray(key) - elif key['name'] == "cloud_flags": - array = self["CLOUD_INFORMATION"] - dataset = create_xarray(array) - else: + try: + dataset = self._get_data_array(key) + except KeyError: logger.info("Can't load channel in eps_l1b: " + str(key["name"])) return @@ -314,6 +301,22 @@ def get_dataset(self, key, info): dataset.attrs.update(key.to_dict()) return dataset + def _get_data_array(self, key): + name = key["name"] + if name in ["longitude", "latitude"]: + data = self.get_full_lonlats()[int(name == "latitude")] + dataset = create_xarray(data) + elif name in ["solar_zenith_angle", "solar_azimuth_angle", "satellite_zenith_angle", "satellite_azimuth_angle"]: + dataset = self._get_angle_dataarray(key) + elif name in ["1", "2", "3a", "3A", "3b", "3B", "4", "5"]: + dataset = self._get_calibrated_dataarray(key) + elif name == "cloud_flags": + array = self["CLOUD_INFORMATION"] + dataset = create_xarray(array) + else: + raise KeyError(f"Unknown channel: {name}") + return dataset + def _get_angle_dataarray(self, key): """Get an angle dataarray.""" arr_index = { @@ -321,7 +324,7 @@ def _get_angle_dataarray(self, key): "solar_zenith_angle": 1, "satellite_azimuth_angle": 2, "satellite_zenith_angle": 3, - }[key["name"] + }[key["name"]] data = self.get_full_angles()[arr_index] return create_xarray(data) From 504220210195f671bf7a5e91d1ecb6ea2bbd6c0a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 17 Nov 2023 14:17:04 -0600 Subject: [PATCH 476/702] Fix sphinx docstring error in make_cf_data_array --- satpy/cf/data_array.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/cf/data_array.py b/satpy/cf/data_array.py index ef86953f84..e0b26867c7 100644 --- a/satpy/cf/data_array.py +++ b/satpy/cf/data_array.py @@ -71,7 +71,7 @@ def make_cf_data_array(dataarray, include_orig_name (bool, optional): Include the original dataset name in the netcdf variable attributes. Defaults to True. numeric_name_prefix (str, optional): Prepend dataset name with this if starting with a digit. - Defaults to "CHANNEL_". + Defaults to ``"CHANNEL_"``. Returns: xr.DataArray: A CF-compliant xr.DataArray. From cc366c0d11799374f46301b2c70c1560483dbd95 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 17 Nov 2023 14:17:26 -0600 Subject: [PATCH 477/702] Add py.typed file so users get type information in their IDE --- MANIFEST.in | 1 + satpy/py.typed | 0 2 files changed, 1 insertion(+) create mode 100644 satpy/py.typed diff --git a/MANIFEST.in b/MANIFEST.in index 3a7cdb0b43..05c921b367 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -13,4 +13,5 @@ include satpy/version.py include pyproject.toml include setup.py include setup.cfg +include satpy/py.typed global-exclude *.py[cod] diff --git a/satpy/py.typed b/satpy/py.typed new file mode 100644 index 0000000000..e69de29bb2 From e98eb3ee54831dd2449f7fc73a72c5bdff6b4324 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 20 Nov 2023 11:18:47 +0200 Subject: [PATCH 478/702] Cast lons/lats to data dtype only if dtypes don't match and data are floats --- satpy/modifiers/angles.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 2a032b20b3..1471ba3669 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -403,8 +403,9 @@ def get_cos_sza(data_arr: xr.DataArray) -> xr.DataArray: """ chunks = _geo_chunks_from_data_arr(data_arr) lons, lats = _get_valid_lonlats(data_arr.attrs["area"], chunks) - lons = lons.astype(data_arr.dtype) - lats = lats.astype(data_arr.dtype) + if lons.dtype != data_arr.dtype and np.issubdtype(data_arr.dtype, np.floating): + lons = lons.astype(data_arr.dtype) + lats = lats.astype(data_arr.dtype) cos_sza = _get_cos_sza(data_arr.attrs["start_time"], lons, lats) return _geo_dask_to_data_array(cos_sza) From 5f385c67f103615cf75096b7db55a78d86093acf Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 20 Nov 2023 16:23:55 +0100 Subject: [PATCH 479/702] Add support for ir and pol --- satpy/etc/composites/sgli.yaml | 55 ---- satpy/etc/readers/sgli_l1b.yaml | 368 +--------------------- satpy/readers/sgli_l1b.py | 97 +++--- satpy/tests/reader_tests/test_sgli_l1b.py | 280 ++++++++++++---- 4 files changed, 275 insertions(+), 525 deletions(-) diff --git a/satpy/etc/composites/sgli.yaml b/satpy/etc/composites/sgli.yaml index 58f52a1124..451c60d8e6 100644 --- a/satpy/etc/composites/sgli.yaml +++ b/satpy/etc/composites/sgli.yaml @@ -89,50 +89,6 @@ composites: modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: true_color - true_color_land: - compositor: !!python/name:satpy.composites.GenericCompositor - prerequisites: - - name: 'Oa08' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - - name: 'Oa06' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - - name: 'Oa03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - standard_name: true_color - - true_color_desert: - compositor: !!python/name:satpy.composites.GenericCompositor - prerequisites: - - name: 'Oa08' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - - name: 'Oa06' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - - name: 'Oa03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - standard_name: true_color - - true_color_marine_clean: - compositor: !!python/name:satpy.composites.GenericCompositor - prerequisites: - - name: 'Oa08' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - - name: 'Oa06' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - - name: 'Oa03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - standard_name: true_color - - true_color_marine_tropical: - compositor: !!python/name:satpy.composites.GenericCompositor - prerequisites: - - name: 'Oa08' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - - name: 'Oa06' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - - name: 'Oa03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - standard_name: true_color - true_color_raw: compositor: !!python/name:satpy.composites.FillingCompositor prerequisites: @@ -145,14 +101,3 @@ composites: - name: 'VN3' modifiers: [effective_solar_pathlength_corrected] standard_name: true_color - - ocean_color: - compositor: !!python/name:satpy.composites.GenericCompositor - prerequisites: - - name: 'Oa08' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - - name: 'Oa06' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - - name: 'Oa03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - standard_name: ocean_color diff --git a/satpy/etc/readers/sgli_l1b.yaml b/satpy/etc/readers/sgli_l1b.yaml index cbf5c4989d..9f8108510f 100644 --- a/satpy/etc/readers/sgli_l1b.yaml +++ b/satpy/etc/readers/sgli_l1b.yaml @@ -111,6 +111,8 @@ datasets: solar_zenith_angle: name: solar_zenith_angle sensor: sgli + units: degree + standard_name: solar_zenith_angle resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v @@ -119,6 +121,8 @@ datasets: solar_azimuth_angle: name: solar_azimuth_angle sensor: sgli + units: degree + standard_name: solar_azimuth_angle resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v @@ -127,6 +131,8 @@ datasets: satellite_zenith_angle: name: satellite_zenith_angle sensor: sgli + units: degree + standard_name: satellite_zenith_angle resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v @@ -135,6 +141,8 @@ datasets: satellite_azimuth_angle: name: satellite_azimuth_angle sensor: sgli + units: degree + standard_name: satellite_azimuth_angle resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v @@ -334,7 +342,7 @@ datasets: units: "%" coordinates: [longitude_p, latitude_p] file_type: gcom-c_l1b_p - file_key: Image_data/Lt_P1_{pol} + file_key: Image_data/Lt_P1_{polarization} P2: name: P2 @@ -351,7 +359,7 @@ datasets: units: "%" coordinates: [longitude_p, latitude_p] file_type: gcom-c_l1b_p - file_key: Image_data/Lt_P2_{pol} + file_key: Image_data/Lt_P2_{polarization} SW1: name: SW1 @@ -444,359 +452,3 @@ datasets: coordinates: [longitude_ir, latitude_ir] file_type: gcom-c_l1b_ir file_key: Image_data/Lt_TI02 - - # Oa02: - # name: Oa02 - # sensor: olci - # wavelength: [0.4075, 0.4125, 0.4175] - # resolution: 300 - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # coordinates: [longitude, latitude] - # file_type: esa_l1b - - # Oa03: - # name: Oa03 - # sensor: olci - # wavelength: [0.4375,0.4425,0.4475] - # resolution: 300 - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # coordinates: [longitude, latitude] - # file_type: esa_l1b - - # Oa04: - # name: Oa04 - # sensor: olci - # wavelength: [0.485,0.49,0.495] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa05: - # name: Oa05 - # sensor: olci - # wavelength: [0.505,0.51,0.515] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa06: - # name: Oa06 - # sensor: olci - # wavelength: [0.555,0.56,0.565] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa07: - # name: Oa07 - # sensor: olci - # wavelength: [0.615,0.62,0.625] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa08: - # name: Oa08 - # sensor: olci - # wavelength: [0.66,0.665,0.67] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa09: - # name: Oa09 - # sensor: olci - # wavelength: [0.67,0.67375,0.6775] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa10: - # name: Oa10 - # sensor: olci - # wavelength: [0.6775,0.68125,0.685] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa11: - # name: Oa11 - # sensor: olci - # wavelength: [0.70375,0.70875,0.71375] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa12: - # name: Oa12 - # sensor: olci - # wavelength: [0.75,0.75375,0.7575] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa13: - # name: Oa13 - # sensor: olci - # wavelength: [0.76,0.76125,0.7625] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa14: - # name: Oa14 - # sensor: olci - # wavelength: [0.760625, 0.764375, 0.768125] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa15: - # name: Oa15 - # sensor: olci - # wavelength: [0.76625, 0.7675, 0.76875] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa16: - # name: Oa16 - # sensor: olci - # wavelength: [0.77125, 0.77875, 0.78625] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa17: - # name: Oa17 - # sensor: olci - # wavelength: [0.855, 0.865, 0.875] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa18: - # name: Oa18 - # sensor: olci - # wavelength: [0.88, 0.885, 0.89] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa19: - # name: Oa19 - # sensor: olci - # wavelength: [0.895, 0.9, 0.905] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa20: - # name: Oa20 - # sensor: olci - # wavelength: [0.93, 0.94, 0.95] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa21: - # name: Oa21 - # sensor: olci - # wavelength: [1.0, 1.02, 1.04] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # solar_zenith_angle: - # name: solar_zenith_angle - # sensor: olci - # resolution: 300 - # coordinates: [longitude, latitude] - # file_type: esa_angles - - # solar_azimuth_angle: - # name: solar_azimuth_angle - # sensor: olci - # resolution: 300 - # coordinates: [longitude, latitude] - # file_type: esa_angles - - # satellite_zenith_angle: - # name: satellite_zenith_angle - # sensor: olci - # resolution: 300 - # coordinates: [longitude, latitude] - # file_type: esa_angles - - # satellite_azimuth_angle: - # name: satellite_azimuth_angle - # sensor: olci - # resolution: 300 - # coordinates: [longitude, latitude] - # file_type: esa_angles - - # humidity: - # name: humidity - # sensor: olci - # resolution: 300 - # coordinates: [longitude, latitude] - # file_type: esa_meteo - - # sea_level_pressure: - # name: sea_level_pressure - # sensor: olci - # resolution: 300 - # coordinates: [longitude, latitude] - # file_type: esa_meteo - - # total_columnar_water_vapour: - # name: total_columnar_water_vapour - # sensor: olci - # resolution: 300 - # coordinates: [longitude, latitude] - # file_type: esa_meteo - - # total_ozone: - # name: total_ozone - # sensor: olci - # resolution: 300 - # coordinates: [longitude, latitude] - # file_type: esa_meteo diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 2dace6d2b3..1a7e076ff9 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -48,6 +48,10 @@ "K": 1000, "L": 1000} +polarization_keys = {0: "0", + -60: "m60", + 60: "60"} + class HDF5SGLI(BaseFileHandler): """File handler for the SGLI l1b data.""" @@ -75,12 +79,9 @@ def get_dataset(self, key, info): if key["resolution"] != self.resolution: return - # if key["polarization"] is not None: - # pols = {0: '0', -60: 'm60', 60: 'p60'} - # file_key = info['file_key'].format(pol=pols[key["polarization"]]) - # else: - # file_key = info['file_key'] file_key = info["file_key"] + if key["name"].startswith("P"): + file_key = file_key.format(polarization=polarization_keys[key["polarization"]]) h5dataset = self.h5file[file_key] chunks = normalize_chunks(("auto", "auto"), h5dataset.shape, previous_chunks=h5dataset.chunks, dtype=np.float32) @@ -89,19 +90,19 @@ def get_dataset(self, key, info): dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) with xr.set_options(keep_attrs=True): - # TODO add ir and polarized channels - if key["name"][:2] in ["VN", "SW", "P1", "P2"]: - dataset = self.get_visible_dataset(key, h5dataset, dataset) - elif key["name"][:-2] in ["longitude", "latitude"]: + if key["name"].startswith(("VN", "SW", "P")): + dataset = self.get_visible_dataset(key, dataset) + elif key["name"].startswith("TI"): + dataset = self.get_ir_dataset(key, dataset) + elif key["name"].startswith(("longitude", "latitude")): resampling_interval = attrs["Resampling_interval"] if resampling_interval != 1: new_lons, new_lats = self.interpolate_lons_lats(resampling_interval) - if key["name"][:-2] == "longitude": + if key["name"].startswith("longitude"): dataset = new_lons else: dataset = new_lats - dataset = xr.DataArray(dataset, attrs=attrs) - return dataset + dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) elif key["name"] in ["satellite_azimuth_angle", "satellite_zenith_angle"]: resampling_interval = attrs["Resampling_interval"] if resampling_interval != 1: @@ -110,8 +111,7 @@ def get_dataset(self, key, info): dataset = new_azi else: dataset = new_zen - dataset = xr.DataArray(dataset, attrs=attrs) - return dataset + dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) elif key["name"] in ["solar_azimuth_angle", "solar_zenith_angle"]: resampling_interval = attrs["Resampling_interval"] if resampling_interval != 1: @@ -120,13 +120,14 @@ def get_dataset(self, key, info): dataset = new_azi else: dataset = new_zen - dataset = xr.DataArray(dataset, attrs=attrs) - return dataset + dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) else: raise NotImplementedError() dataset.attrs["platform_name"] = "GCOM-C1" - + dataset.attrs["sensor"] = "sgli" + dataset.attrs["units"] = info["units"] + dataset.attrs["standard_name"] = info["standard_name"] return dataset def interpolate_lons_lats(self, resampling_interval): @@ -145,12 +146,18 @@ def interpolate_solar_angles(self, resampling_interval): return self.interpolate_angles(azi, zen, resampling_interval) def interpolate_angles(self, azi, zen, resampling_interval): - azi = azi * azi.attrs["Slope"] + azi.attrs["Offset"] - zen = zen * zen.attrs["Slope"] + zen.attrs["Offset"] + azi = self.scale_array(azi) + zen = self.scale_array(zen) zen = zen[:] - 90 new_azi, new_zen = self.interpolate_spherical(azi, zen, resampling_interval) return new_azi, new_zen + 90 + def scale_array(self, array): + try: + return array * array.attrs["Slope"] + array.attrs["Offset"] + except KeyError: + return array + def interpolate_spherical(self, azimuthal_angle, polar_angle, resampling_interval): from geotiepoints.geointerpolator import GeoGridInterpolator @@ -165,10 +172,9 @@ def interpolate_spherical(self, azimuthal_angle, polar_angle, resampling_interva return new_azi, new_pol - def get_visible_dataset(self, key, h5dataset, dataset): - + def get_visible_dataset(self, key, dataset): dataset = self.mask_to_14_bits(dataset) - dataset = self.calibrate(dataset, key["calibration"]) + dataset = self.calibrate_vis(dataset, key["calibration"]) #dataset.attrs.update(info) #dataset = self._mask_and_scale(dataset, h5dataset, key) @@ -180,7 +186,7 @@ def mask_to_14_bits(self, dataset): return dataset & dataset.attrs["Mask"].item() - def calibrate(self, dataset, calibration): + def calibrate_vis(self, dataset, calibration): attrs = dataset.attrs if calibration == "counts": return dataset @@ -198,34 +204,23 @@ def get_missing_and_saturated(self, attrs): saturation = int(mask_vals[1].split(b":")[0].strip()) return missing, saturation - # def _mask_and_scale(self, dataset, h5dataset, key): - # with xr.set_options(keep_attrs=True): - # if 'Mask' in h5dataset.attrs: - # mask_value = h5dataset.attrs['Mask'].item() - # dataset = dataset & mask_value - # if 'Bit00(LSB)-13' in h5dataset.attrs: - # mask_info = h5dataset.attrs['Bit00(LSB)-13'].item() - # mask_vals = mask_info.split(b'\n')[1:] - # missing = int(mask_vals[0].split(b':')[0].strip()) - # saturation = int(mask_vals[1].split(b':')[0].strip()) - # dataset = dataset.where(dataset < min(missing, saturation)) - # if 'Maximum_valid_DN' in h5dataset.attrs: - # # dataset = dataset.where(dataset <= h5dataset.attrs['Maximum_valid_DN'].item()) - # pass - # if key["name"][:2] in ['VN', 'SW', 'P1', 'P2']: - # if key["calibration"] == 'counts': - # pass - # if key["calibration"] == 'radiance': - # dataset = dataset * h5dataset.attrs['Slope'] + h5dataset.attrs['Offset'] - # if key["calibration"] == 'reflectance': - # # dataset = dataset * h5dataset.attrs['Slope'] + h5dataset.attrs['Offset'] - # # dataset *= np.pi / h5dataset.attrs['Band_weighted_TOA_solar_irradiance'] * 100 - # # equivalent to the two lines above - # dataset = (dataset * h5dataset.attrs['Slope_reflectance'] - # + h5dataset.attrs['Offset_reflectance']) * 100 - # else: - # dataset = dataset * h5dataset.attrs['Slope'] + h5dataset.attrs['Offset'] - # return dataset + def get_ir_dataset(self, key, dataset): + dataset = self.mask_to_14_bits(dataset) + dataset = self.calibrate_ir(dataset, key["calibration"]) + return dataset + + def calibrate_ir(self, dataset, calibration): + attrs = dataset.attrs + if calibration == "counts": + return dataset + elif calibration in ["radiance", "brightness_temperature"]: + calibrated = dataset * attrs["Slope"] + attrs["Offset"] + if calibration == "brightness_temperature": + raise NotImplementedError("Cannot calibrate to brightness temperatures.") + # from pyspectral.radiance_tb_conversion import radiance2tb + # calibrated = radiance2tb(calibrated, attrs["Center_wavelength"] * 1e-9) + missing, _ = self.get_missing_and_saturated(attrs) + return calibrated.where(dataset < missing) class H5Array(BackendArray): diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py index e153867aeb..19f6480ae7 100644 --- a/satpy/tests/reader_tests/test_sgli_l1b.py +++ b/satpy/tests/reader_tests/test_sgli_l1b.py @@ -20,10 +20,10 @@ ZEN_ARRAY = np.random.randint(0, 180 * 100, size=(197, 126), dtype=np.int16) -def test_open_dataset(sgli_file): +def test_open_dataset(sgli_vn_file): """Test open_dataset function.""" from satpy.readers.sgli_l1b import SGLIBackend - res = open_dataset(sgli_file, engine=SGLIBackend, chunks={}) + res = open_dataset(sgli_vn_file, engine=SGLIBackend, chunks={}) assert isinstance(res, Dataset) data_array = res["Lt_VN01"] assert isinstance(data_array, DataArray) @@ -32,8 +32,8 @@ def test_open_dataset(sgli_file): @pytest.fixture(scope="session") -def sgli_file(tmp_path_factory): - filename = tmp_path_factory.mktemp("data") / "test_file.h5" +def sgli_vn_file(tmp_path_factory): + filename = tmp_path_factory.mktemp("data") / "test_vn_file.h5" with h5py.File(filename, "w") as h5f: global_attributes = h5f.create_group("Global_attributes") global_attributes.attrs["Scene_start_time"] = np.array([START_TIME.strftime("%Y%m%d %H:%M:%S.%f")[:-3]], @@ -44,6 +44,7 @@ def sgli_file(tmp_path_factory): image_data = h5f.create_group("Image_data") image_data.attrs["Number_of_lines"] = 1955 image_data.attrs["Number_of_pixels"] = 1250 + vn01 = image_data.create_dataset("Lt_VN01", data=FULL_KM_ARRAY, chunks=(116, 157)) vn01.attrs["Slope_reflectance"] = np.array([5e-05], dtype=np.float32) vn01.attrs["Offset_reflectance"] = np.array([-0.05], dtype=np.float32) @@ -53,109 +54,266 @@ def sgli_file(tmp_path_factory): vn01.attrs["Bit00(LSB)-13"] = np.array([b"Digital Number\n16383 : Missing value\n16382 : Saturation value"], dtype="|S61") - geometry_data = h5f.create_group("Geometry_data") - longitude = geometry_data.create_dataset("Longitude", data=LON_LAT_ARRAY, chunks=(47, 63)) - longitude.attrs["Resampling_interval"] = 10 - latitude = geometry_data.create_dataset("Latitude", data=LON_LAT_ARRAY, chunks=(47, 63)) - latitude.attrs["Resampling_interval"] = 10 - - angles_slope = np.array([0.01], dtype=np.float32) - angles_offset = np.array([0], dtype=np.float32) - - azimuth = geometry_data.create_dataset("Sensor_azimuth", data=AZI_ARRAY, chunks=(47, 63)) - azimuth.attrs["Resampling_interval"] = 10 - azimuth.attrs["Slope"] = angles_slope - azimuth.attrs["Offset"] = angles_offset - zenith = geometry_data.create_dataset("Sensor_zenith", data=ZEN_ARRAY, chunks=(47, 63)) - zenith.attrs["Resampling_interval"] = 10 - zenith.attrs["Slope"] = angles_slope - zenith.attrs["Offset"] = angles_offset - - sazimuth = geometry_data.create_dataset("Solar_azimuth", data=AZI_ARRAY, chunks=(47, 63)) - sazimuth.attrs["Resampling_interval"] = 10 - sazimuth.attrs["Slope"] = angles_slope - sazimuth.attrs["Offset"] = angles_offset - szenith = geometry_data.create_dataset("Solar_zenith", data=ZEN_ARRAY, chunks=(47, 63)) - szenith.attrs["Resampling_interval"] = 10 - szenith.attrs["Slope"] = angles_slope - szenith.attrs["Offset"] = angles_offset + add_downsampled_geometry_data(h5f) + + return filename + +@pytest.fixture(scope="session") +def sgli_ir_file(tmp_path_factory): + filename = tmp_path_factory.mktemp("data") / "test_ir_file.h5" + with h5py.File(filename, "w") as h5f: + global_attributes = h5f.create_group("Global_attributes") + global_attributes.attrs["Scene_start_time"] = np.array([START_TIME.strftime("%Y%m%d %H:%M:%S.%f")[:-3]], + dtype="|S21") + global_attributes.attrs["Scene_end_time"] = np.array([END_TIME.strftime("%Y%m%d %H:%M:%S.%f")[:-3]], + dtype="|S21") + + image_data = h5f.create_group("Image_data") + image_data.attrs["Number_of_lines"] = 1854 + image_data.attrs["Number_of_pixels"] = 1250 + + sw01 = image_data.create_dataset("Lt_SW01", data=FULL_KM_ARRAY, chunks=(116, 157)) + sw01.attrs["Slope_reflectance"] = np.array([5e-05], dtype=np.float32) + sw01.attrs["Offset_reflectance"] = np.array([0.0], dtype=np.float32) + sw01.attrs["Slope"] = np.array([0.02], dtype=np.float32) + sw01.attrs["Offset"] = np.array([-25], dtype=np.float32) + sw01.attrs["Mask"] = np.array([16383], dtype=np.uint16) + sw01.attrs["Bit00(LSB)-13"] = np.array([b"Digital Number\n16383 : Missing value\n16382 : Saturation value"], + dtype="|S61") + + + ti01 = image_data.create_dataset("Lt_TI01", data=FULL_KM_ARRAY, chunks=(116, 157)) + ti01.attrs["Slope"] = np.array([0.0012], dtype=np.float32) + ti01.attrs["Offset"] = np.array([-1.65], dtype=np.float32) + ti01.attrs["Mask"] = np.array([16383], dtype=np.uint16) + ti01.attrs["Bit00(LSB)-13"] = np.array([b"Digital Number\n16383 : Missing value\n16382 : Saturation value"], + dtype="|S61") + ti01.attrs["Center_wavelength"] = np.array([12000], dtype=np.float32) + + add_downsampled_geometry_data(h5f) return filename -def test_start_time(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +@pytest.fixture(scope="session") +def sgli_pol_file(tmp_path_factory): + filename = tmp_path_factory.mktemp("data") / "test_pol_file.h5" + with h5py.File(filename, "w") as h5f: + global_attributes = h5f.create_group("Global_attributes") + global_attributes.attrs["Scene_start_time"] = np.array([START_TIME.strftime("%Y%m%d %H:%M:%S.%f")[:-3]], + dtype="|S21") + global_attributes.attrs["Scene_end_time"] = np.array([END_TIME.strftime("%Y%m%d %H:%M:%S.%f")[:-3]], + dtype="|S21") + + image_data = h5f.create_group("Image_data") + image_data.attrs["Number_of_lines"] = 1854 + image_data.attrs["Number_of_pixels"] = 1250 + + p1_0 = image_data.create_dataset("Lt_P1_0", data=FULL_KM_ARRAY, chunks=(116, 157)) + p1_0.attrs["Slope_reflectance"] = np.array([5e-05], dtype=np.float32) + p1_0.attrs["Offset_reflectance"] = np.array([0.0], dtype=np.float32) + p1_0.attrs["Slope"] = np.array([0.02], dtype=np.float32) + p1_0.attrs["Offset"] = np.array([-25], dtype=np.float32) + p1_0.attrs["Mask"] = np.array([16383], dtype=np.uint16) + p1_0.attrs["Bit00(LSB)-13"] = np.array([b"Digital Number\n16383 : Missing value\n16382 : Saturation value"], + dtype="|S61") + + + p1_m60 = image_data.create_dataset("Lt_P1_m60", data=FULL_KM_ARRAY, chunks=(116, 157)) + p1_m60.attrs["Slope_reflectance"] = np.array([5e-05], dtype=np.float32) + p1_m60.attrs["Offset_reflectance"] = np.array([-60.0], dtype=np.float32) + p1_m60.attrs["Slope"] = np.array([0.0012], dtype=np.float32) + p1_m60.attrs["Offset"] = np.array([-1.65], dtype=np.float32) + p1_m60.attrs["Mask"] = np.array([16383], dtype=np.uint16) + p1_m60.attrs["Bit00(LSB)-13"] = np.array([b"Digital Number\n16383 : Missing value\n16382 : Saturation value"], + dtype="|S61") + + p1_60 = image_data.create_dataset("Lt_P1_60", data=FULL_KM_ARRAY, chunks=(116, 157)) + p1_60.attrs["Slope_reflectance"] = np.array([5e-05], dtype=np.float32) + p1_60.attrs["Offset_reflectance"] = np.array([60.0], dtype=np.float32) + p1_60.attrs["Slope"] = np.array([0.0012], dtype=np.float32) + p1_60.attrs["Offset"] = np.array([-1.65], dtype=np.float32) + p1_60.attrs["Mask"] = np.array([16383], dtype=np.uint16) + p1_60.attrs["Bit00(LSB)-13"] = np.array([b"Digital Number\n16383 : Missing value\n16382 : Saturation value"], + dtype="|S61") + + geometry_data = h5f.create_group("Geometry_data") + longitude = geometry_data.create_dataset("Longitude", data=FULL_KM_ARRAY.astype(np.float32), chunks=(47, 63)) + longitude.attrs["Resampling_interval"] = 1 + latitude = geometry_data.create_dataset("Latitude", data=FULL_KM_ARRAY.astype(np.float32), chunks=(47, 63)) + latitude.attrs["Resampling_interval"] = 1 + + return filename + +def add_downsampled_geometry_data(h5f): + geometry_data = h5f.create_group("Geometry_data") + longitude = geometry_data.create_dataset("Longitude", data=LON_LAT_ARRAY, chunks=(47, 63)) + longitude.attrs["Resampling_interval"] = 10 + latitude = geometry_data.create_dataset("Latitude", data=LON_LAT_ARRAY, chunks=(47, 63)) + latitude.attrs["Resampling_interval"] = 10 + + angles_slope = np.array([0.01], dtype=np.float32) + angles_offset = np.array([0], dtype=np.float32) + + azimuth = geometry_data.create_dataset("Sensor_azimuth", data=AZI_ARRAY, chunks=(47, 63)) + azimuth.attrs["Resampling_interval"] = 10 + azimuth.attrs["Slope"] = angles_slope + azimuth.attrs["Offset"] = angles_offset + zenith = geometry_data.create_dataset("Sensor_zenith", data=ZEN_ARRAY, chunks=(47, 63)) + zenith.attrs["Resampling_interval"] = 10 + zenith.attrs["Slope"] = angles_slope + zenith.attrs["Offset"] = angles_offset + + sazimuth = geometry_data.create_dataset("Solar_azimuth", data=AZI_ARRAY, chunks=(47, 63)) + sazimuth.attrs["Resampling_interval"] = 10 + sazimuth.attrs["Slope"] = angles_slope + sazimuth.attrs["Offset"] = angles_offset + szenith = geometry_data.create_dataset("Solar_zenith", data=ZEN_ARRAY, chunks=(47, 63)) + szenith.attrs["Resampling_interval"] = 10 + szenith.attrs["Slope"] = angles_slope + szenith.attrs["Offset"] = angles_offset + + +def test_start_time(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) microseconds = START_TIME.microsecond % 1000 assert handler.start_time == START_TIME - timedelta(microseconds=microseconds) -def test_end_time(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +def test_end_time(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) microseconds = END_TIME.microsecond % 1000 assert handler.end_time == END_TIME - timedelta(microseconds=microseconds) -def test_get_dataset_counts(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +def test_get_dataset_counts(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="counts") - res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01"}) + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", + "standard_name": ""}) assert np.allclose(res, FULL_KM_ARRAY & MASK) assert res.dtype == np.uint16 assert res.attrs["platform_name"] == "GCOM-C1" + assert res.attrs["sensor"] == "sgli" -def test_get_dataset_reflectances(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +def test_get_vn_dataset_reflectances(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="reflectance") - res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01"}) + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", + "standard_name": ""}) assert np.allclose(res[0, :] / 100, FULL_KM_ARRAY[0, :] * 5e-5 - 0.05) assert res.dtype == np.float32 + assert res.dims == ("y", "x") -def test_get_dataset_radiance(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +def test_get_vn_dataset_radiance(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="radiance") - res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01"}) + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", + "standard_name": ""}) assert np.allclose(res[0, :], FULL_KM_ARRAY[0, :] * np.float32(0.02) - 25) assert res.dtype == np.float32 -def test_channel_is_masked(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +def test_channel_is_masked(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="counts") - res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01"}) + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", + "standard_name": ""}) assert res.max() == MASK -def test_missing_values_are_masked(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +def test_missing_values_are_masked(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="radiance") - res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01"}) + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", + "standard_name": ""}) assert np.isnan(res).sum() == 149 -def test_channel_is_chunked(sgli_file): +def test_channel_is_chunked(sgli_vn_file): with dask.config.set({"array.chunk-size": "1MiB"}): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="counts") - res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01"}) + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", + "standard_name": ""}) assert res.chunks[0][0] > 116 -def test_loading_lon_lat(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +def test_loading_lon_lat(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="longitude_v", resolution=1000, polarization=None) - res = handler.get_dataset(did, {"file_key": "Geometry_data/Longitude"}) + res = handler.get_dataset(did, {"file_key": "Geometry_data/Longitude", "units": "", + "standard_name": ""}) assert res.shape == (1955, 1250) assert res.chunks is not None assert res.dtype == np.float32 + assert res.dims == ("y", "x") -def test_loading_sensor_angles(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +def test_loading_sensor_angles(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="satellite_zenith_angle", resolution=1000, polarization=None) - res = handler.get_dataset(did, {"file_key": "Geometry_data/Sensor_zenith"}) + res = handler.get_dataset(did, {"file_key": "Geometry_data/Sensor_zenith", "units": "", + "standard_name": ""}) assert res.shape == (1955, 1250) assert res.chunks is not None assert res.dtype == np.float32 assert res.min() >= 0 -def test_loading_solar_angles(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +def test_loading_solar_angles(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="solar_azimuth_angle", resolution=1000, polarization=None) - res = handler.get_dataset(did, {"file_key": "Geometry_data/Sensor_zenith"}) + res = handler.get_dataset(did, {"file_key": "Geometry_data/Sensor_zenith", "units": "", + "standard_name": ""}) assert res.shape == (1955, 1250) assert res.chunks is not None assert res.dtype == np.float32 assert res.max() <= 180 + +def test_get_sw_dataset_reflectances(sgli_ir_file): + handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) + did = dict(name="SW1", resolution=1000, polarization=None, calibration="reflectance") + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_SW01", "units": "", + "standard_name": ""}) + assert np.allclose(res[0, :] / 100, FULL_KM_ARRAY[0, :] * 5e-5) + assert res.dtype == np.float32 + +def test_get_ti_dataset_radiance(sgli_ir_file): + handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) + did = dict(name="TI1", resolution=1000, polarization=None, calibration="radiance") + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_TI01", "units": "", + "standard_name": ""}) + assert np.allclose(res[0, :], FULL_KM_ARRAY[0, :] * np.float32(0.0012) - 1.65) + assert res.dtype == np.float32 + +def test_get_ti_dataset_bt(sgli_ir_file): + handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) + did = dict(name="TI1", resolution=1000, polarization=None, calibration="brightness_temperature") + with pytest.raises(NotImplementedError): + _ = handler.get_dataset(did, {"file_key": "Image_data/Lt_TI01", "units": "K", + "standard_name": "toa_brightness_temperature"}) + +def test_get_ti_lon_lats(sgli_ir_file): + handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) + did = dict(name="longitude_ir", resolution=1000, polarization=None) + res = handler.get_dataset(did, {"file_key": "Geometry_data/Longitude", "units": "", + "standard_name": ""}) + assert res.shape == (1854, 1250) + assert res.chunks is not None + assert res.dtype == np.float32 + +@pytest.mark.parametrize("polarization", [0, -60, 60]) +def test_get_polarized_dataset_reflectance(sgli_pol_file, polarization): + """Test getting polarized reflectances.""" + handler = HDF5SGLI(sgli_pol_file, {"resolution": "L"}, {}) + did = dict(name="P1", resolution=1000, polarization=polarization, calibration="reflectance") + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_P1_{polarization}", "units": "%", + "standard_name": "toa_bidirectional_reflectance"}) + assert res.dtype == np.float32 + expected = (FULL_KM_ARRAY[0, :] * np.float32(5e-5) + np.float32(polarization)) * 100 + np.testing.assert_allclose(res[0, :], expected) + assert res.attrs["units"] == "%" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" + +def test_get_polarized_longitudes(sgli_pol_file): + """Test getting polarized reflectances.""" + handler = HDF5SGLI(sgli_pol_file, {"resolution": "L"}, {}) + did = dict(name="longitude", resolution=1000, polarization=0) + res = handler.get_dataset(did, {"file_key": "Geometry_data/Longitude", "units": "", + "standard_name": ""}) + assert res.dtype == np.float32 + expected = FULL_KM_ARRAY.astype(np.float32) + np.testing.assert_allclose(res, expected) From c2ad3d0966ea4cf98021ba98f0f5724bdb544242 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 20 Nov 2023 20:47:55 +0100 Subject: [PATCH 480/702] Adress linting issues --- .pre-commit-config.yaml | 4 +- pyproject.toml | 2 +- satpy/readers/sgli_l1b.py | 159 ++++++++++++---------- satpy/tests/reader_tests/test_sgli_l1b.py | 22 ++- 4 files changed, 108 insertions(+), 79 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index eb21aa6601..f10beb1a7d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,9 +1,9 @@ exclude: '^$' fail_fast: false repos: - - repo: https://github.com/charliermarsh/ruff-pre-commit + - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.0.247' + rev: 'v0.1.6' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks diff --git a/pyproject.toml b/pyproject.toml index 1282120a59..300e738e60 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ # In the future, add "A", "B", "S", "N", "D" -select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] +select = ["D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] line-length = 120 [tool.ruff.per-file-ignores] diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 1a7e076ff9..61a4b61f9d 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -75,10 +75,9 @@ def end_time(self): return datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f") def get_dataset(self, key, info): - """Get the dataset.""" + """Get the dataset from the file.""" if key["resolution"] != self.resolution: return - file_key = info["file_key"] if key["name"].startswith("P"): file_key = file_key.format(polarization=polarization_keys[key["polarization"]]) @@ -95,32 +94,9 @@ def get_dataset(self, key, info): elif key["name"].startswith("TI"): dataset = self.get_ir_dataset(key, dataset) elif key["name"].startswith(("longitude", "latitude")): - resampling_interval = attrs["Resampling_interval"] - if resampling_interval != 1: - new_lons, new_lats = self.interpolate_lons_lats(resampling_interval) - if key["name"].startswith("longitude"): - dataset = new_lons - else: - dataset = new_lats - dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) - elif key["name"] in ["satellite_azimuth_angle", "satellite_zenith_angle"]: - resampling_interval = attrs["Resampling_interval"] - if resampling_interval != 1: - new_azi, new_zen = self.interpolate_sensor_angles(resampling_interval) - if "azimuth" in key["name"]: - dataset = new_azi - else: - dataset = new_zen - dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) - elif key["name"] in ["solar_azimuth_angle", "solar_zenith_angle"]: - resampling_interval = attrs["Resampling_interval"] - if resampling_interval != 1: - new_azi, new_zen = self.interpolate_solar_angles(resampling_interval) - if "azimuth" in key["name"]: - dataset = new_azi - else: - dataset = new_zen - dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) + dataset = self.get_lon_lats(key) + elif "angle" in key["name"]: + dataset = self.get_angles(key) else: raise NotImplementedError() @@ -130,63 +106,18 @@ def get_dataset(self, key, info): dataset.attrs["standard_name"] = info["standard_name"] return dataset - def interpolate_lons_lats(self, resampling_interval): - lons = self.h5file["Geometry_data/Longitude"] - lats = self.h5file["Geometry_data/Latitude"] - return self.interpolate_spherical(lons, lats, resampling_interval) - - def interpolate_sensor_angles(self, resampling_interval): - azi = self.h5file["Geometry_data/Sensor_azimuth"] - zen = self.h5file["Geometry_data/Sensor_zenith"] - return self.interpolate_angles(azi, zen, resampling_interval) - - def interpolate_solar_angles(self, resampling_interval): - azi = self.h5file["Geometry_data/Solar_azimuth"] - zen = self.h5file["Geometry_data/Solar_zenith"] - return self.interpolate_angles(azi, zen, resampling_interval) - - def interpolate_angles(self, azi, zen, resampling_interval): - azi = self.scale_array(azi) - zen = self.scale_array(zen) - zen = zen[:] - 90 - new_azi, new_zen = self.interpolate_spherical(azi, zen, resampling_interval) - return new_azi, new_zen + 90 - - def scale_array(self, array): - try: - return array * array.attrs["Slope"] + array.attrs["Offset"] - except KeyError: - return array - - def interpolate_spherical(self, azimuthal_angle, polar_angle, resampling_interval): - from geotiepoints.geointerpolator import GeoGridInterpolator - - full_shape = (self.h5file["Image_data"].attrs["Number_of_lines"], - self.h5file["Image_data"].attrs["Number_of_pixels"]) - - tie_lines = np.arange(0, polar_angle.shape[0] * resampling_interval, resampling_interval) - tie_cols = np.arange(0, polar_angle.shape[1] * resampling_interval, resampling_interval) - - interpolator = GeoGridInterpolator((tie_lines, tie_cols), azimuthal_angle, polar_angle, method="slinear") - new_azi, new_pol = interpolator.interpolate_to_shape(full_shape, chunks="auto") - return new_azi, new_pol - - def get_visible_dataset(self, key, dataset): + """Produce a DataArray with a visible channel data in it.""" dataset = self.mask_to_14_bits(dataset) dataset = self.calibrate_vis(dataset, key["calibration"]) - #dataset.attrs.update(info) - #dataset = self._mask_and_scale(dataset, h5dataset, key) - - # return dataset def mask_to_14_bits(self, dataset): """Mask data to 14 bits.""" return dataset & dataset.attrs["Mask"].item() - def calibrate_vis(self, dataset, calibration): + """Calibrate visible data.""" attrs = dataset.attrs if calibration == "counts": return dataset @@ -198,6 +129,7 @@ def calibrate_vis(self, dataset, calibration): return calibrated.where(dataset < missing) def get_missing_and_saturated(self, attrs): + """Get the missing and saturation values.""" missing_and_saturated = attrs["Bit00(LSB)-13"].item() mask_vals = missing_and_saturated.split(b"\n")[1:] missing = int(mask_vals[0].split(b":")[0].strip()) @@ -205,11 +137,13 @@ def get_missing_and_saturated(self, attrs): return missing, saturation def get_ir_dataset(self, key, dataset): + """Produce a DataArray with an IR channel data in it.""" dataset = self.mask_to_14_bits(dataset) dataset = self.calibrate_ir(dataset, key["calibration"]) return dataset def calibrate_ir(self, dataset, calibration): + """Calibrate IR channel.""" attrs = dataset.attrs if calibration == "counts": return dataset @@ -222,6 +156,81 @@ def calibrate_ir(self, dataset, calibration): missing, _ = self.get_missing_and_saturated(attrs) return calibrated.where(dataset < missing) + def get_lon_lats(self, key): + """Get lon/lats from the file.""" + lons = self.h5file["Geometry_data/Longitude"] + lats = self.h5file["Geometry_data/Latitude"] + attrs = lons.attrs + resampling_interval = attrs["Resampling_interval"] + if resampling_interval != 1: + lons, lats = self.interpolate_spherical(lons, lats, resampling_interval) + if key["name"].startswith("longitude"): + dataset = lons + else: + dataset = lats + return xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) + + def interpolate_spherical(self, azimuthal_angle, polar_angle, resampling_interval): + """Interpolate spherical coordinates.""" + from geotiepoints.geointerpolator import GeoGridInterpolator + + full_shape = (self.h5file["Image_data"].attrs["Number_of_lines"], + self.h5file["Image_data"].attrs["Number_of_pixels"]) + + tie_lines = np.arange(0, polar_angle.shape[0] * resampling_interval, resampling_interval) + tie_cols = np.arange(0, polar_angle.shape[1] * resampling_interval, resampling_interval) + + interpolator = GeoGridInterpolator((tie_lines, tie_cols), azimuthal_angle, polar_angle, method="slinear") + new_azi, new_pol = interpolator.interpolate_to_shape(full_shape, chunks="auto") + return new_azi, new_pol + + def get_angles(self, key): + """Get angles from the file.""" + if "solar" in key["name"]: + azi, zen, attrs = self.get_solar_angles() + elif "satellite" in key["name"]: + azi, zen, attrs = self.get_sensor_angles() + if "azimuth" in key["name"]: + dataset = azi + else: + dataset = zen + dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) + return dataset + + def get_solar_angles(self): + """Get the solar angles.""" + azi = self.h5file["Geometry_data/Solar_azimuth"] + zen = self.h5file["Geometry_data/Solar_zenith"] + attrs = zen.attrs + azi = self.scale_array(azi) + zen = self.scale_array(zen) + return *self.get_full_angles(azi, zen, attrs), attrs + + def get_sensor_angles(self): + """Get the solar angles.""" + azi = self.h5file["Geometry_data/Sensor_azimuth"] + zen = self.h5file["Geometry_data/Sensor_zenith"] + attrs = zen.attrs + azi = self.scale_array(azi) + zen = self.scale_array(zen) + return *self.get_full_angles(azi, zen, attrs), attrs + + def scale_array(self, array): + """Scale an array with its attributes `Slope` and `Offset` if available.""" + try: + return array * array.attrs["Slope"] + array.attrs["Offset"] + except KeyError: + return array + + def get_full_angles(self, azi, zen, attrs): + """Interpolate angle arrays.""" + resampling_interval = attrs["Resampling_interval"] + if resampling_interval != 1: + zen = zen[:] - 90 + new_azi, new_zen = self.interpolate_spherical(azi, zen, resampling_interval) + return new_azi, new_zen + 90 + return azi, zen + class H5Array(BackendArray): """An Hdf5-based array.""" diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py index 19f6480ae7..9fa8caa8b1 100644 --- a/satpy/tests/reader_tests/test_sgli_l1b.py +++ b/satpy/tests/reader_tests/test_sgli_l1b.py @@ -33,6 +33,7 @@ def test_open_dataset(sgli_vn_file): @pytest.fixture(scope="session") def sgli_vn_file(tmp_path_factory): + """Create a stub VN file.""" filename = tmp_path_factory.mktemp("data") / "test_vn_file.h5" with h5py.File(filename, "w") as h5f: global_attributes = h5f.create_group("Global_attributes") @@ -60,6 +61,7 @@ def sgli_vn_file(tmp_path_factory): @pytest.fixture(scope="session") def sgli_ir_file(tmp_path_factory): + """Create a stub IR file.""" filename = tmp_path_factory.mktemp("data") / "test_ir_file.h5" with h5py.File(filename, "w") as h5f: global_attributes = h5f.create_group("Global_attributes") @@ -96,6 +98,7 @@ def sgli_ir_file(tmp_path_factory): @pytest.fixture(scope="session") def sgli_pol_file(tmp_path_factory): + """Create a POL stub file.""" filename = tmp_path_factory.mktemp("data") / "test_pol_file.h5" with h5py.File(filename, "w") as h5f: global_attributes = h5f.create_group("Global_attributes") @@ -145,6 +148,7 @@ def sgli_pol_file(tmp_path_factory): return filename def add_downsampled_geometry_data(h5f): + """Add downsampled geometry data to an h5py file instance.""" geometry_data = h5f.create_group("Geometry_data") longitude = geometry_data.create_dataset("Longitude", data=LON_LAT_ARRAY, chunks=(47, 63)) longitude.attrs["Resampling_interval"] = 10 @@ -174,17 +178,20 @@ def add_downsampled_geometry_data(h5f): def test_start_time(sgli_vn_file): + """Test that the start time is extracted.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) microseconds = START_TIME.microsecond % 1000 assert handler.start_time == START_TIME - timedelta(microseconds=microseconds) def test_end_time(sgli_vn_file): + """Test that the end time is extracted.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) microseconds = END_TIME.microsecond % 1000 assert handler.end_time == END_TIME - timedelta(microseconds=microseconds) def test_get_dataset_counts(sgli_vn_file): + """Test that counts can be extracted from a file.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="counts") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", @@ -195,15 +202,18 @@ def test_get_dataset_counts(sgli_vn_file): assert res.attrs["sensor"] == "sgli" def test_get_vn_dataset_reflectances(sgli_vn_file): + """Test that the vn datasets can be calibrated to reflectances.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="reflectance") - res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "%", "standard_name": ""}) assert np.allclose(res[0, :] / 100, FULL_KM_ARRAY[0, :] * 5e-5 - 0.05) assert res.dtype == np.float32 assert res.dims == ("y", "x") + assert res.units == "%" def test_get_vn_dataset_radiance(sgli_vn_file): + """Test that datasets can be calibrated to radiance.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="radiance") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", @@ -212,6 +222,7 @@ def test_get_vn_dataset_radiance(sgli_vn_file): assert res.dtype == np.float32 def test_channel_is_masked(sgli_vn_file): + """Test that channels are masked for no-data.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="counts") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", @@ -219,6 +230,7 @@ def test_channel_is_masked(sgli_vn_file): assert res.max() == MASK def test_missing_values_are_masked(sgli_vn_file): + """Check that missing values are masked.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="radiance") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", @@ -226,6 +238,7 @@ def test_missing_values_are_masked(sgli_vn_file): assert np.isnan(res).sum() == 149 def test_channel_is_chunked(sgli_vn_file): + """Test that the channel data is chunked.""" with dask.config.set({"array.chunk-size": "1MiB"}): handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="counts") @@ -234,6 +247,7 @@ def test_channel_is_chunked(sgli_vn_file): assert res.chunks[0][0] > 116 def test_loading_lon_lat(sgli_vn_file): + """Test that loading lons and lats works.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="longitude_v", resolution=1000, polarization=None) res = handler.get_dataset(did, {"file_key": "Geometry_data/Longitude", "units": "", @@ -244,6 +258,7 @@ def test_loading_lon_lat(sgli_vn_file): assert res.dims == ("y", "x") def test_loading_sensor_angles(sgli_vn_file): + """Test loading the satellite angles.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="satellite_zenith_angle", resolution=1000, polarization=None) res = handler.get_dataset(did, {"file_key": "Geometry_data/Sensor_zenith", "units": "", @@ -254,6 +269,7 @@ def test_loading_sensor_angles(sgli_vn_file): assert res.min() >= 0 def test_loading_solar_angles(sgli_vn_file): + """Test loading sun angles.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="solar_azimuth_angle", resolution=1000, polarization=None) res = handler.get_dataset(did, {"file_key": "Geometry_data/Sensor_zenith", "units": "", @@ -264,6 +280,7 @@ def test_loading_solar_angles(sgli_vn_file): assert res.max() <= 180 def test_get_sw_dataset_reflectances(sgli_ir_file): + """Test getting SW dataset reflectances.""" handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) did = dict(name="SW1", resolution=1000, polarization=None, calibration="reflectance") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_SW01", "units": "", @@ -272,6 +289,7 @@ def test_get_sw_dataset_reflectances(sgli_ir_file): assert res.dtype == np.float32 def test_get_ti_dataset_radiance(sgli_ir_file): + """Test getting thermal IR radiances.""" handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) did = dict(name="TI1", resolution=1000, polarization=None, calibration="radiance") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_TI01", "units": "", @@ -280,6 +298,7 @@ def test_get_ti_dataset_radiance(sgli_ir_file): assert res.dtype == np.float32 def test_get_ti_dataset_bt(sgli_ir_file): + """Test getting brightness temperatures for IR channels.""" handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) did = dict(name="TI1", resolution=1000, polarization=None, calibration="brightness_temperature") with pytest.raises(NotImplementedError): @@ -287,6 +306,7 @@ def test_get_ti_dataset_bt(sgli_ir_file): "standard_name": "toa_brightness_temperature"}) def test_get_ti_lon_lats(sgli_ir_file): + """Test getting the lons and lats for IR channels.""" handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) did = dict(name="longitude_ir", resolution=1000, polarization=None) res = handler.get_dataset(did, {"file_key": "Geometry_data/Longitude", "units": "", From 2521d1a6f7c28fc181665884638e6f03f17960c7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 20 Nov 2023 19:58:20 +0000 Subject: [PATCH 481/702] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - https://github.com/charliermarsh/ruff-pre-commit → https://github.com/astral-sh/ruff-pre-commit - [github.com/astral-sh/ruff-pre-commit: v0.0.247 → v0.1.6](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.247...v0.1.6) - [github.com/pre-commit/mirrors-mypy: v1.6.1 → v1.7.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.6.1...v1.7.0) --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index eb21aa6601..d84659c6f0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,9 +1,9 @@ exclude: '^$' fail_fast: false repos: - - repo: https://github.com/charliermarsh/ruff-pre-commit + - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.0.247' + rev: 'v0.1.6' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks @@ -19,7 +19,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.6.1' # Use the sha / tag you want to point at + rev: 'v1.7.0' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: From c1394e67883f2d4278997b819da49c1341b6933c Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 20 Nov 2023 22:04:00 +0100 Subject: [PATCH 482/702] Continue fixing assertraises --- satpy/tests/test_composites.py | 27 ++++++++++++++++----------- satpy/tests/test_dataset.py | 3 ++- 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index bf8a9dfb9e..70bc2abf25 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -78,7 +78,8 @@ def test_mult_ds_no_area(self): ds2 = self._get_test_ds() del ds2.attrs["area"] comp = CompositeBase("test_comp") - self.assertRaises(ValueError, comp.match_data_arrays, (ds1, ds2)) + with pytest.raises(ValueError, match="Missing 'area' attribute"): + comp.match_data_arrays((ds1, ds2)) def test_mult_ds_diff_area(self): """Test that datasets with different areas fail.""" @@ -94,7 +95,8 @@ def test_mult_ds_diff_area(self): 100, 50, (-30037508.34, -20018754.17, 10037508.34, 18754.17)) comp = CompositeBase("test_comp") - self.assertRaises(IncompatibleAreas, comp.match_data_arrays, (ds1, ds2)) + with pytest.raises(IncompatibleAreas): + comp.match_data_arrays((ds1, ds2)) def test_mult_ds_diff_dims(self): """Test that datasets with different dimensions still pass.""" @@ -118,7 +120,8 @@ def test_mult_ds_diff_size(self): ds1 = self._get_test_ds(shape=(50, 100), dims=("x", "y")) ds2 = self._get_test_ds(shape=(3, 50, 100), dims=("bands", "y", "x")) comp = CompositeBase("test_comp") - self.assertRaises(IncompatibleAreas, comp.match_data_arrays, (ds1, ds2)) + with pytest.raises(IncompatibleAreas): + comp.match_data_arrays((ds1, ds2)) def test_nondimensional_coords(self): """Test the removal of non-dimensional coordinates when compositing.""" @@ -351,9 +354,11 @@ def test_bad_areas_diff(self): from satpy.composites import DifferenceCompositor, IncompatibleAreas comp = DifferenceCompositor(name="diff") # too many arguments - self.assertRaises(ValueError, comp, (self.ds1, self.ds2, self.ds2_big)) + with pytest.raises(ValueError, match="Expected 2 datasets, got 3"): + comp((self.ds1, self.ds2, self.ds2_big)) # different resolution - self.assertRaises(IncompatibleAreas, comp, (self.ds1, self.ds2_big)) + with pytest.raises(IncompatibleAreas): + comp((self.ds1, self.ds2_big)) @pytest.fixture() @@ -1051,8 +1056,8 @@ def test_concat_datasets(self): assert res.shape[0] == num_bands assert res.bands[0] == "L" assert res.bands[1] == "A" - self.assertRaises(IncompatibleAreas, self.comp._concat_datasets, - [self.all_valid, self.wrong_shape], "LA") + with pytest.raises(IncompatibleAreas): + self.comp._concat_datasets([self.all_valid, self.wrong_shape], "LA") def test_get_sensors(self): """Test getting sensors from the dataset attributes.""" @@ -1099,8 +1104,8 @@ def test_call_with_mock(self, match_data_arrays, check_times, combine_metadata, match_data_arrays.reset_mock() # When areas are incompatible, masking shouldn't happen match_data_arrays.side_effect = IncompatibleAreas() - self.assertRaises(IncompatibleAreas, - self.comp, [self.all_valid, self.wrong_shape]) + with pytest.raises(IncompatibleAreas): + self.comp([self.all_valid, self.wrong_shape]) match_data_arrays.assert_called_once() def test_call(self): @@ -1217,7 +1222,7 @@ def test_init(self, get_area_def): from satpy.composites import StaticImageCompositor # No filename given raises ValueError - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="StaticImageCompositor needs a .*"): StaticImageCompositor("name") # No area defined @@ -1281,7 +1286,7 @@ def load(self, arg): # Non-georeferenced image, no area given img.attrs.pop("area") comp = StaticImageCompositor("name", filename="/foo.tif") - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): comp() # Non-georeferenced image, area given diff --git a/satpy/tests/test_dataset.py b/satpy/tests/test_dataset.py index 014a450e0c..1b827b8dcf 100644 --- a/satpy/tests/test_dataset.py +++ b/satpy/tests/test_dataset.py @@ -54,7 +54,8 @@ def test_init_bad_modifiers(self): """Test that modifiers are a tuple.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc - self.assertRaises(TypeError, DataID, dikc, name="a", modifiers="str") + with pytest.raises(TypeError): + DataID(dikc, name="a", modifiers="str") def test_compare_no_wl(self): """Compare fully qualified wavelength ID to no wavelength ID.""" From 1a35f2ec2b19f7c59d6eb055bd3b5289e1cebbc3 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 20 Nov 2023 22:46:34 +0100 Subject: [PATCH 483/702] Get rid of the last assertraises --- satpy/tests/multiscene_tests/test_save_animation.py | 4 +++- satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py | 3 ++- satpy/tests/reader_tests/test_abi_l1b.py | 4 ++-- satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py | 9 +++++---- satpy/tests/reader_tests/test_ami_l1b.py | 11 +++-------- satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py | 2 +- satpy/tests/reader_tests/test_clavrx.py | 4 +++- satpy/tests/reader_tests/test_electrol_hrit.py | 5 +++-- satpy/tests/reader_tests/test_goes_imager_nc_noaa.py | 6 ++++-- satpy/tests/reader_tests/test_hrit_base.py | 6 ++++-- satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py | 5 +++-- satpy/tests/reader_tests/test_netcdf_utils.py | 3 ++- satpy/tests/reader_tests/test_scmi.py | 4 +++- satpy/tests/reader_tests/test_seviri_base.py | 4 ++-- .../tests/reader_tests/test_seviri_l1b_calibration.py | 2 +- satpy/tests/reader_tests/test_seviri_l1b_icare.py | 7 ++++--- satpy/tests/reader_tests/test_utils.py | 8 +++++--- satpy/tests/reader_tests/test_vii_base_nc.py | 5 +++-- satpy/tests/reader_tests/test_vii_l1b_nc.py | 3 ++- 19 files changed, 55 insertions(+), 40 deletions(-) diff --git a/satpy/tests/multiscene_tests/test_save_animation.py b/satpy/tests/multiscene_tests/test_save_animation.py index 6807446bbb..7ec1a53df8 100644 --- a/satpy/tests/multiscene_tests/test_save_animation.py +++ b/satpy/tests/multiscene_tests/test_save_animation.py @@ -28,6 +28,8 @@ from datetime import datetime from unittest import mock +import pytest + from satpy.tests.multiscene_tests.test_utils import ( _create_test_area, _create_test_dataset, @@ -248,7 +250,7 @@ def test_save_datasets_distributed_source_target(self): with mock.patch("satpy.multiscene._multiscene.Scene.save_datasets") as save_datasets: save_datasets.return_value = [(source_mock, target_mock)] # some arbitrary return value # force order of datasets by specifying them - with self.assertRaises(NotImplementedError): + with pytest.raises(NotImplementedError): mscn.save_datasets(base_dir=self.base_dir, client=client_mock, datasets=["ds1", "ds2", "ds3"], writer="geotiff") diff --git a/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py b/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py index 7055a4df6d..a7cfa17ddb 100644 --- a/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py +++ b/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py @@ -24,6 +24,7 @@ import unittest import numpy as np +import pytest from satpy.readers.aapp_mhs_amsub_l1c import _HEADERTYPE, _SCANTYPE, HEADER_LENGTH, MHS_AMSUB_AAPPL1CFile from satpy.tests.utils import make_dataid @@ -396,7 +397,7 @@ def test_sensor_name(self): tmpfile.seek(HEADER_LENGTH, 0) self._data.tofile(tmpfile) - with self.assertRaises(IOError): + with pytest.raises(IOError, match="Sensor neither MHS nor AMSU-B!"): fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) def test_read(self): diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index ab2b1eec54..64720f7808 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -389,8 +389,8 @@ class FakeDataID(dict): def to_dict(self): return self - with self.assertRaises(ValueError, msg="Did not detect invalid cal"): - did = FakeDataID(name="C05", calibration="invalid", modifiers=()) + did = FakeDataID(name="C05", calibration="invalid", modifiers=()) + with pytest.raises(ValueError, match="Unknown calibration 'invalid'"): self.reader.get_dataset(did, {}) diff --git a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py index 05abef600b..33f9984ede 100644 --- a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py +++ b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py @@ -25,6 +25,7 @@ import dask.array as da import numpy as np +import pytest from pyresample.geometry import AreaDefinition from satpy.readers.ahi_l1b_gridded_bin import AHI_LUT_NAMES, AHIGriddedFileHandler @@ -90,9 +91,9 @@ def test_bad_area(self): """Ensure an error is raised for an usupported area.""" tmp_fh = self.make_fh("ext.01") tmp_fh.areaname = "scanning" - with self.assertRaises(NotImplementedError): + with pytest.raises(NotImplementedError): tmp_fh.get_area_def(None) - with self.assertRaises(NotImplementedError): + with pytest.raises(NotImplementedError): self.make_fh("ext.01", area="scanning") @@ -141,12 +142,12 @@ def test_calibrate(self, np_loadtxt, os_exist, get_luts): np.testing.assert_allclose(refl_out, out_data) # Check that exception is raised if bad calibration is passed - with self.assertRaises(NotImplementedError): + with pytest.raises(NotImplementedError): self.fh.calibrate(in_data, "lasers") # Check that exception is raised if no file is present np_loadtxt.side_effect = FileNotFoundError - with self.assertRaises(FileNotFoundError): + with pytest.raises(FileNotFoundError): self.fh.calibrate(in_data, "reflectance") diff --git a/satpy/tests/reader_tests/test_ami_l1b.py b/satpy/tests/reader_tests/test_ami_l1b.py index cdbc4468c9..f385a6080b 100644 --- a/satpy/tests/reader_tests/test_ami_l1b.py +++ b/satpy/tests/reader_tests/test_ami_l1b.py @@ -23,7 +23,7 @@ import dask.array as da import numpy as np import xarray as xr -from pytest import approx # noqa: PT013 +from pytest import approx, raises # noqa: PT013 class FakeDataset(object): @@ -198,13 +198,8 @@ def test_get_dataset(self): def test_bad_calibration(self): """Test that asking for a bad calibration fails.""" from satpy.tests.utils import make_dataid - with self.assertRaises(ValueError): - ds_id = make_dataid(name="VI006", calibration="_bad_") - ds_info = {"file_key": "image_pixel_values", - "standard_name": "toa_outgoing_radiance_per_unit_wavelength", - "units": "W m-2 um-1 sr-1", - } - self.reader.get_dataset(ds_id, ds_info) + with raises(ValueError, match="_bad_ invalid value for .*"): + _ = make_dataid(name="VI006", calibration="_bad_") @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def(self, adef): diff --git a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py index 2272a950bf..dfcaff4514 100644 --- a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py +++ b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py @@ -180,7 +180,7 @@ def test_read_raw_data(self): # Test exception if all data is masked reader.mask = [1] fh.reader = None - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="All data is masked out"): fh.read_raw_data() @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") diff --git a/satpy/tests/reader_tests/test_clavrx.py b/satpy/tests/reader_tests/test_clavrx.py index bc5e968b08..b4b1aef1a5 100644 --- a/satpy/tests/reader_tests/test_clavrx.py +++ b/satpy/tests/reader_tests/test_clavrx.py @@ -23,6 +23,7 @@ import dask.array as da import numpy as np +import pytest import xarray as xr from pyresample.geometry import AreaDefinition, SwathDefinition @@ -347,7 +348,8 @@ def test_no_nav_donor(self): "clavrx_H08_20180806_1800.level2.hdf", ]) r.create_filehandlers(loadables) - self.assertRaises(IOError, r.load, ["variable1", "variable2", "variable3"]) + with pytest.raises(IOError, match="Could not find navigation donor for"): + r.load(["variable1", "variable2", "variable3"]) def test_load_all_old_donor(self): """Test loading all test datasets with old donor.""" diff --git a/satpy/tests/reader_tests/test_electrol_hrit.py b/satpy/tests/reader_tests/test_electrol_hrit.py index b3e14c24d1..6a328275fd 100644 --- a/satpy/tests/reader_tests/test_electrol_hrit.py +++ b/satpy/tests/reader_tests/test_electrol_hrit.py @@ -23,6 +23,7 @@ import dask.array as da import numpy as np +import pytest from xarray import DataArray from satpy.readers.electrol_hrit import ( @@ -211,8 +212,8 @@ def test_calibrate(self, *mocks): dtype=np.uint16).reshape(5, 5)) # Test that calibration fails if given a silly mode - self.assertRaises(NotImplementedError, fh.calibrate, counts, - "nonsense") + with pytest.raises(NotImplementedError): + fh.calibrate(counts, "nonsense") # Test that 'counts' calibration returns identical values to input out = fh.calibrate(counts, "counts") diff --git a/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py b/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py index 59236290b8..1fd5e65cac 100644 --- a/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py +++ b/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py @@ -444,13 +444,15 @@ def test_get_dataset_invalid(self): args = dict(key=make_dataid(name="00_7", calibration="brightness_temperature"), info={}) - self.assertRaises(ValueError, self.reader.get_dataset, **args) + with pytest.raises(ValueError, match="Cannot calibrate VIS channel to 2"): + self.reader.get_dataset(**args) # IR -> Reflectance args = dict(key=make_dataid(name="10_7", calibration="reflectance"), info={}) - self.assertRaises(ValueError, self.reader.get_dataset, **args) + with pytest.raises(ValueError, match="Cannot calibrate IR channel to 1"): + self.reader.get_dataset(**args) # Unsupported calibration with pytest.raises(ValueError, match="invalid invalid value for "): diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index 7edbd02329..cb2dc6c3f4 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -44,10 +44,12 @@ def test_xrit_cmd(self): old_env = os.environ.get("XRIT_DECOMPRESS_PATH", None) os.environ["XRIT_DECOMPRESS_PATH"] = "/path/to/my/bin" - self.assertRaises(IOError, get_xritdecompress_cmd) + with pytest.raises(IOError, match=".* does not exist!"): + get_xritdecompress_cmd() os.environ["XRIT_DECOMPRESS_PATH"] = gettempdir() - self.assertRaises(IOError, get_xritdecompress_cmd) + with pytest.raises(IOError, match=".* is a directory!.*"): + get_xritdecompress_cmd() with NamedTemporaryFile() as fd: os.environ["XRIT_DECOMPRESS_PATH"] = fd.name diff --git a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py index 416d74d16e..9bf5f5f093 100644 --- a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py +++ b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py @@ -23,6 +23,7 @@ import dask.array as da import numpy as np +import pytest import xarray as xr from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler @@ -488,7 +489,7 @@ def test_reading_attrs(self): # Make sure we have some files res = reader.load(["wvc_lon"]) assert res["wvc_lon"].attrs["L2B_Number_WVC_cells"] == 10 - with self.assertRaises(KeyError): + with pytest.raises(KeyError): assert res["wvc_lon"].attrs["L2B_Expected_WVC_Cells"] == 10 def test_reading_attrs_nsoas(self): @@ -502,7 +503,7 @@ def test_reading_attrs_nsoas(self): reader.create_filehandlers(files) # Make sure we have some files res = reader.load(["wvc_lon"]) - with self.assertRaises(KeyError): + with pytest.raises(KeyError): assert res["wvc_lon"].attrs["L2B_Number_WVC_cells"] == 10 assert res["wvc_lon"].attrs["L2B_Expected_WVC_Cells"] == 10 diff --git a/satpy/tests/reader_tests/test_netcdf_utils.py b/satpy/tests/reader_tests/test_netcdf_utils.py index ea104ed086..2d29288784 100644 --- a/satpy/tests/reader_tests/test_netcdf_utils.py +++ b/satpy/tests/reader_tests/test_netcdf_utils.py @@ -21,6 +21,7 @@ import unittest import numpy as np +import pytest try: from satpy.readers.netcdf_utils import NetCDF4FileHandler @@ -232,7 +233,7 @@ def test_filenotfound(self): """Test that error is raised when file not found.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler - with self.assertRaises(IOError): + with pytest.raises(IOError, match=".*No such file or directory.*"): NetCDF4FileHandler("/thisfiledoesnotexist.nc", {}, {}) def test_get_and_cache_npxr_is_xr(self): diff --git a/satpy/tests/reader_tests/test_scmi.py b/satpy/tests/reader_tests/test_scmi.py index 89eda0479a..13c74a7d5c 100644 --- a/satpy/tests/reader_tests/test_scmi.py +++ b/satpy/tests/reader_tests/test_scmi.py @@ -21,6 +21,7 @@ from unittest import mock import numpy as np +import pytest import xarray as xr @@ -273,4 +274,5 @@ def test_get_area_def_bad(self, adef): "grid_mapping_name": "fake", } ) - self.assertRaises(ValueError, reader.get_area_def, None) + with pytest.raises(ValueError, match="Can't handle projection 'fake'"): + reader.get_area_def(None) diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index ced24a77ea..c2d190e084 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -98,7 +98,7 @@ def test_pad_data_horizontally_bad_shape(self): east_bound = 5 west_bound = 10 final_size = (1, 20) - with self.assertRaises(IndexError): + with pytest.raises(IndexError): pad_data_horizontally(data, final_size, east_bound, west_bound) def test_pad_data_vertically_bad_shape(self): @@ -107,7 +107,7 @@ def test_pad_data_vertically_bad_shape(self): south_bound = 5 north_bound = 10 final_size = (20, 1) - with self.assertRaises(IndexError): + with pytest.raises(IndexError): pad_data_vertically(data, final_size, south_bound, north_bound) def observation_start_time(self): diff --git a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py index d46af5abd2..e6c2cdcf16 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py @@ -130,7 +130,7 @@ def test_ir_calibrate(self): CHANNEL_NAME, CAL_TYPE2) xr.testing.assert_allclose(result, TBS_OUTPUT2, rtol=1E-5) - with self.assertRaises(NotImplementedError): + with pytest.raises(NotImplementedError): self.algo.ir_calibrate(RADIANCES_OUTPUT, CHANNEL_NAME, CAL_TYPEBAD) def test_vis_calibrate(self): diff --git a/satpy/tests/reader_tests/test_seviri_l1b_icare.py b/satpy/tests/reader_tests/test_seviri_l1b_icare.py index 372611c87d..7c32001168 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_icare.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_icare.py @@ -22,6 +22,7 @@ import dask.array as da import numpy as np +import pytest from satpy.readers import load_reader from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler @@ -193,13 +194,13 @@ def _run_target(): plat, sens = _run_target() assert plat == sat - with self.assertRaises(NameError): - file_data["/attr/Sensors"] = "BADSAT/NOSENSE" + file_data["/attr/Sensors"] = "BADSAT/NOSENSE" + with pytest.raises(NameError): plat, sens = _run_target() def test_bad_bandname(self): """Check reader raises an error if a band bandname is passed.""" - with self.assertRaises(NameError): + with pytest.raises(NameError): self.p.target(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())._get_dsname({"name": "badband"}) diff --git a/satpy/tests/reader_tests/test_utils.py b/satpy/tests/reader_tests/test_utils.py index 6471159449..67bdb41374 100644 --- a/satpy/tests/reader_tests/test_utils.py +++ b/satpy/tests/reader_tests/test_utils.py @@ -215,10 +215,12 @@ def test_np2str(self): # multi-element array npbytes = np.array([npbytes, npbytes]) - self.assertRaises(ValueError, hf.np2str, npbytes) + with pytest.raises(ValueError, match="Array is not a string type or is larger than 1"): + hf.np2str(npbytes) # non-array - self.assertRaises(ValueError, hf.np2str, 5) + with pytest.raises(ValueError, match="Array is not a string type or is larger than 1"): + hf.np2str(5) def test_get_earth_radius(self): """Test earth radius computation.""" @@ -419,7 +421,7 @@ def test_get_user_calibration_factors(self): assert offset == 0.0 # Check that incorrect dict keys throw an error - with self.assertRaises(KeyError): + with pytest.raises(KeyError): hf.get_user_calibration_factors("IR108", radcor_dict) diff --git a/satpy/tests/reader_tests/test_vii_base_nc.py b/satpy/tests/reader_tests/test_vii_base_nc.py index 82c0e6a4e1..60fe7dcdcb 100644 --- a/satpy/tests/reader_tests/test_vii_base_nc.py +++ b/satpy/tests/reader_tests/test_vii_base_nc.py @@ -25,6 +25,7 @@ from unittest import mock import numpy as np +import pytest import xarray as xr from netCDF4 import Dataset @@ -218,10 +219,10 @@ def test_file_reading(self): @mock.patch("satpy.readers.vii_base_nc.tie_points_geo_interpolation") def test_functions(self, tpgi_, tpi_): """Test the functions.""" - with self.assertRaises(NotImplementedError): + with pytest.raises(NotImplementedError): self.reader._perform_orthorectification(mock.Mock(), mock.Mock()) - with self.assertRaises(NotImplementedError): + with pytest.raises(NotImplementedError): self.reader._perform_calibration(mock.Mock(), mock.Mock()) # Checks that the _perform_interpolation function is correctly executed diff --git a/satpy/tests/reader_tests/test_vii_l1b_nc.py b/satpy/tests/reader_tests/test_vii_l1b_nc.py index d9ee714d09..22ab14e0a3 100644 --- a/satpy/tests/reader_tests/test_vii_l1b_nc.py +++ b/satpy/tests/reader_tests/test_vii_l1b_nc.py @@ -29,6 +29,7 @@ import dask.array as da import numpy as np +import pytest import xarray as xr from netCDF4 import Dataset @@ -147,7 +148,7 @@ def test_functions(self): assert np.all(return_variable == variable) # invalid calibration: raises a ValueError - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="Unknown calibration invalid for dataset test"): self.reader._perform_calibration(variable, {"calibration": "invalid", "name": "test"}) From e8388678a16d1e8270b836f8b89165323c9af03c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 20 Nov 2023 20:03:51 -0600 Subject: [PATCH 484/702] Bump expected xarray version in test_cf.py --- satpy/tests/writer_tests/test_cf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 18b5947eb6..5723e88ba5 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -1448,5 +1448,5 @@ def _should_use_compression_keyword(): versions = _get_backend_versions() return ( versions["libnetcdf"] >= Version("4.9.0") and - versions["xarray"] >= Version("2023.11") + versions["xarray"] >= Version("2023.12") ) From 7591239c5aeeb96b0e331cf1e08762484ffb03ea Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 21 Nov 2023 08:20:06 +0100 Subject: [PATCH 485/702] Add "D" checks to ruff config --- doc/source/doi_role.py | 4 ++++ pyproject.toml | 3 ++- satpy/readers/atms_sdr_hdf5.py | 3 +-- satpy/readers/generic_image.py | 3 +-- satpy/readers/gerb_l2_hr_h5.py | 3 +-- satpy/readers/ici_l1b_nc.py | 6 ++---- satpy/readers/modis_l3.py | 2 -- satpy/readers/scatsat1_l2b.py | 3 +++ satpy/tests/reader_tests/test_abi_l1b.py | 6 ++++++ satpy/tests/reader_tests/test_satpy_cf_nc.py | 3 +++ satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py | 1 + 11 files changed, 24 insertions(+), 13 deletions(-) diff --git a/doc/source/doi_role.py b/doc/source/doi_role.py index 115e7895c6..0bb48d9880 100644 --- a/doc/source/doi_role.py +++ b/doc/source/doi_role.py @@ -20,6 +20,7 @@ def doi_role(typ, rawtext, text, lineno, inliner, options=None, content=None): + """Create a doi role.""" if options is None: options = {} if content is None: @@ -34,6 +35,7 @@ def doi_role(typ, rawtext, text, lineno, inliner, options=None, content=None): def arxiv_role(typ, rawtext, text, lineno, inliner, options=None, content=None): + """Create an arxive role.""" if options is None: options = {} if content is None: @@ -48,6 +50,7 @@ def arxiv_role(typ, rawtext, text, lineno, inliner, options=None, content=None): def setup_link_role(app): + """Set up the role link.""" app.add_role("doi", doi_role, override=True) app.add_role("DOI", doi_role, override=True) app.add_role("arXiv", arxiv_role, override=True) @@ -55,5 +58,6 @@ def setup_link_role(app): def setup(app): + """Set up the app.""" app.connect("builder-inited", setup_link_role) return {"version": "0.1", "parallel_read_safe": True} diff --git a/pyproject.toml b/pyproject.toml index 1282120a59..fe5bc8dc59 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,8 +16,9 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ # In the future, add "A", "B", "S", "N", "D" -select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] +select = ["D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] line-length = 120 +ignore = ["D417"] [tool.ruff.per-file-ignores] "satpy/tests/*" = ["S101"] # assert allowed in tests diff --git a/satpy/readers/atms_sdr_hdf5.py b/satpy/readers/atms_sdr_hdf5.py index 7f2d43bd71..dc78399aca 100644 --- a/satpy/readers/atms_sdr_hdf5.py +++ b/satpy/readers/atms_sdr_hdf5.py @@ -15,8 +15,7 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -""" -Reader for the ATMS SDR format. +"""Reader for the ATMS SDR format. A reader for Advanced Technology Microwave Sounder (ATMS) SDR data as it e.g. comes out of the CSPP package for processing Direct Readout data. diff --git a/satpy/readers/generic_image.py b/satpy/readers/generic_image.py index 1ba160095f..f6c983e8d5 100644 --- a/satpy/readers/generic_image.py +++ b/satpy/readers/generic_image.py @@ -15,8 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -""" -Reader for generic image (e.g. gif, png, jpg, tif, geotiff, ...). +"""Reader for generic image (e.g. gif, png, jpg, tif, geotiff, ...). Returns a dataset without calibration. Includes coordinates if available in the file (eg. geotiff). diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 0bf918d68f..4f34c1fde8 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -34,8 +34,7 @@ def gerb_get_dataset(ds, ds_info): - """ - Load a GERB dataset in memory from a HDF5 file or HDF5FileHandler. + """Load a GERB dataset in memory from a HDF5 file or HDF5FileHandler. The routine takes into account the quantisation factor and fill values. """ diff --git a/satpy/readers/ici_l1b_nc.py b/satpy/readers/ici_l1b_nc.py index d6ebea0c56..b063c51c4f 100644 --- a/satpy/readers/ici_l1b_nc.py +++ b/satpy/readers/ici_l1b_nc.py @@ -176,8 +176,7 @@ def _interpolate_geo( latitude, n_samples, ): - """ - Perform the interpolation of geographic coordinates from tie points to pixel points. + """Perform the interpolation of geographic coordinates from tie points to pixel points. Args: longitude: xarray DataArray containing the longitude dataset to @@ -229,8 +228,7 @@ def _interpolate_viewing_angle( zenith, n_samples, ): - """ - Perform the interpolation of angular coordinates from tie points to pixel points. + """Perform the interpolation of angular coordinates from tie points to pixel points. Args: azimuth: xarray DataArray containing the azimuth angle dataset to diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 29e0247fdc..2862301168 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -46,7 +46,6 @@ class ModisL3GriddedHDFFileHandler(HDFEOSGeoReader): """File handler for MODIS HDF-EOS Level 3 CMG gridded files.""" def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" - # Initialise set of variable names to carry through code handled_var_names = set() @@ -112,7 +111,6 @@ def get_dataset(self, dataset_id, dataset_info): def _get_area_extent(self): """Get the grid properties.""" - # Now compute the data extent upperleft = self.metadata["GridStructure"]["GRID_1"]["UpperLeftPointMtrs"] lowerright = self.metadata["GridStructure"]["GRID_1"]["LowerRightMtrs"] diff --git a/satpy/readers/scatsat1_l2b.py b/satpy/readers/scatsat1_l2b.py index 9989bf3d86..886ce458b3 100644 --- a/satpy/readers/scatsat1_l2b.py +++ b/satpy/readers/scatsat1_l2b.py @@ -26,8 +26,10 @@ class SCATSAT1L2BFileHandler(BaseFileHandler): + """File handler for ScatSat level 2 files, as distributed by Eumetsat in HDF5 format.""" def __init__(self, filename, filename_info, filetype_info): + """Initialize the file handler.""" super(SCATSAT1L2BFileHandler, self).__init__(filename, filename_info, filetype_info) self.h5f = h5py.File(self.filename, "r") h5data = self.h5f["science_data"] @@ -44,6 +46,7 @@ def __init__(self, filename, filename_info, filetype_info): self.longitude_scale = float(h5data.attrs["Longitude Scale"]) def get_dataset(self, key, info): + """Get the dataset.""" h5data = self.h5f["science_data"] stdname = info.get("standard_name") diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 1c7d2c78ef..969c497410 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -131,11 +131,13 @@ def _create_fake_rad_dataset(rad: xr.DataArray, resolution: int) -> xr.Dataset: def generate_l1b_filename(chan_name: str) -> str: + """Generate a l1b filename.""" return f"OR_ABI-L1b-RadC-M4{chan_name}_G16_s20161811540362_e20161811545170_c20161811545230_suffix.nc" @pytest.fixture() def c01_refl(tmp_path) -> xr.DataArray: + """Load c01 reflectances.""" with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", None, 1000) return reader.load(["C01"])["C01"] @@ -143,6 +145,7 @@ def c01_refl(tmp_path) -> xr.DataArray: @pytest.fixture() def c01_rad(tmp_path) -> xr.DataArray: + """Load c01 radiances.""" with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", None, 1000) return reader.load([DataQuery(name="C01", calibration="radiance")])["C01"] @@ -150,6 +153,7 @@ def c01_rad(tmp_path) -> xr.DataArray: @pytest.fixture() def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: + """Load c01 radiances through h5netcdf.""" shape = RAD_SHAPE[1000] rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.0) * 50.0 rad_data = (rad_data + 1.0) / 0.5 @@ -172,6 +176,7 @@ def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: @pytest.fixture() def c01_counts(tmp_path) -> xr.DataArray: + """Load c01 counts.""" with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", None, 1000) return reader.load([DataQuery(name="C01", calibration="counts")])["C01"] @@ -179,6 +184,7 @@ def c01_counts(tmp_path) -> xr.DataArray: @pytest.fixture() def c07_bt_creator(tmp_path) -> Callable: + """Create a loader for c07 brightness temperatures.""" def _load_data_array( clip_negative_radiances: bool = False, ): diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index e71534fbd2..0c22f5b3f1 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -89,6 +89,7 @@ def _create_test_netcdf(filename, resolution=742): @pytest.fixture(scope="session") def cf_scene(): + """Create a cf scene.""" tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) data_visir = np.array([[1, 2], [3, 4]]) @@ -214,6 +215,7 @@ def cf_scene(): @pytest.fixture() def nc_filename(tmp_path): + """Create an nc filename for viirs m band.""" now = datetime.utcnow() filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-mband-20201007075915-20201007080744.nc" return str(tmp_path / filename) @@ -221,6 +223,7 @@ def nc_filename(tmp_path): @pytest.fixture() def nc_filename_i(tmp_path): + """Create an nc filename for viirs i band.""" now = datetime.utcnow() filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-iband-20201007075915-20201007080744.nc" return str(tmp_path / filename) diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index 49206962e5..7ec34fd9bf 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -31,6 +31,7 @@ @pytest.fixture() def nc_filename(tmp_path): + """Create an nc test data file and return its filename.""" now = datetime.datetime.utcnow() filename = f"VGAC_VJ10XMOD_A{now:%Y%j_%H%M}_n004946_K005.nc" filename_str = str(tmp_path / filename) From 8662786043afba31329a9fda964d4cdf6a2c1051 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 21 Nov 2023 08:48:10 +0100 Subject: [PATCH 486/702] Fix "A" ruff checks --- doc/source/conf.py | 2 +- pyproject.toml | 6 +- satpy/etc/eps_gomel1b_10.0.xml | 1785 ++++++++++++++++++++++++++++++++ satpy/readers/xmlformat.py | 18 +- satpy/tests/test_node.py | 2 +- satpy/tests/test_readers.py | 2 +- 6 files changed, 1800 insertions(+), 15 deletions(-) create mode 100644 satpy/etc/eps_gomel1b_10.0.xml diff --git a/doc/source/conf.py b/doc/source/conf.py index df006727c0..3aa810420e 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -117,7 +117,7 @@ def __getattr__(cls, name): # General information about the project. project = u"Satpy" -copyright = u"2009-{}, The PyTroll Team".format(datetime.utcnow().strftime("%Y")) +copyright = u"2009-{}, The PyTroll Team".format(datetime.utcnow().strftime("%Y")) # noqa: A001 # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/pyproject.toml b/pyproject.toml index fe5bc8dc59..61c08ba57b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,10 +15,10 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ -# In the future, add "A", "B", "S", "N", "D" -select = ["D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] +# In the future, add "B", "S", "N" +select = ["A", "D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] line-length = 120 -ignore = ["D417"] +ignore = ["D417", "A003"] [tool.ruff.per-file-ignores] "satpy/tests/*" = ["S101"] # assert allowed in tests diff --git a/satpy/etc/eps_gomel1b_10.0.xml b/satpy/etc/eps_gomel1b_10.0.xml new file mode 100644 index 0000000000..39942ee907 --- /dev/null +++ b/satpy/etc/eps_gomel1b_10.0.xml @@ -0,0 +1,1785 @@ + + + + + + + + + + 130 + + + 80 + + PFS April 2004 + april04 + spring04 + + + EPS GOME Level 1B Format + + + This GOME 1B description was generated using the GOME PFS Excel document Issue 8 Revision 0 (eps_gomel1_8.0_names_masks_v13.xls) and pfs2xml version 3.3 + + + + GOME_*1B_*Z* + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + eps-product + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + + + + + + + + + + + + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + +
+
+ + + + +
+ + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + +
+ + +
+ + + + + + + + + + + + + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+ + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + +
+
+ + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + +
+
+ + +
+ + + + + + + + + + + +
+ + + + + + + + + + +
+
+ + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + +
+
+ + + + + + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + +
+
+ + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
diff --git a/satpy/readers/xmlformat.py b/satpy/readers/xmlformat.py index 969c30113a..21200d4c8d 100644 --- a/satpy/readers/xmlformat.py +++ b/satpy/readers/xmlformat.py @@ -31,22 +31,22 @@ "uinteger4": ">u4", } -def process_delimiter(elt, ascii=False): +def process_delimiter(elt, text=False): """Process a 'delimiter' tag.""" - del elt, ascii + del elt, text -def process_field(elt, ascii=False): +def process_field(elt, text=False): """Process a 'field' tag.""" # NOTE: if there is a variable defined in this field and it is different # from the default, we could change the value and restart. scale = np.uint8(1) - if elt.get("type") == "bitfield" and not ascii: + if elt.get("type") == "bitfield" and not text: current_type = ">u" + str(int(elt.get("length")) // 8) scale = np.dtype(current_type).type(1) elif (elt.get("length") is not None): - if ascii: + if text: add = 33 else: add = 0 @@ -64,9 +64,9 @@ def process_field(elt, ascii=False): return ((elt.get("name"), current_type, scale)) -def process_array(elt, ascii=False): +def process_array(elt, text=False): """Process an 'array' tag.""" - del ascii + del text chld = list(elt) if len(chld) > 1: raise ValueError() @@ -147,10 +147,10 @@ def parse_format(xml_file): types_scales = {} for prod in tree.find("product"): - ascii = (prod.tag in ["mphr", "sphr"]) + text = (prod.tag in ["mphr", "sphr"]) res = [] for i in prod: - lres = CASES[i.tag](i, ascii) + lres = CASES[i.tag](i, text) if lres is not None: res.append(lres) types_scales[(prod.tag, int(prod.get("subclass")))] = res diff --git a/satpy/tests/test_node.py b/satpy/tests/test_node.py index 7475b04d24..35fd8a27bb 100644 --- a/satpy/tests/test_node.py +++ b/satpy/tests/test_node.py @@ -26,7 +26,7 @@ class FakeCompositor: """A fake compositor.""" - def __init__(self, id): + def __init__(self, id): # noqa: A002 """Set up the fake compositor.""" self.id = id diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index 378f3fdb5a..d91e2b6fed 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -637,7 +637,7 @@ def test_available_readers_base_loader(self, monkeypatch): from satpy import available_readers from satpy._config import glob_config - def patched_import_error(name, globals=None, locals=None, fromlist=(), level=0): + def patched_import_error(name, globals=None, locals=None, fromlist=(), level=0): # noqa: A002 if name in ("netcdf4", ): raise ImportError(f"Mocked import error {name}") return real_import(name, globals=globals, locals=locals, fromlist=fromlist, level=level) From 47f54674a4f646cd799399b8d007161c91e85a7c Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 21 Nov 2023 09:00:40 +0100 Subject: [PATCH 487/702] Add noqa on A003 errors --- pyproject.toml | 2 +- satpy/composites/__init__.py | 2 +- satpy/readers/__init__.py | 2 +- satpy/readers/avhrr_l1b_gaclac.py | 2 +- satpy/scene.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 61c08ba57b..0f3569280c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,7 @@ line_length = 120 # In the future, add "B", "S", "N" select = ["A", "D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] line-length = 120 -ignore = ["D417", "A003"] +ignore = ["D417"] [tool.ruff.per-file-ignores] "satpy/tests/*" = ["S101"] # assert allowed in tests diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index d3a1e510cb..52b7c1555d 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -112,7 +112,7 @@ def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwar self.attrs = kwargs @property - def id(self): + def id(self): # noqa: A003 """Return the DataID of the object.""" try: return self.attrs["_satpy_id"] diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 81ebf2393b..385e1126e4 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -705,7 +705,7 @@ def __repr__(self): """Representation of the object.""" return '' - def open(self, *args, **kwargs): + def open(self, *args, **kwargs): # noqa: A003 """Open the file. This is read-only. diff --git a/satpy/readers/avhrr_l1b_gaclac.py b/satpy/readers/avhrr_l1b_gaclac.py index c566175b8c..22dfc857f5 100644 --- a/satpy/readers/avhrr_l1b_gaclac.py +++ b/satpy/readers/avhrr_l1b_gaclac.py @@ -196,7 +196,7 @@ def get_dataset(self, key, info): return res - def slice(self, data, times): + def slice(self, data, times): # noqa: A003 """Select user-defined scanlines and/or strip invalid coordinates. Furthermore, update scanline timestamps. diff --git a/satpy/scene.py b/satpy/scene.py index d96c81a0e4..27822e9ad5 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -660,7 +660,7 @@ def _slice_datasets(self, dataset_ids, slice_key, new_area, area_only=True): else: replace_anc(new_ds, pres) - def slice(self, key): + def slice(self, key): # noqa: A003 """Slice Scene by dataset index. .. note:: From 33f354f9d6d8c9e62ca8f79d4cb60809d20b6235 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 21 Nov 2023 09:04:22 +0100 Subject: [PATCH 488/702] Add noqa for D417 --- doc/source/reader_table.py | 2 +- pyproject.toml | 1 - satpy/composites/__init__.py | 12 +- satpy/composites/abi.py | 2 +- satpy/composites/agri.py | 2 +- satpy/composites/glm.py | 2 +- satpy/dataset/data_dict.py | 4 +- satpy/dependency_tree.py | 8 +- satpy/enhancements/__init__.py | 10 +- satpy/etc/eps_gomel1b_10.0.xml | 1785 ------------------- satpy/modifiers/_crefl.py | 2 +- satpy/modifiers/filters.py | 2 +- satpy/modifiers/geometry.py | 8 +- satpy/modifiers/parallax.py | 2 +- satpy/modifiers/spectral.py | 4 +- satpy/multiscene/_multiscene.py | 2 +- satpy/readers/__init__.py | 6 +- satpy/readers/avhrr_l1b_gaclac.py | 2 +- satpy/readers/file_handlers.py | 2 +- satpy/readers/mviri_l1b_fiduceo_nc.py | 2 +- satpy/readers/nucaps.py | 2 +- satpy/readers/viirs_edr_active_fires.py | 2 +- satpy/readers/viirs_sdr.py | 2 +- satpy/resample.py | 4 +- satpy/scene.py | 2 +- satpy/tests/modifier_tests/test_parallax.py | 2 +- satpy/writers/awips_tiled.py | 4 +- satpy/writers/cf_writer.py | 2 +- satpy/writers/geotiff.py | 2 +- satpy/writers/ninjogeotiff.py | 2 +- 30 files changed, 49 insertions(+), 1835 deletions(-) delete mode 100644 satpy/etc/eps_gomel1b_10.0.xml diff --git a/doc/source/reader_table.py b/doc/source/reader_table.py index 3ddec3444b..618cb2b96b 100644 --- a/doc/source/reader_table.py +++ b/doc/source/reader_table.py @@ -39,7 +39,7 @@ def rst_table_row(columns=None): return row -def rst_table_header(name=None, header=None, header_rows=1, widths="auto"): +def rst_table_header(name=None, header=None, header_rows=1, widths="auto"): # noqa: D417 """Create header for rst table. Args: diff --git a/pyproject.toml b/pyproject.toml index 0f3569280c..4de1e302f4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,6 @@ line_length = 120 # In the future, add "B", "S", "N" select = ["A", "D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] line-length = 120 -ignore = ["D417"] [tool.ruff.per-file-ignores] "satpy/tests/*" = ["S101"] # assert allowed in tests diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 52b7c1555d..fa43d4e689 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -343,7 +343,7 @@ class CategoricalDataCompositor(CompositeBase): res = [[20, 40, 30], [50, 30, 10]] """ - def __init__(self, name, lut=None, **kwargs): + def __init__(self, name, lut=None, **kwargs): # noqa: D417 """Get look-up-table used to recategorize data. Args: @@ -381,7 +381,7 @@ class GenericCompositor(CompositeBase): modes = {1: "L", 2: "LA", 3: "RGB", 4: "RGBA"} - def __init__(self, name, common_channel_mask=True, **kwargs): + def __init__(self, name, common_channel_mask=True, **kwargs): # noqa: D417 """Collect custom configuration values. Args: @@ -679,7 +679,7 @@ class DayNightCompositor(GenericCompositor): of the image (night or day). See the documentation below for more details. """ - def __init__(self, name, lim_low=85., lim_high=88., day_night="day_night", include_alpha=True, **kwargs): + def __init__(self, name, lim_low=85., lim_high=88., day_night="day_night", include_alpha=True, **kwargs): # noqa: D417 """Collect custom configuration values. Args: @@ -1014,7 +1014,7 @@ def __call__(self, projectables, *args, **kwargs): class CloudCompositor(GenericCompositor): """Detect clouds based on thresholding and use it as a mask for compositing.""" - def __init__(self, name, transition_min=258.15, transition_max=298.15, + def __init__(self, name, transition_min=258.15, transition_max=298.15, # noqa: D417 transition_gamma=3.0, **kwargs): """Collect custom configuration values. @@ -1357,7 +1357,7 @@ class StaticImageCompositor(GenericCompositor, DataDownloadMixin): """ - def __init__(self, name, filename=None, url=None, known_hash=None, area=None, + def __init__(self, name, filename=None, url=None, known_hash=None, area=None, # noqa: D417 **kwargs): """Collect custom configuration values. @@ -1735,7 +1735,7 @@ def _get_flag_value(mask, val): class LongitudeMaskingCompositor(SingleBandCompositor): """Masks areas outside defined longitudes.""" - def __init__(self, name, lon_min=None, lon_max=None, **kwargs): + def __init__(self, name, lon_min=None, lon_max=None, **kwargs): # noqa: D417 """Collect custom configuration values. Args: diff --git a/satpy/composites/abi.py b/satpy/composites/abi.py index 3ae5237906..88c0db1d8e 100644 --- a/satpy/composites/abi.py +++ b/satpy/composites/abi.py @@ -42,7 +42,7 @@ class SimulatedGreen(GenericCompositor): """ - def __init__(self, name, fractions=(0.465, 0.465, 0.07), **kwargs): + def __init__(self, name, fractions=(0.465, 0.465, 0.07), **kwargs): # noqa: D417 """Initialize fractions for input channels. Args: diff --git a/satpy/composites/agri.py b/satpy/composites/agri.py index 839706457e..20024282d2 100644 --- a/satpy/composites/agri.py +++ b/satpy/composites/agri.py @@ -42,7 +42,7 @@ class SimulatedRed(GenericCompositor): """ - def __init__(self, name, fractions=(1.0, 0.13, 0.87), **kwargs): + def __init__(self, name, fractions=(1.0, 0.13, 0.87), **kwargs): # noqa: D417 """Initialize fractions for input channels. Args: diff --git a/satpy/composites/glm.py b/satpy/composites/glm.py index e9b6ef275e..e1c9b676c6 100644 --- a/satpy/composites/glm.py +++ b/satpy/composites/glm.py @@ -43,7 +43,7 @@ class HighlightCompositor(GenericCompositor): """ - def __init__(self, name, min_highlight=0.0, max_highlight=10.0, + def __init__(self, name, min_highlight=0.0, max_highlight=10.0, # noqa: D417 max_factor=(0.8, 0.8, -0.8, 0), **kwargs): """Initialize composite with highlight factor options. diff --git a/satpy/dataset/data_dict.py b/satpy/dataset/data_dict.py index 790d688b24..783ddc4487 100644 --- a/satpy/dataset/data_dict.py +++ b/satpy/dataset/data_dict.py @@ -51,7 +51,7 @@ def get_best_dataset_key(key, choices): return [choice for choice, distance in zip(sorted_choices, distances) if distance == distances[0]] -def get_key(key, key_container, num_results=1, best=True, query=None, +def get_key(key, key_container, num_results=1, best=True, query=None, # noqa: D417 **kwargs): """Get the fully-specified key best matching the provided key. @@ -139,7 +139,7 @@ def keys(self, names=False, wavelengths=False): else: return keys - def get_key(self, match_key, num_results=1, best=True, **dfilter): + def get_key(self, match_key, num_results=1, best=True, **dfilter): # noqa: D417 """Get multiple fully-specified keys that match the provided query. Args: diff --git a/satpy/dependency_tree.py b/satpy/dependency_tree.py index d99fb536eb..7c2b65a6c5 100644 --- a/satpy/dependency_tree.py +++ b/satpy/dependency_tree.py @@ -327,7 +327,7 @@ def _create_subtree_from_reader(self, dataset_key, query): LOG.trace("Found reader provided dataset:\n\tRequested: {}\n\tFound: {}".format(dataset_key, node.name)) return node - def _find_reader_node(self, dataset_key, query): + def _find_reader_node(self, dataset_key, query): # noqa: D417 """Attempt to find a `DataID` in the available readers. Args: @@ -517,7 +517,7 @@ def get_modifier(self, comp_id): raise KeyError("Could not find modifier '{}'".format(modifier)) - def _create_required_subtrees(self, parent, prereqs, query=None): + def _create_required_subtrees(self, parent, prereqs, query=None): # noqa: D417 """Determine required prerequisite Nodes for a composite. Args: @@ -531,7 +531,7 @@ def _create_required_subtrees(self, parent, prereqs, query=None): raise MissingDependencies(unknown_datasets) return prereq_nodes - def _create_optional_subtrees(self, parent, prereqs, query=None): + def _create_optional_subtrees(self, parent, prereqs, query=None): # noqa: D417 """Determine optional prerequisite Nodes for a composite. Args: @@ -549,7 +549,7 @@ def _create_optional_subtrees(self, parent, prereqs, query=None): return prereq_nodes - def _create_prerequisite_subtrees(self, parent, prereqs, query=None): + def _create_prerequisite_subtrees(self, parent, prereqs, query=None): # noqa: D417 """Determine prerequisite Nodes for a composite. Args: diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py index e2dda9cf63..00a0f8dd4e 100644 --- a/satpy/enhancements/__init__.py +++ b/satpy/enhancements/__init__.py @@ -131,7 +131,7 @@ def wrapper(data, **kwargs): return on_dask_array(wrapper) -def piecewise_linear_stretch( +def piecewise_linear_stretch( # noqa: D417 img: XRImage, xp: ArrayLike, fp: ArrayLike, @@ -229,7 +229,7 @@ def _cira_stretch(band_data): return band_data -def reinhard_to_srgb(img, saturation=1.25, white=100, **kwargs): +def reinhard_to_srgb(img, saturation=1.25, white=100, **kwargs): # noqa: D417 """Stretch method based on the Reinhard algorithm, using luminance. Args: @@ -293,7 +293,7 @@ def _lookup_table(band_data, luts=None, index=-1): return lut[band_data] -def colorize(img, **kwargs): +def colorize(img, **kwargs): # noqa: D417 """Colorize the given image. Args: @@ -365,7 +365,7 @@ def _merge_colormaps(kwargs, img=None): return full_cmap -def create_colormap(palette, img=None): +def create_colormap(palette, img=None): # noqa: D417 """Create colormap of the given numpy file, color vector, or colormap. Args: @@ -525,7 +525,7 @@ def _three_d_effect_delayed(band_data, kernel, mode): return new_data.reshape((1, band_data.shape[0], band_data.shape[1])) -def btemp_threshold(img, min_in, max_in, threshold, threshold_out=None, **kwargs): +def btemp_threshold(img, min_in, max_in, threshold, threshold_out=None, **kwargs): # noqa: D417 """Scale data linearly in two separate regions. This enhancement scales the input data linearly by splitting the data diff --git a/satpy/etc/eps_gomel1b_10.0.xml b/satpy/etc/eps_gomel1b_10.0.xml deleted file mode 100644 index 39942ee907..0000000000 --- a/satpy/etc/eps_gomel1b_10.0.xml +++ /dev/null @@ -1,1785 +0,0 @@ - - - - - - - - - - 130 - - - 80 - - PFS April 2004 - april04 - spring04 - - - EPS GOME Level 1B Format - - - This GOME 1B description was generated using the GOME PFS Excel document Issue 8 Revision 0 (eps_gomel1_8.0_names_masks_v13.xls) and pfs2xml version 3.3 - - - - GOME_*1B_*Z* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - eps-product - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-
- - - - - - - - - - - - - - -
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - -
-
- - - - -
- - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - -
- - -
- - - - - - - - - - - - - -
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - -
-
- - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - -
- - - - - - - - - - -
-
- - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - -
-
- - -
- - - - - - - - - - - -
- - - - - - - - - - -
-
- - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - -
-
- - - - - - - - - - -
- - - - - - - - - - - -
- - - - - - - - - - -
-
- - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-
-
diff --git a/satpy/modifiers/_crefl.py b/satpy/modifiers/_crefl.py index bc42228f26..a68d9a460e 100644 --- a/satpy/modifiers/_crefl.py +++ b/satpy/modifiers/_crefl.py @@ -35,7 +35,7 @@ class ReflectanceCorrector(ModifierBase, DataDownloadMixin): Uses a python rewrite of the C CREFL code written for VIIRS and MODIS. """ - def __init__(self, *args, dem_filename=None, dem_sds="averaged elevation", + def __init__(self, *args, dem_filename=None, dem_sds="averaged elevation", # noqa: D417 url=None, known_hash=None, **kwargs): """Initialize the compositor with values from the user or from the configuration file. diff --git a/satpy/modifiers/filters.py b/satpy/modifiers/filters.py index 151082e723..0bfc3592b7 100644 --- a/satpy/modifiers/filters.py +++ b/satpy/modifiers/filters.py @@ -11,7 +11,7 @@ class Median(ModifierBase): """Apply a median filter to the band.""" - def __init__(self, median_filter_params, **kwargs): + def __init__(self, median_filter_params, **kwargs): # noqa: D417 """Create the instance. Args: diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py index 1194eb036a..cc903ad5e9 100644 --- a/satpy/modifiers/geometry.py +++ b/satpy/modifiers/geometry.py @@ -33,7 +33,7 @@ class SunZenithCorrectorBase(ModifierBase): """Base class for sun zenith correction modifiers.""" - def __init__(self, max_sza=95.0, **kwargs): + def __init__(self, max_sza=95.0, **kwargs): # noqa: D417 """Collect custom configuration values. Args: @@ -96,7 +96,7 @@ class SunZenithCorrector(SunZenithCorrectorBase): """ - def __init__(self, correction_limit=88., **kwargs): + def __init__(self, correction_limit=88., **kwargs): # noqa: D417 """Collect custom configuration values. Args: @@ -142,7 +142,7 @@ class EffectiveSolarPathLengthCorrector(SunZenithCorrectorBase): """ - def __init__(self, correction_limit=88., **kwargs): + def __init__(self, correction_limit=88., **kwargs): # noqa: D417 """Collect custom configuration values. Args: @@ -177,7 +177,7 @@ class SunZenithReducer(SunZenithCorrectorBase): """ - def __init__(self, correction_limit=55., max_sza=90, strength=1.5, **kwargs): + def __init__(self, correction_limit=55., max_sza=90, strength=1.5, **kwargs): # noqa: D417 """Collect custom configuration values. Args: diff --git a/satpy/modifiers/parallax.py b/satpy/modifiers/parallax.py index 8c5c138e5d..9d70aa12c3 100644 --- a/satpy/modifiers/parallax.py +++ b/satpy/modifiers/parallax.py @@ -265,7 +265,7 @@ def __init__(self, base_area, self.debug_mode = debug_mode self.diagnostics = {} - def __call__(self, cth_dataset, **kwargs): + def __call__(self, cth_dataset, **kwargs): # noqa: D417 """Apply parallax correction to dataset. Args: diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index e3ea3214b8..46466540c9 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -43,7 +43,7 @@ class NIRReflectance(ModifierBase): TERMINATOR_LIMIT = 85.0 MASKING_LIMIT = 88.0 - def __init__(self, sunz_threshold=TERMINATOR_LIMIT, + def __init__(self, sunz_threshold=TERMINATOR_LIMIT, # noqa: D417 masking_limit=MASKING_LIMIT, **kwargs): """Collect custom configuration values. @@ -139,7 +139,7 @@ def _init_reflectance_calculator(self, metadata): class NIREmissivePartFromReflectance(NIRReflectance): """Get the emissive part of NIR bands.""" - def __init__(self, sunz_threshold=None, **kwargs): + def __init__(self, sunz_threshold=None, **kwargs): # noqa: D417 """Collect custom configuration values. Args: diff --git a/satpy/multiscene/_multiscene.py b/satpy/multiscene/_multiscene.py index 976fbbbd2a..4440641d8f 100644 --- a/satpy/multiscene/_multiscene.py +++ b/satpy/multiscene/_multiscene.py @@ -192,7 +192,7 @@ def first_scene(self): return self._scene_gen.first @classmethod - def from_files( + def from_files( # noqa: D417 cls, files_to_sort: Collection[str], reader: str | Collection[str] | None = None, diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 385e1126e4..c8fc0a8b69 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -111,7 +111,7 @@ def group_files(files_to_sort, reader=None, time_threshold=10, return list(_filter_groups(groups, missing=missing)) -def _assign_files_to_readers(files_to_sort, reader_names, +def _assign_files_to_readers(files_to_sort, reader_names, # noqa: D417 reader_kwargs): """Assign files to readers. @@ -190,7 +190,7 @@ def _get_file_keys_for_reader_files(reader_files, group_keys=None): return file_keys -def _get_sorted_file_groups(all_file_keys, time_threshold): +def _get_sorted_file_groups(all_file_keys, time_threshold): # noqa: D417 """Get sorted file groups. Get a list of dictionaries, where each list item consists of a dictionary @@ -673,7 +673,7 @@ class FSFile(os.PathLike): """ - def __init__(self, file, fs=None): + def __init__(self, file, fs=None): # noqa: D417 """Initialise the FSFile instance. Args: diff --git a/satpy/readers/avhrr_l1b_gaclac.py b/satpy/readers/avhrr_l1b_gaclac.py index 22dfc857f5..cfc3e1283e 100644 --- a/satpy/readers/avhrr_l1b_gaclac.py +++ b/satpy/readers/avhrr_l1b_gaclac.py @@ -60,7 +60,7 @@ class GACLACFile(BaseFileHandler): """Reader for GAC and LAC data.""" - def __init__(self, filename, filename_info, filetype_info, + def __init__(self, filename, filename_info, filetype_info, # noqa: D417 start_line=None, end_line=None, strip_invalid_coords=True, interpolate_coords=True, **reader_kwargs): """Init the file handler. diff --git a/satpy/readers/file_handlers.py b/satpy/readers/file_handlers.py index 3fdeed1edc..66a028eb4c 100644 --- a/satpy/readers/file_handlers.py +++ b/satpy/readers/file_handlers.py @@ -25,7 +25,7 @@ from satpy.readers import open_file_or_filename -def open_dataset(filename, *args, **kwargs): +def open_dataset(filename, *args, **kwargs): # noqa: D417 """Open a file with xarray. Args: diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index 9a309a0bb8..fc5aea2c8e 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -540,7 +540,7 @@ class FiduceoMviriBase(BaseFileHandler): "IR": "count_ir" } - def __init__(self, filename, filename_info, filetype_info, + def __init__(self, filename, filename_info, filetype_info, # noqa: D417 mask_bad_quality=False): """Initialize the file handler. diff --git a/satpy/readers/nucaps.py b/satpy/readers/nucaps.py index 2c9e2ba39f..19db6f9976 100644 --- a/satpy/readers/nucaps.py +++ b/satpy/readers/nucaps.py @@ -237,7 +237,7 @@ def get_dataset(self, dataset_id, ds_info): class NUCAPSReader(FileYAMLReader): """Reader for NUCAPS NetCDF4 files.""" - def __init__(self, config_files, mask_surface=True, mask_quality=True, **kwargs): + def __init__(self, config_files, mask_surface=True, mask_quality=True, **kwargs): # noqa: D417 """Configure reader behavior. Args: diff --git a/satpy/readers/viirs_edr_active_fires.py b/satpy/readers/viirs_edr_active_fires.py index bd8f3f6d69..9fa5b5d59a 100644 --- a/satpy/readers/viirs_edr_active_fires.py +++ b/satpy/readers/viirs_edr_active_fires.py @@ -46,7 +46,7 @@ def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=auto_maskandscale, xarray_kwargs=xarray_kwargs) self.prefix = filetype_info.get("variable_prefix") - def get_dataset(self, dsid, dsinfo): + def get_dataset(self, dsid, dsinfo): # noqa: D417 """Get requested data as DataArray. Args: diff --git a/satpy/readers/viirs_sdr.py b/satpy/readers/viirs_sdr.py index db9ba9ba10..eef02f7777 100644 --- a/satpy/readers/viirs_sdr.py +++ b/satpy/readers/viirs_sdr.py @@ -185,7 +185,7 @@ def split_desired_other(fhs, prime_geo, second_geo): class VIIRSSDRReader(FileYAMLReader): """Custom file reader for finding VIIRS SDR geolocation at runtime.""" - def __init__(self, config_files, use_tc=None, **kwargs): + def __init__(self, config_files, use_tc=None, **kwargs): # noqa: D417 """Initialize file reader and adjust geolocation preferences. Args: diff --git a/satpy/resample.py b/satpy/resample.py index c8ed073ae5..ddab90be82 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -823,7 +823,7 @@ def compute(self, data, **kwargs): """Call the resampling.""" raise NotImplementedError("Use the sub-classes") - def resample(self, data, **kwargs): + def resample(self, data, **kwargs): # noqa: D417 """Resample `data` by calling `precompute` and `compute` methods. Args: @@ -899,7 +899,7 @@ class BucketAvg(BucketResamplerBase): """ - def compute(self, data, fill_value=np.nan, skipna=True, **kwargs): + def compute(self, data, fill_value=np.nan, skipna=True, **kwargs): # noqa: D417 """Call the resampling. Args: diff --git a/satpy/scene.py b/satpy/scene.py index 27822e9ad5..9d9057c907 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1384,7 +1384,7 @@ def unload(self, keepables=None): LOG.debug("Unloading dataset: %r", ds_id) del self._datasets[ds_id] - def load(self, wishlist, calibration="*", resolution="*", + def load(self, wishlist, calibration="*", resolution="*", # noqa: D417 polarization="*", level="*", modifiers="*", generate=True, unload=True, **kwargs): """Read and generate requested datasets. diff --git a/satpy/tests/modifier_tests/test_parallax.py b/satpy/tests/modifier_tests/test_parallax.py index e1b426dce2..b769e45608 100644 --- a/satpy/tests/modifier_tests/test_parallax.py +++ b/satpy/tests/modifier_tests/test_parallax.py @@ -49,7 +49,7 @@ def fake_tle(): line2="2 40732 0.2533 325.0106 0000976 118.8734 330.4058 1.00272123 23817") -def _get_fake_areas(center, sizes, resolution, code=4326): +def _get_fake_areas(center, sizes, resolution, code=4326): # noqa: D417 """Get multiple square areas with the same center. Returns multiple square areas centered at the same location diff --git a/satpy/writers/awips_tiled.py b/satpy/writers/awips_tiled.py index 9bab65fe35..15680e8091 100644 --- a/satpy/writers/awips_tiled.py +++ b/satpy/writers/awips_tiled.py @@ -422,7 +422,7 @@ def __call__(self): class LetteredTileGenerator(NumberedTileGenerator): """Helper class to generate per-tile metadata for lettered tiles.""" - def __init__(self, area_definition, extents, sector_crs, + def __init__(self, area_definition, extents, sector_crs, # noqa: D417 cell_size=(2000000, 2000000), num_subtiles=None, use_sector_reference=False): """Initialize tile information for later generation. @@ -1501,7 +1501,7 @@ def _get_tile_data_info(self, data_arrs, creation_time, source_name): return ds_info # TODO: Add additional untiled variable support - def save_datasets(self, datasets, sector_id=None, + def save_datasets(self, datasets, sector_id=None, # noqa: D417 source_name=None, tile_count=(1, 1), tile_size=None, lettered_grid=False, num_subtiles=None, diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 506a8bf561..301df399c8 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -1092,7 +1092,7 @@ def save_dataset(self, dataset, filename=None, fill_value=None, **kwargs): """Save the *dataset* to a given *filename*.""" return self.save_datasets([dataset], filename, **kwargs) - def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=EPOCH, + def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=EPOCH, # noqa: D417 flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False, include_orig_name=True, numeric_name_prefix="CHANNEL_", **to_netcdf_kwargs): """Save the given datasets in one netCDF file. diff --git a/satpy/writers/geotiff.py b/satpy/writers/geotiff.py index 1a522ecd68..229cf777db 100644 --- a/satpy/writers/geotiff.py +++ b/satpy/writers/geotiff.py @@ -137,7 +137,7 @@ def separate_init_kwargs(cls, kwargs): return init_kwargs, kwargs - def save_image( + def save_image( # noqa: D417 self, img: XRImage, filename: Optional[str] = None, diff --git a/satpy/writers/ninjogeotiff.py b/satpy/writers/ninjogeotiff.py index a8f603861e..5f88cc52ed 100644 --- a/satpy/writers/ninjogeotiff.py +++ b/satpy/writers/ninjogeotiff.py @@ -103,7 +103,7 @@ class NinJoGeoTIFFWriter(GeoTIFFWriter): scale_offset_tag_names = ("ninjo_Gradient", "ninjo_AxisIntercept") - def save_image( + def save_image( # noqa: D417 self, image, filename=None, fill_value=None, compute=True, keep_palette=False, cmap=None, overviews=None, overviews_minsize=256, overviews_resampling=None, From 65d35510f9fa9d2da60b93ec209b746e44a57ea3 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 21 Nov 2023 10:29:53 +0200 Subject: [PATCH 489/702] Set dtype for get_lonlats() in NIR reflectance calculation --- satpy/modifiers/spectral.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index e3ea3214b8..979469876a 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -107,7 +107,7 @@ def _get_sun_zenith_from_provided_data(projectables, optional_datasets): if sun_zenith_angle is None: raise ImportError("Module pyorbital.astronomy needed to compute sun zenith angles.") _nir = projectables[0] - lons, lats = _nir.attrs["area"].get_lonlats(chunks=_nir.data.chunks) + lons, lats = _nir.attrs["area"].get_lonlats(chunks=_nir.data.chunks, dtype=_nir.dtype) sun_zenith = sun_zenith_angle(_nir.attrs["start_time"], lons, lats) return sun_zenith From e40fb6cdbb28483459c9c420788ce9c1b8b58d0b Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 21 Nov 2023 10:50:35 +0200 Subject: [PATCH 490/702] Check that NIR dtype is passed to get_lonlats() call --- satpy/tests/test_modifiers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 4aece73487..fccda40c13 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -273,7 +273,7 @@ def test_no_sunz_no_co2(self, calculator, apply_modifier_info, sza): # due to copying of DataArrays, self.get_lonlats is not the same as the one that was called # we must used the area from the final result DataArray - res.attrs["area"].get_lonlats.assert_called() + res.attrs["area"].get_lonlats.assert_called_with(chunks=((2,), (2,)), dtype=self.nir.dtype) sza.assert_called_with(self.start_time, self.lons, self.lats) self.refl_from_tbs.assert_called_with(self.da_sunz, self.nir.data, self.ir_.data, tb_ir_co2=None) assert np.allclose(res.data, self.refl * 100).compute() From 4c7b3301d42bab04ed202e34c7f8fa4bfefee005 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 21 Nov 2023 13:02:46 +0200 Subject: [PATCH 491/702] Move SZA dtype determination one level up --- satpy/modifiers/spectral.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index 979469876a..7e3ffe66df 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -75,7 +75,7 @@ def _get_reflectance_as_dataarray(self, projectables, optional_datasets): da_nir = _nir.data da_tb11 = _tb11.data da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) - da_sun_zenith = self._get_sun_zenith_from_provided_data(projectables, optional_datasets) + da_sun_zenith = self._get_sun_zenith_from_provided_data(_nir, optional_datasets, _nir.dtype) logger.info("Getting reflective part of %s", _nir.attrs["name"]) reflectance = self._get_reflectance_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, _nir.attrs) @@ -95,7 +95,7 @@ def _get_tb13_4_from_optionals(optional_datasets): return tb13_4 @staticmethod - def _get_sun_zenith_from_provided_data(projectables, optional_datasets): + def _get_sun_zenith_from_provided_data(_nir, optional_datasets, dtype): """Get the sunz from available data or compute it if unavailable.""" sun_zenith = None @@ -106,8 +106,7 @@ def _get_sun_zenith_from_provided_data(projectables, optional_datasets): if sun_zenith is None: if sun_zenith_angle is None: raise ImportError("Module pyorbital.astronomy needed to compute sun zenith angles.") - _nir = projectables[0] - lons, lats = _nir.attrs["area"].get_lonlats(chunks=_nir.data.chunks, dtype=_nir.dtype) + lons, lats = _nir.attrs["area"].get_lonlats(chunks=_nir.data.chunks, dtype=dtype) sun_zenith = sun_zenith_angle(_nir.attrs["start_time"], lons, lats) return sun_zenith From b992edb6e774c2a9e1dfa35426a54df9c7266492 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 21 Nov 2023 13:43:02 +0200 Subject: [PATCH 492/702] Rename _nir variable to nir --- satpy/modifiers/spectral.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index 7e3ffe66df..431b118c1c 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -71,16 +71,16 @@ def __call__(self, projectables, optional_datasets=None, **info): def _get_reflectance_as_dataarray(self, projectables, optional_datasets): """Get the reflectance as a dataarray.""" - _nir, _tb11 = projectables - da_nir = _nir.data + nir, _tb11 = projectables + da_nir = nir.data da_tb11 = _tb11.data da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) - da_sun_zenith = self._get_sun_zenith_from_provided_data(_nir, optional_datasets, _nir.dtype) + da_sun_zenith = self._get_sun_zenith_from_provided_data(nir, optional_datasets, nir.dtype) - logger.info("Getting reflective part of %s", _nir.attrs["name"]) - reflectance = self._get_reflectance_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, _nir.attrs) + logger.info("Getting reflective part of %s", nir.attrs["name"]) + reflectance = self._get_reflectance_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) - proj = self._create_modified_dataarray(reflectance, base_dataarray=_nir) + proj = self._create_modified_dataarray(reflectance, base_dataarray=nir) proj.attrs["units"] = "%" return proj @@ -95,7 +95,7 @@ def _get_tb13_4_from_optionals(optional_datasets): return tb13_4 @staticmethod - def _get_sun_zenith_from_provided_data(_nir, optional_datasets, dtype): + def _get_sun_zenith_from_provided_data(nir, optional_datasets, dtype): """Get the sunz from available data or compute it if unavailable.""" sun_zenith = None @@ -106,8 +106,8 @@ def _get_sun_zenith_from_provided_data(_nir, optional_datasets, dtype): if sun_zenith is None: if sun_zenith_angle is None: raise ImportError("Module pyorbital.astronomy needed to compute sun zenith angles.") - lons, lats = _nir.attrs["area"].get_lonlats(chunks=_nir.data.chunks, dtype=dtype) - sun_zenith = sun_zenith_angle(_nir.attrs["start_time"], lons, lats) + lons, lats = nir.attrs["area"].get_lonlats(chunks=nir.data.chunks, dtype=dtype) + sun_zenith = sun_zenith_angle(nir.attrs["start_time"], lons, lats) return sun_zenith def _create_modified_dataarray(self, reflectance, base_dataarray): From a0c2c23b0bea0fd0e8747e6e24b4921e94263edc Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 21 Nov 2023 14:23:45 +0200 Subject: [PATCH 493/702] Fix NIREmissive SZA calculation to use the same as NIRReflective --- satpy/modifiers/spectral.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index 431b118c1c..ea7cbc6bac 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -162,16 +162,16 @@ def __call__(self, projectables, optional_datasets=None, **info): def _get_emissivity_as_dataarray(self, projectables, optional_datasets): """Get the emissivity as a dataarray.""" - _nir, _tb11 = projectables - da_nir = _nir.data + nir, _tb11 = projectables + da_nir = nir.data da_tb11 = _tb11.data da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) - da_sun_zenith = self._get_sun_zenith_from_provided_data(projectables, optional_datasets) + da_sun_zenith = self._get_sun_zenith_from_provided_data(nir, optional_datasets, nir.dtype) - logger.info("Getting emissive part of %s", _nir.attrs["name"]) - emissivity = self._get_emissivity_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, _nir.attrs) + logger.info("Getting emissive part of %s", nir.attrs["name"]) + emissivity = self._get_emissivity_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) - proj = self._create_modified_dataarray(emissivity, base_dataarray=_nir) + proj = self._create_modified_dataarray(emissivity, base_dataarray=nir) proj.attrs["units"] = "K" return proj From a7a023619cd19951e3c5005a767b9d8c84b46a16 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 21 Nov 2023 17:42:12 +0200 Subject: [PATCH 494/702] Add pykdtree as unstable dependency --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 6e1fdfc781..de5409cfd3 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -84,6 +84,7 @@ jobs: git+https://github.com/rasterio/rasterio \ git+https://github.com/pydata/bottleneck \ git+https://github.com/pydata/xarray \ + git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/astropy/astropy; LD_PRELOAD=$(python -c "import sys; print(sys.prefix)")/lib/libstdc++.so echo "LD_PRELOAD=${LD_PRELOAD}" >> $GITHUB_ENV From 12561044220f621742744e2c75e186b4b32ffec2 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 10:26:58 -0600 Subject: [PATCH 495/702] Try building pykdtree with numpy 2 in ci.yaml --- .github/workflows/ci.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index de5409cfd3..bd39401623 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -64,6 +64,7 @@ jobs: - name: Install unstable dependencies if: matrix.experimental == true shell: bash -l {0} + # Install pykdtree with --no-build-isolation so it builds with numpy 2.0 # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels # may break the conda-forge libraries trying to use newer glibc versions run: | @@ -75,6 +76,7 @@ jobs: numpy \ pandas \ scipy; \ + python -m pip install --no-deps --upgrade --no-build-isolation git+https://github.com/storpipfugl/pykdtree; \ python -m pip install \ --no-deps --upgrade \ git+https://github.com/dask/dask \ @@ -84,7 +86,6 @@ jobs: git+https://github.com/rasterio/rasterio \ git+https://github.com/pydata/bottleneck \ git+https://github.com/pydata/xarray \ - git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/astropy/astropy; LD_PRELOAD=$(python -c "import sys; print(sys.prefix)")/lib/libstdc++.so echo "LD_PRELOAD=${LD_PRELOAD}" >> $GITHUB_ENV From efe647fb83df65b10e0f0acaa5584dc0ee96ec88 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 10:50:42 -0600 Subject: [PATCH 496/702] More debugging for unstable in ci.yaml --- .github/workflows/ci.yaml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index bd39401623..d976ca2251 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -75,8 +75,9 @@ jobs: matplotlib \ numpy \ pandas \ - scipy; \ - python -m pip install --no-deps --upgrade --no-build-isolation git+https://github.com/storpipfugl/pykdtree; \ + scipy + python -m pip install --no-deps --upgrade --no-build-isolation -vv git+https://github.com/storpipfugl/pykdtree + python -c "from pykdtree.kdtree import KDTree" python -m pip install \ --no-deps --upgrade \ git+https://github.com/dask/dask \ @@ -86,7 +87,7 @@ jobs: git+https://github.com/rasterio/rasterio \ git+https://github.com/pydata/bottleneck \ git+https://github.com/pydata/xarray \ - git+https://github.com/astropy/astropy; + git+https://github.com/astropy/astropy LD_PRELOAD=$(python -c "import sys; print(sys.prefix)")/lib/libstdc++.so echo "LD_PRELOAD=${LD_PRELOAD}" >> $GITHUB_ENV From 05111affd16b2d91ffc140b191c70024109f2a2c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 11:12:47 -0600 Subject: [PATCH 497/702] Try removing pykdtree first in unstable CI --- .github/workflows/ci.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index d976ca2251..0a03e003fa 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -76,7 +76,8 @@ jobs: numpy \ pandas \ scipy - python -m pip install --no-deps --upgrade --no-build-isolation -vv git+https://github.com/storpipfugl/pykdtree + mamba remove --force-remove -y pykdtree + python -m pip install --no-deps --upgrade --no-build-isolation -vvv git+https://github.com/storpipfugl/pykdtree python -c "from pykdtree.kdtree import KDTree" python -m pip install \ --no-deps --upgrade \ From 8a55d33e0d9ac2af710797536ec38cc5129e9a5a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 11:26:15 -0600 Subject: [PATCH 498/702] Build pyresample with numpy 2 in unstable CI --- .github/workflows/ci.yaml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0a03e003fa..b371893b8a 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -76,9 +76,10 @@ jobs: numpy \ pandas \ scipy - mamba remove --force-remove -y pykdtree - python -m pip install --no-deps --upgrade --no-build-isolation -vvv git+https://github.com/storpipfugl/pykdtree - python -c "from pykdtree.kdtree import KDTree" + mamba remove --force-remove -y pykdtree pyresample + python -m pip install --no-deps --upgrade --no-build-isolation -vvv \ + git+https://github.com/storpipfugl/pykdtree \ + git+https://github.com/pytroll/pyresample python -m pip install \ --no-deps --upgrade \ git+https://github.com/dask/dask \ From c6bb954ac6f58e4d4c8e520eef89c6ffdb79327a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 11:33:09 -0600 Subject: [PATCH 499/702] Add missing versioneer unstable CI dependency --- .github/workflows/ci.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b371893b8a..36aed73828 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -79,7 +79,8 @@ jobs: mamba remove --force-remove -y pykdtree pyresample python -m pip install --no-deps --upgrade --no-build-isolation -vvv \ git+https://github.com/storpipfugl/pykdtree \ - git+https://github.com/pytroll/pyresample + git+https://github.com/pytroll/pyresample \ + versioneer python -m pip install \ --no-deps --upgrade \ git+https://github.com/dask/dask \ From 522156425acb1e3013093c0faaafe5c817521aca Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 11:45:45 -0600 Subject: [PATCH 500/702] Install versioneer in a different way --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 36aed73828..7e6e446568 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -68,6 +68,7 @@ jobs: # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels # may break the conda-forge libraries trying to use newer glibc versions run: | + python -m pip install versioneer python -m pip install \ --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ @@ -79,8 +80,7 @@ jobs: mamba remove --force-remove -y pykdtree pyresample python -m pip install --no-deps --upgrade --no-build-isolation -vvv \ git+https://github.com/storpipfugl/pykdtree \ - git+https://github.com/pytroll/pyresample \ - versioneer + git+https://github.com/pytroll/pyresample python -m pip install \ --no-deps --upgrade \ git+https://github.com/dask/dask \ From 921ed0f5451272bd120fbc16ede325eb307d59dd Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 21 Nov 2023 19:54:25 +0200 Subject: [PATCH 501/702] Refactor _get_reflectance/emissivity_as_dataarray --- satpy/modifiers/spectral.py | 33 +++++++++++++++------------------ 1 file changed, 15 insertions(+), 18 deletions(-) diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index ea7cbc6bac..28a3804da7 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -67,23 +67,24 @@ def __call__(self, projectables, optional_datasets=None, **info): Not supposed to be used for wavelength outside [3, 4] µm. """ projectables = self.match_data_arrays(projectables) - return self._get_reflectance_as_dataarray(projectables, optional_datasets) + inputs = self._get_nir_inputs(projectables, optional_datasets) + return self._get_reflectance_as_dataarray(*inputs) - def _get_reflectance_as_dataarray(self, projectables, optional_datasets): + def _get_reflectance_as_dataarray(self, nir, da_tb11, da_tb13_4, da_sun_zenith): """Get the reflectance as a dataarray.""" - nir, _tb11 = projectables - da_nir = nir.data - da_tb11 = _tb11.data - da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) - da_sun_zenith = self._get_sun_zenith_from_provided_data(nir, optional_datasets, nir.dtype) - logger.info("Getting reflective part of %s", nir.attrs["name"]) - reflectance = self._get_reflectance_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) - + reflectance = self._get_reflectance_as_dask(nir.data, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) proj = self._create_modified_dataarray(reflectance, base_dataarray=nir) proj.attrs["units"] = "%" return proj + def _get_nir_inputs(self, projectables, optional_datasets): + nir, _tb11 = projectables + da_tb11 = _tb11.data + da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) + da_sun_zenith = self._get_sun_zenith_from_provided_data(nir, optional_datasets, nir.dtype) + return (nir, da_tb11, da_tb13_4, da_sun_zenith) + @staticmethod def _get_tb13_4_from_optionals(optional_datasets): tb13_4 = None @@ -158,18 +159,14 @@ def __call__(self, projectables, optional_datasets=None, **info): """ projectables = self.match_data_arrays(projectables) - return self._get_emissivity_as_dataarray(projectables, optional_datasets) + inputs = self._get_nir_inputs(projectables, optional_datasets) + return self._get_emissivity_as_dataarray(*inputs) - def _get_emissivity_as_dataarray(self, projectables, optional_datasets): + def _get_emissivity_as_dataarray(self, nir, da_tb11, da_tb13_4, da_sun_zenith): """Get the emissivity as a dataarray.""" - nir, _tb11 = projectables - da_nir = nir.data - da_tb11 = _tb11.data - da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) - da_sun_zenith = self._get_sun_zenith_from_provided_data(nir, optional_datasets, nir.dtype) logger.info("Getting emissive part of %s", nir.attrs["name"]) - emissivity = self._get_emissivity_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) + emissivity = self._get_emissivity_as_dask(nir.data, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) proj = self._create_modified_dataarray(emissivity, base_dataarray=nir) proj.attrs["units"] = "K" From 05c126835e19ecb24714160e8d3ad04f6c4bcdf1 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 12:17:09 -0600 Subject: [PATCH 502/702] Try --no-build-isolation for all unstable deps --- .github/workflows/ci.yaml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 7e6e446568..3849514506 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -78,11 +78,9 @@ jobs: pandas \ scipy mamba remove --force-remove -y pykdtree pyresample - python -m pip install --no-deps --upgrade --no-build-isolation -vvv \ + python -m pip install --no-deps --upgrade --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ - git+https://github.com/pytroll/pyresample - python -m pip install \ - --no-deps --upgrade \ + git+https://github.com/pytroll/pyresample \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ git+https://github.com/zarr-developers/zarr \ From 21de5b04dfff6101a314ea21ab53d3d461931bab Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 13:24:46 -0600 Subject: [PATCH 503/702] Add extension-helpers to unstable build dependencies --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 3849514506..4ccddc8904 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -68,7 +68,7 @@ jobs: # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels # may break the conda-forge libraries trying to use newer glibc versions run: | - python -m pip install versioneer + python -m pip install versioneer extension-helpers python -m pip install \ --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ From 3a97a1a02ea474507e2e2df956068d53c31acca9 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 13:35:50 -0600 Subject: [PATCH 504/702] Allow dependencies to be installed in unstable CI --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 4ccddc8904..8368c325bc 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -78,7 +78,7 @@ jobs: pandas \ scipy mamba remove --force-remove -y pykdtree pyresample - python -m pip install --no-deps --upgrade --no-build-isolation \ + python -m pip install --upgrade --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ git+https://github.com/dask/dask \ From 0c53817da1c8dd4cf62068c38ab54a0680e00085 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 13:46:08 -0600 Subject: [PATCH 505/702] More unstable CI reworking --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8368c325bc..52e6f3c387 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -68,7 +68,7 @@ jobs: # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels # may break the conda-forge libraries trying to use newer glibc versions run: | - python -m pip install versioneer extension-helpers + python -m pip install versioneer extension-helpers setuptools-scm configobj python -m pip install \ --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ @@ -78,7 +78,7 @@ jobs: pandas \ scipy mamba remove --force-remove -y pykdtree pyresample - python -m pip install --upgrade --no-build-isolation \ + python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ git+https://github.com/dask/dask \ From 71e489ecf1043329a4a2752ed09998ddee449bcb Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 13:58:37 -0600 Subject: [PATCH 506/702] Add shapely to unstable CI build --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 52e6f3c387..ef11beaac8 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -88,6 +88,7 @@ jobs: git+https://github.com/rasterio/rasterio \ git+https://github.com/pydata/bottleneck \ git+https://github.com/pydata/xarray \ + git+https://github.com/shapely/shapely \ git+https://github.com/astropy/astropy LD_PRELOAD=$(python -c "import sys; print(sys.prefix)")/lib/libstdc++.so echo "LD_PRELOAD=${LD_PRELOAD}" >> $GITHUB_ENV From 8b4e05d5508bd0db31c26b5ccd049844cfd04b6f Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 14:10:42 -0600 Subject: [PATCH 507/702] Add trollimage to unstable dependencies --- .github/workflows/ci.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ef11beaac8..38b69cd477 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -77,10 +77,11 @@ jobs: numpy \ pandas \ scipy - mamba remove --force-remove -y pykdtree pyresample + mamba remove --force-remove -y pykdtree pyresample trollimage python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ + git+https://github.com/pytroll/trollimage \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ git+https://github.com/zarr-developers/zarr \ From a58e9c9af7e77e8eb9cc875430d455cb6cc64658 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 14:21:50 -0600 Subject: [PATCH 508/702] Try adding pyhdf to unstable deps --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 38b69cd477..eb08e0a0bb 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -82,6 +82,7 @@ jobs: git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ git+https://github.com/pytroll/trollimage \ + git+https://github.com/fhs/pyhdf \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ git+https://github.com/zarr-developers/zarr \ From 08159bab2a463b5d77ff3c99dd7420d606cd6a7e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 14:56:27 -0600 Subject: [PATCH 509/702] Try removing pyhdf from conda in unstable CI --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index eb08e0a0bb..36746696e7 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -77,7 +77,7 @@ jobs: numpy \ pandas \ scipy - mamba remove --force-remove -y pykdtree pyresample trollimage + mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ From 4b165c4c2cb1a0727c5cf56aee5000bc42b069c3 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 15:06:03 -0600 Subject: [PATCH 510/702] Add netcdf4-python to unstable CI --- .github/workflows/ci.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 36746696e7..e132c837bd 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -77,12 +77,13 @@ jobs: numpy \ pandas \ scipy - mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf + mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf netcdf4 python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ git+https://github.com/pytroll/trollimage \ git+https://github.com/fhs/pyhdf \ + git+https://github.com/Unidata/netcdf4-python \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ git+https://github.com/zarr-developers/zarr \ From f2cbbe55a5adf62fb714e773ad2a1255d662796d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 15:14:38 -0600 Subject: [PATCH 511/702] Add h5py to unstable CI --- .github/workflows/ci.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e132c837bd..dfc594e723 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -77,13 +77,14 @@ jobs: numpy \ pandas \ scipy - mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf netcdf4 + mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf netcdf4 h5py python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ git+https://github.com/pytroll/trollimage \ git+https://github.com/fhs/pyhdf \ git+https://github.com/Unidata/netcdf4-python \ + git+https://github.com/h5py/h5py \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ git+https://github.com/zarr-developers/zarr \ From fe67279d869d219c63448ac2900fdc7760e633c7 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 15:23:23 -0600 Subject: [PATCH 512/702] Add missing unstable deps --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index dfc594e723..e2659bfd98 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -68,7 +68,7 @@ jobs: # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels # may break the conda-forge libraries trying to use newer glibc versions run: | - python -m pip install versioneer extension-helpers setuptools-scm configobj + python -m pip install versioneer extension-helpers setuptools-scm configobj pkgconfig python -m pip install \ --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ From c9a861f1ec71e060eb57a7066f40002d4a2b6fef Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 15:45:09 -0600 Subject: [PATCH 513/702] Install h5py in unstable CI without build isolation --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e2659bfd98..64c980a4e2 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -78,13 +78,13 @@ jobs: pandas \ scipy mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf netcdf4 h5py + python -m pip install --upgrade --no-deps --pre git+https://github.com/h5py/h5py python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ git+https://github.com/pytroll/trollimage \ git+https://github.com/fhs/pyhdf \ git+https://github.com/Unidata/netcdf4-python \ - git+https://github.com/h5py/h5py \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ git+https://github.com/zarr-developers/zarr \ From 88ae0a6eccb6122b6b016441b7d47a0ad3258133 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 15:59:34 -0600 Subject: [PATCH 514/702] Use h5py cython branch for unstable CI --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 64c980a4e2..026b9bc50e 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -78,7 +78,7 @@ jobs: pandas \ scipy mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf netcdf4 h5py - python -m pip install --upgrade --no-deps --pre git+https://github.com/h5py/h5py + python -m pip install --upgrade --no-deps --pre git+https://github.com/takluyver/h5py@cython-3 python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ From 9efa3cbac62ffc639e49cfcffd524f98038aca14 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 16:04:13 -0600 Subject: [PATCH 515/702] Fix cython dev h5py install --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 026b9bc50e..14e5e3ffcd 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -78,12 +78,12 @@ jobs: pandas \ scipy mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf netcdf4 h5py - python -m pip install --upgrade --no-deps --pre git+https://github.com/takluyver/h5py@cython-3 python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ git+https://github.com/pytroll/trollimage \ git+https://github.com/fhs/pyhdf \ + git+https://github.com/takluyver/h5py@cython-3 \ git+https://github.com/Unidata/netcdf4-python \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ From d250a9dfba72613107bc995a202196969efee45f Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 23 Nov 2023 09:40:54 +0200 Subject: [PATCH 516/702] Rename _tb11 variable to tb11 --- satpy/modifiers/spectral.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index 28a3804da7..029ee88cb8 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -79,8 +79,8 @@ def _get_reflectance_as_dataarray(self, nir, da_tb11, da_tb13_4, da_sun_zenith): return proj def _get_nir_inputs(self, projectables, optional_datasets): - nir, _tb11 = projectables - da_tb11 = _tb11.data + nir, tb11 = projectables + da_tb11 = tb11.data da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) da_sun_zenith = self._get_sun_zenith_from_provided_data(nir, optional_datasets, nir.dtype) return (nir, da_tb11, da_tb13_4, da_sun_zenith) From 2ff0b2acad2becd2a86b070e16dfdd73e2432b78 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 23 Nov 2023 16:25:32 +0200 Subject: [PATCH 517/702] Add tolerances to tests affected by Trollimage dtype handling --- satpy/tests/test_modifiers.py | 6 ++++-- satpy/tests/writer_tests/test_ninjogeotiff.py | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 4aece73487..23c22f529e 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -119,13 +119,15 @@ def test_basic_default_not_provided(self, sunz_ds1, as_32bit): sunz_ds1 = sunz_ds1.astype(np.float32) comp = SunZenithCorrector(name="sza_test", modifiers=tuple()) res = comp((sunz_ds1,), test_attr="test") - np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) + np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]]), + rtol=1e-6) assert "y" in res.coords assert "x" in res.coords ds1 = sunz_ds1.copy().drop_vars(("y", "x")) res = comp((ds1,), test_attr="test") res_np = res.compute() - np.testing.assert_allclose(res_np.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) + np.testing.assert_allclose(res_np.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]]), + rtol=1e-6) assert res.dtype == res_np.dtype assert "y" not in res.coords assert "x" not in res.coords diff --git a/satpy/tests/writer_tests/test_ninjogeotiff.py b/satpy/tests/writer_tests/test_ninjogeotiff.py index a9c60bdf90..e05150a571 100644 --- a/satpy/tests/writer_tests/test_ninjogeotiff.py +++ b/satpy/tests/writer_tests/test_ninjogeotiff.py @@ -630,7 +630,7 @@ def test_write_and_read_file_units( np.testing.assert_allclose(float(tgs["ninjo_Gradient"]), 0.467717, rtol=1e-5) np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]), - -79.86771) + -79.86771, rtol=1e-5) fn2 = os.fspath(tmp_path / "test2.tif") with caplog.at_level(logging.WARNING): ngtw.save_dataset( From 8dce4219f43ec3870e520f2dcdaa84b15895da9f Mon Sep 17 00:00:00 2001 From: Aaron Rainbolt Date: Thu, 23 Nov 2023 21:05:28 -0600 Subject: [PATCH 518/702] Use assert_called_once rather than called_once in tests --- satpy/tests/scene_tests/test_resampling.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/tests/scene_tests/test_resampling.py b/satpy/tests/scene_tests/test_resampling.py index 6b5f74ee59..cc812839ac 100644 --- a/satpy/tests/scene_tests/test_resampling.py +++ b/satpy/tests/scene_tests/test_resampling.py @@ -319,14 +319,14 @@ def test_resample_reduce_data_toggle(self, rs): assert not slice_data.called assert not get_area_slices.called scene.resample(target_area) - assert slice_data.called_once - assert get_area_slices.called_once + slice_data.assert_called_once + get_area_slices.assert_called_once scene.resample(target_area, reduce_data=True) # 2 times for each dataset # once for default (reduce_data=True) # once for kwarg forced to `True` assert slice_data.call_count == 2 * 3 - assert get_area_slices.called_once + get_area_slices.assert_called_once def test_resample_ancillary(self): """Test that the Scene reducing data does not affect final output.""" From 99c91209fd2e0d60f3680e7332043d491d3c6b4c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 23 Nov 2023 21:18:56 -0600 Subject: [PATCH 519/702] Fix call to assert_called_once in test_resampling.py --- satpy/tests/scene_tests/test_resampling.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/tests/scene_tests/test_resampling.py b/satpy/tests/scene_tests/test_resampling.py index cc812839ac..07e1cc2814 100644 --- a/satpy/tests/scene_tests/test_resampling.py +++ b/satpy/tests/scene_tests/test_resampling.py @@ -319,14 +319,14 @@ def test_resample_reduce_data_toggle(self, rs): assert not slice_data.called assert not get_area_slices.called scene.resample(target_area) - slice_data.assert_called_once - get_area_slices.assert_called_once + slice_data.assert_called_once() + get_area_slices.assert_called_once() scene.resample(target_area, reduce_data=True) # 2 times for each dataset # once for default (reduce_data=True) # once for kwarg forced to `True` assert slice_data.call_count == 2 * 3 - get_area_slices.assert_called_once + get_area_slices.assert_called_once() def test_resample_ancillary(self): """Test that the Scene reducing data does not affect final output.""" From f15a2f4b394a784c516992425b361908d96a7bc6 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sat, 25 Nov 2023 19:59:23 -0600 Subject: [PATCH 520/702] Fix expected reduce_data method calls --- satpy/tests/scene_tests/test_resampling.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/satpy/tests/scene_tests/test_resampling.py b/satpy/tests/scene_tests/test_resampling.py index 07e1cc2814..bd4ff6d49b 100644 --- a/satpy/tests/scene_tests/test_resampling.py +++ b/satpy/tests/scene_tests/test_resampling.py @@ -316,17 +316,18 @@ def test_resample_reduce_data_toggle(self, rs): ds_walker.return_value = test_order slice_data.side_effect = orig_slice_data scene.resample(target_area, reduce_data=False) - assert not slice_data.called - assert not get_area_slices.called + slice_data.assert_not_called() + get_area_slices.assert_not_called() scene.resample(target_area) - slice_data.assert_called_once() - get_area_slices.assert_called_once() + assert slice_data.call_count == 3 + assert get_area_slices.call_count == 2 scene.resample(target_area, reduce_data=True) # 2 times for each dataset # once for default (reduce_data=True) # once for kwarg forced to `True` assert slice_data.call_count == 2 * 3 - get_area_slices.assert_called_once() + # reductions are cached, no additional reductions in second call + assert get_area_slices.call_count == 2 def test_resample_ancillary(self): """Test that the Scene reducing data does not affect final output.""" From 43ab244b204287231c0e4145ad6a02dec890361d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 26 Nov 2023 14:20:16 -0600 Subject: [PATCH 521/702] Fix expected number of area slice calls in resample test --- satpy/tests/scene_tests/test_resampling.py | 6 ++++-- satpy/tests/writer_tests/test_ninjogeotiff.py | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/satpy/tests/scene_tests/test_resampling.py b/satpy/tests/scene_tests/test_resampling.py index bd4ff6d49b..d59019e3f7 100644 --- a/satpy/tests/scene_tests/test_resampling.py +++ b/satpy/tests/scene_tests/test_resampling.py @@ -320,14 +320,16 @@ def test_resample_reduce_data_toggle(self, rs): get_area_slices.assert_not_called() scene.resample(target_area) assert slice_data.call_count == 3 - assert get_area_slices.call_count == 2 + assert get_area_slices.call_count == 1 + assert get_area_slices_big.call_count == 1 scene.resample(target_area, reduce_data=True) # 2 times for each dataset # once for default (reduce_data=True) # once for kwarg forced to `True` assert slice_data.call_count == 2 * 3 - # reductions are cached, no additional reductions in second call + # get area slices is called again, once per area assert get_area_slices.call_count == 2 + assert get_area_slices_big.call_count == 2 def test_resample_ancillary(self): """Test that the Scene reducing data does not affect final output.""" diff --git a/satpy/tests/writer_tests/test_ninjogeotiff.py b/satpy/tests/writer_tests/test_ninjogeotiff.py index a9c60bdf90..e05150a571 100644 --- a/satpy/tests/writer_tests/test_ninjogeotiff.py +++ b/satpy/tests/writer_tests/test_ninjogeotiff.py @@ -630,7 +630,7 @@ def test_write_and_read_file_units( np.testing.assert_allclose(float(tgs["ninjo_Gradient"]), 0.467717, rtol=1e-5) np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]), - -79.86771) + -79.86771, rtol=1e-5) fn2 = os.fspath(tmp_path / "test2.tif") with caplog.at_level(logging.WARNING): ngtw.save_dataset( From 2d1ea402c79f83ac3c0ac36a34a4176aa1844d2f Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 27 Nov 2023 11:55:15 +0100 Subject: [PATCH 522/702] Ensure attributes are correct --- satpy/tests/reader_tests/test_sgli_l1b.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py index 9fa8caa8b1..5ae305a068 100644 --- a/satpy/tests/reader_tests/test_sgli_l1b.py +++ b/satpy/tests/reader_tests/test_sgli_l1b.py @@ -210,16 +210,18 @@ def test_get_vn_dataset_reflectances(sgli_vn_file): assert np.allclose(res[0, :] / 100, FULL_KM_ARRAY[0, :] * 5e-5 - 0.05) assert res.dtype == np.float32 assert res.dims == ("y", "x") - assert res.units == "%" + assert res.attrs["units"] == "%" def test_get_vn_dataset_radiance(sgli_vn_file): """Test that datasets can be calibrated to radiance.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="radiance") - res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", - "standard_name": ""}) + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "W m-2 um-1 sr-1", + "standard_name": "toa_outgoing_radiance_per_unit_wavelength"}) assert np.allclose(res[0, :], FULL_KM_ARRAY[0, :] * np.float32(0.02) - 25) assert res.dtype == np.float32 + assert res.attrs["units"] == "W m-2 um-1 sr-1" + assert res.attrs["standard_name"] == "toa_outgoing_radiance_per_unit_wavelength" def test_channel_is_masked(sgli_vn_file): """Test that channels are masked for no-data.""" From fc41c70e08c27c1c8c0a64d69b2c5a5df4329116 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 27 Nov 2023 19:55:46 +0000 Subject: [PATCH 523/702] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.7.0 → v1.7.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.7.0...v1.7.1) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d84659c6f0..99e77cb56a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,7 +19,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.7.0' # Use the sha / tag you want to point at + rev: 'v1.7.1' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: From 0782f8112576ee428726451d5da4ab6385ede4a1 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 28 Nov 2023 08:49:00 +0200 Subject: [PATCH 524/702] Add file pattern for CRRPh of NWC SAF GEO v2021 --- satpy/etc/readers/nwcsaf-geo.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/etc/readers/nwcsaf-geo.yaml b/satpy/etc/readers/nwcsaf-geo.yaml index 29e3b5cc05..e22ae09fc1 100644 --- a/satpy/etc/readers/nwcsaf-geo.yaml +++ b/satpy/etc/readers/nwcsaf-geo.yaml @@ -41,7 +41,8 @@ file_types: nc_nwcsaf_crr-ph: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF - file_patterns: ['S_NWC_CRR-Ph_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] + file_patterns: ['S_NWC_CRR-Ph_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc', + 'S_NWC_CRRPh_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_ishai: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF From 47557a25fb32278384095b5c5e0548fd5c3896d0 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 28 Nov 2023 12:53:40 +0200 Subject: [PATCH 525/702] Update VIRR test to allow floating point differences --- satpy/tests/reader_tests/test_virr_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_virr_l1b.py b/satpy/tests/reader_tests/test_virr_l1b.py index e3fbd73272..f899cd537c 100644 --- a/satpy/tests/reader_tests/test_virr_l1b.py +++ b/satpy/tests/reader_tests/test_virr_l1b.py @@ -158,7 +158,7 @@ def _fy3_helper(self, platform_name, reader, Emissive_units): "solar_azimuth_angle", "sensor_azimuth_angle"] assert ["virr_geoxx", "virr_l1b"] == attributes["file_type"] assert ("longitude", "latitude") == attributes["coordinates"] - assert band_values[dataset["name"]] == round(float(np.array(ds[ds.shape[0] // 2][ds.shape[1] // 2])), 6) + np.testing.assert_allclose(band_values[dataset["name"]], ds[ds.shape[0] // 2][ds.shape[1] // 2], rtol=1e-6) assert "valid_range" not in ds.attrs def test_fy3b_file(self): From 1d86df949213ec92f4bc7dd5599d327916954ad9 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 28 Nov 2023 13:43:44 +0000 Subject: [PATCH 526/702] Rename `subsatellite_longitude` in INSAT-3D --- satpy/readers/insat3d_img_l1b_h5.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/insat3d_img_l1b_h5.py b/satpy/readers/insat3d_img_l1b_h5.py index 9f2224ef82..205f4d17b2 100644 --- a/satpy/readers/insat3d_img_l1b_h5.py +++ b/satpy/readers/insat3d_img_l1b_h5.py @@ -187,7 +187,7 @@ def get_area_def(self, ds_id): a = 6378137.0 b = 6356752.314245 - nom_cen_pos = self.datatree.attrs["Nominal_Central_Point_Coordinates(degrees)_Latitude_Longitude"][1] + subsatellite_longitude = self.datatree.attrs["Nominal_Central_Point_Coordinates(degrees)_Latitude_Longitude"][1] pdict = { "cfac": cfac, @@ -200,7 +200,7 @@ def get_area_def(self, ds_id): "a": a, "b": b, "h": h, - "ssp_lon": nom_cen_pos, + "ssp_lon": subsatellite_longitude, "a_name": "insat3d82", "a_desc": "insat3d82", "p_id": "geosmsg" From 9e89902a6de27401d86dccbf357e9e9521b393ff Mon Sep 17 00:00:00 2001 From: andream Date: Tue, 28 Nov 2023 14:51:26 +0100 Subject: [PATCH 527/702] update default parameters in modifier init --- satpy/modifiers/geometry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py index cc903ad5e9..1e8841c6bd 100644 --- a/satpy/modifiers/geometry.py +++ b/satpy/modifiers/geometry.py @@ -177,7 +177,7 @@ class SunZenithReducer(SunZenithCorrectorBase): """ - def __init__(self, correction_limit=55., max_sza=90, strength=1.5, **kwargs): # noqa: D417 + def __init__(self, correction_limit=80., max_sza=90, strength=1.3, **kwargs): # noqa: D417 """Collect custom configuration values. Args: From 00df29f826f2990e9071b14216a9afb7d647a63c Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 28 Nov 2023 13:51:43 +0000 Subject: [PATCH 528/702] Refactor date attribute getter for OSI SAF reader. --- satpy/readers/osisaf_l3_nc.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 1affb3a883..2953cc6dfc 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -205,22 +205,22 @@ def _parse_datetime(datestr): @property def start_time(self): - start_t = self.get("/attr/start_date") - if start_t is None: - start_t = self.get("/attr/start_time") - if start_t is None: - start_t = self.get("/attr/time_coverage_start") + poss_names = ["/attr/start_date", "/attr/start_time", "/attr/time_coverage_start"] + for name in poss_names: + start_t = self.get(name) + if start_t is not None: + break if start_t is None: raise ValueError("Unknown start time attribute.") return self._parse_datetime(start_t) @property def end_time(self): - end_t = self.get("/attr/stop_date") - if end_t is None: - end_t = self.get("/attr/stop_time") - if end_t is None: - end_t = self.get("/attr/time_coverage_end") + poss_names = ["/attr/stop_date", "/attr/stop_time", "/attr/time_coverage_end"] + for name in poss_names: + end_t = self.get(name) + if end_t is not None: + break if end_t is None: raise ValueError("Unknown stop time attribute.") return self._parse_datetime(end_t) From 2ad06aa6a0b06fa41712c69266ca57808e482985 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 28 Nov 2023 15:22:20 +0100 Subject: [PATCH 529/702] Test Insat sublon is not hardcoded --- satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py index 1886560402..92aef2b906 100644 --- a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py +++ b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py @@ -74,6 +74,7 @@ start_time = datetime(2009, 6, 9, 9, 0) end_time = datetime(2009, 6, 9, 9, 30) +subsatellite_longitude = 82 time_pattern = "%d-%b-%YT%H:%M:%S" @@ -81,7 +82,7 @@ "Field_of_View(degrees)": 17.973925, "Acquisition_Start_Time": start_time.strftime(time_pattern), "Acquisition_End_Time": end_time.strftime(time_pattern), - "Nominal_Central_Point_Coordinates(degrees)_Latitude_Longitude": [0.0, 82.0], + "Nominal_Central_Point_Coordinates(degrees)_Latitude_Longitude": [0.0, subsatellite_longitude], "Nominal_Altitude(km)": 36000.0, } @@ -243,6 +244,7 @@ def test_insat3d_has_orbital_parameters(insat_filehandler): assert "orbital_parameters" in darr.attrs assert "satellite_nominal_longitude" in darr.attrs["orbital_parameters"] + assert darr.attrs["orbital_parameters"]["satellite_nominal_longitude"] == subsatellite_longitude assert "satellite_nominal_latitude" in darr.attrs["orbital_parameters"] assert "satellite_nominal_altitude" in darr.attrs["orbital_parameters"] assert "satellite_actual_altitude" in darr.attrs["orbital_parameters"] @@ -276,6 +278,7 @@ def test_filehandler_returns_area(insat_filehandler): ds_id = make_dataid(name="MIR", resolution=4000, calibration="brightness_temperature") area_def = fh.get_area_def(ds_id) lons, lats = area_def.get_lonlats(chunks=1000) + assert "+lon_0=" + str(subsatellite_longitude) in area_def.crs.to_proj4() def test_filehandler_has_start_and_end_time(insat_filehandler): From a607fbd489bd698882b69789391c05682db33989 Mon Sep 17 00:00:00 2001 From: andream Date: Tue, 28 Nov 2023 15:33:59 +0100 Subject: [PATCH 530/702] remove defaults from docstrings, and include parameters in log message --- satpy/modifiers/geometry.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py index 1e8841c6bd..693529f9dd 100644 --- a/satpy/modifiers/geometry.py +++ b/satpy/modifiers/geometry.py @@ -181,10 +181,10 @@ def __init__(self, correction_limit=80., max_sza=90, strength=1.3, **kwargs): # """Collect custom configuration values. Args: - correction_limit (float): Solar zenith angle in degrees where to start the signal reduction. Default 60. + correction_limit (float): Solar zenith angle in degrees where to start the signal reduction. max_sza (float): Maximum solar zenith angle in degrees where to apply the signal reduction. Beyond - this solar zenith angle the signal will become zero. Default 90. - strength (float): The strength of the non-linear signal reduction. Default 1.5 + this solar zenith angle the signal will become zero. + strength (float): The strength of the non-linear signal reduction. """ self.correction_limit = correction_limit @@ -194,7 +194,8 @@ def __init__(self, correction_limit=80., max_sza=90, strength=1.3, **kwargs): # raise ValueError("`max_sza` must be defined when using the SunZenithReducer.") def _apply_correction(self, proj, coszen): - logger.debug("Apply sun-zenith signal reduction") + logger.debug(f"Applying sun-zenith signal reduction with correction_limit {self.correction_limit} deg," + f" strength {self.strength}, and max_sza {self.max_sza} deg.") res = proj.copy() sunz = np.rad2deg(np.arccos(coszen.data)) res.data = sunzen_reduction(proj.data, sunz, From 79585f152830089ae654d7fa640faa818c3608df Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 28 Nov 2023 15:40:15 +0100 Subject: [PATCH 531/702] Fix style --- satpy/modifiers/spectral.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index e37f6d3c9f..18d1df2379 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -164,7 +164,6 @@ def __call__(self, projectables, optional_datasets=None, **info): def _get_emissivity_as_dataarray(self, nir, da_tb11, da_tb13_4, da_sun_zenith): """Get the emissivity as a dataarray.""" - logger.info("Getting emissive part of %s", nir.attrs["name"]) emissivity = self._get_emissivity_as_dask(nir.data, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) From 98061ec7e6207554c10366533226b66ecefc4fb5 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 28 Nov 2023 15:46:05 +0100 Subject: [PATCH 532/702] Fix style --- satpy/modifiers/spectral.py | 1 - satpy/readers/osisaf_l3_nc.py | 3 ++- satpy/tests/reader_tests/test_osisaf_l3.py | 5 ++++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index e37f6d3c9f..18d1df2379 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -164,7 +164,6 @@ def __call__(self, projectables, optional_datasets=None, **info): def _get_emissivity_as_dataarray(self, nir, da_tb11, da_tb13_4, da_sun_zenith): """Get the emissivity as a dataarray.""" - logger.info("Getting emissive part of %s", nir.attrs["name"]) emissivity = self._get_emissivity_as_dask(nir.data, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 2953cc6dfc..56d4773a43 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -120,7 +120,6 @@ def get_area_def(self, area_id): def _get_ds_units(self, ds_info, var_path): """Find the units of the datasets.""" - file_units = ds_info.get("file_units") if file_units is None: file_units = self.get(var_path + "/attr/units") @@ -205,6 +204,7 @@ def _parse_datetime(datestr): @property def start_time(self): + """Get the start time.""" poss_names = ["/attr/start_date", "/attr/start_time", "/attr/time_coverage_start"] for name in poss_names: start_t = self.get(name) @@ -216,6 +216,7 @@ def start_time(self): @property def end_time(self): + """Get the end time.""" poss_names = ["/attr/stop_date", "/attr/stop_time", "/attr/time_coverage_end"] for name in poss_names: end_t = self.get(name) diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index a9a595202b..3fa9e5bb35 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -178,7 +178,6 @@ def test_get_dataset(self, tmp_path): def test_get_start_and_end_times(self, tmp_path): """Test retrieval of the sensor name from the netCDF file.""" - tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) @@ -202,6 +201,7 @@ class TestOSISAFL3ReaderICE(OSISAFL3ReaderTests): """Test OSI-SAF level 3 netCDF reader ice files.""" def setup_method(self): + """Set up the tests.""" super().setup_method(tester="ice") self.filename_info = {"grid": "ease"} self.filetype_info = {"file_type": "osi_sea_ice_conc"} @@ -258,6 +258,7 @@ class TestOSISAFL3ReaderFluxStere(OSISAFL3ReaderTests): """Test OSI-SAF level 3 netCDF reader flux files on stereographic grid.""" def setup_method(self): + """Set up the tests.""" super().setup_method(tester="flux_stere") self.filename_info = {"grid": "polstere"} self.filetype_info = {"file_type": "osi_radflux_stere"} @@ -294,6 +295,7 @@ class TestOSISAFL3ReaderFluxGeo(OSISAFL3ReaderTests): """Test OSI-SAF level 3 netCDF reader flux files on lat/lon grid (GEO sensors).""" def setup_method(self): + """Set up the tests.""" super().setup_method(tester="flux_geo") self.filename_info = {} self.filetype_info = {"file_type": "osi_radflux_grid"} @@ -329,6 +331,7 @@ class TestOSISAFL3ReaderSST(OSISAFL3ReaderTests): """Test OSI-SAF level 3 netCDF reader surface temperature files.""" def setup_method(self): + """Set up the tests.""" super().setup_method(tester="sst") self.filename_info = {} self.filetype_info = {"file_type": "osi_sst"} From 6d26c565171a41ff3a2fc755f42b29f3d5746768 Mon Sep 17 00:00:00 2001 From: andream Date: Tue, 28 Nov 2023 15:51:58 +0100 Subject: [PATCH 533/702] add true_color_fully_sunzencorrected --- satpy/etc/composites/fci.yaml | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 336ae415b3..366b8bbc20 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -33,6 +33,20 @@ composites: - name: vis_08 standard_name: toa_bidirectional_reflectance + ndvi_hybrid_green_fully_sunzencorrected: + description: Same as ndvi_hybrid_green, but without Sun-zenith reduction + compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen + limits: [ 0.15, 0.05 ] + strength: 3.0 + prerequisites: + - name: vis_05 + modifiers: [ sunz_corrected, rayleigh_corrected ] + - name: vis_06 + modifiers: [ sunz_corrected, rayleigh_corrected ] + - name: vis_08 + modifiers: [ sunz_corrected ] + standard_name: toa_bidirectional_reflectance + binary_cloud_mask: # This will set all clear pixels to '0', all pixles with cloudy features (meteorological/dust/ash clouds) to '1' and # missing/undefined pixels to 'nan'. This can be used for the the official EUMETSAT cloud mask product (CLM). @@ -56,6 +70,19 @@ composites: modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] standard_name: true_color + true_color_fully_sunzencorrected: + compositor: !!python/name:satpy.composites.SelfSharpenedRGB + description: > + Same as true_color, but without Sun-zenith reduction. For users that want to maintain as much data as possible + close to the terminator, at cost of some artefacts (bright limb and reddish clouds) (see issue #2643). + prerequisites: + - name: vis_06 + modifiers: [sunz_corrected, rayleigh_corrected] + - name: ndvi_hybrid_green_fully_sunzencorrected + - name: vis_04 + modifiers: [sunz_corrected, rayleigh_corrected] + standard_name: true_color + true_color_raw_with_corrected_green: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > From b4e8fa572d35b55f84151c61a73d071391f697fb Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 28 Nov 2023 15:54:31 +0100 Subject: [PATCH 534/702] Fix style --- satpy/cf/attrs.py | 2 +- satpy/cf/coords.py | 2 +- satpy/modifiers/spectral.py | 1 - satpy/tests/cf_tests/test_area.py | 2 ++ 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/satpy/cf/attrs.py b/satpy/cf/attrs.py index f9d49416c8..cdec8500d4 100644 --- a/satpy/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -97,7 +97,7 @@ def _encode_object(obj): def _try_decode_object(obj): - """Try to decode byte string""" + """Try to decode byte string.""" try: decoded = obj.decode() except AttributeError: diff --git a/satpy/cf/coords.py b/satpy/cf/coords.py index 48a0748509..9220632fcb 100644 --- a/satpy/cf/coords.py +++ b/satpy/cf/coords.py @@ -181,7 +181,7 @@ def ensure_unique_nondimensional_coords( this is not applied to latitude and longitude. Args: - datas: + data_arrays: Dictionary of (dataset name, dataset) pretty: Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index e37f6d3c9f..18d1df2379 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -164,7 +164,6 @@ def __call__(self, projectables, optional_datasets=None, **info): def _get_emissivity_as_dataarray(self, nir, da_tb11, da_tb13_4, da_sun_zenith): """Get the emissivity as a dataarray.""" - logger.info("Getting emissive part of %s", nir.attrs["name"]) emissivity = self._get_emissivity_as_dask(nir.data, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) diff --git a/satpy/tests/cf_tests/test_area.py b/satpy/tests/cf_tests/test_area.py index 31b51b6cd9..ee24d0e10d 100644 --- a/satpy/tests/cf_tests/test_area.py +++ b/satpy/tests/cf_tests/test_area.py @@ -27,6 +27,7 @@ @pytest.fixture() def input_data_arr() -> xr.DataArray: + """Create a data array.""" return xr.DataArray( data=[[1, 2], [3, 4]], dims=("y", "x"), @@ -59,6 +60,7 @@ def test_area2cf_geos_area_nolonlats(self, input_data_arr, include_lonlats): assert "latitude" in res[1].coords def test_area2cf_swath(self, input_data_arr): + """Test area2cf for swath definitions.""" swath = SwathDefinition(lons=[[1, 1], [2, 2]], lats=[[1, 2], [1, 2]]) input_data_arr.attrs["area"] = swath From ea6d6e035aa9222e3fe2def19d81ee3a1faf7afa Mon Sep 17 00:00:00 2001 From: andream Date: Tue, 28 Nov 2023 16:11:57 +0100 Subject: [PATCH 535/702] update defaults test --- satpy/tests/test_modifiers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 81ce5f3ad8..0c8eb51b3f 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -179,7 +179,7 @@ def test_default_settings(self, sunz_ds1, sunz_sza): """Test default settings with sza data available.""" res = self.default((sunz_ds1, sunz_sza), test_attr="test") np.testing.assert_allclose(res.values, - np.array([[0.00242814, 0.00235669], [0.00245885, 0.00238707]]), + np.array([[0.02916261, 0.02839063], [0.02949383, 0.02871911]]), rtol=1e-5) def test_custom_settings(self, sunz_ds1, sunz_sza): From 4dd1920a1ed98c1438cd9a319a9fb632bbec0613 Mon Sep 17 00:00:00 2001 From: andream Date: Tue, 28 Nov 2023 16:55:58 +0100 Subject: [PATCH 536/702] make ruff happy --- satpy/modifiers/spectral.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index e37f6d3c9f..18d1df2379 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -164,7 +164,6 @@ def __call__(self, projectables, optional_datasets=None, **info): def _get_emissivity_as_dataarray(self, nir, da_tb11, da_tb13_4, da_sun_zenith): """Get the emissivity as a dataarray.""" - logger.info("Getting emissive part of %s", nir.attrs["name"]) emissivity = self._get_emissivity_as_dask(nir.data, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) From e338294dd2b4924c11c7b08b22b28bc150ecd6f5 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 29 Nov 2023 08:08:58 +0100 Subject: [PATCH 537/702] Update changelog for v0.45.0 --- CHANGELOG.md | 60 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 60 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4b9ab4e1b7..aa85b83f56 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,63 @@ +## Version 0.45.0 (2023/11/29) + +### Issues Closed + +* [Issue 2646](https://github.com/pytroll/satpy/issues/2646) - satpy/tests/scene_tests/test_resampling.py is using called_once in assertions rather than assert_called_once, causing test failures on Python 3.12 ([PR 2648](https://github.com/pytroll/satpy/pull/2648) by [@ArrayBolt3](https://github.com/ArrayBolt3)) +* [Issue 2643](https://github.com/pytroll/satpy/issues/2643) - SunZenithReducer defaults make True Color FCI imagery too dark at high solar zenith angles ([PR 2653](https://github.com/pytroll/satpy/pull/2653) by [@ameraner](https://github.com/ameraner)) +* [Issue 2638](https://github.com/pytroll/satpy/issues/2638) - Update AVHRR EPS reader to read cloud flags information ([PR 2639](https://github.com/pytroll/satpy/pull/2639) by [@fwfichtner](https://github.com/fwfichtner)) +* [Issue 2619](https://github.com/pytroll/satpy/issues/2619) - NDVI hybrid green correction triggers early dask computations ([PR 2623](https://github.com/pytroll/satpy/pull/2623) by [@pnuu](https://github.com/pnuu)) +* [Issue 2614](https://github.com/pytroll/satpy/issues/2614) - DayNightCompositor triggers early dask computation ([PR 2617](https://github.com/pytroll/satpy/pull/2617) by [@pnuu](https://github.com/pnuu)) +* [Issue 2613](https://github.com/pytroll/satpy/issues/2613) - modifier NIREmissivePartFromReflectance triggers early dask computation +* [Issue 2604](https://github.com/pytroll/satpy/issues/2604) - grid_mapping attrs lead to failure of cf writer +* [Issue 2601](https://github.com/pytroll/satpy/issues/2601) - Is the 31(32)-band read by the modis_l1b reader converted to bright temperature by default? + +In this release 8 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2648](https://github.com/pytroll/satpy/pull/2648) - Fix assert_called_once usage in resample tests ([2646](https://github.com/pytroll/satpy/issues/2646)) +* [PR 2635](https://github.com/pytroll/satpy/pull/2635) - Fix nwcsaf_geo start time to be nominal time +* [PR 2627](https://github.com/pytroll/satpy/pull/2627) - Fix ABI readers using wrong dtype for resolution-based chunks +* [PR 2625](https://github.com/pytroll/satpy/pull/2625) - Cleanup various warnings encountered during tests +* [PR 2623](https://github.com/pytroll/satpy/pull/2623) - Fix unnecessary Dask `compute()`s in `NDVIHybridGreen` compositor ([2619](https://github.com/pytroll/satpy/issues/2619)) +* [PR 2617](https://github.com/pytroll/satpy/pull/2617) - Reduce Dask computations in `DayNightCompositor` ([2614](https://github.com/pytroll/satpy/issues/2614)) +* [PR 2608](https://github.com/pytroll/satpy/pull/2608) - Fix ABI L2 to only convert reflectances to percentages +* [PR 2607](https://github.com/pytroll/satpy/pull/2607) - Fix ABI L2 reader to produce reflectances as percentages +* [PR 2606](https://github.com/pytroll/satpy/pull/2606) - Change platform name for EPIC (DSCOVR) to upper case. +* [PR 2585](https://github.com/pytroll/satpy/pull/2585) - Make caching warn if some of the args are unhashable + +#### Features added + +* [PR 2653](https://github.com/pytroll/satpy/pull/2653) - Update Sun-zenith reducer defaults ([2643](https://github.com/pytroll/satpy/issues/2643)) +* [PR 2652](https://github.com/pytroll/satpy/pull/2652) - Add file pattern for CRRPh of NWC SAF GEO v2021 +* [PR 2642](https://github.com/pytroll/satpy/pull/2642) - Set dtype for get_lonlats() in NIR reflectance calculation +* [PR 2640](https://github.com/pytroll/satpy/pull/2640) - Keep original dtype in DayNightCompositor +* [PR 2639](https://github.com/pytroll/satpy/pull/2639) - Update AVHRR EPS reader to read cloud flags information ([2638](https://github.com/pytroll/satpy/issues/2638)) +* [PR 2637](https://github.com/pytroll/satpy/pull/2637) - Keep FCI data as 32-bit floats +* [PR 2632](https://github.com/pytroll/satpy/pull/2632) - Add reader for OSI SAF L3 products +* [PR 2631](https://github.com/pytroll/satpy/pull/2631) - Add a reader for MODIS Level 3 files in CMG format. +* [PR 2623](https://github.com/pytroll/satpy/pull/2623) - Fix unnecessary Dask `compute()`s in `NDVIHybridGreen` compositor ([2619](https://github.com/pytroll/satpy/issues/2619)) +* [PR 2621](https://github.com/pytroll/satpy/pull/2621) - Add resolution-based chunking to ABI L1b reader +* [PR 2610](https://github.com/pytroll/satpy/pull/2610) - Remove legacy resampler code + +#### Clean ups + +* [PR 2648](https://github.com/pytroll/satpy/pull/2648) - Fix assert_called_once usage in resample tests ([2646](https://github.com/pytroll/satpy/issues/2646)) +* [PR 2641](https://github.com/pytroll/satpy/pull/2641) - Add "A" and "D" checks to ruff config +* [PR 2634](https://github.com/pytroll/satpy/pull/2634) - Remove duplicate entries of required netcdf variables in FCI reader +* [PR 2625](https://github.com/pytroll/satpy/pull/2625) - Cleanup various warnings encountered during tests +* [PR 2624](https://github.com/pytroll/satpy/pull/2624) - Replace assertRaises with pytest.raises +* [PR 2621](https://github.com/pytroll/satpy/pull/2621) - Add resolution-based chunking to ABI L1b reader +* [PR 2612](https://github.com/pytroll/satpy/pull/2612) - Remove tests for removed and deprecated functionality +* [PR 2610](https://github.com/pytroll/satpy/pull/2610) - Remove legacy resampler code +* [PR 2586](https://github.com/pytroll/satpy/pull/2586) - Replace flake8 with ruff in pre-commit and ci linting +* [PR 2524](https://github.com/pytroll/satpy/pull/2524) - Refactor CFWriter utility into CF directory + +In this release 31 pull requests were closed. + + ## Version 0.44.0 (2023/10/17) ### Issues Closed From bd0d423d8306e29e3f91a41d3df32e82867897df Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 29 Nov 2023 09:48:31 +0100 Subject: [PATCH 538/702] Skip some sgli tests for python <=3.9 --- satpy/tests/reader_tests/test_sgli_l1b.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py index 5ae305a068..0ae65bb8d3 100644 --- a/satpy/tests/reader_tests/test_sgli_l1b.py +++ b/satpy/tests/reader_tests/test_sgli_l1b.py @@ -1,4 +1,5 @@ """Tests for the SGLI L1B backend.""" +import sys from datetime import datetime, timedelta import dask @@ -248,6 +249,7 @@ def test_channel_is_chunked(sgli_vn_file): "standard_name": ""}) assert res.chunks[0][0] > 116 +@pytest.mark.skipif(sys.version_info <= (3, 9)) def test_loading_lon_lat(sgli_vn_file): """Test that loading lons and lats works.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) @@ -259,6 +261,7 @@ def test_loading_lon_lat(sgli_vn_file): assert res.dtype == np.float32 assert res.dims == ("y", "x") +@pytest.mark.skipif(sys.version_info <= (3, 9)) def test_loading_sensor_angles(sgli_vn_file): """Test loading the satellite angles.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) @@ -270,6 +273,7 @@ def test_loading_sensor_angles(sgli_vn_file): assert res.dtype == np.float32 assert res.min() >= 0 +@pytest.mark.skipif(sys.version_info <= (3, 9)) def test_loading_solar_angles(sgli_vn_file): """Test loading sun angles.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) @@ -307,6 +311,7 @@ def test_get_ti_dataset_bt(sgli_ir_file): _ = handler.get_dataset(did, {"file_key": "Image_data/Lt_TI01", "units": "K", "standard_name": "toa_brightness_temperature"}) +@pytest.mark.skipif(sys.version_info <= (3, 9)) def test_get_ti_lon_lats(sgli_ir_file): """Test getting the lons and lats for IR channels.""" handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) From a7d6674637b695b7f48bd611e7d0453d039c741f Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 29 Nov 2023 16:53:34 +0100 Subject: [PATCH 539/702] Fix skipif with reason --- satpy/tests/reader_tests/test_sgli_l1b.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py index 0ae65bb8d3..85feb1ffaa 100644 --- a/satpy/tests/reader_tests/test_sgli_l1b.py +++ b/satpy/tests/reader_tests/test_sgli_l1b.py @@ -249,7 +249,7 @@ def test_channel_is_chunked(sgli_vn_file): "standard_name": ""}) assert res.chunks[0][0] > 116 -@pytest.mark.skipif(sys.version_info <= (3, 9)) +@pytest.mark.skipif(sys.version_info <= (3, 9), reason="Python 3.10 or higher needed for geotiepoints") def test_loading_lon_lat(sgli_vn_file): """Test that loading lons and lats works.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) @@ -261,7 +261,7 @@ def test_loading_lon_lat(sgli_vn_file): assert res.dtype == np.float32 assert res.dims == ("y", "x") -@pytest.mark.skipif(sys.version_info <= (3, 9)) +@pytest.mark.skipif(sys.version_info <= (3, 9), reason="Python 3.10 or higher needed for geotiepoints") def test_loading_sensor_angles(sgli_vn_file): """Test loading the satellite angles.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) @@ -273,7 +273,7 @@ def test_loading_sensor_angles(sgli_vn_file): assert res.dtype == np.float32 assert res.min() >= 0 -@pytest.mark.skipif(sys.version_info <= (3, 9)) +@pytest.mark.skipif(sys.version_info <= (3, 9), reason="Python 3.10 or higher needed for geotiepoints") def test_loading_solar_angles(sgli_vn_file): """Test loading sun angles.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) @@ -311,7 +311,7 @@ def test_get_ti_dataset_bt(sgli_ir_file): _ = handler.get_dataset(did, {"file_key": "Image_data/Lt_TI01", "units": "K", "standard_name": "toa_brightness_temperature"}) -@pytest.mark.skipif(sys.version_info <= (3, 9)) +@pytest.mark.skipif(sys.version_info <= (3, 9), reason="Python 3.10 or higher needed for geotiepoints") def test_get_ti_lon_lats(sgli_ir_file): """Test getting the lons and lats for IR channels.""" handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) From 8f5258afeee55c3834cc40921a9f1d92b279916c Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 29 Nov 2023 17:06:59 +0100 Subject: [PATCH 540/702] Fix skipif again --- satpy/tests/reader_tests/test_sgli_l1b.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py index 85feb1ffaa..c2b8bd5167 100644 --- a/satpy/tests/reader_tests/test_sgli_l1b.py +++ b/satpy/tests/reader_tests/test_sgli_l1b.py @@ -249,7 +249,7 @@ def test_channel_is_chunked(sgli_vn_file): "standard_name": ""}) assert res.chunks[0][0] > 116 -@pytest.mark.skipif(sys.version_info <= (3, 9), reason="Python 3.10 or higher needed for geotiepoints") +@pytest.mark.skipif(sys.version_info < (3, 10), reason="Python 3.10 or higher needed for geotiepoints") def test_loading_lon_lat(sgli_vn_file): """Test that loading lons and lats works.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) @@ -261,7 +261,7 @@ def test_loading_lon_lat(sgli_vn_file): assert res.dtype == np.float32 assert res.dims == ("y", "x") -@pytest.mark.skipif(sys.version_info <= (3, 9), reason="Python 3.10 or higher needed for geotiepoints") +@pytest.mark.skipif(sys.version_info < (3, 10), reason="Python 3.10 or higher needed for geotiepoints") def test_loading_sensor_angles(sgli_vn_file): """Test loading the satellite angles.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) @@ -273,7 +273,7 @@ def test_loading_sensor_angles(sgli_vn_file): assert res.dtype == np.float32 assert res.min() >= 0 -@pytest.mark.skipif(sys.version_info <= (3, 9), reason="Python 3.10 or higher needed for geotiepoints") +@pytest.mark.skipif(sys.version_info < (3, 10), reason="Python 3.10 or higher needed for geotiepoints") def test_loading_solar_angles(sgli_vn_file): """Test loading sun angles.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) @@ -311,7 +311,7 @@ def test_get_ti_dataset_bt(sgli_ir_file): _ = handler.get_dataset(did, {"file_key": "Image_data/Lt_TI01", "units": "K", "standard_name": "toa_brightness_temperature"}) -@pytest.mark.skipif(sys.version_info <= (3, 9), reason="Python 3.10 or higher needed for geotiepoints") +@pytest.mark.skipif(sys.version_info < (3, 10), reason="Python 3.10 or higher needed for geotiepoints") def test_get_ti_lon_lats(sgli_ir_file): """Test getting the lons and lats for IR channels.""" handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) From 9c9ae0897836c89f122793fc222c1ef6a67fc39f Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 30 Nov 2023 14:42:11 +0100 Subject: [PATCH 541/702] Fix missing radiance units in eps l1b --- satpy/readers/eps_l1b.py | 7 ++++--- satpy/tests/reader_tests/test_eps_l1b.py | 11 +++++++++++ 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/satpy/readers/eps_l1b.py b/satpy/readers/eps_l1b.py index bd03f40820..fbeb3ecba6 100644 --- a/satpy/readers/eps_l1b.py +++ b/satpy/readers/eps_l1b.py @@ -81,10 +81,10 @@ def read_records(filename): grh = np.fromfile(fdes, grh_dtype, 1) if grh.size == 0: break - rec_class = record_class[int(grh["record_class"])] + rec_class = record_class[int(grh["record_class"].squeeze())] sub_class = grh["RECORD_SUBCLASS"][0] - expected_size = int(grh["RECORD_SIZE"]) + expected_size = int(grh["RECORD_SIZE"].squeeze()) bare_size = expected_size - grh_dtype.itemsize try: the_type = form.dtype((rec_class, sub_class)) @@ -144,7 +144,8 @@ class EPSAVHRRFile(BaseFileHandler): sensors = {"AVHR": "avhrr-3"} units = {"reflectance": "%", - "brightness_temperature": "K"} + "brightness_temperature": "K", + "radiance": "W m^-2 sr^-1"} def __init__(self, filename, filename_info, filetype_info): """Initialize FileHandler.""" diff --git a/satpy/tests/reader_tests/test_eps_l1b.py b/satpy/tests/reader_tests/test_eps_l1b.py index d9a484face..16c7cfe27b 100644 --- a/satpy/tests/reader_tests/test_eps_l1b.py +++ b/satpy/tests/reader_tests/test_eps_l1b.py @@ -134,6 +134,17 @@ def test_dataset(self): assert res.attrs["calibration"] == "brightness_temperature" assert res.attrs["units"] == "K" + def test_get_dataset_radiance(self): + """Test loading a data array with radiance calibration.""" + did = make_dataid(name="1", calibration="radiance") + res = self.fh.get_dataset(did, {}) + assert isinstance(res, xr.DataArray) + assert res.attrs["platform_name"] == "Metop-C" + assert res.attrs["sensor"] == "avhrr-3" + assert res.attrs["name"] == "1" + assert res.attrs["calibration"] == "radiance" + assert res.attrs["units"] == "W m^-2 sr^-1" + def test_navigation(self): """Test the navigation.""" did = make_dataid(name="longitude") From aac3016f0d1287c619c5682d45a3ccf7d6428ffc Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 1 Dec 2023 11:17:35 -0600 Subject: [PATCH 542/702] Update unstable version of h5py in CI --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 14e5e3ffcd..2eff205013 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -83,7 +83,7 @@ jobs: git+https://github.com/pytroll/pyresample \ git+https://github.com/pytroll/trollimage \ git+https://github.com/fhs/pyhdf \ - git+https://github.com/takluyver/h5py@cython-3 \ + git+https://github.com/djhoese/h5py@cython-3-davidh \ git+https://github.com/Unidata/netcdf4-python \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ From e8938cbef0e5b4473a63b95fde3ec37b1b50ce13 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 1 Dec 2023 21:33:26 -0600 Subject: [PATCH 543/702] Add h5netcdf to unstable CI --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2eff205013..9931130d53 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -84,6 +84,7 @@ jobs: git+https://github.com/pytroll/trollimage \ git+https://github.com/fhs/pyhdf \ git+https://github.com/djhoese/h5py@cython-3-davidh \ + git+https://github.com/h5netcdf/h5netcdf \ git+https://github.com/Unidata/netcdf4-python \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ From 3c7e6deeaee713aa9be2d93d7cae5fced8cd5f74 Mon Sep 17 00:00:00 2001 From: Kexin828 <149068356+Kexin828@users.noreply.github.com> Date: Sun, 3 Dec 2023 18:16:53 +0800 Subject: [PATCH 544/702] Update tropomi_l2.yaml MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The Tropomi l2 reader can read ’S5P_OFFL_L2__NO2____‘ files but cannot read 'S5P_RPRO_L2__NO2____' files. Because 'S5P_RPRO_L2__NO2____' files have reduced in their names, which means the pattern defined here doesn't match. Adding a new pattern to the pattern list with the reduced added to the end can work. --- satpy/etc/readers/tropomi_l2.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/etc/readers/tropomi_l2.yaml b/satpy/etc/readers/tropomi_l2.yaml index 3e961f7d56..181311d25d 100644 --- a/satpy/etc/readers/tropomi_l2.yaml +++ b/satpy/etc/readers/tropomi_l2.yaml @@ -14,6 +14,7 @@ file_types: file_reader: !!python/name:satpy.readers.tropomi_l2.TROPOMIL2FileHandler file_patterns: - '{platform_shortname:3s}_{data_type:4s}_{level:3s}_{product:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit:5d}_{collection:2d}_{processor_version:6d}_{creation_time:%Y%m%dT%H%M%S}.nc' + - '{platform_shortname:3s}_RPRO_{level:3s}_{product:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit:5d}_{collection:2d}_{processor_version:6d}_{creation_time:%Y%m%dT%H%M%S}_reduced.nc' datasets: latitude: From b7167249133f39708d11c72feb90545afbdd64a7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Dec 2023 10:45:42 +0000 Subject: [PATCH 545/702] Bump conda-incubator/setup-miniconda from 2 to 3 Bumps [conda-incubator/setup-miniconda](https://github.com/conda-incubator/setup-miniconda) from 2 to 3. - [Release notes](https://github.com/conda-incubator/setup-miniconda/releases) - [Changelog](https://github.com/conda-incubator/setup-miniconda/blob/main/CHANGELOG.md) - [Commits](https://github.com/conda-incubator/setup-miniconda/compare/v2...v3) --- updated-dependencies: - dependency-name: conda-incubator/setup-miniconda dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 14e5e3ffcd..30acd9ec08 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -36,7 +36,7 @@ jobs: uses: actions/checkout@v4 - name: Setup Conda Environment - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: miniforge-variant: Mambaforge miniforge-version: latest From 1f2041c069e7c9ff54ddf2691aa3389a189d0dd5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Dec 2023 10:45:51 +0000 Subject: [PATCH 546/702] Bump pypa/gh-action-pypi-publish from 1.8.10 to 1.8.11 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.10 to 1.8.11. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.10...v1.8.11) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 4ed63fefdd..9fd1d86b5a 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -23,7 +23,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.8.10 + uses: pypa/gh-action-pypi-publish@v1.8.11 with: user: __token__ password: ${{ secrets.pypi_password }} From 2aab8c4b2cbb4e3b77540dd794ba11439b517467 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 4 Dec 2023 11:56:37 +0100 Subject: [PATCH 547/702] Address a few review comments --- satpy/readers/sgli_l1b.py | 37 ----------------------- satpy/tests/reader_tests/test_sgli_l1b.py | 16 +--------- 2 files changed, 1 insertion(+), 52 deletions(-) diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 61a4b61f9d..67b8d46816 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -35,9 +35,6 @@ import numpy as np import xarray as xr from dask.array.core import normalize_chunks -from xarray import Dataset, Variable -from xarray.backends import BackendArray, BackendEntrypoint -from xarray.core import indexing # from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler @@ -230,37 +227,3 @@ def get_full_angles(self, azi, zen, attrs): new_azi, new_zen = self.interpolate_spherical(azi, zen, resampling_interval) return new_azi, new_zen + 90 return azi, zen - - -class H5Array(BackendArray): - """An Hdf5-based array.""" - - def __init__(self, array): - """Initialize the array.""" - self.shape = array.shape - self.dtype = array.dtype - self.array = array - - def __getitem__(self, key): - """Get a slice of the array.""" - return indexing.explicit_indexing_adapter( - key, self.shape, indexing.IndexingSupport.BASIC, self._getitem - ) - - def _getitem(self, key): - return self.array[key] - - -class SGLIBackend(BackendEntrypoint): - """The SGLI backend.""" - - def open_dataset(self, filename, *, drop_variables=None): - """Open the dataset.""" - ds = Dataset() - h5f = h5py.File(filename) - h5_arr = h5f["Image_data"]["Lt_VN01"] - chunks = dict(zip(("y", "x"), h5_arr.chunks)) - ds["Lt_VN01"] = Variable(["y", "x"], - indexing.LazilyIndexedArray(H5Array(h5_arr)), - encoding={"preferred_chunks": chunks}) - return ds diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py index c2b8bd5167..1512e38762 100644 --- a/satpy/tests/reader_tests/test_sgli_l1b.py +++ b/satpy/tests/reader_tests/test_sgli_l1b.py @@ -3,12 +3,9 @@ from datetime import datetime, timedelta import dask -import dask.array as da import h5py import numpy as np import pytest -from dask.array.core import normalize_chunks -from xarray import DataArray, Dataset, open_dataset from satpy.readers.sgli_l1b import HDF5SGLI @@ -21,18 +18,7 @@ ZEN_ARRAY = np.random.randint(0, 180 * 100, size=(197, 126), dtype=np.int16) -def test_open_dataset(sgli_vn_file): - """Test open_dataset function.""" - from satpy.readers.sgli_l1b import SGLIBackend - res = open_dataset(sgli_vn_file, engine=SGLIBackend, chunks={}) - assert isinstance(res, Dataset) - data_array = res["Lt_VN01"] - assert isinstance(data_array, DataArray) - assert isinstance(data_array.data, da.Array) - assert data_array.chunks == normalize_chunks((116, 157), data_array.shape) - - -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def sgli_vn_file(tmp_path_factory): """Create a stub VN file.""" filename = tmp_path_factory.mktemp("data") / "test_vn_file.h5" From 728631eb32afad4eaef8f53bbed863b9ab05282c Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 4 Dec 2023 12:41:09 +0100 Subject: [PATCH 548/702] Refactor dataset preparation --- satpy/readers/sgli_l1b.py | 17 +++++++++++------ satpy/tests/reader_tests/test_sgli_l1b.py | 12 ++++++++++-- 2 files changed, 21 insertions(+), 8 deletions(-) diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 67b8d46816..1e2a64f783 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -85,6 +85,16 @@ def get_dataset(self, key, info): attrs = h5dataset.attrs dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) + dataset = self.prepare_dataset(key, dataset) + + dataset.attrs["platform_name"] = "GCOM-C1" + dataset.attrs["sensor"] = "sgli" + dataset.attrs["units"] = info["units"] + dataset.attrs["standard_name"] = info["standard_name"] + return dataset + + def prepare_dataset(self, key, dataset): + """Prepare the dataset according to key.""" with xr.set_options(keep_attrs=True): if key["name"].startswith(("VN", "SW", "P")): dataset = self.get_visible_dataset(key, dataset) @@ -95,12 +105,7 @@ def get_dataset(self, key, info): elif "angle" in key["name"]: dataset = self.get_angles(key) else: - raise NotImplementedError() - - dataset.attrs["platform_name"] = "GCOM-C1" - dataset.attrs["sensor"] = "sgli" - dataset.attrs["units"] = info["units"] - dataset.attrs["standard_name"] = info["standard_name"] + raise KeyError(f"Unrecognized dataset {key['name']}") return dataset def get_visible_dataset(self, key, dataset): diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py index 1512e38762..7f5fffa70c 100644 --- a/satpy/tests/reader_tests/test_sgli_l1b.py +++ b/satpy/tests/reader_tests/test_sgli_l1b.py @@ -46,7 +46,7 @@ def sgli_vn_file(tmp_path_factory): return filename -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def sgli_ir_file(tmp_path_factory): """Create a stub IR file.""" filename = tmp_path_factory.mktemp("data") / "test_ir_file.h5" @@ -83,7 +83,7 @@ def sgli_ir_file(tmp_path_factory): return filename -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def sgli_pol_file(tmp_path_factory): """Create a POL stub file.""" filename = tmp_path_factory.mktemp("data") / "test_pol_file.h5" @@ -188,6 +188,14 @@ def test_get_dataset_counts(sgli_vn_file): assert res.attrs["platform_name"] == "GCOM-C1" assert res.attrs["sensor"] == "sgli" +def test_get_dataset_for_unknown_channel(sgli_vn_file): + """Test that counts can be extracted from a file.""" + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) + did = dict(name="VIN", resolution=1000, polarization=None, calibration="counts") + with pytest.raises(KeyError): + handler.get_dataset(did, {"file_key": "Image_data/Lt_VIN01", "units": "", + "standard_name": ""}) + def test_get_vn_dataset_reflectances(sgli_vn_file): """Test that the vn datasets can be calibrated to reflectances.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) From 2c83c70007899e17345f184239f5fbd94090e52c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 4 Dec 2023 10:21:30 -0600 Subject: [PATCH 549/702] Update satpy/etc/readers/tropomi_l2.yaml --- satpy/etc/readers/tropomi_l2.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/tropomi_l2.yaml b/satpy/etc/readers/tropomi_l2.yaml index 181311d25d..afd78d750f 100644 --- a/satpy/etc/readers/tropomi_l2.yaml +++ b/satpy/etc/readers/tropomi_l2.yaml @@ -14,7 +14,7 @@ file_types: file_reader: !!python/name:satpy.readers.tropomi_l2.TROPOMIL2FileHandler file_patterns: - '{platform_shortname:3s}_{data_type:4s}_{level:3s}_{product:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit:5d}_{collection:2d}_{processor_version:6d}_{creation_time:%Y%m%dT%H%M%S}.nc' - - '{platform_shortname:3s}_RPRO_{level:3s}_{product:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit:5d}_{collection:2d}_{processor_version:6d}_{creation_time:%Y%m%dT%H%M%S}_reduced.nc' + - '{platform_shortname:3s}_{data_type:4s}_{level:3s}_{product:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit:5d}_{collection:2d}_{processor_version:6d}_{creation_time:%Y%m%dT%H%M%S}_reduced.nc' datasets: latitude: From c85f0a073560c14f914425683600aab98f4a736e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 5 Dec 2023 11:15:23 -0600 Subject: [PATCH 550/702] Update .github/workflows/ci.yaml --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9931130d53..0d73af13e1 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -83,7 +83,7 @@ jobs: git+https://github.com/pytroll/pyresample \ git+https://github.com/pytroll/trollimage \ git+https://github.com/fhs/pyhdf \ - git+https://github.com/djhoese/h5py@cython-3-davidh \ + git+https://github.com/h5py/h5py \ git+https://github.com/h5netcdf/h5netcdf \ git+https://github.com/Unidata/netcdf4-python \ git+https://github.com/dask/dask \ From 60a7c1b3a527b896838114a5ee0f2ce7b26fabec Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 8 Dec 2023 09:42:53 +0200 Subject: [PATCH 551/702] Fix RealisticColors compositor not to upcast data to float64 --- satpy/composites/__init__.py | 8 ++++---- satpy/tests/test_composites.py | 34 ++++++++++++++++++++++++++++++++++ 2 files changed, 38 insertions(+), 4 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 9295f94dc7..686b8c4c27 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -992,17 +992,17 @@ def __call__(self, projectables, *args, **kwargs): hrv = projectables[2] try: - ch3 = 3 * hrv - vis06 - vis08 + ch3 = 3.0 * hrv - vis06 - vis08 ch3.attrs = hrv.attrs except ValueError: raise IncompatibleAreas ndvi = (vis08 - vis06) / (vis08 + vis06) - ndvi = np.where(ndvi < 0, 0, ndvi) + ndvi = ndvi.where(ndvi >= 0.0, 0.0) - ch1 = ndvi * vis06 + (1 - ndvi) * vis08 + ch1 = ndvi * vis06 + (1.0 - ndvi) * vis08 ch1.attrs = vis06.attrs - ch2 = ndvi * vis08 + (1 - ndvi) * vis06 + ch2 = ndvi * vis08 + (1.0 - ndvi) * vis06 ch2.attrs = vis08.attrs res = super(RealisticColors, self).__call__((ch1, ch2, ch3), diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 7fbe177bfb..4a7b2a2ce9 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1867,3 +1867,37 @@ def _create_fake_composite_config(yaml_filename: str): }, comp_file, ) + + +class TestRealisticColors: + """Test the SEVIRI Realistic Colors compositor.""" + + def test_realistic_colors(self): + """Test the compositor.""" + from satpy.composites import RealisticColors + + vis06 = xr.DataArray(da.arange(0, 15, dtype=np.float32).reshape(3, 5), dims=("y", "x"), + attrs={"foo": "foo"}) + vis08 = xr.DataArray(da.arange(15, 0, -1, dtype=np.float32).reshape(3, 5), dims=("y", "x"), + attrs={"bar": "bar"}) + hrv = xr.DataArray(6 * da.ones((3, 5), dtype=np.float32), dims=("y", "x"), + attrs={"baz": "baz"}) + + expected_red = np.array([[0.0, 2.733333, 4.9333334, 6.6, 7.733333], + [8.333333, 8.400001, 7.9333334, 7.0, 6.0], + [5.0, 4.0, 3.0, 2.0, 1.0]], dtype=np.float32) + expected_green = np.array([ + [15.0, 12.266666, 10.066668, 8.400001, 7.2666664], + [6.6666665, 6.6000004, 7.0666666, 8.0, 9.0], + [10.0, 11.0, 12.0, 13.0, 14.0]], dtype=np.float32) + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = RealisticColors("Ni!") + res = comp((vis06, vis08, hrv)) + + arr = res.values + + assert res.dtype == np.float32 + np.testing.assert_allclose(arr[0, :, :], expected_red) + np.testing.assert_allclose(arr[1, :, :], expected_green) + np.testing.assert_allclose(arr[2, :, :], 3.0) From 91b59ca721cdda8d8b7cf4716c74050aa589d06b Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 8 Dec 2023 10:07:50 -0600 Subject: [PATCH 552/702] Workaround AWIPS bug not handling integers properly in "awips_tiled" writer --- satpy/tests/writer_tests/test_awips_tiled.py | 2 +- satpy/writers/awips_tiled.py | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/satpy/tests/writer_tests/test_awips_tiled.py b/satpy/tests/writer_tests/test_awips_tiled.py index 63113a9f94..dbc1bc82d7 100644 --- a/satpy/tests/writer_tests/test_awips_tiled.py +++ b/satpy/tests/writer_tests/test_awips_tiled.py @@ -198,7 +198,7 @@ def test_basic_numbered_1_tile(self, extra_attrs, expected_filename, use_save_da check_required_properties(unmasked_ds, output_ds) scale_factor = output_ds["data"].encoding["scale_factor"] np.testing.assert_allclose(input_data_arr.values, output_ds["data"].data, - atol=scale_factor / 2) + atol=scale_factor * 0.75) def test_units_length_warning(self, tmp_path): """Test long 'units' warnings are raised.""" diff --git a/satpy/writers/awips_tiled.py b/satpy/writers/awips_tiled.py index 15680e8091..03ce3e9d68 100644 --- a/satpy/writers/awips_tiled.py +++ b/satpy/writers/awips_tiled.py @@ -630,7 +630,13 @@ def _get_factor_offset_fill(input_data_arr, vmin, vmax, encoding): # max value fills = [2 ** (file_bit_depth - 1) - 1] - mx = (vmax - vmin) / (2 ** bit_depth - 1 - num_fills) + # NOTE: AWIPS is buggy and does not properly handle both + # halves an integers data space. The below code limits + # unsigned integers to the positive half and this seems + # to work better with current AWIPS. + mx = (vmax - vmin) / (2 ** (bit_depth - 1) - 1 - num_fills) + # NOTE: This is what the line should look like if AWIPS wasn't buggy: + # mx = (vmax - vmin) / (2 ** bit_depth - 1 - num_fills) bx = vmin if not is_unsigned and not unsigned_in_signed: bx += 2 ** (bit_depth - 1) * mx From aa98fd6d69335a8c862570e1ec3479d92f0cd9eb Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 11 Dec 2023 15:14:50 +0200 Subject: [PATCH 553/702] Update xarray version in compression tests for compression kwarg --- satpy/tests/writer_tests/test_cf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 7fabb04f10..62c9995cde 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -570,5 +570,5 @@ def _should_use_compression_keyword(): versions = _get_backend_versions() return ( versions["libnetcdf"] >= Version("4.9.0") and - versions["xarray"] >= Version("2023.12") + versions["xarray"] >= Version("2024.1") ) From 77830d67ca86bfb398fa38ec10d21f097b2faffe Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Mon, 11 Dec 2023 13:39:02 +0000 Subject: [PATCH 554/702] Add AMV/AMVI file pattern and variable --- satpy/etc/readers/fci_l2_nc.yaml | 267 +++++++++++++++++++++++++++++++ 1 file changed, 267 insertions(+) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index 20d9935682..7c0724b6ac 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -64,6 +64,16 @@ file_types: file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-ASR-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' + nc_fci_amvi: + file_reader: !!python/name:readers.fci_amv_l2_nc.FciAmvL2NCFileHandler + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-AMVI-{channel}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' + + nc_fci_amv: + file_reader: !!python/name:readers.fci_amv_l2_nc.FciAmvL2NCFileHandler + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-AMV-{channel}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' + datasets: # CLM @@ -2734,3 +2744,260 @@ datasets: file_type: nc_fci_asr file_key: product_timeliness long_name: product_timeliness_index + +# AMV Intermediate Product + intm_latitude: + name: intm_latitude + file_type: nc_fci_amvi + file_key: intm_latitude + standard_name: latitude + + intm_longitude: + name: intm_longitude + file_type: nc_fci_amvi + file_key: intm_longitude + standard_name: longitude + + intm_speed: + name: intm_speed + file_type: nc_fci_amvi + file_key: intm_speed + standard_name: wind_speed + coordinates: + - intm_longitude + - intm_latitude + + intm_u_component: + name: intm_u_component + file_type: nc_fci_amvi + file_key: intm_u_component + standard_name: wind_speed_horizontal_component + coordinates: + - intm_longitude + - intm_latitude + + intm_v_component: + name: intm_v_component + file_type: nc_fci_amvi + file_key: intm_v_component + standard_name: wind_speed_vertical_component + coordinates: + - intm_longitude + - intm_latitude + + intm_direction: + name: intm_direction + file_type: nc_fci_amvi + file_key: intm_direction + standard_name: wind_to_direction + coordinates: + - intm_longitude + - intm_latitude + + intm_pressure: + name: intm_pressure + file_type: nc_fci_amvi + file_key: intm_pressure + standard_name: wind_pressure + coordinates: + - intm_longitude + - intm_latitude + + intm_temperature: + name: intm_temperature + file_type: nc_fci_amvi + file_key: intm_temperature + standard_name: wind_temperature + coordinates: + - intm_longitude + - intm_latitude + + intm_target_type: + name: intm_target_type + file_type: nc_fci_amvi + file_key: target_type + standard_name: wind_target_type + coordinates: + - intm_longitude + - intm_latitude + + intm_wind_method: + name: intm_wind_method + file_type: nc_fci_amvi + file_key: wind_method + standard_name: wind_wind_method + coordinates: + - intm_longitude + - intm_latitude + +# AMV Final Product + channel_id: + name: channel_id + file_type: nc_fci_amv + file_key: channel_id + standard_name: channel_id + + latitude: + name: latitude + file_type: nc_fci_amv + file_key: latitude + standard_name: latitude + + longitude: + name: longitude + file_type: nc_fci_amv + file_key: longitude + standard_name: longitude + + speed: + name: speed + file_type: nc_fci_amv + file_key: speed + standard_name: wind_speed + coordinates: + - longitude + - latitude + + speed_u_component: + name: speed_u_component + file_type: nc_fci_amv + file_key: speed_u_component + standard_name: wind_speed_horizontal_component + coordinates: + - longitude + - latitude + + speed_v_component: + name: speed_v_component + file_type: nc_fci_amv + file_key: speed_v_component + standard_name: wind_speed_vertical_component + coordinates: + - longitude + - latitude + + direction: + name: direction + file_type: nc_fci_amv + file_key: direction + standard_name: wind_to_direction + coordinates: + - longitude + - latitude + + pressure: + name: pressure + file_type: nc_fci_amv + file_key: pressure + standard_name: wind_pressure + coordinates: + - longitude + - latitude + + temperature: + name: temperature + file_type: nc_fci_amv + file_key: temperature + standard_name: wind_temperature + coordinates: + - longitude + - latitude + + target_type: + name: target_type + file_type: nc_fci_amv + file_key: target_type + standard_name: wind_target_type + coordinates: + - longitude + - latitude + + wind_method: + name: wind_method + file_type: nc_fci_amv + file_key: wind_method + standard_name: wind_wind_method + coordinates: + - longitude + - latitude + + fcst_u: + name: fcst_u + file_type: nc_fci_amv + file_key: forecast_u_component + standard_name: wind_forecast_u_component + coordinates: + - longitude + - latitude + + fcst_v: + name: fcst_v + file_type: nc_fci_amv + file_key: forecast_v_component + standard_name: wind_forecast_v_component + coordinates: + - longitude + - latitude + + best_fit_pres: + name: best_fit_pres + file_type: nc_fci_amv + file_key: best_fit_pressure + standard_name: wind_best_fit_pressure + coordinates: + - longitude + - latitude + + best_fit_u: + name: best_fit_u + file_type: nc_fci_amv + file_key: best_fit_u_component + standard_name: wind_best_fit_u_component + coordinates: + - longitude + - latitude + + best_fit_v: + name: best_fit_v + file_type: nc_fci_amv + file_key: best_fit_v_component + standard_name: wind_best_fit_v_component + coordinates: + - longitude + - latitude + + qi: + name: qi + file_type: nc_fci_amv + file_key: overall_reliability + standard_name: wind_overall_reliability + coordinates: + - longitude + - latitude + + qi_excl_fcst: + name: qi_excl_fcst + file_type: nc_fci_amv + file_key: overall_reliability_exc_forecast + standard_name: wind_overall_reliability_exc_forecast + coordinates: + - longitude + - latitude + + product_quality: + name: product_quality + file_type: nc_fci_amv + file_key: product_quality + long_name: product_quality_index + + product_completeness: + name: product_completeness + file_type: nc_fci_amv + file_key: product_completeness + long_name: product_completeness_index + + product_timeliness: + name: product_timeliness + file_type: nc_fci_amv + file_key: product_timeliness + long_name: product_timeliness_index From a43187efd63b48f68fe43a08c779c011fc0d7776 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 11 Dec 2023 16:19:14 +0200 Subject: [PATCH 555/702] Convert times in SEVIRI readers to nanosecond precision to silence warnings --- satpy/readers/seviri_base.py | 4 +++- satpy/tests/reader_tests/test_seviri_base.py | 19 ++++++++++++------- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 25e6ed1a8b..5b19e56833 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -475,8 +475,10 @@ def get_cds_time(days, msecs): days = np.array([days], dtype="int64") msecs = np.array([msecs], dtype="int64") + # use nanosecond precision to silence warning from XArray + nsecs = 1000000 * msecs.astype("timedelta64[ns]") time = np.datetime64("1958-01-01").astype("datetime64[ms]") + \ - days.astype("timedelta64[D]") + msecs.astype("timedelta64[ms]") + days.astype("timedelta64[D]") + nsecs time[time == np.datetime64("1958-01-01 00:00")] = np.datetime64("NaT") if len(time) == 1: diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index c2d190e084..42b79ea0c8 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -74,23 +74,28 @@ def test_chebyshev(self): exp = chebyshev4(coefs, time, domain) np.testing.assert_allclose(res, exp) - def test_get_cds_time(self): - """Test the get_cds_time function.""" - # Scalar + def test_get_cds_time_scalar(self): + """Test the get_cds_time function for scalar inputs.""" assert get_cds_time(days=21246, msecs=12 * 3600 * 1000) == np.datetime64("2016-03-03 12:00") - # Array + def test_get_cds_time_array(self): + """Test the get_cds_time function for array inputs.""" days = np.array([21246, 21247, 21248]) msecs = np.array([12*3600*1000, 13*3600*1000 + 1, 14*3600*1000 + 2]) expected = np.array([np.datetime64("2016-03-03 12:00:00.000"), np.datetime64("2016-03-04 13:00:00.001"), np.datetime64("2016-03-05 14:00:00.002")]) - np.testing.assert_equal(get_cds_time(days=days, msecs=msecs), expected) + res = get_cds_time(days=days, msecs=msecs) + np.testing.assert_equal(res, expected) + def test_get_cds_time_nanoseconds(self): + """Test the get_cds_time function for having nanosecond precision.""" days = 21246 - msecs = 12*3600*1000 + msecs = 12 * 3600 * 1000 expected = np.datetime64("2016-03-03 12:00:00.000") - np.testing.assert_equal(get_cds_time(days=days, msecs=msecs), expected) + res = get_cds_time(days=days, msecs=msecs) + np.testing.assert_equal(res, expected) + assert ".000000000" in res.__repr__() def test_pad_data_horizontally_bad_shape(self): """Test the error handling for the horizontal hrv padding.""" From df26a586eff71ed269e449dd8c59ef12a4b129a6 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 11 Dec 2023 18:34:06 +0200 Subject: [PATCH 556/702] Check for dtype instead of string representation --- satpy/tests/reader_tests/test_seviri_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index 42b79ea0c8..86f684bb5e 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -95,7 +95,7 @@ def test_get_cds_time_nanoseconds(self): expected = np.datetime64("2016-03-03 12:00:00.000") res = get_cds_time(days=days, msecs=msecs) np.testing.assert_equal(res, expected) - assert ".000000000" in res.__repr__() + assert res.dtype == np.dtype("datetime64[ns]") def test_pad_data_horizontally_bad_shape(self): """Test the error handling for the horizontal hrv padding.""" From 101c44ddf8b9da6101eaa13b0a902ae5574de3b4 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 11 Dec 2023 10:46:40 -0600 Subject: [PATCH 557/702] Add remaining JPSS satellite platform aliases to "mirs" reader --- satpy/readers/mirs.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/satpy/readers/mirs.py b/satpy/readers/mirs.py index 1ee0912b0f..34edd02739 100644 --- a/satpy/readers/mirs.py +++ b/satpy/readers/mirs.py @@ -50,6 +50,10 @@ PLATFORMS = {"n18": "NOAA-18", "n19": "NOAA-19", "np": "NOAA-19", + "n20": "NOAA-20", + "n21": "NOAA-21", + "n22": "NOAA-22", + "n23": "NOAA-23", "m2": "MetOp-A", "m1": "MetOp-B", "m3": "MetOp-C", @@ -60,11 +64,14 @@ "f17": "DMSP-F17", "f18": "DMSP-F18", "gpm": "GPM", - "n20": "NOAA-20", } SENSOR = {"n18": amsu, "n19": amsu, "n20": "atms", + "n21": "atms", + "n22": "atms", + "n23": "atms", + "n24": "atms", "np": amsu, "m1": amsu, "m2": amsu, From 943085f54b14dfadee4ecd591d9321a3aeaf61aa Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 11 Dec 2023 20:24:37 +0000 Subject: [PATCH 558/702] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.6 → v0.1.7](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.6...v0.1.7) - [github.com/PyCQA/bandit: 1.7.5 → 1.7.6](https://github.com/PyCQA/bandit/compare/1.7.5...1.7.6) - [github.com/pycqa/isort: 5.12.0 → 5.13.1](https://github.com/pycqa/isort/compare/5.12.0...5.13.1) --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 99e77cb56a..8036f793f5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ fail_fast: false repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.1.6' + rev: 'v0.1.7' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks @@ -14,7 +14,7 @@ repos: - id: check-yaml args: [--unsafe] - repo: https://github.com/PyCQA/bandit - rev: '1.7.5' # Update me! + rev: '1.7.6' # Update me! hooks: - id: bandit args: [--ini, .bandit] @@ -29,7 +29,7 @@ repos: - types-requests args: ["--python-version", "3.9", "--ignore-missing-imports"] - repo: https://github.com/pycqa/isort - rev: 5.12.0 + rev: 5.13.1 hooks: - id: isort language_version: python3 From 8428da1e67a8befe4b79df0572426ddafb2d5585 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 11 Dec 2023 14:52:53 -0600 Subject: [PATCH 559/702] Change pre-commit update schedule to monthly --- .pre-commit-config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8036f793f5..a398bd445f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -36,4 +36,5 @@ repos: ci: # To trigger manually, comment on a pull request with "pre-commit.ci autofix" autofix_prs: false + autoupdate_schedule: "monthly" skip: [bandit] From b8fea39fee9bfafc68039e1481784d2f9a440b3f Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 11 Dec 2023 14:53:24 -0600 Subject: [PATCH 560/702] Change dependabot to monthly updates --- .github/dependabot.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 90e05c40d0..95179b06c9 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -8,4 +8,4 @@ updates: - package-ecosystem: "github-actions" # See documentation for possible values directory: "/" # Location of package manifests schedule: - interval: "weekly" + interval: "monthly" From 8c7999539d095e2817ff4c734264b201c3e3e16c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 11 Dec 2023 15:29:56 -0600 Subject: [PATCH 561/702] Update MiRS reader coefficient files to newer version --- satpy/etc/readers/mirs.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/etc/readers/mirs.yaml b/satpy/etc/readers/mirs.yaml index 4e70fbed2c..5ca15f66b0 100644 --- a/satpy/etc/readers/mirs.yaml +++ b/satpy/etc/readers/mirs.yaml @@ -8,13 +8,13 @@ reader: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [amsu, amsu-mhs, atms, ssmis, gmi] data_files: - - url: "https://zenodo.org/record/4472664/files/limbcoef_atmsland_noaa20.txt" - known_hash: "08a3b7c1594a963610dd864b7ecd12f0ab486412d35185c2371d924dd92c5779" - - url: "https://zenodo.org/record/4472664/files/limbcoef_atmsland_snpp.txt" + - url: "https://zenodo.org/record/10357932/files/limbcoef_atmsland_noaa20.txt" + known_hash: "08deca15afe8638effac9e6ccb442c2c386f5444926129d30a250d5840264c1d" + - url: "https://zenodo.org/record/10357932/files/limbcoef_atmsland_snpp.txt" known_hash: "4b01543699792306711ef1699244e96186487e8a869e4ae42bf1f0e4d00fd063" - - url: "https://zenodo.org/record/4472664/files/limbcoef_atmssea_noaa20.txt" - known_hash: "6853d0536b11c31dc130ab12c61fa322a76d3823a4b8ff9a18a0ecedbf269a88" - - url: "https://zenodo.org/record/4472664/files/limbcoef_atmssea_snpp.txt" + - url: "https://zenodo.org/record/10357932/files/limbcoef_atmssea_noaa20.txt" + known_hash: "07cd7874ff3f069cc3d473bdd0d1d19880ef01ac8d75cb0212a3687c059557f4" + - url: "https://zenodo.org/record/10357932/files/limbcoef_atmssea_snpp.txt" known_hash: "d0f806051b80320e046bdae6a9b68616152bbf8c2dbf3667b9834459259c0d72" file_types: From 944b049aefe93d8f2fb2a0339ce34cb555cac853 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 12 Dec 2023 09:11:42 +0000 Subject: [PATCH 562/702] Add File handler for AMV --- satpy/readers/fci_l2_nc.py | 81 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 81 insertions(+) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index c387326f89..3743a64480 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -401,3 +401,84 @@ def _modify_area_extent(stand_area_extent): area_extent = tuple([ll_x, ll_y, ur_x, ur_y]) return area_extent + +class FciL2NCAMVFileHandler(FciL2CommonFunctions, BaseFileHandler): + """Reader class for FCI L2 AMV products in NetCDF4 format.""" + def __init__(self, filename, filename_info, filetype_info): + """Open the NetCDF file with xarray and prepare for dataset reading.""" + super().__init__(filename, filename_info, filetype_info) + + # Use xarray's default netcdf4 engine to open the file + self.nc = xr.open_dataset( + self.filename, + decode_cf=True, + mask_and_scale=True, + chunks={ + "number_of_images": CHUNK_SIZE, + # 'number_of_height_estimates': CHUNK_SIZE, + "number_of_winds": CHUNK_SIZE + } + ) + @property + def spacecraft_name(self): + """Get spacecraft name.""" + try: + return self.nc.attrs["platform"] + except KeyError: + # TODO if the platform attribute is not valid, return a default value + logger.warning("Spacecraft name cannot be obtained from file content, use default value instead") + return "MTI1" + + @property + def sensor_name(self): + """Get instrument name.""" + try: + return self.nc.attrs["data_source"] + except KeyError: + # TODO if the data_source attribute is not valid, return a default value + logger.warning("Sensor cannot be obtained from file content, use default value instead") + return "FCI" + + def _get_global_attributes(self): + """Create a dictionary of global attributes to be added to all datasets. + + Returns: + dict: A dictionary of global attributes. + filename: name of the product file + spacecraft_name: name of the spacecraft + sensor: name of sensor + platform_name: name of the platform + + """ + attributes = { + "filename": self.filename, + "spacecraft_name": self._spacecraft_name, + "sensor": self._sensor_name, + "platform_name": self._spacecraft_name, + "channel":self.filename_info["channel"] + } + return attributes + + def get_dataset(self, dataset_id, dataset_info): + """Get dataset using the file_key in dataset_info.""" + var_key = dataset_info["file_key"] + logger.debug("Reading in file to get dataset with key %s.", var_key) + + try: + variable = self.nc[var_key] + except KeyError: + logger.warning("Could not find key %s in NetCDF file, no valid Dataset created", var_key) + return None + + # Manage the attributes of the dataset + variable.attrs.update(dataset_info) + variable.attrs.update(self._get_global_attributes()) + + return variable + + def __del__(self): + """Close the NetCDF file that may still be open.""" + try: + self.nc.close() + except AttributeError: + pass From 0fe0ae197bc6c5b6aeb4badb4fc4e935c895f2be Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 12 Dec 2023 09:34:18 +0000 Subject: [PATCH 563/702] Fix wrong naming for method --- satpy/readers/fci_l2_nc.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 3743a64480..5d15d24528 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -452,9 +452,9 @@ def _get_global_attributes(self): """ attributes = { "filename": self.filename, - "spacecraft_name": self._spacecraft_name, - "sensor": self._sensor_name, - "platform_name": self._spacecraft_name, + "spacecraft_name": self.spacecraft_name, + "sensor": self.sensor_name, + "platform_name": self.spacecraft_name, "channel":self.filename_info["channel"] } return attributes From 9ca6d246d9e243a79354596004a8e3a44ad47add Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 12 Dec 2023 09:34:48 +0000 Subject: [PATCH 564/702] Fix reference to AMV file handler --- satpy/etc/readers/fci_l2_nc.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index 7c0724b6ac..f9c12849eb 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -65,12 +65,12 @@ file_types: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-ASR-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' nc_fci_amvi: - file_reader: !!python/name:readers.fci_amv_l2_nc.FciAmvL2NCFileHandler + file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCAMVFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-AMVI-{channel}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' nc_fci_amv: - file_reader: !!python/name:readers.fci_amv_l2_nc.FciAmvL2NCFileHandler + file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCAMVFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-AMV-{channel}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' From f2b3238574bbf07906866d5c0be9c096731e0ccc Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 12 Dec 2023 09:43:42 +0000 Subject: [PATCH 565/702] Remove duplicate method del --- satpy/readers/fci_l2_nc.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 5d15d24528..0948cd0e0a 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -475,10 +475,3 @@ def get_dataset(self, dataset_id, dataset_info): variable.attrs.update(self._get_global_attributes()) return variable - - def __del__(self): - """Close the NetCDF file that may still be open.""" - try: - self.nc.close() - except AttributeError: - pass From 0d2312a617fee7a9c6264c8f586700f35e569329 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 12:49:38 +0200 Subject: [PATCH 566/702] Use create_gradient_search_resampler() --- satpy/resample.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/resample.py b/satpy/resample.py index ddab90be82..f74b6c5ecd 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -154,7 +154,7 @@ from packaging import version from pyresample.ewa import DaskEWAResampler, LegacyDaskEWAResampler from pyresample.geometry import SwathDefinition -from pyresample.gradient import GradientSearchResampler +from pyresample.gradient import create_gradient_search_resampler from pyresample.resampler import BaseResampler as PRBaseResampler from satpy._config import config_search_paths, get_config_path @@ -1009,7 +1009,7 @@ def compute(self, data, fill_value=np.nan, categories=None, **kwargs): "nearest": KDTreeResampler, "bilinear": BilinearResampler, "native": NativeResampler, - "gradient_search": GradientSearchResampler, + "gradient_search": create_gradient_search_resampler, "bucket_avg": BucketAvg, "bucket_sum": BucketSum, "bucket_count": BucketCount, From 7c54a14e0d1d9f40a887de88f0204e2a80305441 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 12:50:21 +0200 Subject: [PATCH 567/702] Do not use proj dicts --- satpy/tests/test_resample.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index 7135661578..9a0584e301 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -48,7 +48,6 @@ def get_test_data(input_shape=(100, 50), output_shape=(200, 100), output_proj=No """ import dask.array as da from pyresample.geometry import AreaDefinition, SwathDefinition - from pyresample.utils import proj4_str_to_dict from xarray import DataArray ds1 = DataArray(da.zeros(input_shape, chunks=85), dims=input_dims, @@ -62,16 +61,16 @@ def get_test_data(input_shape=(100, 50), output_shape=(200, 100), output_proj=No input_proj_str = ("+proj=geos +lon_0=-95.0 +h=35786023.0 +a=6378137.0 " "+b=6356752.31414 +sweep=x +units=m +no_defs") + crs = CRS(input_proj_str) source = AreaDefinition( "test_target", "test_target", "test_target", - proj4_str_to_dict(input_proj_str), + crs, input_shape[1], # width input_shape[0], # height (-1000., -1500., 1000., 1500.)) ds1.attrs["area"] = source - crs = CRS.from_string(input_proj_str) ds1 = ds1.assign_coords(crs=crs) ds2 = ds1.copy() @@ -95,7 +94,7 @@ def get_test_data(input_shape=(100, 50), output_shape=(200, 100), output_proj=No "test_target", "test_target", "test_target", - proj4_str_to_dict(output_proj_str), + CRS(output_proj_str), output_shape[1], # width output_shape[0], # height (-1000., -1500., 1000., 1500.), From c72cee28dacd0a1ab2d4e49843f8180885f87a50 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 12 Dec 2023 10:54:29 +0000 Subject: [PATCH 568/702] Add test for AMV reader --- satpy/tests/reader_tests/test_fci_l2_nc.py | 81 +++++++++++++++++++++- 1 file changed, 80 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index 22611a8469..fb5c725ffc 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -29,7 +29,7 @@ from netCDF4 import Dataset from pyresample import geometry -from satpy.readers.fci_l2_nc import FciL2NCFileHandler, FciL2NCSegmentFileHandler +from satpy.readers.fci_l2_nc import FciL2NCAMVFileHandler, FciL2NCFileHandler, FciL2NCSegmentFileHandler from satpy.tests.utils import make_dataid AREA_DEF = geometry.AreaDefinition( @@ -507,3 +507,82 @@ def test_byte_extraction(self): }) assert dataset.values == 0 + +class TestFciL2NCAMVFileHandler(unittest.TestCase): + """Test the FciL2NCFileHandler reader.""" + + def setUp(self): + """Set up the test by creating a test file and opening it with the reader.""" + # Easiest way to test the reader is to create a test netCDF file on the fly + # Create unique filenames to prevent race conditions when tests are run in parallel + self.test_file = str(uuid.uuid4()) + ".nc" + with Dataset(self.test_file, "w") as nc: + # Create dimensions + nc.createDimension("number_of_winds", 50000) + + # add global attributes + nc.data_source = "test_data_source" + nc.platform = "test_platform" + + # Add datasets + latitude = nc.createVariable("latitude", np.float32, dimensions=("number_of_winds",)) + latitude[:] = np.arange(50000) + + longitude = nc.createVariable("y", np.float32, dimensions=("number_of_winds",)) + longitude[:] = np.arange(50000) + + qi = nc.createVariable("product_quality", np.int8) + qi[:] = 99. + + test_dataset = nc.createVariable("test_one_layer", np.float32, + dimensions="number_of_winds") + test_dataset[:] = np.ones((50000)) + test_dataset.test_attr = "attr" + test_dataset.units = "test_units" + + mtg_geos_projection = nc.createVariable("mtg_geos_projection", int, dimensions=()) + mtg_geos_projection.longitude_of_projection_origin = 0.0 + mtg_geos_projection.semi_major_axis = 6378137. + mtg_geos_projection.inverse_flattening = 298.257223563 + mtg_geos_projection.perspective_point_height = 35786400. + + self.fh = FciL2NCAMVFileHandler(filename=self.test_file, + filename_info={"channel":"test_channel"}, + filetype_info={}) + + def tearDown(self): + """Remove the previously created test file.""" + # First delete the file handler, forcing the file to be closed if still open + del self.fh + # Then we can safely remove the file from the system + with suppress(OSError): + os.remove(self.test_file) + + def test_all_basic(self): + """Test all basic functionalities.""" + assert self.fh.spacecraft_name == "test_platform" + assert self.fh.sensor_name == "test_data_source" + assert self.fh.ssp_lon == 0.0 + + global_attributes = self.fh._get_global_attributes() + expected_global_attributes = { + "filename": self.test_file, + "spacecraft_name": "test_platform", + "sensor": "test_data_source", + "platform_name": "test_platform", + "channel": "test_channel" + } + assert global_attributes == expected_global_attributes + + def test_dataset(self): + """Test the correct execution of the get_dataset function with a valid file_key.""" + dataset = self.fh.get_dataset(make_dataid(name="test_dataset", resolution=2000), + {"name": "test_dataset", + "file_key": "test_dataset", + "fill_value": -999, + "file_type": "test_file_type"}) + + np.testing.assert_allclose(dataset.values, np.ones((50000))) + assert dataset.attrs["test_attr"] == "attr" + assert dataset.attrs["units"] == "test_units" + assert dataset.attrs["fill_value"] == -999 From a7f93eb9eecf5919bb42071251c3092864a7589b Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 13:12:34 +0200 Subject: [PATCH 569/702] Remove unnecessary (since Pyresample 1.18) mask_all_nan/skipna handling --- satpy/resample.py | 47 +++---------------------- satpy/tests/test_resample.py | 68 ------------------------------------ 2 files changed, 4 insertions(+), 111 deletions(-) diff --git a/satpy/resample.py b/satpy/resample.py index f74b6c5ecd..336e3fec11 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -148,10 +148,8 @@ import dask.array as da import numpy as np -import pyresample import xarray as xr import zarr -from packaging import version from pyresample.ewa import DaskEWAResampler, LegacyDaskEWAResampler from pyresample.geometry import SwathDefinition from pyresample.gradient import create_gradient_search_resampler @@ -177,8 +175,6 @@ resamplers_cache: "WeakValueDictionary[tuple, object]" = WeakValueDictionary() -PR_USE_SKIPNA = version.parse(pyresample.__version__) > version.parse("1.17.0") - def hash_dict(the_dict, the_hash=None): """Calculate a hash for a dictionary.""" @@ -773,33 +769,6 @@ def _get_replicated_chunk_sizes(d_arr, repeats): return tuple(repeated_chunks) -def _get_arg_to_pass_for_skipna_handling(**kwargs): - """Determine if skipna can be passed to the compute functions for the average and sum bucket resampler.""" - # FIXME this can be removed once Pyresample 1.18.0 is a Satpy requirement - - if PR_USE_SKIPNA: - if "mask_all_nan" in kwargs: - warnings.warn( - "Argument mask_all_nan is deprecated. Please use skipna for missing values handling. " - "Continuing with default skipna=True, if not provided differently.", - DeprecationWarning, - stacklevel=3 - ) - kwargs.pop("mask_all_nan") - else: - if "mask_all_nan" in kwargs: - warnings.warn( - "Argument mask_all_nan is deprecated." - "Please update Pyresample and use skipna for missing values handling.", - DeprecationWarning, - stacklevel=3 - ) - kwargs.setdefault("mask_all_nan", False) - kwargs.pop("skipna") - - return kwargs - - class BucketResamplerBase(PRBaseResampler): """Base class for bucket resampling which implements averaging.""" @@ -832,11 +801,6 @@ def resample(self, data, **kwargs): # noqa: D417 Returns (xarray.DataArray): Data resampled to the target area """ - if not PR_USE_SKIPNA and "skipna" in kwargs: - raise ValueError("You are trying to set the skipna argument but you are using an old version of" - " Pyresample that does not support it." - "Please update Pyresample to 1.18.0 or higher to be able to use this argument.") - self.precompute(**kwargs) attrs = data.attrs.copy() data_arr = data.data @@ -910,17 +874,16 @@ def compute(self, data, fill_value=np.nan, skipna=True, **kwargs): # noqa: D417 Returns: dask.Array """ - kwargs = _get_arg_to_pass_for_skipna_handling(skipna=skipna, **kwargs) - results = [] if data.ndim == 3: for i in range(data.shape[0]): res = self.resampler.get_average(data[i, :, :], fill_value=fill_value, + skipna=skipna, **kwargs) results.append(res) else: - res = self.resampler.get_average(data, fill_value=fill_value, + res = self.resampler.get_average(data, fill_value=fill_value, skipna=skipna, **kwargs) results.append(res) @@ -948,16 +911,14 @@ class BucketSum(BucketResamplerBase): def compute(self, data, skipna=True, **kwargs): """Call the resampling.""" - kwargs = _get_arg_to_pass_for_skipna_handling(skipna=skipna, **kwargs) - results = [] if data.ndim == 3: for i in range(data.shape[0]): - res = self.resampler.get_sum(data[i, :, :], + res = self.resampler.get_sum(data[i, :, :], skipna=skipna, **kwargs) results.append(res) else: - res = self.resampler.get_sum(data, **kwargs) + res = self.resampler.get_sum(data, skipna=skipna, **kwargs) results.append(res) return da.stack(results) diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index 9a0584e301..d0bbbe2a46 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -581,17 +581,10 @@ def test_compute(self): res = self._compute_mocked_bucket_avg(data, return_data=data[0, :, :], fill_value=2) assert res.shape == (3, 5, 5) - @mock.patch("satpy.resample.PR_USE_SKIPNA", True) def test_compute_and_use_skipna_handling(self): """Test bucket resampler computation and use skipna handling.""" data = da.ones((5,)) - self._compute_mocked_bucket_avg(data, fill_value=2, mask_all_nan=True) - self.bucket.resampler.get_average.assert_called_once_with( - data, - fill_value=2, - skipna=True) - self._compute_mocked_bucket_avg(data, fill_value=2, skipna=False) self.bucket.resampler.get_average.assert_called_once_with( data, @@ -604,35 +597,6 @@ def test_compute_and_use_skipna_handling(self): fill_value=2, skipna=True) - @mock.patch("satpy.resample.PR_USE_SKIPNA", False) - def test_compute_and_not_use_skipna_handling(self): - """Test bucket resampler computation and not use skipna handling.""" - data = da.ones((5,)) - - self._compute_mocked_bucket_avg(data, fill_value=2, mask_all_nan=True) - self.bucket.resampler.get_average.assert_called_once_with( - data, - fill_value=2, - mask_all_nan=True) - - self._compute_mocked_bucket_avg(data, fill_value=2, mask_all_nan=False) - self.bucket.resampler.get_average.assert_called_once_with( - data, - fill_value=2, - mask_all_nan=False) - - self._compute_mocked_bucket_avg(data, fill_value=2) - self.bucket.resampler.get_average.assert_called_once_with( - data, - fill_value=2, - mask_all_nan=False) - - self._compute_mocked_bucket_avg(data, fill_value=2, skipna=True) - self.bucket.resampler.get_average.assert_called_once_with( - data, - fill_value=2, - mask_all_nan=False) - @mock.patch("pyresample.bucket.BucketResampler") def test_resample(self, pyresample_bucket): """Test bucket resamplers resample method.""" @@ -712,16 +676,10 @@ def test_compute(self): res = self._compute_mocked_bucket_sum(data, return_data=data[0, :, :]) assert res.shape == (3, 5, 5) - @mock.patch("satpy.resample.PR_USE_SKIPNA", True) def test_compute_and_use_skipna_handling(self): """Test bucket resampler computation and use skipna handling.""" data = da.ones((5,)) - self._compute_mocked_bucket_sum(data, mask_all_nan=True) - self.bucket.resampler.get_sum.assert_called_once_with( - data, - skipna=True) - self._compute_mocked_bucket_sum(data, skipna=False) self.bucket.resampler.get_sum.assert_called_once_with( data, @@ -732,32 +690,6 @@ def test_compute_and_use_skipna_handling(self): data, skipna=True) - @mock.patch("satpy.resample.PR_USE_SKIPNA", False) - def test_compute_and_not_use_skipna_handling(self): - """Test bucket resampler computation and not use skipna handling.""" - data = da.ones((5,)) - - self._compute_mocked_bucket_sum(data, mask_all_nan=True) - self.bucket.resampler.get_sum.assert_called_once_with( - data, - mask_all_nan=True) - - self._compute_mocked_bucket_sum(data, mask_all_nan=False) - self.bucket.resampler.get_sum.assert_called_once_with( - data, - mask_all_nan=False) - - self._compute_mocked_bucket_sum(data) - self.bucket.resampler.get_sum.assert_called_once_with( - data, - mask_all_nan=False) - - self._compute_mocked_bucket_sum(data, fill_value=2, skipna=True) - self.bucket.resampler.get_sum.assert_called_once_with( - data, - fill_value=2, - mask_all_nan=False) - class TestBucketCount(unittest.TestCase): """Test the count bucket resampler.""" From 5f00f09efe3412b06b32d481a6dca26c56e42e5d Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 13:17:32 +0200 Subject: [PATCH 570/702] Catch re-chunking warning --- satpy/tests/test_resample.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index d0bbbe2a46..11a9644eb4 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -247,8 +247,12 @@ def test_expand_reduce_agg_rechunk(self): into that chunk size. """ + from satpy.utils import PerformanceWarning + d_arr = da.zeros((6, 20), chunks=3) - new_data = NativeResampler._expand_reduce(d_arr, {0: 0.5, 1: 0.5}) + text = "Array chunk size is not divisible by aggregation factor. Re-chunking to continue native resampling." + with pytest.warns(PerformanceWarning, match=text): + new_data = NativeResampler._expand_reduce(d_arr, {0: 0.5, 1: 0.5}) assert new_data.shape == (3, 10) def test_expand_reduce_numpy(self): From 084d5031d279c93216d740dad1509182767f44b7 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 12 Dec 2023 11:39:07 +0000 Subject: [PATCH 571/702] Update AHI HSD reader to correctly handle singleton arrays. --- satpy/readers/ahi_hsd.py | 30 ++++++++++++------------ satpy/tests/reader_tests/test_ahi_hsd.py | 12 +++++----- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 8e14d049b9..889b858ff5 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -419,12 +419,12 @@ def end_time(self): @property def observation_start_time(self): """Get the observation start time.""" - return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_start_time"])) + return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_start_time"][0])) @property def observation_end_time(self): """Get the observation end time.""" - return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_end_time"])) + return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_end_time"][0])) @property def nominal_start_time(self): @@ -498,8 +498,8 @@ def _get_area_def(self): pdict["h"] = float(self.proj_info["distance_from_earth_center"] * 1000 - pdict["a"]) pdict["b"] = float(self.proj_info["earth_polar_radius"] * 1000) pdict["ssp_lon"] = float(self.proj_info["sub_lon"]) - pdict["nlines"] = int(self.data_info["number_of_lines"]) - pdict["ncols"] = int(self.data_info["number_of_columns"]) + pdict["nlines"] = int(self.data_info["number_of_lines"][0]) + pdict["ncols"] = int(self.data_info["number_of_columns"][0]) pdict["scandir"] = "N2S" pdict["loff"] = pdict["loff"] + (self.segment_number * pdict["nlines"]) @@ -528,19 +528,19 @@ def _read_header(self, fp_): fpos = 0 header["block1"] = np.fromfile( fp_, dtype=_BASIC_INFO_TYPE, count=1) - fpos = fpos + int(header["block1"]["blocklength"]) + fpos = fpos + int(header["block1"]["blocklength"][0]) self._check_fpos(fp_, fpos, 0, "block1") fp_.seek(fpos, 0) header["block2"] = np.fromfile(fp_, dtype=_DATA_INFO_TYPE, count=1) - fpos = fpos + int(header["block2"]["blocklength"]) + fpos = fpos + int(header["block2"]["blocklength"][0]) self._check_fpos(fp_, fpos, 0, "block2") fp_.seek(fpos, 0) header["block3"] = np.fromfile(fp_, dtype=_PROJ_INFO_TYPE, count=1) - fpos = fpos + int(header["block3"]["blocklength"]) + fpos = fpos + int(header["block3"]["blocklength"][0]) self._check_fpos(fp_, fpos, 0, "block3") fp_.seek(fpos, 0) header["block4"] = np.fromfile(fp_, dtype=_NAV_INFO_TYPE, count=1) - fpos = fpos + int(header["block4"]["blocklength"]) + fpos = fpos + int(header["block4"]["blocklength"][0]) self._check_fpos(fp_, fpos, 0, "block4") fp_.seek(fpos, 0) header["block5"] = np.fromfile(fp_, dtype=_CAL_INFO_TYPE, count=1) @@ -553,7 +553,7 @@ def _read_header(self, fp_): cal = np.fromfile(fp_, dtype=_VISCAL_INFO_TYPE, count=1) else: cal = np.fromfile(fp_, dtype=_IRCAL_INFO_TYPE, count=1) - fpos = fpos + int(header["block5"]["blocklength"]) + fpos = fpos + int(header["block5"]["blocklength"][0]) self._check_fpos(fp_, fpos, 0, "block5") fp_.seek(fpos, 0) @@ -561,12 +561,12 @@ def _read_header(self, fp_): header["block6"] = np.fromfile( fp_, dtype=_INTER_CALIBRATION_INFO_TYPE, count=1) - fpos = fpos + int(header["block6"]["blocklength"]) + fpos = fpos + int(header["block6"]["blocklength"][0]) self._check_fpos(fp_, fpos, 0, "block6") fp_.seek(fpos, 0) header["block7"] = np.fromfile( fp_, dtype=_SEGMENT_INFO_TYPE, count=1) - fpos = fpos + int(header["block7"]["blocklength"]) + fpos = fpos + int(header["block7"]["blocklength"][0]) self._check_fpos(fp_, fpos, 0, "block7") fp_.seek(fpos, 0) header["block8"] = np.fromfile( @@ -576,7 +576,7 @@ def _read_header(self, fp_): corrections = [] for _i in range(ncorrs): corrections.append(np.fromfile(fp_, dtype=_NAVIGATION_CORRECTION_SUBINFO_TYPE, count=1)) - fpos = fpos + int(header["block8"]["blocklength"]) + fpos = fpos + int(header["block8"]["blocklength"][0]) self._check_fpos(fp_, fpos, 40, "block8") fp_.seek(fpos, 0) header["navigation_corrections"] = corrections @@ -591,7 +591,7 @@ def _read_header(self, fp_): dtype=_OBSERVATION_LINE_TIME_INFO_TYPE, count=1)) header["observation_time_information"] = lines_and_times - fpos = fpos + int(header["block9"]["blocklength"]) + fpos = fpos + int(header["block9"]["blocklength"][0]) self._check_fpos(fp_, fpos, 40, "block9") fp_.seek(fpos, 0) @@ -604,12 +604,12 @@ def _read_header(self, fp_): for _i in range(num_err_info_data): err_info_data.append(np.fromfile(fp_, dtype=_ERROR_LINE_INFO_TYPE, count=1)) header["error_information_data"] = err_info_data - fpos = fpos + int(header["block10"]["blocklength"]) + fpos = fpos + int(header["block10"]["blocklength"][0]) self._check_fpos(fp_, fpos, 40, "block10") fp_.seek(fpos, 0) header["block11"] = np.fromfile(fp_, dtype=_SPARE_TYPE, count=1) - fpos = fpos + int(header["block11"]["blocklength"]) + fpos = fpos + int(header["block11"]["blocklength"][0]) self._check_fpos(fp_, fpos, 0, "block11") fp_.seek(fpos, 0) diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 9338440246..2075b88947 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -48,8 +48,8 @@ "compression_flag_for_data": 0, "hblock_number": 2, "number_of_bits_per_pixel": 16, - "number_of_columns": 11000, - "number_of_lines": 1100, + "number_of_columns": [11000], + "number_of_lines": [1100], "spare": "", } FAKE_PROJ_INFO: InfoDict = { @@ -135,8 +135,8 @@ def test_region(self, fromfile, np2str): "compression_flag_for_data": 0, "hblock_number": 2, "number_of_bits_per_pixel": 16, - "number_of_columns": 1000, - "number_of_lines": 1000, + "number_of_columns": [1000], + "number_of_lines": [1000], "spare": ""} area_def = fh.get_area_def(None) @@ -183,8 +183,8 @@ def test_segment(self, fromfile, np2str): "compression_flag_for_data": 0, "hblock_number": 2, "number_of_bits_per_pixel": 16, - "number_of_columns": 11000, - "number_of_lines": 1100, + "number_of_columns": [11000], + "number_of_lines": [1100], "spare": ""} area_def = fh.get_area_def(None) From b00a6d93bb27283f440fb593154f612ec2fe4a9f Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 12 Dec 2023 12:06:46 +0000 Subject: [PATCH 572/702] Fix test for get_dataset --- satpy/tests/reader_tests/test_fci_l2_nc.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index fb5c725ffc..8eff51e344 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -534,9 +534,9 @@ def setUp(self): qi = nc.createVariable("product_quality", np.int8) qi[:] = 99. - test_dataset = nc.createVariable("test_one_layer", np.float32, + test_dataset = nc.createVariable("test_dataset", np.float32, dimensions="number_of_winds") - test_dataset[:] = np.ones((50000)) + test_dataset[:] = np.ones(50000) test_dataset.test_attr = "attr" test_dataset.units = "test_units" @@ -581,8 +581,7 @@ def test_dataset(self): "file_key": "test_dataset", "fill_value": -999, "file_type": "test_file_type"}) - - np.testing.assert_allclose(dataset.values, np.ones((50000))) + np.testing.assert_allclose(dataset.values, np.ones(50000)) assert dataset.attrs["test_attr"] == "attr" assert dataset.attrs["units"] == "test_units" assert dataset.attrs["fill_value"] == -999 From 610d365c247393fe50b7dfce7a675eeab634bd22 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 14:07:50 +0200 Subject: [PATCH 573/702] Suppress division-by-zero warning in RatioSharpenedRGB --- satpy/composites/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 9295f94dc7..71b9bd0605 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1180,7 +1180,8 @@ def _combined_sharpened_info(self, info, new_attrs): def _get_sharpening_ratio(high_res, low_res): - ratio = high_res / low_res + with np.errstate(divide="ignore"): + ratio = high_res / low_res # make ratio a no-op (multiply by 1) where the ratio is NaN, infinity, # or it is negative. ratio[~np.isfinite(ratio) | (ratio < 0)] = 1.0 From 51ab6b7f00ca5e9e6073f55cc8529051db881922 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 14:33:28 +0200 Subject: [PATCH 574/702] Use ds.drop_vars(), adjust XArray version requirement to match --- satpy/composites/__init__.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 71b9bd0605..17d4c00075 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -204,7 +204,7 @@ def drop_coordinates(self, data_arrays): if coord not in ds.dims and any([neglible in coord for neglible in NEGLIGIBLE_COORDS])] if drop: - new_arrays.append(ds.drop(drop)) + new_arrays.append(ds.drop_vars(drop)) else: new_arrays.append(ds) diff --git a/setup.py b/setup.py index cd1c43422e..a9bf050786 100644 --- a/setup.py +++ b/setup.py @@ -23,7 +23,7 @@ from setuptools import find_packages, setup requires = ["numpy >=1.21", "pillow", "pyresample >=1.24.0", "trollsift", - "trollimage >=1.20", "pykdtree", "pyyaml >=5.1", "xarray >=0.10.1, !=0.13.0", + "trollimage >=1.20", "pykdtree", "pyyaml >=5.1", "xarray >=0.14.1", "dask[array] >=0.17.1", "pyproj>=2.2", "zarr", "donfig", "appdirs", "packaging", "pooch", "pyorbital"] From 5c11a5be684a517d4dada08e993708b4aadc89bf Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 14:46:26 +0200 Subject: [PATCH 575/702] Remove deprecated GreenCorrector --- satpy/composites/spectral.py | 21 ------------------- satpy/tests/compositor_tests/test_spectral.py | 14 +------------ 2 files changed, 1 insertion(+), 34 deletions(-) diff --git a/satpy/composites/spectral.py b/satpy/composites/spectral.py index 448d7cb26a..d656bab7ec 100644 --- a/satpy/composites/spectral.py +++ b/satpy/composites/spectral.py @@ -16,7 +16,6 @@ """Composite classes for spectral adjustments.""" import logging -import warnings from satpy.composites import GenericCompositor from satpy.dataset import combine_metadata @@ -199,23 +198,3 @@ def _compute_blend_fraction(self, ndvi): + self.limits[0] return fraction - - -class GreenCorrector(SpectralBlender): - """Previous class used to blend channels for green band corrections. - - This method has been refactored to make it more generic. The replacement class is 'SpectralBlender' which computes - a weighted average based on N number of channels and N number of corresponding weights/fractions. A new class - called 'HybridGreen' has been created, which performs a correction of green bands centered at 0.51 microns - following Miller et al. (2016, :doi:`10.1175/BAMS-D-15-00154.2`) in order to improve true color imagery. - """ - - def __init__(self, *args, fractions=(0.85, 0.15), **kwargs): - """Set default keyword argument values.""" - warnings.warn( - "'GreenCorrector' is deprecated, use 'SpectralBlender' instead, or 'HybridGreen' for hybrid green" - " correction following Miller et al. (2016).", - UserWarning, - stacklevel=2 - ) - super().__init__(fractions=fractions, *args, **kwargs) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index e46cff4d0c..c7f07c0454 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -21,7 +21,7 @@ import pytest import xarray as xr -from satpy.composites.spectral import GreenCorrector, HybridGreen, NDVIHybridGreen, SpectralBlender +from satpy.composites.spectral import HybridGreen, NDVIHybridGreen, SpectralBlender from satpy.tests.utils import CustomScheduler @@ -67,18 +67,6 @@ def test_hybrid_green(self): data = res.compute() np.testing.assert_allclose(data, 0.23) - def test_green_corrector(self): - """Test the deprecated class for green corrections.""" - comp = GreenCorrector("blended_channel", fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), - standard_name="toa_bidirectional_reflectance") - res = comp((self.c01, self.c03)) - assert isinstance(res, xr.DataArray) - assert isinstance(res.data, da.Array) - assert res.attrs["name"] == "blended_channel" - assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" - data = res.compute() - np.testing.assert_allclose(data, 0.23) - class TestNdviHybridGreenCompositor: """Test NDVI-weighted hybrid green correction of green band.""" From 934bdb30945189d5d9e65757767ef860c3b91bfb Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 12 Dec 2023 13:04:55 +0000 Subject: [PATCH 576/702] Remove duplicate methods --- satpy/readers/fci_l2_nc.py | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 0948cd0e0a..03f8e94f55 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -419,25 +419,6 @@ def __init__(self, filename, filename_info, filetype_info): "number_of_winds": CHUNK_SIZE } ) - @property - def spacecraft_name(self): - """Get spacecraft name.""" - try: - return self.nc.attrs["platform"] - except KeyError: - # TODO if the platform attribute is not valid, return a default value - logger.warning("Spacecraft name cannot be obtained from file content, use default value instead") - return "MTI1" - - @property - def sensor_name(self): - """Get instrument name.""" - try: - return self.nc.attrs["data_source"] - except KeyError: - # TODO if the data_source attribute is not valid, return a default value - logger.warning("Sensor cannot be obtained from file content, use default value instead") - return "FCI" def _get_global_attributes(self): """Create a dictionary of global attributes to be added to all datasets. From 3d6d561986b433264f918aed3646387ce9fa676a Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 12 Dec 2023 13:07:40 +0000 Subject: [PATCH 577/702] Add test for invalide dataset --- satpy/tests/reader_tests/test_fci_l2_nc.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index 8eff51e344..7853cba900 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -585,3 +585,12 @@ def test_dataset(self): assert dataset.attrs["test_attr"] == "attr" assert dataset.attrs["units"] == "test_units" assert dataset.attrs["fill_value"] == -999 + + def test_dataset_with_invalid_filekey(self): + """Test the correct execution of the get_dataset function with an invalid file_key.""" + invalid_dataset = self.fh.get_dataset(make_dataid(name="test_invalid", resolution=2000), + {"name": "test_invalid", + "file_key": "test_invalid", + "fill_value": -999, + "file_type": "test_file_type"}) + assert invalid_dataset is None From bb56a486200b6436d00b9922f670ec5d0f40c3d0 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 15:11:20 +0200 Subject: [PATCH 578/702] Remove GreenCorrector import --- satpy/composites/ahi.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/satpy/composites/ahi.py b/satpy/composites/ahi.py index bb96a94581..4826f84820 100644 --- a/satpy/composites/ahi.py +++ b/satpy/composites/ahi.py @@ -14,7 +14,3 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Composite classes for AHI.""" - -# The green corrector used to be defined here, but was moved to spectral.py -# in Satpy 0.38 because it also applies to FCI. -from .spectral import GreenCorrector # noqa: F401 From f60b188938d16c0d365eaec8d4c961f8ec0a1a62 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 12 Dec 2023 13:16:53 +0000 Subject: [PATCH 579/702] Use `item` to select singleton array elements in AHI HSD. --- satpy/readers/ahi_hsd.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 889b858ff5..cf3fe018f5 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -419,12 +419,12 @@ def end_time(self): @property def observation_start_time(self): """Get the observation start time.""" - return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_start_time"][0])) + return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_start_time"].item())) @property def observation_end_time(self): """Get the observation end time.""" - return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_end_time"][0])) + return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_end_time"].item())) @property def nominal_start_time(self): From 336fc9c97a08d9f367495ae356a7f0d6dcb4d0dd Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 15:19:30 +0200 Subject: [PATCH 580/702] Remove deprecated AHI composites --- satpy/etc/composites/ahi.yaml | 40 ----------------------------------- 1 file changed, 40 deletions(-) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index cda79a5fac..9c585d53de 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -15,46 +15,6 @@ modifiers: - solar_zenith_angle composites: - green: - deprecation_warning: "'green' is a deprecated composite. Use the equivalent 'hybrid_green' instead." - compositor: !!python/name:satpy.composites.spectral.HybridGreen - # FUTURE: Set a wavelength...see what happens. Dependency finding - # probably wouldn't work. - prerequisites: - # should we be using the most corrected or least corrected inputs? - # what happens if something requests more modifiers on top of this? - - wavelength: 0.51 - modifiers: [sunz_corrected, rayleigh_corrected] - - wavelength: 0.85 - modifiers: [sunz_corrected] - standard_name: toa_bidirectional_reflectance - - green_true_color_reproduction: - # JMA True Color Reproduction green band - # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html - deprecation_warning: "'green_true_color_reproduction' is a deprecated composite. Use the equivalent 'reproduced_green' instead." - compositor: !!python/name:satpy.composites.spectral.SpectralBlender - fractions: [0.6321, 0.2928, 0.0751] - prerequisites: - - name: B02 - modifiers: [sunz_corrected, rayleigh_corrected] - - name: B03 - modifiers: [sunz_corrected, rayleigh_corrected] - - name: B04 - modifiers: [sunz_corrected] - standard_name: none - - green_nocorr: - deprecation_warning: "'green_nocorr' is a deprecated composite. Use the equivalent 'hybrid_green_nocorr' instead." - compositor: !!python/name:satpy.composites.spectral.HybridGreen - # FUTURE: Set a wavelength...see what happens. Dependency finding - # probably wouldn't work. - prerequisites: - # should we be using the most corrected or least corrected inputs? - # what happens if something requests more modifiers on top of this? - - wavelength: 0.51 - - wavelength: 0.85 - standard_name: toa_reflectance hybrid_green: compositor: !!python/name:satpy.composites.spectral.HybridGreen From 8f3704bcfd8a5ec4212d9350eafb70d87ad09e50 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 16:13:22 +0200 Subject: [PATCH 581/702] Use importlib.resources to read packaged data --- satpy/readers/mirs.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/readers/mirs.py b/satpy/readers/mirs.py index 34edd02739..5f68af2d6c 100644 --- a/satpy/readers/mirs.py +++ b/satpy/readers/mirs.py @@ -18,6 +18,7 @@ """Interface to MiRS product.""" import datetime +import importlib import logging import os from collections import Counter @@ -34,13 +35,12 @@ LOG = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) -try: - # try getting setuptools/distribute's version of resource retrieval first - from pkg_resources import resource_string as get_resource_string -except ImportError: - from pkgutil import get_data as get_resource_string # type: ignore -# +def get_resource_string(mod_part, file_part): + """Read resource string.""" + ref = importlib.resources.files(mod_part).joinpath(file_part) + return ref.read_bytes() + # 'Polo' variable in MiRS files use these values for H/V polarization POLO_V = 2 From 2469168fba6d8f5557f01bafc5ee0c67a3a73bb5 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 12 Dec 2023 14:26:37 +0000 Subject: [PATCH 582/702] Use `item` to select singleton array elements in AHI HSD. --- satpy/readers/ahi_hsd.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index cf3fe018f5..313e5ccab5 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -498,8 +498,8 @@ def _get_area_def(self): pdict["h"] = float(self.proj_info["distance_from_earth_center"] * 1000 - pdict["a"]) pdict["b"] = float(self.proj_info["earth_polar_radius"] * 1000) pdict["ssp_lon"] = float(self.proj_info["sub_lon"]) - pdict["nlines"] = int(self.data_info["number_of_lines"][0]) - pdict["ncols"] = int(self.data_info["number_of_columns"][0]) + pdict["nlines"] = int(self.data_info["number_of_lines"].item()) + pdict["ncols"] = int(self.data_info["number_of_columns"].item()) pdict["scandir"] = "N2S" pdict["loff"] = pdict["loff"] + (self.segment_number * pdict["nlines"]) @@ -528,19 +528,19 @@ def _read_header(self, fp_): fpos = 0 header["block1"] = np.fromfile( fp_, dtype=_BASIC_INFO_TYPE, count=1) - fpos = fpos + int(header["block1"]["blocklength"][0]) + fpos = fpos + int(header["block1"]["blocklength"].item()) self._check_fpos(fp_, fpos, 0, "block1") fp_.seek(fpos, 0) header["block2"] = np.fromfile(fp_, dtype=_DATA_INFO_TYPE, count=1) - fpos = fpos + int(header["block2"]["blocklength"][0]) + fpos = fpos + int(header["block2"]["blocklength"].item()) self._check_fpos(fp_, fpos, 0, "block2") fp_.seek(fpos, 0) header["block3"] = np.fromfile(fp_, dtype=_PROJ_INFO_TYPE, count=1) - fpos = fpos + int(header["block3"]["blocklength"][0]) + fpos = fpos + int(header["block3"]["blocklength"].item()) self._check_fpos(fp_, fpos, 0, "block3") fp_.seek(fpos, 0) header["block4"] = np.fromfile(fp_, dtype=_NAV_INFO_TYPE, count=1) - fpos = fpos + int(header["block4"]["blocklength"][0]) + fpos = fpos + int(header["block4"]["blocklength"].item()) self._check_fpos(fp_, fpos, 0, "block4") fp_.seek(fpos, 0) header["block5"] = np.fromfile(fp_, dtype=_CAL_INFO_TYPE, count=1) @@ -553,7 +553,7 @@ def _read_header(self, fp_): cal = np.fromfile(fp_, dtype=_VISCAL_INFO_TYPE, count=1) else: cal = np.fromfile(fp_, dtype=_IRCAL_INFO_TYPE, count=1) - fpos = fpos + int(header["block5"]["blocklength"][0]) + fpos = fpos + int(header["block5"]["blocklength"].item()) self._check_fpos(fp_, fpos, 0, "block5") fp_.seek(fpos, 0) @@ -561,12 +561,12 @@ def _read_header(self, fp_): header["block6"] = np.fromfile( fp_, dtype=_INTER_CALIBRATION_INFO_TYPE, count=1) - fpos = fpos + int(header["block6"]["blocklength"][0]) + fpos = fpos + int(header["block6"]["blocklength"].item()) self._check_fpos(fp_, fpos, 0, "block6") fp_.seek(fpos, 0) header["block7"] = np.fromfile( fp_, dtype=_SEGMENT_INFO_TYPE, count=1) - fpos = fpos + int(header["block7"]["blocklength"][0]) + fpos = fpos + int(header["block7"]["blocklength"].item()) self._check_fpos(fp_, fpos, 0, "block7") fp_.seek(fpos, 0) header["block8"] = np.fromfile( @@ -576,7 +576,7 @@ def _read_header(self, fp_): corrections = [] for _i in range(ncorrs): corrections.append(np.fromfile(fp_, dtype=_NAVIGATION_CORRECTION_SUBINFO_TYPE, count=1)) - fpos = fpos + int(header["block8"]["blocklength"][0]) + fpos = fpos + int(header["block8"]["blocklength"].item()) self._check_fpos(fp_, fpos, 40, "block8") fp_.seek(fpos, 0) header["navigation_corrections"] = corrections @@ -591,7 +591,7 @@ def _read_header(self, fp_): dtype=_OBSERVATION_LINE_TIME_INFO_TYPE, count=1)) header["observation_time_information"] = lines_and_times - fpos = fpos + int(header["block9"]["blocklength"][0]) + fpos = fpos + int(header["block9"]["blocklength"].item()) self._check_fpos(fp_, fpos, 40, "block9") fp_.seek(fpos, 0) @@ -604,12 +604,12 @@ def _read_header(self, fp_): for _i in range(num_err_info_data): err_info_data.append(np.fromfile(fp_, dtype=_ERROR_LINE_INFO_TYPE, count=1)) header["error_information_data"] = err_info_data - fpos = fpos + int(header["block10"]["blocklength"][0]) + fpos = fpos + int(header["block10"]["blocklength"].item()) self._check_fpos(fp_, fpos, 40, "block10") fp_.seek(fpos, 0) header["block11"] = np.fromfile(fp_, dtype=_SPARE_TYPE, count=1) - fpos = fpos + int(header["block11"]["blocklength"][0]) + fpos = fpos + int(header["block11"]["blocklength"].item()) self._check_fpos(fp_, fpos, 0, "block11") fp_.seek(fpos, 0) @@ -617,8 +617,8 @@ def _read_header(self, fp_): def _read_data(self, fp_, header, resolution): """Read data block.""" - nlines = int(header["block2"]["number_of_lines"][0]) - ncols = int(header["block2"]["number_of_columns"][0]) + nlines = int(header["block2"]["number_of_lines"].item()) + ncols = int(header["block2"]["number_of_columns"].item()) chunks = normalize_low_res_chunks( ("auto", "auto"), (nlines, ncols), From 66bc5b5290901d17aa303e101b5af29ff55a851d Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 16:29:48 +0200 Subject: [PATCH 583/702] Use nanosecond precision times in MVIRI FIDUCEO tests --- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index b03336c230..301f9751a3 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -61,10 +61,10 @@ {"sun_earth_distance_correction_applied": True, "sun_earth_distance_correction_factor": 1.} ) -acq_time_vis_exp = [np.datetime64("1970-01-01 00:30"), - np.datetime64("1970-01-01 00:30"), - np.datetime64("1970-01-01 02:30"), - np.datetime64("1970-01-01 02:30")] +acq_time_vis_exp = [np.datetime64("1970-01-01 00:30").astype("datetime64[ns]"), + np.datetime64("1970-01-01 00:30").astype("datetime64[ns]"), + np.datetime64("1970-01-01 02:30").astype("datetime64[ns]"), + np.datetime64("1970-01-01 02:30").astype("datetime64[ns]")] vis_counts_exp = xr.DataArray( np.array( [[0., 17., 34., 51.], @@ -124,8 +124,8 @@ }, attrs=attrs_exp ) -acq_time_ir_wv_exp = [np.datetime64("1970-01-01 00:30"), - np.datetime64("1970-01-01 02:30")] +acq_time_ir_wv_exp = [np.datetime64("1970-01-01 00:30").astype("datetime64[ns]"), + np.datetime64("1970-01-01 02:30").astype("datetime64[ns]")] wv_counts_exp = xr.DataArray( np.array( [[0, 85], @@ -277,7 +277,8 @@ def fixture_fake_dataset(): dtype=np.uint8 ) ) - time = np.arange(4).astype("datetime64[h]").reshape(2, 2) + time = np.arange(4) * 60 * 60 * 1e9 + time = time.astype("datetime64[ns]").reshape(2, 2) ds = xr.Dataset( data_vars={ "count_vis": (("y", "x"), count_vis), From 3f552560d99d8209516cff4dfd033c15a11d9fab Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 12 Dec 2023 14:39:46 +0000 Subject: [PATCH 584/702] Repair AHI HSD tests. --- satpy/tests/reader_tests/test_ahi_hsd.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 2075b88947..7bf1562e1c 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -48,8 +48,8 @@ "compression_flag_for_data": 0, "hblock_number": 2, "number_of_bits_per_pixel": 16, - "number_of_columns": [11000], - "number_of_lines": [1100], + "number_of_columns": np.array([11000]), + "number_of_lines": np.array([1100]), "spare": "", } FAKE_PROJ_INFO: InfoDict = { @@ -135,8 +135,8 @@ def test_region(self, fromfile, np2str): "compression_flag_for_data": 0, "hblock_number": 2, "number_of_bits_per_pixel": 16, - "number_of_columns": [1000], - "number_of_lines": [1000], + "number_of_columns": np.array([1000]), + "number_of_lines": np.array([1000]), "spare": ""} area_def = fh.get_area_def(None) @@ -183,8 +183,8 @@ def test_segment(self, fromfile, np2str): "compression_flag_for_data": 0, "hblock_number": 2, "number_of_bits_per_pixel": 16, - "number_of_columns": [11000], - "number_of_lines": [1100], + "number_of_columns": np.array([11000]), + "number_of_lines": np.array([1100]), "spare": ""} area_def = fh.get_area_def(None) From 372e1c935dac8e1b171b298979ba62b506281f90 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 17:00:55 +0200 Subject: [PATCH 585/702] Filter out pyproj warnings --- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 32 ++++++++++--------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 301f9751a3..56bbd5212f 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -26,6 +26,7 @@ import numpy as np import pytest import xarray as xr +from pyproj import CRS from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_radius_parameters @@ -232,17 +233,12 @@ dims=("y", "x"), attrs=attrs_exp ) +projection = CRS(f"+proj=geos +lon_0=57.0 +h={ALTITUDE} +a={EQUATOR_RADIUS} +b={POLE_RADIUS}") area_vis_exp = AreaDefinition( area_id="geos_mviri_4x4", proj_id="geos_mviri_4x4", description="MVIRI Geostationary Projection", - projection={ - "proj": "geos", - "lon_0": 57.0, - "h": ALTITUDE, - "a": EQUATOR_RADIUS, - "b": POLE_RADIUS - }, + projection=projection, width=4, height=4, area_extent=[5621229.74392, 5621229.74392, -5621229.74392, -5621229.74392] @@ -501,16 +497,22 @@ def test_angle_cache(self, interp_tiepoints, file_handler): def test_get_area_definition(self, file_handler, name, resolution, area_exp): """Test getting area definitions.""" + import warnings + dataset_id = make_dataid(name=name, resolution=resolution) area = file_handler.get_area_def(dataset_id) - a, b = proj4_radius_parameters(area.proj_dict) - a_exp, b_exp = proj4_radius_parameters(area_exp.proj_dict) - assert a == a_exp - assert b == b_exp - assert area.width == area_exp.width - assert area.height == area_exp.height - for key in ["h", "lon_0", "proj", "units"]: - assert area.proj_dict[key] == area_exp.proj_dict[key] + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + a, b = proj4_radius_parameters(area.proj_dict) + a_exp, b_exp = proj4_radius_parameters(area_exp.proj_dict) + assert a == a_exp + assert b == b_exp + assert area.width == area_exp.width + assert area.height == area_exp.height + for key in ["h", "lon_0", "proj", "units"]: + assert area.proj_dict[key] == area_exp.proj_dict[key] np.testing.assert_allclose(area.area_extent, area_exp.area_extent) def test_calib_exceptions(self, file_handler): From 5b7bbe73b226bc9d83ebdc63b83e7f65e9d0debe Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 09:45:12 +0200 Subject: [PATCH 586/702] Suppress PROJ4 UserWarning on lost accuracy --- satpy/tests/reader_tests/test_nwcsaf_nc.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/satpy/tests/reader_tests/test_nwcsaf_nc.py b/satpy/tests/reader_tests/test_nwcsaf_nc.py index 07d6cee174..a3235e99e7 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_nc.py +++ b/satpy/tests/reader_tests/test_nwcsaf_nc.py @@ -268,13 +268,12 @@ def test_sensor_name_sat_id(self, nwcsaf_geo_ct_filehandler, platform, instrumen def test_get_area_def(self, nwcsaf_geo_ct_filehandler): """Test that get_area_def() returns proper area.""" dsid = {"name": "ct"} - - _check_area_def(nwcsaf_geo_ct_filehandler.get_area_def(dsid)) + _check_filehandler_area_def(nwcsaf_geo_ct_filehandler, dsid) def test_get_area_def_km(self, nwcsaf_old_geo_ct_filehandler): """Test that get_area_def() returns proper area when the projection is in km.""" dsid = {"name": "ct"} - _check_area_def(nwcsaf_old_geo_ct_filehandler.get_area_def(dsid)) + _check_filehandler_area_def(nwcsaf_old_geo_ct_filehandler, dsid) def test_scale_dataset_attr_removal(self, nwcsaf_geo_ct_filehandler): """Test the scaling of the dataset and removal of obsolete attributes.""" @@ -506,12 +505,19 @@ def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(sel np.testing.assert_allclose(res.attrs["palette_meanings"], palette_meanings * COT_SCALE + COT_OFFSET) -def _check_area_def(area_definition): +def _check_filehandler_area_def(file_handler, dsid): + import warnings + correct_h = float(PROJ["gdal_projection"].split("+h=")[-1]) correct_a = float(PROJ["gdal_projection"].split("+a=")[-1].split()[0]) - assert area_definition.proj_dict["h"] == correct_h - assert area_definition.proj_dict["a"] == correct_a - assert area_definition.proj_dict["units"] == "m" + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + area_definition = file_handler.get_area_def(dsid) + assert area_definition.proj_dict["h"] == correct_h + assert area_definition.proj_dict["a"] == correct_a + assert area_definition.proj_dict["units"] == "m" correct_extent = (PROJ["gdal_xgeo_up_left"], PROJ["gdal_ygeo_low_right"], PROJ["gdal_xgeo_low_right"], From e3160bf0ca5874cb2b99ac9b6148ae2094e35866 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 10:18:34 +0200 Subject: [PATCH 587/702] Use modern chunk size setting in nwcsaf_nc reader --- satpy/readers/nwcsaf_nc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/nwcsaf_nc.py b/satpy/readers/nwcsaf_nc.py index 5d8320f954..e3b3dc2d3d 100644 --- a/satpy/readers/nwcsaf_nc.py +++ b/satpy/readers/nwcsaf_nc.py @@ -36,11 +36,11 @@ from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import unzip_file -from satpy.utils import get_legacy_chunk_size +from satpy.utils import get_chunk_size_limit logger = logging.getLogger(__name__) -CHUNK_SIZE = get_legacy_chunk_size() +CHUNK_SIZE = get_chunk_size_limit() SENSOR = {"NOAA-19": "avhrr-3", "NOAA-18": "avhrr-3", From fbb437331fd983a1cb3553f9b9bd298d6b4fccdb Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 10:30:39 +0200 Subject: [PATCH 588/702] Use DataArray.drop_vars() instead of deprecated .drop() --- satpy/readers/ahi_l2_nc.py | 4 ++-- satpy/readers/goes_imager_nc.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 17823fed1e..7785c3994d 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -100,8 +100,8 @@ def get_dataset(self, key, info): # Data has 'Latitude' and 'Longitude' coords, these must be replaced. variable = variable.rename({"Rows": "y", "Columns": "x"}) - variable = variable.drop("Latitude") - variable = variable.drop("Longitude") + variable = variable.drop_vars("Latitude") + variable = variable.drop_vars("Longitude") variable.attrs.update(key.to_dict()) return variable diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index 214852fffd..969151cb34 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -1087,7 +1087,7 @@ def get_dataset(self, key, info): # Set proper dimension names data = data.rename({"xc": "x", "yc": "y"}) - data = data.drop("time") + data = data.drop_vars("time") # Update metadata self._update_metadata(data, ds_info=info) From 12192b1ac153a124892fa3a45dae112bbf027bbd Mon Sep 17 00:00:00 2001 From: Youva <120452807+YouvaEUMex@users.noreply.github.com> Date: Wed, 13 Dec 2023 09:33:01 +0100 Subject: [PATCH 589/702] Update satpy/tests/reader_tests/test_fci_l2_nc.py Co-authored-by: Martin Raspaud --- satpy/tests/reader_tests/test_fci_l2_nc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index 7853cba900..e7a312d4b8 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -508,10 +508,10 @@ def test_byte_extraction(self): assert dataset.values == 0 -class TestFciL2NCAMVFileHandler(unittest.TestCase): +class TestFciL2NCAMVFileHandler: """Test the FciL2NCFileHandler reader.""" - def setUp(self): + def setup_method(self): """Set up the test by creating a test file and opening it with the reader.""" # Easiest way to test the reader is to create a test netCDF file on the fly # Create unique filenames to prevent race conditions when tests are run in parallel From 43b2c209909b4024f83b5c64018c62f37b6f6698 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 12:36:43 +0200 Subject: [PATCH 590/702] Use datetime64[ns] for JMA HRIT readers --- satpy/readers/hrit_jma.py | 7 ++++--- satpy/tests/reader_tests/test_ahi_hrit.py | 10 +++++----- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/satpy/readers/hrit_jma.py b/satpy/readers/hrit_jma.py index c273b9b578..0c88faf46b 100644 --- a/satpy/readers/hrit_jma.py +++ b/satpy/readers/hrit_jma.py @@ -198,9 +198,10 @@ def mjd2datetime64(mjd): """Convert Modified Julian Day (MJD) to datetime64.""" epoch = np.datetime64("1858-11-17 00:00") - day2usec = 24 * 3600 * 1E6 - mjd_usec = (mjd * day2usec).astype(np.int64).astype("timedelta64[us]") - return epoch + mjd_usec + day2nsec = 24 * 3600 * 1E9 + mjd_nsec = (mjd * day2nsec).astype(np.int64).astype("timedelta64[ns]") + + return epoch + mjd_nsec class HRITJMAFileHandler(HRITFileHandler): diff --git a/satpy/tests/reader_tests/test_ahi_hrit.py b/satpy/tests/reader_tests/test_ahi_hrit.py index 1dbf36c66b..3db8e2d094 100644 --- a/satpy/tests/reader_tests/test_ahi_hrit.py +++ b/satpy/tests/reader_tests/test_ahi_hrit.py @@ -119,6 +119,7 @@ def test_init(self): # Check if scanline timestamps are there (dedicated test below) assert isinstance(reader.acq_time, np.ndarray) + assert reader.acq_time.dtype == np.dtype("datetime64[ns]") # Check platform assert reader.platform == HIMAWARI8 @@ -305,14 +306,13 @@ def test_get_dataset(self, base_get_dataset): def test_mjd2datetime64(self): """Test conversion from modified julian day to datetime64.""" from satpy.readers.hrit_jma import mjd2datetime64 - assert mjd2datetime64(np.array([0])) == np.datetime64("1858-11-17", "us") - assert mjd2datetime64(np.array([40587.5])) == np.datetime64("1970-01-01 12:00", "us") + assert mjd2datetime64(np.array([0])) == np.datetime64("1858-11-17", "ns") + assert mjd2datetime64(np.array([40587.5])) == np.datetime64("1970-01-01 12:00", "ns") def test_get_acq_time(self): """Test computation of scanline acquisition times.""" dt_line = np.arange(1, 11000+1).astype("timedelta64[s]") - acq_time_exp = np.datetime64("1970-01-01", "us") + dt_line - + acq_time_exp = np.datetime64("1970-01-01", "ns") + dt_line for platform in ["Himawari-8", "MTSAT-2"]: # Results are not exactly identical because timestamps are stored in # the header with only 6 decimals precision (max diff here: 45 msec). @@ -320,7 +320,7 @@ def test_get_acq_time(self): reader = self._get_reader(mda=mda) np.testing.assert_allclose(reader.acq_time.astype(np.int64), acq_time_exp.astype(np.int64), - atol=45000) + atol=45000000) def test_start_time_from_filename(self): """Test that by default the datetime in the filename is returned.""" From 566ab73dbc0359e081bf0a130821aa10d948de6c Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 12:42:37 +0200 Subject: [PATCH 591/702] Use datetime64[ns] in GOES imager netCDF reader tests --- satpy/tests/reader_tests/test_goes_imager_nc_eum.py | 2 +- satpy/tests/reader_tests/test_goes_imager_nc_noaa.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_goes_imager_nc_eum.py b/satpy/tests/reader_tests/test_goes_imager_nc_eum.py index 68472aeb1a..e192bbe63f 100644 --- a/satpy/tests/reader_tests/test_goes_imager_nc_eum.py +++ b/satpy/tests/reader_tests/test_goes_imager_nc_eum.py @@ -52,7 +52,7 @@ def setUp(self, xr_): xr_.open_dataset.return_value = xr.Dataset( {"data": xr.DataArray(data=self.radiance, dims=("time", "yc", "xc")), - "time": xr.DataArray(data=np.array([0], dtype="datetime64[ms]"), + "time": xr.DataArray(data=np.array([0], dtype="datetime64[ns]"), dims=("time",)), "bands": xr.DataArray(data=np.array([1]))}, attrs={"Satellite Sensor": "G-15"}) diff --git a/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py b/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py index 1fd5e65cac..994f1336fd 100644 --- a/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py +++ b/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py @@ -58,7 +58,7 @@ def setUp(self, xr_): "lon": xr.DataArray(data=self.dummy2d, dims=("yc", "xc")), "lat": xr.DataArray(data=self.dummy2d, dims=("yc", "xc")), "time": xr.DataArray(data=np.array([self.time], - dtype="datetime64[ms]"), + dtype="datetime64[ns]"), dims=("time",)), "bands": xr.DataArray(data=np.array([self.band]))}, attrs={"Satellite Sensor": "G-15"}) @@ -238,7 +238,7 @@ def dataset(self, lons_lats, channel_id): dims=("time", "yc", "xc") ) time = xr.DataArray( - [np.datetime64("2018-01-01 12:00:00")], + [np.datetime64("2018-01-01 12:00:00").astype("datetime64[ns]")], dims="time" ) bands = xr.DataArray([channel_id], dims="bands") @@ -369,7 +369,7 @@ def setUp(self, xr_): {"data": xr.DataArray(data=self.counts, dims=("time", "yc", "xc")), "lon": xr.DataArray(data=self.lon, dims=("yc", "xc")), "lat": xr.DataArray(data=self.lat, dims=("yc", "xc")), - "time": xr.DataArray(data=np.array([0], dtype="datetime64[ms]"), + "time": xr.DataArray(data=np.array([0], dtype="datetime64[ns]"), dims=("time",)), "bands": xr.DataArray(data=np.array([1]))}, attrs={"Satellite Sensor": "G-15"}) From 1b14e764212cf0af7076f3458593eeb9c7525aa1 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 13 Dec 2023 13:46:08 +0000 Subject: [PATCH 592/702] Replace the reader for a lazy reader using cached_property decorator --- satpy/readers/fci_l2_nc.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 03f8e94f55..78020cdcf9 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -22,6 +22,7 @@ import xarray as xr from pyresample import geometry +from satpy._compat import cached_property from satpy.readers._geos_area import get_geos_area_naming, make_ext from satpy.readers.eum_base import get_service_mode from satpy.readers.file_handlers import BaseFileHandler @@ -153,6 +154,7 @@ def __init__(self, filename, filename_info, filetype_info, with_area_definition= self._projection = self.nc["mtg_geos_projection"] self.multi_dims = {"maximum_number_of_layers": "layer", "number_of_vis_channels": "vis_channel_id"} + def get_area_def(self, key): """Return the area definition.""" try: @@ -408,14 +410,15 @@ def __init__(self, filename, filename_info, filetype_info): """Open the NetCDF file with xarray and prepare for dataset reading.""" super().__init__(filename, filename_info, filetype_info) - # Use xarray's default netcdf4 engine to open the file - self.nc = xr.open_dataset( + @cached_property + def nc(self): + """Read the file.""" + return xr.open_dataset( self.filename, decode_cf=True, mask_and_scale=True, chunks={ "number_of_images": CHUNK_SIZE, - # 'number_of_height_estimates': CHUNK_SIZE, "number_of_winds": CHUNK_SIZE } ) From a574c51015c2b6a35381dcd70a897d64b957eb58 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 16:01:31 +0200 Subject: [PATCH 593/702] Use datetime[ns] in SEVIRI HRIT tests --- satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py index b9ff1f95ea..c332f0d3f9 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py @@ -201,6 +201,7 @@ def get_acq_time_cds(start_time, nlines): tline["days"][1:-1] = days_since_1958 * np.ones(nlines - 2) offset_second = (start_time - start_time.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds()*1000 tline["milliseconds"][1:-1] = np.arange(nlines - 2)+offset_second + return tline @@ -211,7 +212,8 @@ def get_acq_time_exp(start_time, nlines): tline_exp[-1] = np.datetime64("NaT") tline_exp[1:-1] = np.datetime64(start_time) tline_exp[1:-1] += np.arange(nlines - 2).astype("timedelta64[ms]") - return tline_exp + + return tline_exp.astype("datetime64[ns]") def get_attrs_exp(projection_longitude=0.0): From 8b7c8416089b562ae5d277e3a663629031a4c8f8 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 16:05:51 +0200 Subject: [PATCH 594/702] Use datetime[ns] in SEVIRI native tests --- satpy/tests/reader_tests/test_seviri_l1b_native.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index ba7cf63447..6382517b55 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -1048,10 +1048,10 @@ def _exp_data_array(): "standard_name": "counts", } ) - expected["acq_time"] = ("y", [np.datetime64("1958-01-02 00:00:01"), - np.datetime64("1958-01-02 00:00:02"), - np.datetime64("1958-01-02 00:00:03"), - np.datetime64("1958-01-02 00:00:04")]) + expected["acq_time"] = ("y", [np.datetime64("1958-01-02 00:00:01").astype("datetime64[ns]"), + np.datetime64("1958-01-02 00:00:02").astype("datetime64[ns]"), + np.datetime64("1958-01-02 00:00:03").astype("datetime64[ns]"), + np.datetime64("1958-01-02 00:00:04").astype("datetime64[ns]")]) return expected def test_get_dataset_with_raw_metadata(self, file_handler): From 822f04bddf5f8f92e9bcc8bb950ee6a2781bd293 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 16:08:05 +0200 Subject: [PATCH 595/702] Use datetime[ns] in SEVIRI netCDF4 tests --- satpy/tests/reader_tests/test_seviri_l1b_nc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_nc.py b/satpy/tests/reader_tests/test_seviri_l1b_nc.py index f6a54aa60e..42e038a766 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_nc.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_nc.py @@ -337,8 +337,8 @@ def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_ "wavelength": "wavelength", "standard_name": "standard_name" } - expected["acq_time"] = ("y", [np.datetime64("1958-01-02 00:00:01"), - np.datetime64("1958-01-02 00:00:02")]) + expected["acq_time"] = ("y", [np.datetime64("1958-01-02 00:00:01").astype("datetime64[ns]"), + np.datetime64("1958-01-02 00:00:02").astype("datetime64[ns]")]) expected = expected[::-1] # reader flips data upside down if mask_bad_quality_scan_lines: expected = file_handler._mask_bad_quality(expected, dataset_info) From a50c44aca2413eb18d8d2f274e221b5cae2cb873 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 16:25:40 +0200 Subject: [PATCH 596/702] Fix mj2datetime64 usage after nanosecond update --- satpy/readers/gms/gms5_vissr_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/gms/gms5_vissr_l1b.py b/satpy/readers/gms/gms5_vissr_l1b.py index c8a88dfe25..51f33d657a 100644 --- a/satpy/readers/gms/gms5_vissr_l1b.py +++ b/satpy/readers/gms/gms5_vissr_l1b.py @@ -307,7 +307,7 @@ def _get_orbital_parameters(self): } def _get_time_parameters(self): - start_time = mjd2datetime64(self._mode_block["observation_time_mjd"]) + start_time = mjd2datetime64(self._mode_block["observation_time_mjd"]).astype("datetime64[us]") start_time = start_time.astype(dt.datetime).replace(second=0, microsecond=0) end_time = start_time + dt.timedelta( minutes=25 From 6e3bc601515943e20d312fc5b01e0f5dc8d1bd80 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 13 Dec 2023 14:29:06 +0000 Subject: [PATCH 597/702] Revert all attempt to introduce tmp_path pytest fixture --- satpy/tests/reader_tests/test_fci_l2_nc.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index e7a312d4b8..44906c5040 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -509,7 +509,7 @@ def test_byte_extraction(self): assert dataset.values == 0 class TestFciL2NCAMVFileHandler: - """Test the FciL2NCFileHandler reader.""" + """Test the FciL2NCAMVFileHandler reader.""" def setup_method(self): """Set up the test by creating a test file and opening it with the reader.""" @@ -548,7 +548,8 @@ def setup_method(self): self.fh = FciL2NCAMVFileHandler(filename=self.test_file, filename_info={"channel":"test_channel"}, - filetype_info={}) + filetype_info={} + ) def tearDown(self): """Remove the previously created test file.""" From 399086ea7565ba7fd2e6c717f5a8801f8b471d5d Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 16:32:17 +0200 Subject: [PATCH 598/702] Use datetime64[ns] in GOES imager EUM test --- satpy/tests/reader_tests/test_goes_imager_nc_eum.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_goes_imager_nc_eum.py b/satpy/tests/reader_tests/test_goes_imager_nc_eum.py index e192bbe63f..189b76d5cd 100644 --- a/satpy/tests/reader_tests/test_goes_imager_nc_eum.py +++ b/satpy/tests/reader_tests/test_goes_imager_nc_eum.py @@ -139,7 +139,7 @@ def setUp(self, xr_): xr_.open_dataset.return_value = xr.Dataset( {"data": xr.DataArray(data=self.reflectance, dims=("time", "yc", "xc")), - "time": xr.DataArray(data=np.array([0], dtype="datetime64[ms]"), + "time": xr.DataArray(data=np.array([0], dtype="datetime64[ns]"), dims=("time",)), "bands": xr.DataArray(data=np.array([1]))}, attrs={"Satellite Sensor": "G-15"}) From 05a9a0b62bac25da4cdaf3f1aa437817c790160d Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 16:53:31 +0200 Subject: [PATCH 599/702] Use .item() to get singleton array elements in GOES imager readers --- satpy/readers/goes_imager_nc.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index 969151cb34..8ec3219eec 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -620,7 +620,7 @@ def __init__(self, filename, filename_info, filetype_info, geo_data=None): self.platform_name = self._get_platform_name( self.nc.attrs["Satellite Sensor"]) self.platform_shortname = self.platform_name.replace("-", "").lower() - self.gvar_channel = int(self.nc["bands"].values) + self.gvar_channel = int(self.nc["bands"].item()) self.sector = self._get_sector(channel=self.gvar_channel, nlines=self.nlines, ncols=self.ncols) @@ -731,9 +731,9 @@ def _get_area_def_uniform_sampling(self, lon0, channel): def start_time(self): """Start timestamp of the dataset.""" dt = self.nc["time"].dt - return datetime(year=int(dt.year), month=int(dt.month), day=int(dt.day), - hour=int(dt.hour), minute=int(dt.minute), - second=int(dt.second), microsecond=int(dt.microsecond)) + return datetime(year=int(dt.year.item()), month=int(dt.month.item()), day=int(dt.day.item()), + hour=int(dt.hour.item()), minute=int(dt.minute.item()), + second=int(dt.second.item()), microsecond=int(dt.microsecond.item())) @property def end_time(self): From 71d842f65e1f6ef06e538696cc4260d1d0bebf03 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 17:27:57 +0200 Subject: [PATCH 600/702] Use .item() to get singleton array elements in EUM reader base --- satpy/readers/eum_base.py | 8 ++++---- satpy/tests/reader_tests/test_eum_base.py | 7 ++++--- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/satpy/readers/eum_base.py b/satpy/readers/eum_base.py index 916ba9d444..3cbbb46433 100644 --- a/satpy/readers/eum_base.py +++ b/satpy/readers/eum_base.py @@ -33,14 +33,14 @@ def timecds2datetime(tcds): Works both with a dictionary and a numpy record_array. """ - days = int(tcds["Days"]) - milliseconds = int(tcds["Milliseconds"]) + days = int(tcds["Days"].item()) + milliseconds = int(tcds["Milliseconds"].item()) try: - microseconds = int(tcds["Microseconds"]) + microseconds = int(tcds["Microseconds"].item()) except (KeyError, ValueError): microseconds = 0 try: - microseconds += int(tcds["Nanoseconds"]) / 1000. + microseconds += int(tcds["Nanoseconds"].item()) / 1000. except (KeyError, ValueError): pass diff --git a/satpy/tests/reader_tests/test_eum_base.py b/satpy/tests/reader_tests/test_eum_base.py index 54de68201d..55ac977b59 100644 --- a/satpy/tests/reader_tests/test_eum_base.py +++ b/satpy/tests/reader_tests/test_eum_base.py @@ -39,17 +39,18 @@ class TestMakeTimeCdsDictionary(unittest.TestCase): def test_fun(self): """Test function for TestMakeTimeCdsDictionary.""" # time_cds_short - tcds = {"Days": 1, "Milliseconds": 2} + tcds = {"Days": np.array(1), "Milliseconds": np.array(2)} expected = datetime(1958, 1, 2, 0, 0, 0, 2000) assert timecds2datetime(tcds) == expected # time_cds - tcds = {"Days": 1, "Milliseconds": 2, "Microseconds": 3} + tcds = {"Days": np.array(1), "Milliseconds": np.array(2), "Microseconds": np.array(3)} expected = datetime(1958, 1, 2, 0, 0, 0, 2003) assert timecds2datetime(tcds) == expected # time_cds_expanded - tcds = {"Days": 1, "Milliseconds": 2, "Microseconds": 3, "Nanoseconds": 4} + tcds = {"Days": np.array(1), "Milliseconds": np.array(2), "Microseconds": np.array(3), + "Nanoseconds": np.array(4)} expected = datetime(1958, 1, 2, 0, 0, 0, 2003) assert timecds2datetime(tcds) == expected From fe9c1db295d29585739fa1ba3f25a2b6933cb18e Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 07:34:16 +0000 Subject: [PATCH 601/702] Add tests for default functionality of HighCloudCompositor and LowCloudCompositor. From 6dd9c4201bbbe564da9d4180294b0c734fb4b220 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 09:39:15 +0200 Subject: [PATCH 602/702] Fix/supress PROJ4 warnings --- satpy/readers/fci_l2_nc.py | 2 +- satpy/readers/geocat.py | 3 +- satpy/readers/satpy_cf_nc.py | 9 ++- satpy/tests/reader_tests/test_ahi_hsd.py | 12 +++- satpy/tests/reader_tests/test_ahi_l2_nc.py | 8 ++- satpy/tests/reader_tests/test_geos_area.py | 19 ++++-- .../reader_tests/test_goes_imager_hrit.py | 16 +++-- satpy/tests/reader_tests/test_gpm_imerg.py | 8 ++- satpy/tests/reader_tests/test_hrit_base.py | 9 ++- .../reader_tests/test_insat3d_img_l1b_h5.py | 10 ++- satpy/tests/reader_tests/test_nwcsaf_msg.py | 8 ++- .../reader_tests/test_oceancolorcci_l3_nc.py | 8 ++- satpy/tests/reader_tests/test_osisaf_l3.py | 63 ++++++++++++------- satpy/tests/reader_tests/test_satpy_cf_nc.py | 7 ++- .../reader_tests/test_seviri_l1b_hrit.py | 13 +++- 15 files changed, 146 insertions(+), 49 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index c387326f89..b03cbdfdaa 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -381,7 +381,7 @@ def _construct_area_def(self, dataset_id): stand_area_def.area_id, stand_area_def.description, "", - stand_area_def.proj_dict, + stand_area_def.crs, stand_area_def.x_size, stand_area_def.y_size, mod_area_extent) diff --git a/satpy/readers/geocat.py b/satpy/readers/geocat.py index 185e7d3c13..852119b02e 100644 --- a/satpy/readers/geocat.py +++ b/satpy/readers/geocat.py @@ -36,7 +36,6 @@ import numpy as np from pyproj import Proj from pyresample import geometry -from pyresample.utils import proj4_str_to_dict from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4 @@ -274,7 +273,7 @@ def get_area_def(self, dsid): area_name, area_name, area_name, - proj4_str_to_dict(proj), + proj, lon.shape[1], lon.shape[0], area_extent=extents, diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index 7a26ead72b..073c8d4cf5 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -318,8 +318,15 @@ def get_dataset(self, ds_id, ds_info): def get_area_def(self, dataset_id): """Get area definition from CF complient netcdf.""" + import warnings + try: - area = AreaDefinition.from_cf(self.filename) + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + # FIXME: This should be silenced in Pyresample + area = AreaDefinition.from_cf(self.filename) return area except ValueError: # No CF compliant projection information was found in the netcdf file or diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 7bf1562e1c..6ed3267723 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -140,7 +140,11 @@ def test_region(self, fromfile, np2str): "spare": ""} area_def = fh.get_area_def(None) - proj_dict = area_def.proj_dict + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + proj_dict = area_def.proj_dict a, b = proj4_radius_parameters(proj_dict) assert a == 6378137.0 assert b == 6356752.3 @@ -188,7 +192,11 @@ def test_segment(self, fromfile, np2str): "spare": ""} area_def = fh.get_area_def(None) - proj_dict = area_def.proj_dict + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + proj_dict = area_def.proj_dict a, b = proj4_radius_parameters(proj_dict) assert a == 6378137.0 assert b == 6356752.3 diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index 7d4050ecf0..a90f24ea5d 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -75,6 +75,8 @@ def test_startend(himl2_filename): def test_ahi_l2_area_def(himl2_filename, caplog): """Test reader handles area definition correctly.""" + import warnings + ps = "+proj=geos +lon_0=140.7 +h=35785863 +x_0=0 +y_0=0 +a=6378137 +rf=298.257024882273 +units=m +no_defs +type=crs" # Check case where input data is correct size. @@ -84,7 +86,11 @@ def test_ahi_l2_area_def(himl2_filename, caplog): assert area_def.width == dimensions["Columns"] assert area_def.height == dimensions["Rows"] assert np.allclose(area_def.area_extent, exp_ext) - assert area_def.proj4_string == ps + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert area_def.proj4_string == ps # Check case where input data is incorrect size. fh = ahil2_filehandler(himl2_filename) diff --git a/satpy/tests/reader_tests/test_geos_area.py b/satpy/tests/reader_tests/test_geos_area.py index fb0bb6f19b..077c0b0c7e 100644 --- a/satpy/tests/reader_tests/test_geos_area.py +++ b/satpy/tests/reader_tests/test_geos_area.py @@ -138,19 +138,26 @@ def test_get_xy_from_linecol(self): def test_get_area_definition(self): """Test the retrieval of the area definition.""" + import warnings + from pyresample.utils import proj4_radius_parameters + pdict, extent = self.make_pdict_ext(1, "N2S") good_res = (-3000.4032785810186, -3000.4032785810186) a_def = get_area_definition(pdict, extent) assert a_def.area_id == pdict["a_name"] assert a_def.resolution == good_res - assert a_def.proj_dict["proj"] == "geos" - assert a_def.proj_dict["units"] == "m" - a, b = proj4_radius_parameters(a_def.proj_dict) - assert a == 6378169 - assert b == 6356583.8 - assert a_def.proj_dict["h"] == 35785831 + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert a_def.proj_dict["proj"] == "geos" + assert a_def.proj_dict["units"] == "m" + a, b = proj4_radius_parameters(a_def.proj_dict) + assert a == 6378169 + assert b == 6356583.8 + assert a_def.proj_dict["h"] == 35785831 def test_sampling_to_lfac_cfac(self): """Test conversion from angular sampling to line/column offset.""" diff --git a/satpy/tests/reader_tests/test_goes_imager_hrit.py b/satpy/tests/reader_tests/test_goes_imager_hrit.py index cafe7c1e2c..48078d0f0e 100644 --- a/satpy/tests/reader_tests/test_goes_imager_hrit.py +++ b/satpy/tests/reader_tests/test_goes_imager_hrit.py @@ -172,6 +172,8 @@ def test_get_dataset(self, base_get_dataset): def test_get_area_def(self): """Test getting the area definition.""" + import warnings + self.reader.mda.update({ "cfac": 10216334, "lfac": 10216334, @@ -184,13 +186,17 @@ def test_get_area_def(self): resolution=3000) area = self.reader.get_area_def(dsid) - a, b = proj4_radius_parameters(area.proj_dict) + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + a, b = proj4_radius_parameters(area.proj_dict) + assert area.proj_dict["h"] == ALTITUDE + assert area.proj_dict["lon_0"] == 100.1640625 + assert area.proj_dict["proj"] == "geos" + assert area.proj_dict["units"] == "m" assert a == EQUATOR_RADIUS assert b == POLE_RADIUS - assert area.proj_dict["h"] == ALTITUDE - assert area.proj_dict["lon_0"] == 100.1640625 - assert area.proj_dict["proj"] == "geos" - assert area.proj_dict["units"] == "m" assert area.width == 2816 assert area.height == 464 assert area.area_id == "goes-15_goes_imager_fd_3km" diff --git a/satpy/tests/reader_tests/test_gpm_imerg.py b/satpy/tests/reader_tests/test_gpm_imerg.py index 508be247d5..96dc65bbd4 100644 --- a/satpy/tests/reader_tests/test_gpm_imerg.py +++ b/satpy/tests/reader_tests/test_gpm_imerg.py @@ -105,6 +105,8 @@ def tearDown(self): def test_load_data(self): """Test loading data.""" + import warnings + from satpy.readers import load_reader # Filename to test, needed for start and end times @@ -130,6 +132,10 @@ def test_load_data(self): assert res["IRprecipitation"].resolution == 0.1 assert res["IRprecipitation"].area.width == 3600 assert res["IRprecipitation"].area.height == 1800 - assert res["IRprecipitation"].area.proj_dict == pdict + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert res["IRprecipitation"].area.proj_dict == pdict np.testing.assert_almost_equal(res["IRprecipitation"].area.area_extent, (-179.95, -89.95, 179.95, 89.95), 5) diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index cb2dc6c3f4..133b45280e 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -221,9 +221,16 @@ def test_get_area_extent(self): def test_get_area_def(self): """Test getting an area definition.""" + import warnings + from pyresample.utils import proj4_radius_parameters + area = self.reader.get_area_def("VIS06") - proj_dict = area.proj_dict + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) assert a == 6378169.0 assert b == 6356583.8 diff --git a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py index 92aef2b906..486177d2d5 100644 --- a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py +++ b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py @@ -273,12 +273,18 @@ def insat_filehandler(insat_filename): def test_filehandler_returns_area(insat_filehandler): """Test that filehandle returns an area.""" + import warnings + fh = insat_filehandler ds_id = make_dataid(name="MIR", resolution=4000, calibration="brightness_temperature") area_def = fh.get_area_def(ds_id) - lons, lats = area_def.get_lonlats(chunks=1000) - assert "+lon_0=" + str(subsatellite_longitude) in area_def.crs.to_proj4() + _ = area_def.get_lonlats(chunks=1000) + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert "+lon_0=" + str(subsatellite_longitude) in area_def.crs.to_proj4() def test_filehandler_has_start_and_end_time(insat_filehandler): diff --git a/satpy/tests/reader_tests/test_nwcsaf_msg.py b/satpy/tests/reader_tests/test_nwcsaf_msg.py index 6d4dbfe53f..761f84d380 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_msg.py +++ b/satpy/tests/reader_tests/test_nwcsaf_msg.py @@ -472,6 +472,8 @@ def cut_h5_object_ref(root, attr): def test_get_area_def(self): """Get the area definition.""" + import warnings + from satpy.readers.nwcsaf_msg2013_hdf5 import Hdf5NWCSAF from satpy.tests.utils import make_dataid @@ -487,7 +489,11 @@ def test_get_area_def(self): assert area_def.area_extent[i] == pytest.approx(aext_res[i], abs=1e-4) proj_dict = AREA_DEF_DICT["proj_dict"] - assert proj_dict["proj"] == area_def.proj_dict["proj"] + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert proj_dict["proj"] == area_def.proj_dict["proj"] # Not all elements passed on Appveyor, so skip testing every single element of the proj-dict: # for key in proj_dict: # self.assertEqual(proj_dict[key], area_def.proj_dict[key]) diff --git a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py index bdb0edfb03..da99fd2d27 100644 --- a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py +++ b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py @@ -166,6 +166,8 @@ def area_exp(self): def test_get_area_def(self, area_exp, fake_file_dict): """Test area definition.""" + import warnings + reader = self._create_reader_for_resolutions([fake_file_dict["ocprod_1m"]]) res = reader.load([ds_list_all[0]]) area = res[ds_list_all[0]].attrs["area"] @@ -174,7 +176,11 @@ def test_get_area_def(self, area_exp, fake_file_dict): assert area.area_extent == area_exp.area_extent assert area.width == area_exp.width assert area.height == area_exp.height - assert area.proj_dict == area_exp.proj_dict + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert area.proj_dict == area_exp.proj_dict def test_bad_fname(self, fake_dataset, fake_file_dict): """Test case where an incorrect composite period is given.""" diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index 3fa9e5bb35..f42a1d4648 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -16,6 +16,7 @@ """Module for testing the satpy.readers.osisaf_l3 module.""" import os +import warnings from datetime import datetime import numpy as np @@ -223,11 +224,15 @@ def test_get_area_def_stere(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" - assert area_def.proj_dict["a"] == 6378273.0 - assert area_def.proj_dict["lat_0"] == -90 - assert area_def.proj_dict["lat_ts"] == -70 - assert area_def.proj_dict["lon_0"] == 0 - assert area_def.proj_dict["proj"] == "stere" + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert area_def.proj_dict["a"] == 6378273.0 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lat_ts"] == -70 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "stere" assert area_def.width == 5 assert area_def.height == 2 @@ -243,10 +248,14 @@ def test_get_area_def_ease(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_lambert_azimuthal_equal_area" - assert area_def.proj_dict["R"] == 6371228 - assert area_def.proj_dict["lat_0"] == -90 - assert area_def.proj_dict["lon_0"] == 0 - assert area_def.proj_dict["proj"] == "laea" + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert area_def.proj_dict["R"] == 6371228 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "laea" assert area_def.width == 5 assert area_def.height == 2 @@ -279,11 +288,15 @@ def test_get_area_def_stere(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" - assert area_def.proj_dict["a"] == 6378273.0 - assert area_def.proj_dict["lat_0"] == -90 - assert area_def.proj_dict["lat_ts"] == -70 - assert area_def.proj_dict["lon_0"] == 0 - assert area_def.proj_dict["proj"] == "stere" + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert area_def.proj_dict["a"] == 6378273.0 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lat_ts"] == -70 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "stere" assert area_def.width == 5 assert area_def.height == 2 @@ -318,8 +331,12 @@ def test_get_area_def_grid(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_geographic_area" - assert area_def.proj_dict["datum"] == "WGS84" - assert area_def.proj_dict["proj"] == "longlat" + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert area_def.proj_dict["datum"] == "WGS84" + assert area_def.proj_dict["proj"] == "longlat" assert area_def.width == 5 assert area_def.height == 2 @@ -353,11 +370,15 @@ def test_get_area_def_stere(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" - assert area_def.proj_dict["a"] == 6378273.0 - assert area_def.proj_dict["lat_0"] == -90 - assert area_def.proj_dict["lat_ts"] == -70 - assert area_def.proj_dict["lon_0"] == 0 - assert area_def.proj_dict["proj"] == "stere" + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert area_def.proj_dict["a"] == 6378273.0 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lat_ts"] == -70 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "stere" assert area_def.width == 5 assert area_def.height == 2 diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index 0c22f5b3f1..f279196ab4 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -249,11 +249,14 @@ def test_write_and_read_with_area_definition(self, cf_scene, nc_filename): expected_area = cf_scene["image0"].attrs["area"] actual_area = scn_["image0"].attrs["area"] assert pytest.approx(expected_area.area_extent, 0.000001) == actual_area.area_extent - assert expected_area.proj_dict == actual_area.proj_dict + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert expected_area.proj_dict == actual_area.proj_dict assert expected_area.shape == actual_area.shape assert expected_area.area_id == actual_area.area_id assert expected_area.description == actual_area.description - assert expected_area.proj_dict == actual_area.proj_dict def test_write_and_read_with_swath_definition(self, cf_scene, nc_filename): """Save a dataset with a swath definition to file with cf_writer and read the data again.""" diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index 0ce40d8dfc..e928468228 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -18,6 +18,7 @@ """The HRIT msg reader tests package.""" import unittest +import warnings from datetime import datetime from unittest import mock @@ -119,7 +120,11 @@ def test_get_area_def(self): from pyresample.utils import proj4_radius_parameters area = self.reader.get_area_def(make_dataid(name="HRV", resolution=1000)) assert area.area_extent == (-45561979844414.07, -3720765401003.719, 45602912357076.38, 77771774058.38356) - proj_dict = area.proj_dict + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) assert a == 6378169.0 assert b == pytest.approx(6356583.8) @@ -168,7 +173,11 @@ def test_get_area_def(self): """Test getting the area def.""" from pyresample.utils import proj4_radius_parameters area = self.reader.get_area_def(make_dataid(name="VIS006", resolution=3000)) - proj_dict = area.proj_dict + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) assert a == 6378169.0 assert b == pytest.approx(6356583.8) From 30e89987c430d81a6236077e3a20acbbd9ad8704 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 07:44:35 +0000 Subject: [PATCH 603/702] Add tests for default functionality of HighCloudCompositor and LowCloudCompositor. --- satpy/tests/test_composites.py | 51 ++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 70bc2abf25..2fa87c1e1b 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -937,6 +937,57 @@ def test_call(self): np.testing.assert_allclose(res, exp) +class TestHighCloudCompositor: + """Test HighCloudCompositor.""" + + def setup_method(self): + """Create test data.""" + from pyresample.geometry import create_area_def + area = create_area_def(area_id="test", projection={"proj": "latlong"}, + center=(0, 45), width=3, height=3, resolution=35) + + self.data = xr.DataArray(da.from_array([[200, 250, 300], + [200, 250, 300], + [200, 250, 300]]), + dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}, + attrs={"area": area}) + + def test_default_behaviour(self): + """Test general default functionality of compositor.""" + from satpy.composites import HighCloudCompositor + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = HighCloudCompositor(name="test") + res = comp([self.data]) + expexted_alpha = np.array([[1.0, 0.7142857, 0.0], + [1.0, 0.625, 0.0], + [1.0, 0.5555555, 0.0]]) + expected = np.stack([self.data, expexted_alpha]) + np.testing.assert_almost_equal(res.values, expected) + + +class TestLowCloudCompositor: + """Test LowCloudCompositor.""" + + def setup_method(self): + """Create test data.""" + self.btd = xr.DataArray(da.from_array([[0.0, 1.0, 10.0], [0.0, 1.0, 10.0], [0.0, 1.0, 10.0]]), + dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}) + self.bt_win = xr.DataArray(da.from_array([[250, 250, 250], [250, 250, 250], [150, 150, 150]]), + dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}) + self.lsm = xr.DataArray(da.from_array([[0., 0., 0.], [1., 1., 1.], [0., 1., 0.]]), + dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}) + + def test_default_behaviour(self): + """Test general default functionality of compositor.""" + from satpy.composites import LowCloudCompositor + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = LowCloudCompositor(name="test") + res = comp([self.btd, self.bt_win, self.lsm]) + expexted_alpha = np.array([[0.0, 0.25, 1.0], [0.0, 0.25, 1.0], [0.0, 0.0, 0.0]]) + expected = np.stack([self.btd, expexted_alpha]) + np.testing.assert_equal(res.values, expected) + + class TestSingleBandCompositor(unittest.TestCase): """Test the single-band compositor.""" From 169bfbd7f8a19528d33172b2e05a034e88c1505e Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 07:50:44 +0000 Subject: [PATCH 604/702] Set alpha chanel to transparent instead of brightness temperature difference to zero when hiding potential IR3.8 channel noise for cold cloud tops. --- satpy/composites/__init__.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index bc5a199aa0..550040e5f5 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1202,11 +1202,6 @@ def __call__(self, projectables, **kwargs): lsm = lsm.squeeze(drop=True) lsm = lsm.round() # Make sure to have whole numbers in case of smearing from resampling - # Avoid spurious false alarms caused by noise in the 3.9um channel that can occur for very cold cloud tops - # TODO Consolidate this. Should it really be set to zero and thus within the threshold range? What if the - # lower threshold would be changed to -1 - btd = btd.where(bt_win >= 230, 0.0) - # Call CloudCompositor for land surface pixels self.transition_min, self.transition_max = self.range_land res = super().__call__([btd.where(lsm.isin(self.values_land))], **kwargs) @@ -1218,6 +1213,10 @@ def __call__(self, projectables, **kwargs): # Compine resutls for land and sea/water surface pixels res = res.where(lsm.isin(self.values_land), res_sea) + # Make pixels with cold window channel brightness temperatures transparent to avoid spurious false + # alarms caused by noise in the 3.9um channel that can occur for very cold cloud tops + res.loc["A"] = res.sel(bands="A").where(bt_win >= 230, 0.0) + return res From 130e768f6f5d5216aa2a8945d2bd8098bf61c39d Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 07:52:35 +0000 Subject: [PATCH 605/702] Implement keyword for alpha channel inversion in CloudCompositor and use for LowCloudCompositor. Remove corresponding alpha channel inversion in enhancement recipe. --- satpy/composites/__init__.py | 9 +++++++-- satpy/etc/enhancements/generic.yaml | 4 ---- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 550040e5f5..9281dfae4f 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1015,7 +1015,7 @@ class CloudCompositor(GenericCompositor): """Detect clouds based on thresholding and use it as a mask for compositing.""" def __init__(self, name, transition_min=258.15, transition_max=298.15, - transition_gamma=3.0, **kwargs): + invert_alpha=False, transition_gamma=3.0, **kwargs): """Collect custom configuration values. Args: @@ -1028,6 +1028,7 @@ def __init__(self, name, transition_min=258.15, transition_max=298.15, """ self.transition_min = transition_min self.transition_max = transition_max + self.invert_alpha = invert_alpha self.transition_gamma = transition_gamma super(CloudCompositor, self).__init__(name, **kwargs) @@ -1050,6 +1051,9 @@ def __call__(self, projectables, **kwargs): alpha = alpha.where(data <= tr_max, 0.) alpha = alpha.where((data <= tr_min) | (data > tr_max), slope * data + offset) + if self.invert_alpha: + alpha = 1.0 - alpha + # gamma adjustment alpha **= gamma res = super(CloudCompositor, self).__call__((data, alpha), **kwargs) @@ -1155,6 +1159,7 @@ class LowCloudCompositor(CloudCompositor): def __init__(self, name, values_land=(1,), values_sea=(0,), range_land=(0.0, 4.0), range_sea=(0.0, 4.0), + invert_alpha=True, transition_gamma=1.0, **kwargs): """Init info. @@ -1182,7 +1187,7 @@ def __init__(self, name, values_land=(1,), values_sea=(0,), self.transition_gamma = transition_gamma self.transition_min = None # Placeholder for later use in CloudCompositor self.transition_max = None # Placeholder for later use in CloudCompositor - super().__init__(name, transition_gamma=transition_gamma, **kwargs) + super().__init__(name, invert_alpha=invert_alpha, transition_gamma=transition_gamma, **kwargs) def __call__(self, projectables, **kwargs): """Generate the composite. diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index efb9b2c6fa..dfd5b5f5c6 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -969,10 +969,6 @@ enhancements: geo_color_low_clouds: standard_name: geo_color_low_clouds operations: - - name: inverse - method: !!python/name:satpy.enhancements.invert - args: - - [False, True] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: From 2da489b9c5d48ebf1e78b9faa0db3fa353848948 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Thu, 14 Dec 2023 08:55:56 +0100 Subject: [PATCH 606/702] hvplot tests Test for areadefinition data (single band and rgb). Test for swath data (only single band) --- satpy/tests/scene_tests/test_conversions.py | 47 +++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index a886c3fa60..9b0dd9098e 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -81,6 +81,53 @@ def test_geoviews_basic_with_swath(self): # we assume that if we got something back, geoviews can use it assert gv_obj is not None + def test_hvplot_basic_with_area(self): + """Test converting a Scene to hvplot with a AreaDefinition.""" + from pyresample.geometry import AreaDefinition + scn = Scene() + area = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, + 2, 2, [-200, -200, 200, 200]) + scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), + "area": area, "units": "m"}) + hv_obj = scn.to_hvplot() + # we assume that if we got something back, hvplot can use it + assert hv_obj is not None + + def test_hvplot_rgb_with_area(self): + """Test converting a Scene to hvplot with a AreaDefinition.""" + from pyresample.geometry import AreaDefinition + scn = Scene() + area = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, + 2, 2, [-200, -200, 200, 200]) + scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), + "area": area, "units": "m"}) + scn["ds2"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), + "area": area, "units": "m"}) + scn["ds3"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), + "area": area, "units": "m"}) + hv_obj = scn.to_hvplot() + # we assume that if we got something back, hvplot can use it + assert hv_obj is not None + + def test_hvplot_basic_with_swath(self): + """Test converting a Scene to hvplot with a SwathDefinition.""" + from pyresample.geometry import SwathDefinition + scn = Scene() + longitude = xr.DataArray(da.zeros((2, 2))) + latitude = xr.DataArray(da.zeros((2, 2))) + area = SwathDefinition(longitude, latitude) + scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), + "area": area, "units": "m"}) + hv_obj = scn.to_hvplot() + # we assume that if we got something back, hvplot can use it + assert hv_obj is not None class TestToXarrayConversion: """Test Scene.to_xarray() conversion.""" From 56138aa7c9c5365b6ada8a367414d106d97f00ac Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 08:44:13 +0000 Subject: [PATCH 607/702] Fix alpha inversion to keep dataset attributes. --- satpy/composites/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 9281dfae4f..8089464be6 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1052,7 +1052,7 @@ def __call__(self, projectables, **kwargs): alpha = alpha.where((data <= tr_min) | (data > tr_max), slope * data + offset) if self.invert_alpha: - alpha = 1.0 - alpha + alpha.data = 1.0 - alpha.data # gamma adjustment alpha **= gamma From 43e6ce596e235dd5d2eea5e2e102e3c5c2813cf9 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 08:46:23 +0000 Subject: [PATCH 608/702] Move initialization of parent class variables to parent class init. --- satpy/composites/__init__.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 8089464be6..cb5a10663c 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1184,10 +1184,8 @@ def __init__(self, name, values_land=(1,), values_sea=(0,), self.values_sea = values_sea if type(values_sea) in [list, tuple] else [values_sea] self.range_land = range_land self.range_sea = range_sea - self.transition_gamma = transition_gamma - self.transition_min = None # Placeholder for later use in CloudCompositor - self.transition_max = None # Placeholder for later use in CloudCompositor - super().__init__(name, invert_alpha=invert_alpha, transition_gamma=transition_gamma, **kwargs) + super().__init__(name, transition_min=None, transition_max=None, + invert_alpha=invert_alpha, transition_gamma=transition_gamma, **kwargs) def __call__(self, projectables, **kwargs): """Generate the composite. From ffa15e1bb241ea7d2b4c599f22e82137ad0d7c41 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 08:48:22 +0000 Subject: [PATCH 609/702] Fix indentation. --- satpy/tests/test_composites.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 2fa87c1e1b..6179fc8053 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -962,7 +962,7 @@ def test_default_behaviour(self): [1.0, 0.625, 0.0], [1.0, 0.5555555, 0.0]]) expected = np.stack([self.data, expexted_alpha]) - np.testing.assert_almost_equal(res.values, expected) + np.testing.assert_almost_equal(res.values, expected) class TestLowCloudCompositor: @@ -985,7 +985,7 @@ def test_default_behaviour(self): res = comp([self.btd, self.bt_win, self.lsm]) expexted_alpha = np.array([[0.0, 0.25, 1.0], [0.0, 0.25, 1.0], [0.0, 0.0, 0.0]]) expected = np.stack([self.btd, expexted_alpha]) - np.testing.assert_equal(res.values, expected) + np.testing.assert_equal(res.values, expected) class TestSingleBandCompositor(unittest.TestCase): From d82bc6c7d8e102c673b83b34d834a16d27cf1140 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 11:03:04 +0200 Subject: [PATCH 610/702] Handle UserWarnings from unmatching header blocks and observation times --- satpy/tests/reader_tests/test_ahi_hsd.py | 41 ++++++++++++++++-------- 1 file changed, 27 insertions(+), 14 deletions(-) diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 6ed3267723..2a2c608a46 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -331,7 +331,10 @@ def test_read_band(self, calibrate, *mocks): with _fake_hsd_handler() as fh: fh.data_info["number_of_columns"] = ncols fh.data_info["number_of_lines"] = nrows - im = fh.read_band(mock.MagicMock(), mock.MagicMock()) + with warnings.catch_warnings(): + # The header isn't valid + warnings.filterwarnings("ignore", category=UserWarning, message=r"Actual .* header size") + im = fh.read_band(mock.MagicMock(), mock.MagicMock()) # Note: Within the earth's shape get_geostationary_mask() is True but the numpy.ma mask # is False mask = im.to_masked_array().mask @@ -366,7 +369,10 @@ def test_read_band(self, calibrate, *mocks): # Test if masking space pixels disables with appropriate flag fh.mask_space = False with mock.patch("satpy.readers.ahi_hsd.AHIHSDFileHandler._mask_space") as mask_space: - fh.read_band(mock.MagicMock(), mock.MagicMock()) + with warnings.catch_warnings(): + # The header isn't valid + warnings.filterwarnings("ignore", category=UserWarning, message=r"Actual .* header size") + fh.read_band(mock.MagicMock(), mock.MagicMock()) mask_space.assert_not_called() def test_read_band_from_actual_file(self, hsd_file_jp01): @@ -377,14 +383,17 @@ def test_read_band_from_actual_file(self, hsd_file_jp01): key = {"name": "B01", "calibration": "counts", "resolution": 1000} import dask with dask.config.set({"array.chunk-size": "32MiB"}): - data = fh.read_band( - key, - { - "units": "%", - "standard_name": "toa_bidirectional_reflectance", - "wavelength": 2, - "resolution": 1000, - }) + with warnings.catch_warnings(): + # The header isn't valid + warnings.filterwarnings("ignore", category=UserWarning, message=r"Actual .* header size") + data = fh.read_band( + key, + { + "units": "%", + "standard_name": "toa_bidirectional_reflectance", + "wavelength": 2, + "resolution": 1000, + }) assert data.chunks == ((1100,) * 10, (1100,) * 10) assert data.dtype == data.compute().dtype assert data.dtype == np.float32 @@ -406,7 +415,10 @@ def test_scene_loading(self, calibrate, *mocks): fh.data_info["number_of_columns"] = ncols fh.data_info["number_of_lines"] = nrows scn = Scene(reader="ahi_hsd", filenames=["HS_H08_20210225_0700_B07_FLDK_R20_S0110.DAT"]) - scn.load(["B07"]) + with warnings.catch_warnings(): + # The header isn't valid + warnings.filterwarnings("ignore", category=UserWarning, message=r"Actual .* header size") + scn.load(["B07"]) im = scn["B07"] # Make sure space masking worked @@ -461,9 +473,8 @@ def test_blocklen_error(self, *mocks): # Expected and actual blocklength do not match fp_.tell.return_value = 100 - with warnings.catch_warnings(record=True) as w: + with pytest.raises(UserWarning, match=r"Actual .* header size does not match expected"): fh._check_fpos(fp_, fpos, 0, "header 1") - assert len(w) > 0 def test_is_valid_time(self): """Test that valid times are correctly identified.""" @@ -480,7 +491,9 @@ def test_time_rounding(self): mocker.return_value = True assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 3, 0, 0) mocker.return_value = False - assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 12, 0, 0) + with pytest.raises(UserWarning, + match=r"Observation timeline is fill value, not rounding observation time"): + assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 12, 0, 0) class TestAHICalibration(unittest.TestCase): From 5a84efde0e048ff8f13076bbc4e1bb38b835f657 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 11:06:09 +0200 Subject: [PATCH 611/702] Fix deprecated proj string property --- satpy/tests/reader_tests/test_ahi_l2_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index a90f24ea5d..2a697d0a4a 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -90,7 +90,7 @@ def test_ahi_l2_area_def(himl2_filename, caplog): warnings.filterwarnings("ignore", message=r"You will likely lose important projection information", category=UserWarning) - assert area_def.proj4_string == ps + assert area_def.proj_str == ps # Check case where input data is incorrect size. fh = ahil2_filehandler(himl2_filename) From 1225b077912251b0d760a6393e8a06a8fd203614 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 11:17:02 +0200 Subject: [PATCH 612/702] Filter orbit polynomial warning in tests --- .../test_seviri_l1b_hrit_setup.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py index c332f0d3f9..a885a5becc 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py @@ -49,6 +49,8 @@ def new_read_prologue(self): def get_fake_file_handler(observation_start_time, nlines, ncols, projection_longitude=0, orbit_polynomials=ORBIT_POLYNOMIALS): """Create a mocked SEVIRI HRIT file handler.""" + import warnings + prologue = get_fake_prologue(projection_longitude, orbit_polynomials) mda = get_fake_mda(nlines=nlines, ncols=ncols, start_time=observation_start_time) filename_info = get_fake_filename_info(observation_start_time) @@ -80,13 +82,16 @@ def get_fake_file_handler(observation_start_time, nlines, ncols, projection_long ) epilogue = mock.MagicMock(epilogue=epilogue) - reader = HRITMSGFileHandler( - "filename", - filename_info, - {"filetype": "info"}, - prologue, - epilogue - ) + with warnings.catch_warnings(): + # Orbit polynomial has no exact match, so filter the unnecessary warning + warnings.filterwarnings("ignore", category=UserWarning, message=r"No orbit polynomial valid for") + reader = HRITMSGFileHandler( + "filename", + filename_info, + {"filetype": "info"}, + prologue, + epilogue + ) reader.mda.update(mda) return reader From aae7c9ec28668074b89deab02e7b209a00964fcb Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 09:18:01 +0000 Subject: [PATCH 613/702] Move computation of expected data outside of the dask scheduler. --- satpy/tests/test_composites.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 6179fc8053..bcda95aba8 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -946,9 +946,7 @@ def setup_method(self): area = create_area_def(area_id="test", projection={"proj": "latlong"}, center=(0, 45), width=3, height=3, resolution=35) - self.data = xr.DataArray(da.from_array([[200, 250, 300], - [200, 250, 300], - [200, 250, 300]]), + self.data = xr.DataArray(da.from_array([[200, 250, 300], [200, 250, 300], [200, 250, 300]]), dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}, attrs={"area": area}) @@ -958,11 +956,11 @@ def test_default_behaviour(self): with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = HighCloudCompositor(name="test") res = comp([self.data]) - expexted_alpha = np.array([[1.0, 0.7142857, 0.0], - [1.0, 0.625, 0.0], - [1.0, 0.5555555, 0.0]]) - expected = np.stack([self.data, expexted_alpha]) - np.testing.assert_almost_equal(res.values, expected) + data = res.values + + expexted_alpha = np.array([[1.0, 0.7142857, 0.0], [1.0, 0.625, 0.0], [1.0, 0.5555555, 0.0]]) + expected = np.stack([self.data, expexted_alpha]) + np.testing.assert_almost_equal(data, expected) class TestLowCloudCompositor: @@ -983,9 +981,11 @@ def test_default_behaviour(self): with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = LowCloudCompositor(name="test") res = comp([self.btd, self.bt_win, self.lsm]) - expexted_alpha = np.array([[0.0, 0.25, 1.0], [0.0, 0.25, 1.0], [0.0, 0.0, 0.0]]) - expected = np.stack([self.btd, expexted_alpha]) - np.testing.assert_equal(res.values, expected) + data = res.values + + expexted_alpha = np.array([[0.0, 0.25, 1.0], [0.0, 0.25, 1.0], [0.0, 0.0, 0.0]]) + expected = np.stack([self.btd, expexted_alpha]) + np.testing.assert_equal(data, expected) class TestSingleBandCompositor(unittest.TestCase): From 27358020edf6bd47579756b345c64676353e72c8 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 09:30:15 +0000 Subject: [PATCH 614/702] Add data type preservation tests for HighCloudCompositor and LowCloudCompositor. --- satpy/tests/test_composites.py | 48 ++++++++++++++++++++++++---------- 1 file changed, 34 insertions(+), 14 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index bcda95aba8..db29a8572e 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -946,47 +946,67 @@ def setup_method(self): area = create_area_def(area_id="test", projection={"proj": "latlong"}, center=(0, 45), width=3, height=3, resolution=35) - self.data = xr.DataArray(da.from_array([[200, 250, 300], [200, 250, 300], [200, 250, 300]]), - dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}, - attrs={"area": area}) + self.data = xr.DataArray( + da.from_array(np.array([[200, 250, 300], [200, 250, 300], [200, 250, 300]], dtype=np.float32)), + dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}, + attrs={"area": area} + ) - def test_default_behaviour(self): + def test_high_cloud_compositor(self): """Test general default functionality of compositor.""" from satpy.composites import HighCloudCompositor with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = HighCloudCompositor(name="test") res = comp([self.data]) data = res.values - expexted_alpha = np.array([[1.0, 0.7142857, 0.0], [1.0, 0.625, 0.0], [1.0, 0.5555555, 0.0]]) expected = np.stack([self.data, expexted_alpha]) np.testing.assert_almost_equal(data, expected) + def test_high_cloud_compositor_dtype(self): + """Test that the datatype is not altered by the compositor.""" + from satpy.composites import HighCloudCompositor + comp = HighCloudCompositor(name="test") + res = comp([self.data]) + assert res.data.dtype == np.float32 + class TestLowCloudCompositor: """Test LowCloudCompositor.""" def setup_method(self): """Create test data.""" - self.btd = xr.DataArray(da.from_array([[0.0, 1.0, 10.0], [0.0, 1.0, 10.0], [0.0, 1.0, 10.0]]), - dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}) - self.bt_win = xr.DataArray(da.from_array([[250, 250, 250], [250, 250, 250], [150, 150, 150]]), - dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}) - self.lsm = xr.DataArray(da.from_array([[0., 0., 0.], [1., 1., 1.], [0., 1., 0.]]), - dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}) - - def test_default_behaviour(self): + self.btd = xr.DataArray( + da.from_array(np.array([[0.0, 1.0, 10.0], [0.0, 1.0, 10.0], [0.0, 1.0, 10.0]], dtype=np.float32)), + dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]} + ) + self.bt_win = xr.DataArray( + da.from_array(np.array([[250, 250, 250], [250, 250, 250], [150, 150, 150]], dtype=np.float32)), + dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]} + ) + self.lsm = xr.DataArray( + da.from_array(np.array([[0., 0., 0.], [1., 1., 1.], [0., 1., 0.]], dtype=np.float32)), + dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]} + ) + + def test_low_cloud_compositor(self): """Test general default functionality of compositor.""" from satpy.composites import LowCloudCompositor with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = LowCloudCompositor(name="test") res = comp([self.btd, self.bt_win, self.lsm]) data = res.values - expexted_alpha = np.array([[0.0, 0.25, 1.0], [0.0, 0.25, 1.0], [0.0, 0.0, 0.0]]) expected = np.stack([self.btd, expexted_alpha]) np.testing.assert_equal(data, expected) + def test_low_cloud_compositor_dtype(self): + """Test that the datatype is not altered by the compositor.""" + from satpy.composites import LowCloudCompositor + comp = LowCloudCompositor(name="test") + res = comp([self.btd, self.bt_win, self.lsm]) + assert res.data.dtype == np.float32 + class TestSingleBandCompositor(unittest.TestCase): """Test the single-band compositor.""" From 5e9763cabe573f968d5d6aafbe2c701b326be030 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 09:32:23 +0000 Subject: [PATCH 615/702] Move computation of data outside dask schedule and set max number of jobs to 0. --- satpy/tests/test_composites.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index db29a8572e..bf47c9ff9a 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -955,13 +955,12 @@ def setup_method(self): def test_high_cloud_compositor(self): """Test general default functionality of compositor.""" from satpy.composites import HighCloudCompositor - with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = HighCloudCompositor(name="test") res = comp([self.data]) - data = res.values expexted_alpha = np.array([[1.0, 0.7142857, 0.0], [1.0, 0.625, 0.0], [1.0, 0.5555555, 0.0]]) expected = np.stack([self.data, expexted_alpha]) - np.testing.assert_almost_equal(data, expected) + np.testing.assert_almost_equal(res.values, expected) def test_high_cloud_compositor_dtype(self): """Test that the datatype is not altered by the compositor.""" @@ -992,13 +991,12 @@ def setup_method(self): def test_low_cloud_compositor(self): """Test general default functionality of compositor.""" from satpy.composites import LowCloudCompositor - with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = LowCloudCompositor(name="test") res = comp([self.btd, self.bt_win, self.lsm]) - data = res.values expexted_alpha = np.array([[0.0, 0.25, 1.0], [0.0, 0.25, 1.0], [0.0, 0.0, 0.0]]) expected = np.stack([self.btd, expexted_alpha]) - np.testing.assert_equal(data, expected) + np.testing.assert_equal(res.values, expected) def test_low_cloud_compositor_dtype(self): """Test that the datatype is not altered by the compositor.""" From 88fe02cddfcceb95abb20cb93c2234134546854e Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 11:38:35 +0200 Subject: [PATCH 616/702] Fix DeprecationWarning of empty Numpy array falsy with list support --- satpy/readers/satpy_cf_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index 073c8d4cf5..2f9743e0f6 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -237,7 +237,7 @@ def _existing_datasets(self, configured_datasets=None): def fix_modifier_attr(self, ds_info): """Fix modifiers attribute.""" # Empty modifiers are read as [], which causes problems later - if "modifiers" in ds_info and not ds_info["modifiers"]: + if "modifiers" in ds_info and len(ds_info["modifiers"]) == 0: ds_info["modifiers"] = () try: try: From 6b3b40f0d11575266c1293aafcb8c1dde88c0e1e Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 09:53:07 +0000 Subject: [PATCH 617/702] Daskify computation of latitude array for HighCloudCompositor and preserve dtype. --- satpy/composites/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index cb5a10663c..9bb7043343 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1120,13 +1120,13 @@ def __call__(self, projectables, **kwargs): raise ValueError(f"Expected 1 dataset, got {len(projectables)}") data = projectables[0] - _, lats = data.attrs["area"].get_lonlats() + _, lats = data.attrs["area"].get_lonlats(chunks=data.chunks, dtype=data.dtype) lats = np.abs(lats) slope = (self.transition_min[1] - self.transition_min[0]) / (self.latitude_min[1] - self.latitude_min[0]) offset = self.transition_min[0] - slope * self.latitude_min[0] - tr_min_lat = xr.DataArray(name="tr_min_lat", coords=data.coords, dims=data.dims) + tr_min_lat = xr.DataArray(name="tr_min_lat", coords=data.coords, dims=data.dims).astype(data.dtype) tr_min_lat = tr_min_lat.where(lats >= self.latitude_min[0], self.transition_min[0]) tr_min_lat = tr_min_lat.where(lats <= self.latitude_min[1], self.transition_min[1]) tr_min_lat = tr_min_lat.where((lats < self.latitude_min[0]) | (lats > self.latitude_min[1]), From 48c1c63ef830421b631ed1a6e4aae5620ac0beba Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 09:56:53 +0000 Subject: [PATCH 618/702] use a common variable for testing dtype. --- satpy/tests/test_composites.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index bf47c9ff9a..f0515c0f93 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -945,9 +945,9 @@ def setup_method(self): from pyresample.geometry import create_area_def area = create_area_def(area_id="test", projection={"proj": "latlong"}, center=(0, 45), width=3, height=3, resolution=35) - + self.dtype = np.float32 self.data = xr.DataArray( - da.from_array(np.array([[200, 250, 300], [200, 250, 300], [200, 250, 300]], dtype=np.float32)), + da.from_array(np.array([[200, 250, 300], [200, 250, 300], [200, 250, 300]], dtype=self.dtype)), dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}, attrs={"area": area} ) @@ -967,7 +967,7 @@ def test_high_cloud_compositor_dtype(self): from satpy.composites import HighCloudCompositor comp = HighCloudCompositor(name="test") res = comp([self.data]) - assert res.data.dtype == np.float32 + assert res.data.dtype == self.dtype class TestLowCloudCompositor: @@ -975,16 +975,17 @@ class TestLowCloudCompositor: def setup_method(self): """Create test data.""" + self.dtype = np.float32 self.btd = xr.DataArray( - da.from_array(np.array([[0.0, 1.0, 10.0], [0.0, 1.0, 10.0], [0.0, 1.0, 10.0]], dtype=np.float32)), + da.from_array(np.array([[0.0, 1.0, 10.0], [0.0, 1.0, 10.0], [0.0, 1.0, 10.0]], dtype=self.dtype)), dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]} ) self.bt_win = xr.DataArray( - da.from_array(np.array([[250, 250, 250], [250, 250, 250], [150, 150, 150]], dtype=np.float32)), + da.from_array(np.array([[250, 250, 250], [250, 250, 250], [150, 150, 150]], dtype=self.dtype)), dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]} ) self.lsm = xr.DataArray( - da.from_array(np.array([[0., 0., 0.], [1., 1., 1.], [0., 1., 0.]], dtype=np.float32)), + da.from_array(np.array([[0., 0., 0.], [1., 1., 1.], [0., 1., 0.]], dtype=self.dtype)), dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]} ) @@ -1003,7 +1004,7 @@ def test_low_cloud_compositor_dtype(self): from satpy.composites import LowCloudCompositor comp = LowCloudCompositor(name="test") res = comp([self.btd, self.bt_win, self.lsm]) - assert res.data.dtype == np.float32 + assert res.data.dtype == self.dtype class TestSingleBandCompositor(unittest.TestCase): From e9530eff917ac8d6e70c65c89f0810624d70cff4 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 09:57:45 +0000 Subject: [PATCH 619/702] Remove obsolete TODOs. --- satpy/composites/__init__.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 9bb7043343..034e8a7821 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1114,8 +1114,6 @@ def __call__(self, projectables, **kwargs): `projectables` is expected to be a list or tuple with a single element: - index 0: Brightness temperature of a thermal infrared window channel (e.g. 10.5 microns). """ - # TODO Optimize and make sure that there are no early unnecessary dask computations. Is there a way to avoid - # computation of the latitude array? if len(projectables) != 1: raise ValueError(f"Expected 1 dataset, got {len(projectables)}") @@ -1196,7 +1194,6 @@ def __call__(self, projectables, **kwargs): - index 1. Brightness temperature of the window channel (used to filter out noise-induced false alarms). - index 2: Land-Sea-Mask. """ - # TODO Optimize and make sure that there are no early unnecessary dask computations if len(projectables) != 3: raise ValueError(f"Expected 3 datasets, got {len(projectables)}") From 0c2d5dd5901141789b9411a7e9cd1c2b0ff2ec77 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 12:29:22 +0200 Subject: [PATCH 620/702] Catch missing radiance adjustment warnings --- satpy/tests/reader_tests/test_slstr_l1b.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_slstr_l1b.py b/satpy/tests/reader_tests/test_slstr_l1b.py index 63a43c9c79..2bc384c5e2 100644 --- a/satpy/tests/reader_tests/test_slstr_l1b.py +++ b/satpy/tests/reader_tests/test_slstr_l1b.py @@ -18,10 +18,10 @@ """Module for testing the satpy.readers.nc_slstr module.""" import unittest import unittest.mock as mock -import warnings from datetime import datetime import numpy as np +import pytest import xarray as xr from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange @@ -151,7 +151,8 @@ def test_instantiate(self, bvs_, xr_): test = NCSLSTR1B("somedir/S1_radiance_an.nc", filename_info, "c") assert test.view == "nadir" assert test.stripe == "a" - test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) + with pytest.raises(UserWarning, match=r"No radiance adjustment supplied for channel"): + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) assert test.start_time == good_start assert test.end_time == good_end xr_.open_dataset.assert_called() @@ -214,9 +215,8 @@ def test_radiance_calibration(self, xr_): test = NCSLSTR1B("somedir/S1_radiance_co.nc", filename_info, "c") # Check warning is raised if we don't have calibration - with warnings.catch_warnings(record=True) as w: + with pytest.raises(UserWarning, match=r"No radiance adjustment supplied for channel"): test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) - assert issubclass(w[-1].category, UserWarning) # Check user calibration is used correctly test = NCSLSTR1B("somedir/S1_radiance_co.nc", filename_info, "c", From 1057aab5b92886dde6df6004a512c5a268d1f090 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 12:58:13 +0200 Subject: [PATCH 621/702] Fix expected proj string --- satpy/tests/reader_tests/test_ahi_l2_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index 2a697d0a4a..910e0515a1 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -77,7 +77,7 @@ def test_ahi_l2_area_def(himl2_filename, caplog): """Test reader handles area definition correctly.""" import warnings - ps = "+proj=geos +lon_0=140.7 +h=35785863 +x_0=0 +y_0=0 +a=6378137 +rf=298.257024882273 +units=m +no_defs +type=crs" + ps = "+a=6378137 +h=35785863 +lon_0=140.7 +no_defs +proj=geos +rf=298.257024882273 +type=crs +units=m +x_0=0 +y_0=0" # Check case where input data is correct size. fh = ahil2_filehandler(himl2_filename) From e9e85189d2a9ace207dac849cef90c1ccae2cd02 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 12:58:45 +0200 Subject: [PATCH 622/702] Fix pytest.raises() to pytest.warns() --- satpy/tests/reader_tests/test_ahi_hsd.py | 4 ++-- satpy/tests/reader_tests/test_slstr_l1b.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 2a2c608a46..faa348b9af 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -473,7 +473,7 @@ def test_blocklen_error(self, *mocks): # Expected and actual blocklength do not match fp_.tell.return_value = 100 - with pytest.raises(UserWarning, match=r"Actual .* header size does not match expected"): + with pytest.warns(UserWarning, match=r"Actual .* header size does not match expected"): fh._check_fpos(fp_, fpos, 0, "header 1") def test_is_valid_time(self): @@ -491,7 +491,7 @@ def test_time_rounding(self): mocker.return_value = True assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 3, 0, 0) mocker.return_value = False - with pytest.raises(UserWarning, + with pytest.warns(UserWarning, match=r"Observation timeline is fill value, not rounding observation time"): assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 12, 0, 0) diff --git a/satpy/tests/reader_tests/test_slstr_l1b.py b/satpy/tests/reader_tests/test_slstr_l1b.py index 2bc384c5e2..b6784d4e2b 100644 --- a/satpy/tests/reader_tests/test_slstr_l1b.py +++ b/satpy/tests/reader_tests/test_slstr_l1b.py @@ -151,7 +151,7 @@ def test_instantiate(self, bvs_, xr_): test = NCSLSTR1B("somedir/S1_radiance_an.nc", filename_info, "c") assert test.view == "nadir" assert test.stripe == "a" - with pytest.raises(UserWarning, match=r"No radiance adjustment supplied for channel"): + with pytest.warns(UserWarning, match=r"No radiance adjustment supplied for channel"): test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) assert test.start_time == good_start assert test.end_time == good_end @@ -215,7 +215,7 @@ def test_radiance_calibration(self, xr_): test = NCSLSTR1B("somedir/S1_radiance_co.nc", filename_info, "c") # Check warning is raised if we don't have calibration - with pytest.raises(UserWarning, match=r"No radiance adjustment supplied for channel"): + with pytest.warns(UserWarning, match=r"No radiance adjustment supplied for channel"): test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) # Check user calibration is used correctly From 1b57de0c2e98914498e49a8be7e4a2e12367899a Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 13:12:46 +0200 Subject: [PATCH 623/702] Handle more orbit polynomial warnings --- satpy/tests/reader_tests/test_seviri_base.py | 9 +++++++-- satpy/tests/reader_tests/test_seviri_l1b_nc.py | 3 ++- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index 86f684bb5e..a07bb799bc 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -338,8 +338,12 @@ class TestOrbitPolynomialFinder: def test_get_orbit_polynomial(self, orbit_polynomials, time, orbit_polynomial_exp): """Test getting the satellite locator.""" + import warnings finder = OrbitPolynomialFinder(orbit_polynomials) - orbit_polynomial = finder.get_orbit_polynomial(time=time) + with warnings.catch_warnings(): + # There's no exact polynomial time match, filter the warning + warnings.filterwarnings("ignore", category=UserWarning, message=r"No orbit polynomial valid") + orbit_polynomial = finder.get_orbit_polynomial(time=time) assert orbit_polynomial == orbit_polynomial_exp @pytest.mark.parametrize( @@ -356,7 +360,8 @@ def test_get_orbit_polynomial_exceptions(self, orbit_polynomials, time): """Test exceptions thrown while getting the satellite locator.""" finder = OrbitPolynomialFinder(orbit_polynomials) with pytest.raises(NoValidOrbitParams): - finder.get_orbit_polynomial(time=time) + with pytest.warns(UserWarning, match=r"No orbit polynomial valid"): + finder.get_orbit_polynomial(time=time) class TestMeirinkSlope: diff --git a/satpy/tests/reader_tests/test_seviri_l1b_nc.py b/satpy/tests/reader_tests/test_seviri_l1b_nc.py index 42e038a766..cd5e2c713f 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_nc.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_nc.py @@ -381,7 +381,8 @@ def test_satpos_no_valid_orbit_polynomial(self, file_handler): } file_handler.nc["orbit_polynomial_start_time_day"] = 0 file_handler.nc["orbit_polynomial_end_time_day"] = 0 - res = file_handler.get_dataset(dataset_id, dataset_info) + with pytest.warns(UserWarning, match=r"No orbit polynomial valid for"): + res = file_handler.get_dataset(dataset_id, dataset_info) assert "satellite_actual_longitude" not in res.attrs[ "orbital_parameters"] From bdbf3f95c41c7e7e6ef8bb8bdbec894aa36c07cf Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 13:16:15 +0200 Subject: [PATCH 624/702] Fix proj authority usage --- satpy/readers/smos_l2_wind.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/smos_l2_wind.py b/satpy/readers/smos_l2_wind.py index c982397c3c..4a909ee2e4 100644 --- a/satpy/readers/smos_l2_wind.py +++ b/satpy/readers/smos_l2_wind.py @@ -170,6 +170,6 @@ def get_area_def(self, dsid): description = "SMOS L2 Wind Equirectangular Projection" area_id = "smos_eqc" proj_id = "equirectangular" - proj_dict = {"init": self["/attr/geospatial_bounds_vertical_crs"]} - area_def = AreaDefinition(area_id, description, proj_id, proj_dict, width, height, area_extent, ) + proj_str = self["/attr/geospatial_bounds_vertical_crs"] + area_def = AreaDefinition(area_id, description, proj_id, proj_str, width, height, area_extent, ) return area_def From d0dd78e8889ece1b9241be5e1b5876352eefb919 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 13:24:45 +0200 Subject: [PATCH 625/702] Suppress warning about missing DataArray coordinate in test data saving --- satpy/tests/reader_tests/test_satpy_cf_nc.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index f279196ab4..ec7ac34be4 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -260,12 +260,15 @@ def test_write_and_read_with_area_definition(self, cf_scene, nc_filename): def test_write_and_read_with_swath_definition(self, cf_scene, nc_filename): """Save a dataset with a swath definition to file with cf_writer and read the data again.""" - cf_scene.save_datasets(writer="cf", - filename=nc_filename, - engine="h5netcdf", - flatten_attrs=True, - pretty=True, - datasets=["swath_data"]) + with warnings.catch_warnings(): + # Filter out warning about missing lon/lat DataArray coordinates + warnings.filterwarnings("ignore", category=UserWarning, message=r"Coordinate .* referenced") + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="h5netcdf", + flatten_attrs=True, + pretty=True, + datasets=["swath_data"]) scn_ = Scene(reader="satpy_cf_nc", filenames=[nc_filename]) scn_.load(["swath_data"]) From 8811914eb328173d846606a60ffeb6a58c0e9fb5 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 13:30:48 +0200 Subject: [PATCH 626/702] Silence warning about PNG not having geolocation information --- satpy/tests/reader_tests/test_generic_image.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py index 0ea143269f..cd347ce07e 100644 --- a/satpy/tests/reader_tests/test_generic_image.py +++ b/satpy/tests/reader_tests/test_generic_image.py @@ -21,6 +21,7 @@ import dask.array as da import numpy as np +import pytest import xarray as xr from satpy.tests.utils import make_dataid @@ -128,10 +129,13 @@ def tearDown(self): def test_png_scene(self): """Test reading PNG images via satpy.Scene().""" + from rasterio.errors import NotGeoreferencedWarning + from satpy import Scene fname = os.path.join(self.base_dir, "test_l.png") - scn = Scene(reader="generic_image", filenames=[fname]) + with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): + scn = Scene(reader="generic_image", filenames=[fname]) scn.load(["image"]) assert scn["image"].shape == (1, self.y_size, self.x_size) assert scn.sensor_names == {"images"} @@ -140,7 +144,8 @@ def test_png_scene(self): assert "area" not in scn["image"].attrs fname = os.path.join(self.base_dir, "20180101_0000_test_la.png") - scn = Scene(reader="generic_image", filenames=[fname]) + with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): + scn = Scene(reader="generic_image", filenames=[fname]) scn.load(["image"]) data = da.compute(scn["image"].data) assert scn["image"].shape == (1, self.y_size, self.x_size) From b2e9841af1e1b4408fadfe86e23fab59ed488746 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 13:32:46 +0200 Subject: [PATCH 627/702] Use width and height attributes instead of x/y_size --- satpy/readers/fci_l2_nc.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index b03cbdfdaa..cd9bf5788f 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -372,7 +372,7 @@ def _construct_area_def(self, dataset_id): # Construct area definition from standardized area definition. stand_area_def = get_area_def(area_naming["area_id"]) - if (stand_area_def.x_size != self.ncols) | (stand_area_def.y_size != self.nlines): + if (stand_area_def.width != self.ncols) | (stand_area_def.height != self.nlines): raise NotImplementedError("Unrecognised AreaDefinition.") mod_area_extent = self._modify_area_extent(stand_area_def.area_extent) @@ -382,8 +382,8 @@ def _construct_area_def(self, dataset_id): stand_area_def.description, "", stand_area_def.crs, - stand_area_def.x_size, - stand_area_def.y_size, + stand_area_def.width, + stand_area_def.height, mod_area_extent) return area_def From b4974152c4723bebd0b88533c61d367682c37ef7 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 13:48:01 +0200 Subject: [PATCH 628/702] Get number of lines/columns from .sizes attribute --- satpy/readers/ahi_l2_nc.py | 4 ++-- satpy/readers/goes_imager_nc.py | 8 ++++---- satpy/readers/seviri_l1b_nc.py | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 7785c3994d..d6e6caa887 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -73,8 +73,8 @@ def __init__(self, filename, filename_info, filetype_info): raise ValueError("File is not a full disk scene") self.sensor = self.nc.attrs["instrument_name"].lower() - self.nlines = self.nc.dims["Columns"] - self.ncols = self.nc.dims["Rows"] + self.nlines = self.nc.sizes["Columns"] + self.ncols = self.nc.sizes["Rows"] self.platform_name = self.nc.attrs["satellite_name"] self.platform_shortname = filename_info["platform"] self._meta = None diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index 8ec3219eec..1b88919886 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -615,8 +615,8 @@ def __init__(self, filename, filename_info, filetype_info, geo_data=None): mask_and_scale=False, chunks={"xc": CHUNK_SIZE, "yc": CHUNK_SIZE}) self.sensor = "goes_imager" - self.nlines = self.nc.dims["yc"] - self.ncols = self.nc.dims["xc"] + self.nlines = self.nc.sizes["yc"] + self.ncols = self.nc.sizes["xc"] self.platform_name = self._get_platform_name( self.nc.attrs["Satellite Sensor"]) self.platform_shortname = self.platform_name.replace("-", "").lower() @@ -1124,8 +1124,8 @@ def __init__(self, filename, filename_info, filetype_info): mask_and_scale=False, chunks={"xc": CHUNK_SIZE, "yc": CHUNK_SIZE}) self.sensor = "goes_imager" - self.nlines = self.nc.dims["yc"] - self.ncols = self.nc.dims["xc"] + self.nlines = self.nc.sizes["yc"] + self.ncols = self.nc.sizes["xc"] self.platform_name = GOESNCBaseFileHandler._get_platform_name( self.nc.attrs["Satellite Sensor"]) self.platform_shortname = self.platform_name.replace("-", "").lower() diff --git a/satpy/readers/seviri_l1b_nc.py b/satpy/readers/seviri_l1b_nc.py index 82e3b15297..22b55eceda 100644 --- a/satpy/readers/seviri_l1b_nc.py +++ b/satpy/readers/seviri_l1b_nc.py @@ -139,8 +139,8 @@ def get_metadata(self): "h": 35785831.00, "ssp_longitude": ssp_lon} - self.mda["number_of_lines"] = int(self.nc.dims["y"]) - self.mda["number_of_columns"] = int(self.nc.dims["x"]) + self.mda["number_of_lines"] = int(self.nc.sizes["y"]) + self.mda["number_of_columns"] = int(self.nc.sizes["x"]) # only needed for HRV channel which is not implemented yet # self.mda['hrv_number_of_lines'] = int(self.nc.dims['num_rows_hrv']) From 7b56afac24354e91d3347086035fe85e8c721c3a Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 13:59:22 +0200 Subject: [PATCH 629/702] Make HRPT navigation timezone ignorant --- satpy/readers/hrpt.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/hrpt.py b/satpy/readers/hrpt.py index 2a54eed664..c4862e8169 100644 --- a/satpy/readers/hrpt.py +++ b/satpy/readers/hrpt.py @@ -78,7 +78,7 @@ def time_seconds(tc_array, year): word = tc_array[:, 3] msecs += word & 1023 return (np.datetime64( - str(year) + "-01-01T00:00:00Z", "s") + + str(year) + "-01-01T00:00:00", "s") + msecs[:].astype("timedelta64[ms]") + (day - 1)[:].astype("timedelta64[D]")) @@ -224,7 +224,7 @@ def calibrate_solar_channel(self, data, key): """Calibrate a solar channel.""" from pygac.calibration import calibrate_solar julian_days = ((np.datetime64(self.start_time) - - np.datetime64(str(self.year) + "-01-01T00:00:00Z")) + - np.datetime64(str(self.year) + "-01-01T00:00:00")) / np.timedelta64(1, "D")) data = calibrate_solar(data, _get_channel_index(key), self.year, julian_days, self.calibrator) From e6dc6165a4115191d8637b21b09467ab5db04681 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 12:16:19 +0000 Subject: [PATCH 630/702] Modify tests for NDVIHybridGreen compositor to assert 0 dask computations in compositor code. --- satpy/tests/compositor_tests/test_spectral.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index e46cff4d0c..7472016c00 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -97,7 +97,7 @@ def setup_method(self): def test_ndvi_hybrid_green(self): """Test General functionality with linear scaling from ndvi to blend fraction.""" - with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") @@ -107,12 +107,12 @@ def test_ndvi_hybrid_green(self): assert isinstance(res.data, da.Array) assert res.attrs["name"] == "ndvi_hybrid_green" assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" - data = res.values + data = res.values np.testing.assert_array_almost_equal(data, np.array([[0.2633, 0.3071], [0.2115, 0.3420]]), decimal=4) def test_ndvi_hybrid_green_dtype(self): """Test that the datatype is not altered by the compositor.""" - with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c02, self.c03)).compute() @@ -120,15 +120,15 @@ def test_ndvi_hybrid_green_dtype(self): def test_nonlinear_scaling(self): """Test non-linear scaling using `strength` term.""" - with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), strength=2.0, prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c02, self.c03)) - res_np = res.data.compute() - assert res.dtype == res_np.dtype - assert res.dtype == np.float32 + res_np = res.data.compute() + assert res.dtype == res_np.dtype + assert res.dtype == np.float32 np.testing.assert_array_almost_equal(res.data, np.array([[0.2646, 0.3075], [0.2120, 0.3471]]), decimal=4) def test_invalid_strength(self): From 3259552f53e90b19e132acc1e8ce8a5e1144ef99 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 12:24:16 +0000 Subject: [PATCH 631/702] Fix typos. --- satpy/composites/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index a7cc3d6e1c..09985d6ba1 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1079,7 +1079,7 @@ class HighCloudCompositor(CloudCompositor): - transition_min = transition_min[0] where abs(latitude) < latitude_min(0) - transition_min = transition_min[1] where abs(latitude) > latitude_min(0) - - transition_min = linear interpolation between transition_min[0] and transition_min[1] as a funtion + - transition_min = linear interpolation between transition_min[0] and transition_min[1] as a function of where abs(latitude). """ @@ -1139,7 +1139,7 @@ def __call__(self, projectables, **kwargs): class LowCloudCompositor(CloudCompositor): """Detect low-level clouds based on thresholding and use it as a mask for compositing during night-time. - This compsitor computes the brightness temperature difference between a window channel (e.g. 10.5 micron) + This compositor computes the brightness temperature difference between a window channel (e.g. 10.5 micron) and the near-infrared channel e.g. (3.8 micron) and uses this brightness temperature difference, `BTD`, to create a partially transparent mask for compositing. From d0993fdaceb7657d5b405123d3136f3087153c07 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 14:36:15 +0200 Subject: [PATCH 632/702] Convert floats and ints to match the nc.Variable datatype --- satpy/tests/reader_tests/test_seadas_l2.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/satpy/tests/reader_tests/test_seadas_l2.py b/satpy/tests/reader_tests/test_seadas_l2.py index 01de26e96b..d3037e6b55 100644 --- a/satpy/tests/reader_tests/test_seadas_l2.py +++ b/satpy/tests/reader_tests/test_seadas_l2.py @@ -198,9 +198,12 @@ def _add_variable_to_netcdf_file(nc, var_name, var_info): fill_value=var_info.get("fill_value")) v[:] = var_info["data"] for attr_key, attr_val in var_info["attrs"].items(): + if isinstance(attr_val, (int, float)): + attr_val = v.dtype.type(attr_val) setattr(v, attr_key, attr_val) + class TestSEADAS: """Test the SEADAS L2 file reader.""" From 2da18407a77d2b4a3833e4c39e0384095fa0a9f9 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 14:57:35 +0200 Subject: [PATCH 633/702] Move AreaDefinition.from_cf() PROJ warning suppression to Pyresample --- satpy/readers/satpy_cf_nc.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index 2f9743e0f6..1dfd68a206 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -318,15 +318,8 @@ def get_dataset(self, ds_id, ds_info): def get_area_def(self, dataset_id): """Get area definition from CF complient netcdf.""" - import warnings - try: - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - # FIXME: This should be silenced in Pyresample - area = AreaDefinition.from_cf(self.filename) + area = AreaDefinition.from_cf(self.filename) return area except ValueError: # No CF compliant projection information was found in the netcdf file or From 1bb565587ca5cd8ff845ff4d4de5045bb052ae8d Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 15:03:55 +0200 Subject: [PATCH 634/702] Replace GradientSearchResampler with the helper method in dosctring --- satpy/resample.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/resample.py b/satpy/resample.py index 336e3fec11..8b8f67dabf 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -42,7 +42,7 @@ "bucket_sum", "Sum Bucket Resampling", :class:`~satpy.resample.BucketSum` "bucket_count", "Count Bucket Resampling", :class:`~satpy.resample.BucketCount` "bucket_fraction", "Fraction Bucket Resampling", :class:`~satpy.resample.BucketFraction` - "gradient_search", "Gradient Search Resampling", :class:`~pyresample.gradient.GradientSearchResampler` + "gradient_search", "Gradient Search Resampling", :meth:`~pyresample.gradient.create_gradient_search_resampler` The resampling algorithm used can be specified with the ``resampler`` keyword argument and defaults to ``nearest``: From 5c49858463b1a4d9fa4dd93312923278e3ba7aa9 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 14 Dec 2023 14:46:08 +0100 Subject: [PATCH 635/702] Use fixtuers for test AMV file --- satpy/tests/reader_tests/test_fci_l2_nc.py | 113 ++++++++++----------- 1 file changed, 56 insertions(+), 57 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index 44906c5040..84681b0f02 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -508,66 +508,65 @@ def test_byte_extraction(self): assert dataset.values == 0 -class TestFciL2NCAMVFileHandler: - """Test the FciL2NCAMVFileHandler reader.""" - - def setup_method(self): - """Set up the test by creating a test file and opening it with the reader.""" - # Easiest way to test the reader is to create a test netCDF file on the fly - # Create unique filenames to prevent race conditions when tests are run in parallel - self.test_file = str(uuid.uuid4()) + ".nc" - with Dataset(self.test_file, "w") as nc: - # Create dimensions - nc.createDimension("number_of_winds", 50000) - - # add global attributes - nc.data_source = "test_data_source" - nc.platform = "test_platform" - - # Add datasets - latitude = nc.createVariable("latitude", np.float32, dimensions=("number_of_winds",)) - latitude[:] = np.arange(50000) - - longitude = nc.createVariable("y", np.float32, dimensions=("number_of_winds",)) - longitude[:] = np.arange(50000) - - qi = nc.createVariable("product_quality", np.int8) - qi[:] = 99. - test_dataset = nc.createVariable("test_dataset", np.float32, - dimensions="number_of_winds") - test_dataset[:] = np.ones(50000) - test_dataset.test_attr = "attr" - test_dataset.units = "test_units" - - mtg_geos_projection = nc.createVariable("mtg_geos_projection", int, dimensions=()) - mtg_geos_projection.longitude_of_projection_origin = 0.0 - mtg_geos_projection.semi_major_axis = 6378137. - mtg_geos_projection.inverse_flattening = 298.257223563 - mtg_geos_projection.perspective_point_height = 35786400. +@pytest.fixture(scope="module") +def amv_file(tmp_path_factory): + """Create an AMV file.""" + test_file = tmp_path_factory.mktemp("data") / "fci_l2_amv.nc" + + with Dataset(test_file, "w") as nc: + # Create dimensions + nc.createDimension("number_of_winds", 50000) + + # add global attributes + nc.data_source = "test_data_source" + nc.platform = "test_platform" + + # Add datasets + latitude = nc.createVariable("latitude", np.float32, dimensions=("number_of_winds",)) + latitude[:] = np.arange(50000) + + longitude = nc.createVariable("y", np.float32, dimensions=("number_of_winds",)) + longitude[:] = np.arange(50000) + + qi = nc.createVariable("product_quality", np.int8) + qi[:] = 99. + + test_dataset = nc.createVariable("test_dataset", np.float32, + dimensions="number_of_winds") + test_dataset[:] = np.ones(50000) + test_dataset.test_attr = "attr" + test_dataset.units = "test_units" + + mtg_geos_projection = nc.createVariable("mtg_geos_projection", int, dimensions=()) + mtg_geos_projection.longitude_of_projection_origin = 0.0 + mtg_geos_projection.semi_major_axis = 6378137. + mtg_geos_projection.inverse_flattening = 298.257223563 + mtg_geos_projection.perspective_point_height = 35786400. + return test_file + + +@pytest.fixture(scope="module") +def amv_filehandler(amv_file): + """Create an AMV filehandler.""" + return FciL2NCAMVFileHandler(filename=amv_file, + filename_info={"channel":"test_channel"}, + filetype_info={} + ) - self.fh = FciL2NCAMVFileHandler(filename=self.test_file, - filename_info={"channel":"test_channel"}, - filetype_info={} - ) - def tearDown(self): - """Remove the previously created test file.""" - # First delete the file handler, forcing the file to be closed if still open - del self.fh - # Then we can safely remove the file from the system - with suppress(OSError): - os.remove(self.test_file) +class TestFciL2NCAMVFileHandler: + """Test the FciL2NCAMVFileHandler reader.""" - def test_all_basic(self): + def test_all_basic(self, amv_filehandler, amv_file): """Test all basic functionalities.""" - assert self.fh.spacecraft_name == "test_platform" - assert self.fh.sensor_name == "test_data_source" - assert self.fh.ssp_lon == 0.0 + assert amv_filehandler.spacecraft_name == "test_platform" + assert amv_filehandler.sensor_name == "test_data_source" + assert amv_filehandler.ssp_lon == 0.0 - global_attributes = self.fh._get_global_attributes() + global_attributes = amv_filehandler._get_global_attributes() expected_global_attributes = { - "filename": self.test_file, + "filename": amv_file, "spacecraft_name": "test_platform", "sensor": "test_data_source", "platform_name": "test_platform", @@ -575,9 +574,9 @@ def test_all_basic(self): } assert global_attributes == expected_global_attributes - def test_dataset(self): + def test_dataset(self, amv_filehandler): """Test the correct execution of the get_dataset function with a valid file_key.""" - dataset = self.fh.get_dataset(make_dataid(name="test_dataset", resolution=2000), + dataset = amv_filehandler.get_dataset(make_dataid(name="test_dataset", resolution=2000), {"name": "test_dataset", "file_key": "test_dataset", "fill_value": -999, @@ -587,9 +586,9 @@ def test_dataset(self): assert dataset.attrs["units"] == "test_units" assert dataset.attrs["fill_value"] == -999 - def test_dataset_with_invalid_filekey(self): + def test_dataset_with_invalid_filekey(self, amv_filehandler): """Test the correct execution of the get_dataset function with an invalid file_key.""" - invalid_dataset = self.fh.get_dataset(make_dataid(name="test_invalid", resolution=2000), + invalid_dataset = amv_filehandler.get_dataset(make_dataid(name="test_invalid", resolution=2000), {"name": "test_invalid", "file_key": "test_invalid", "fill_value": -999, From 24422ad5ea9e59e41b1daac50d6842e7d8f0b9ab Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 16:17:11 +0200 Subject: [PATCH 636/702] Use CRS objects for testing area equality --- satpy/tests/reader_tests/test_ahi_hsd.py | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index faa348b9af..000abb2b58 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -105,7 +105,8 @@ class TestAHIHSDNavigation(unittest.TestCase): @mock.patch("satpy.readers.ahi_hsd.np.fromfile") def test_region(self, fromfile, np2str): """Test region navigation.""" - from pyresample.utils import proj4_radius_parameters + from pyproj import CRS + np2str.side_effect = lambda x: x m = mock.mock_open() with mock.patch("satpy.readers.ahi_hsd.open", m, create=True): @@ -140,18 +141,9 @@ def test_region(self, fromfile, np2str): "spare": ""} area_def = fh.get_area_def(None) - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - proj_dict = area_def.proj_dict - a, b = proj4_radius_parameters(proj_dict) - assert a == 6378137.0 - assert b == 6356752.3 - assert proj_dict["h"] == 35785863.0 - assert proj_dict["lon_0"] == 140.7 - assert proj_dict["proj"] == "geos" - assert proj_dict["units"] == "m" + expected_crs = CRS.from_dict(dict(a=6378137.0, b=6356752.3, h= 35785863.0, + lon_0=140.7, proj="geos", units="m")) + assert area_def.crs == expected_crs np.testing.assert_allclose(area_def.area_extent, (592000.0038256242, 4132000.0267018233, 1592000.0102878273, 5132000.033164027)) From 55d3622c343a3a5923b622318c71a1073aa796b5 Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 14 Dec 2023 16:07:56 +0100 Subject: [PATCH 637/702] add failing test for unsorted segments --- satpy/tests/test_yaml_reader.py | 35 +++++++++++++++++++++++++++++---- 1 file changed, 31 insertions(+), 4 deletions(-) diff --git a/satpy/tests/test_yaml_reader.py b/satpy/tests/test_yaml_reader.py index 41439a1ac6..3f1db6a977 100644 --- a/satpy/tests/test_yaml_reader.py +++ b/satpy/tests/test_yaml_reader.py @@ -971,10 +971,11 @@ def _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_p get_segment_position_info = MagicMock() get_segment_position_info.return_value = chk_pos_info - fh = MagicMock() filetype_info = {"expected_segments": expected_segments, "file_type": "filetype1"} filename_info = {"segment": segment} + + fh = _create_mocked_basic_fh() fh.filetype_info = filetype_info fh.filename_info = filename_info fh.get_area_def = get_area_def @@ -983,6 +984,12 @@ def _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_p return fh, seg_area +def _create_mocked_basic_fh(): + fake_fh = MagicMock() + fake_fh.filename_info = {} + fake_fh.filetype_info = {} + return fake_fh + class TestGEOSegmentYAMLReader(unittest.TestCase): """Test GEOSegmentYAMLReader.""" @@ -993,9 +1000,7 @@ def test_get_expected_segments(self, cfh): from satpy.readers.yaml_reader import GEOSegmentYAMLReader reader = GEOSegmentYAMLReader() - fake_fh = MagicMock() - fake_fh.filename_info = {} - fake_fh.filetype_info = {} + fake_fh = _create_mocked_basic_fh() cfh.return_value = {"ft1": [fake_fh]} # default (1) @@ -1030,6 +1035,28 @@ def test_get_expected_segments(self, cfh): es = created_fhs["ft1"][0].filename_info["segment"] assert es == 5 + @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) + @patch.object(yr.FileYAMLReader, "create_filehandlers") + def test_segments_sorting(self, cfh): + """Test that segment filehandlers are sorted by segment number.""" + from satpy.readers.yaml_reader import GEOSegmentYAMLReader + reader = GEOSegmentYAMLReader() + + # create filehandlers with different segment numbers + fake_fh_1 = _create_mocked_basic_fh() + fake_fh_1.filename_info["segment"] = 1 + fake_fh_2 = _create_mocked_basic_fh() + fake_fh_2.filename_info["segment"] = 2 + fake_fh_3 = _create_mocked_basic_fh() + fake_fh_3.filename_info["segment"] = 3 + + # put the filehandlers in an unsorted order + cfh.return_value = {"ft1": [fake_fh_1, fake_fh_3, fake_fh_2]} + + # check that the created filehandlers are sorted by segment number + created_fhs = reader.create_filehandlers(["fake.nc"]) + assert [fh.filename_info["segment"] for fh in created_fhs["ft1"]] == [1, 2, 3] + @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch("satpy.readers.yaml_reader.FileYAMLReader._load_dataset") @patch("satpy.readers.yaml_reader.xr") From be666eb1aa6c0f5dd763e42787e4b264bd7ac2e3 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Thu, 14 Dec 2023 15:12:24 +0000 Subject: [PATCH 638/702] Fix name for amv lat/lon to avoid dupliacte in the yaml --- satpy/etc/readers/fci_l2_nc.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index f9c12849eb..1ad5d576a0 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -2837,13 +2837,13 @@ datasets: file_key: channel_id standard_name: channel_id - latitude: + amv_latitude: name: latitude file_type: nc_fci_amv file_key: latitude standard_name: latitude - longitude: + amv_longitude: name: longitude file_type: nc_fci_amv file_key: longitude From 27041a451f3e6ca4933f75a7167ab272ae23dc07 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Thu, 14 Dec 2023 16:32:53 +0100 Subject: [PATCH 639/702] add control for swath data --- satpy/scene.py | 30 ++++++++++++++++++++++-------- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index e2d292a992..aea5b44cfe 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -39,10 +39,10 @@ from satpy.utils import convert_remote_files_to_fsspec, get_storage_options_from_reader_kwargs from satpy.writers import load_writer -try: - import hvplot.xarray as hvplot_xarray # noqa -except ImportError: - hvplot_xarray = None +#try: +# import hvplot.xarray as hvplot_xarray # noqa +#except ImportError: +# hvplot_xarray = None LOG = logging.getLogger(__name__) @@ -1092,6 +1092,7 @@ def to_hvplot(self, datasets=None, *args, **kwargs): plot.ash+plot.IR_108 """ + def _get_crs(xarray_ds): return xarray_ds.area.to_cartopy_crs() @@ -1112,17 +1113,27 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): clabel=f"[{_get_units(xarray_ds,variable)}]", title=title, **defaults) - if hvplot_xarray is None: - raise ImportError("'hvplot' must be installed to use this feature") + #def _check_hvplot_library(): + # if hvplot_xarray is None: + # raise ImportError("'hvplot' must be installed to use this feature") +# +# _check_hvplot_library() plot = Overlay() xarray_ds = self.to_xarray_dataset(datasets) - ccrs = _get_crs(xarray_ds) + + if hasattr(xarray_ds, "area") and hasattr(xarray_ds.area, "to_cartopy_crs"): + ccrs = _get_crs(xarray_ds) + defaults={"x":"x","y":"y"} + else: + ccrs = None + defaults={"x":"longitude","y":"latitude"} + if datasets is None: datasets = list(xarray_ds.keys()) - defaults = dict(x="x", y="y", data_aspect=1, project=True, geo=True, + defaults.update(data_aspect=1, project=True, geo=True, crs=ccrs, projection=ccrs, rasterize=True, coastline="110m", cmap="Plasma", responsive=True, dynamic=False, framewise=True, colorbar=False, global_extent=False, xlabel="Longitude", @@ -1130,6 +1141,9 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): defaults.update(kwargs) + #if "latitude" in xarray_ds.coords: + # defaults.update({"x":"longitude","y":"latitude"}) + for element in datasets: title = f"{element} @ {_get_timestamp(xarray_ds)}" if xarray_ds[element].shape[0] == 3: From 9dd1b28e7f9b9086a9c9872d75c17e03abee544e Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 14 Dec 2023 16:43:58 +0100 Subject: [PATCH 640/702] move sorting from _load_dataset to create_filehandlers so that it acts also when pad_data is False --- satpy/readers/yaml_reader.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index ff3599052a..73b5f7b6ee 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -1157,7 +1157,13 @@ class GEOSegmentYAMLReader(GEOFlippableFileYAMLReader): """ def create_filehandlers(self, filenames, fh_kwargs=None): - """Create file handler objects and determine expected segments for each.""" + """Create file handler objects and determine expected segments for each. + + Additionally, sort the filehandlers by segment number to avoid + issues with filenames where start_time or alphabetic sorting does not + produce the correct order. + + """ created_fhs = super(GEOSegmentYAMLReader, self).create_filehandlers( filenames, fh_kwargs=fh_kwargs) @@ -1171,6 +1177,11 @@ def create_filehandlers(self, filenames, fh_kwargs=None): # add segment key-values for FCI filehandlers if "segment" not in fh.filename_info: fh.filename_info["segment"] = fh.filename_info.get("count_in_repeat_cycle", 1) + + # sort by segment number + for file_type in created_fhs.keys(): + created_fhs[file_type] = sorted(created_fhs[file_type], key=lambda x: x.filename_info.get("segment", 1)) + return created_fhs def _load_dataset(self, dsid, ds_info, file_handlers, dim="y", pad_data=True): @@ -1322,11 +1333,9 @@ def _find_missing_segments(file_handlers, ds_info, dsid): failure = True counter = 1 expected_segments = 1 - # get list of file handlers in segment order - # (ex. first segment, second segment, etc) - handlers = sorted(file_handlers, key=lambda x: x.filename_info.get("segment", 1)) + projectable = None - for fh in handlers: + for fh in file_handlers: if fh.filetype_info["file_type"] in ds_info["file_type"]: expected_segments = fh.filetype_info["expected_segments"] From d8af76815c7e1a5db334e2bbf7807faaccdbed6e Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 17:47:12 +0200 Subject: [PATCH 641/702] Use CRS objects as much as possible instead of suppressing the warnings --- satpy/readers/nwcsaf_nc.py | 7 ++- satpy/tests/reader_tests/test_ahi_hsd.py | 18 ++---- satpy/tests/reader_tests/test_ahi_l2_nc.py | 10 ++-- satpy/tests/reader_tests/test_geos_area.py | 17 ++---- .../reader_tests/test_goes_imager_hrit.py | 20 ++----- satpy/tests/reader_tests/test_gpm_imerg.py | 8 +-- satpy/tests/reader_tests/test_hrit_base.py | 20 ++----- .../reader_tests/test_insat3d_img_l1b_h5.py | 8 +-- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 17 +----- satpy/tests/reader_tests/test_nwcsaf_msg.py | 13 +---- satpy/tests/reader_tests/test_nwcsaf_nc.py | 19 +++--- .../reader_tests/test_oceancolorcci_l3_nc.py | 1 + satpy/tests/reader_tests/test_osisaf_l3.py | 58 +++++-------------- .../reader_tests/test_seviri_l1b_hrit.py | 37 ++++-------- 14 files changed, 71 insertions(+), 182 deletions(-) diff --git a/satpy/readers/nwcsaf_nc.py b/satpy/readers/nwcsaf_nc.py index e3b3dc2d3d..e9809bdce5 100644 --- a/satpy/readers/nwcsaf_nc.py +++ b/satpy/readers/nwcsaf_nc.py @@ -347,8 +347,13 @@ def get_area_def(self, dsid): @staticmethod def _ensure_crs_extents_in_meters(crs, area_extent): """Fix units in Earth shape, satellite altitude and 'units' attribute.""" + import warnings if "kilo" in crs.axis_info[0].unit_name: - proj_dict = crs.to_dict() + with warnings.catch_warnings(): + # The proj dict route is the only feasible way to modify the area, suppress the warning it causes + warnings.filterwarnings("ignore", category=UserWarning, + message="You will likely lose important projection information") + proj_dict = crs.to_dict() proj_dict["units"] = "m" if "a" in proj_dict: proj_dict["a"] *= 1000. diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 000abb2b58..7c88c9e5ac 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -151,7 +151,8 @@ def test_region(self, fromfile, np2str): @mock.patch("satpy.readers.ahi_hsd.np.fromfile") def test_segment(self, fromfile, np2str): """Test segment navigation.""" - from pyresample.utils import proj4_radius_parameters + from pyproj import CRS + np2str.side_effect = lambda x: x m = mock.mock_open() with mock.patch("satpy.readers.ahi_hsd.open", m, create=True): @@ -184,18 +185,9 @@ def test_segment(self, fromfile, np2str): "spare": ""} area_def = fh.get_area_def(None) - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - proj_dict = area_def.proj_dict - a, b = proj4_radius_parameters(proj_dict) - assert a == 6378137.0 - assert b == 6356752.3 - assert proj_dict["h"] == 35785863.0 - assert proj_dict["lon_0"] == 140.7 - assert proj_dict["proj"] == "geos" - assert proj_dict["units"] == "m" + expected_crs = CRS.from_dict(dict(a=6378137.0, b=6356752.3, h= 35785863.0, + lon_0=140.7, proj="geos", units="m")) + assert area_def.crs == expected_crs np.testing.assert_allclose(area_def.area_extent, (-5500000.035542117, -3300000.021325271, 5500000.035542117, -2200000.0142168473)) diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index 910e0515a1..817738bb82 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -75,7 +75,7 @@ def test_startend(himl2_filename): def test_ahi_l2_area_def(himl2_filename, caplog): """Test reader handles area definition correctly.""" - import warnings + from pyproj import CRS ps = "+a=6378137 +h=35785863 +lon_0=140.7 +no_defs +proj=geos +rf=298.257024882273 +type=crs +units=m +x_0=0 +y_0=0" @@ -86,11 +86,9 @@ def test_ahi_l2_area_def(himl2_filename, caplog): assert area_def.width == dimensions["Columns"] assert area_def.height == dimensions["Rows"] assert np.allclose(area_def.area_extent, exp_ext) - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert area_def.proj_str == ps + + expected_crs = CRS(ps) + assert area_def.crs == expected_crs # Check case where input data is incorrect size. fh = ahil2_filehandler(himl2_filename) diff --git a/satpy/tests/reader_tests/test_geos_area.py b/satpy/tests/reader_tests/test_geos_area.py index 077c0b0c7e..0b104c2672 100644 --- a/satpy/tests/reader_tests/test_geos_area.py +++ b/satpy/tests/reader_tests/test_geos_area.py @@ -138,9 +138,7 @@ def test_get_xy_from_linecol(self): def test_get_area_definition(self): """Test the retrieval of the area definition.""" - import warnings - - from pyresample.utils import proj4_radius_parameters + from pyproj import CRS pdict, extent = self.make_pdict_ext(1, "N2S") good_res = (-3000.4032785810186, -3000.4032785810186) @@ -148,16 +146,9 @@ def test_get_area_definition(self): a_def = get_area_definition(pdict, extent) assert a_def.area_id == pdict["a_name"] assert a_def.resolution == good_res - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert a_def.proj_dict["proj"] == "geos" - assert a_def.proj_dict["units"] == "m" - a, b = proj4_radius_parameters(a_def.proj_dict) - assert a == 6378169 - assert b == 6356583.8 - assert a_def.proj_dict["h"] == 35785831 + + expected_crs = CRS(dict(proj="geos", units="m", a=6378169, b=6356583.8, h=35785831)) + assert a_def.crs == expected_crs def test_sampling_to_lfac_cfac(self): """Test conversion from angular sampling to line/column offset.""" diff --git a/satpy/tests/reader_tests/test_goes_imager_hrit.py b/satpy/tests/reader_tests/test_goes_imager_hrit.py index 48078d0f0e..0cb0ff0959 100644 --- a/satpy/tests/reader_tests/test_goes_imager_hrit.py +++ b/satpy/tests/reader_tests/test_goes_imager_hrit.py @@ -22,13 +22,10 @@ from unittest import mock import numpy as np -from pyresample.utils import proj4_radius_parameters from xarray import DataArray from satpy.readers.goes_imager_hrit import ( ALTITUDE, - EQUATOR_RADIUS, - POLE_RADIUS, HRITGOESFileHandler, HRITGOESPrologueFileHandler, make_gvar_float, @@ -172,7 +169,7 @@ def test_get_dataset(self, base_get_dataset): def test_get_area_def(self): """Test getting the area definition.""" - import warnings + from pyproj import CRS self.reader.mda.update({ "cfac": 10216334, @@ -186,17 +183,10 @@ def test_get_area_def(self): resolution=3000) area = self.reader.get_area_def(dsid) - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - a, b = proj4_radius_parameters(area.proj_dict) - assert area.proj_dict["h"] == ALTITUDE - assert area.proj_dict["lon_0"] == 100.1640625 - assert area.proj_dict["proj"] == "geos" - assert area.proj_dict["units"] == "m" - assert a == EQUATOR_RADIUS - assert b == POLE_RADIUS + expected_crs = CRS(dict(h=ALTITUDE, lon_0=100.1640625, proj="geos", units="m", + rf=295.488065897001, a=6378169)) + assert area.crs == expected_crs + assert area.width == 2816 assert area.height == 464 assert area.area_id == "goes-15_goes_imager_fd_3km" diff --git a/satpy/tests/reader_tests/test_gpm_imerg.py b/satpy/tests/reader_tests/test_gpm_imerg.py index 96dc65bbd4..a75e59863f 100644 --- a/satpy/tests/reader_tests/test_gpm_imerg.py +++ b/satpy/tests/reader_tests/test_gpm_imerg.py @@ -105,7 +105,7 @@ def tearDown(self): def test_load_data(self): """Test loading data.""" - import warnings + from pyproj import CRS from satpy.readers import load_reader @@ -132,10 +132,6 @@ def test_load_data(self): assert res["IRprecipitation"].resolution == 0.1 assert res["IRprecipitation"].area.width == 3600 assert res["IRprecipitation"].area.height == 1800 - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert res["IRprecipitation"].area.proj_dict == pdict + assert res["IRprecipitation"].area.crs == CRS(pdict) np.testing.assert_almost_equal(res["IRprecipitation"].area.area_extent, (-179.95, -89.95, 179.95, 89.95), 5) diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index 133b45280e..12317f11f1 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -221,23 +221,13 @@ def test_get_area_extent(self): def test_get_area_def(self): """Test getting an area definition.""" - import warnings - - from pyresample.utils import proj4_radius_parameters + from pyproj import CRS area = self.reader.get_area_def("VIS06") - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - proj_dict = area.proj_dict - a, b = proj4_radius_parameters(proj_dict) - assert a == 6378169.0 - assert b == 6356583.8 - assert proj_dict["h"] == 35785831.0 - assert proj_dict["lon_0"] == 44.0 - assert proj_dict["proj"] == "geos" - assert proj_dict["units"] == "m" + + expected_crs = CRS(dict(proj="geos", a=6378169.0, b=6356583.8, h=35785831.0, lon_0=44.0, units="m")) + assert area.crs == expected_crs + assert area.area_extent == (-77771774058.38356, -77771774058.38356, 30310525626438.438, 3720765401003.719) diff --git a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py index 486177d2d5..9fa7af224d 100644 --- a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py +++ b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py @@ -273,18 +273,12 @@ def insat_filehandler(insat_filename): def test_filehandler_returns_area(insat_filehandler): """Test that filehandle returns an area.""" - import warnings - fh = insat_filehandler ds_id = make_dataid(name="MIR", resolution=4000, calibration="brightness_temperature") area_def = fh.get_area_def(ds_id) _ = area_def.get_lonlats(chunks=1000) - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert "+lon_0=" + str(subsatellite_longitude) in area_def.crs.to_proj4() + assert subsatellite_longitude == area_def.crs.to_cf()["longitude_of_projection_origin"] def test_filehandler_has_start_and_end_time(insat_filehandler): diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 56bbd5212f..04694c145a 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -28,7 +28,6 @@ import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition -from pyresample.utils import proj4_radius_parameters from satpy.readers.mviri_l1b_fiduceo_nc import ( ALTITUDE, @@ -497,22 +496,10 @@ def test_angle_cache(self, interp_tiepoints, file_handler): def test_get_area_definition(self, file_handler, name, resolution, area_exp): """Test getting area definitions.""" - import warnings - dataset_id = make_dataid(name=name, resolution=resolution) area = file_handler.get_area_def(dataset_id) - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - a, b = proj4_radius_parameters(area.proj_dict) - a_exp, b_exp = proj4_radius_parameters(area_exp.proj_dict) - assert a == a_exp - assert b == b_exp - assert area.width == area_exp.width - assert area.height == area_exp.height - for key in ["h", "lon_0", "proj", "units"]: - assert area.proj_dict[key] == area_exp.proj_dict[key] + + assert area.crs == area_exp.crs np.testing.assert_allclose(area.area_extent, area_exp.area_extent) def test_calib_exceptions(self, file_handler): diff --git a/satpy/tests/reader_tests/test_nwcsaf_msg.py b/satpy/tests/reader_tests/test_nwcsaf_msg.py index 761f84d380..1c8e0fb793 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_msg.py +++ b/satpy/tests/reader_tests/test_nwcsaf_msg.py @@ -472,7 +472,7 @@ def cut_h5_object_ref(root, attr): def test_get_area_def(self): """Get the area definition.""" - import warnings + from pyproj import CRS from satpy.readers.nwcsaf_msg2013_hdf5 import Hdf5NWCSAF from satpy.tests.utils import make_dataid @@ -488,15 +488,8 @@ def test_get_area_def(self): for i in range(4): assert area_def.area_extent[i] == pytest.approx(aext_res[i], abs=1e-4) - proj_dict = AREA_DEF_DICT["proj_dict"] - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert proj_dict["proj"] == area_def.proj_dict["proj"] - # Not all elements passed on Appveyor, so skip testing every single element of the proj-dict: - # for key in proj_dict: - # self.assertEqual(proj_dict[key], area_def.proj_dict[key]) + expected_crs = CRS(AREA_DEF_DICT["proj_dict"]) + assert expected_crs == area_def.crs assert AREA_DEF_DICT["x_size"] == area_def.width assert AREA_DEF_DICT["y_size"] == area_def.height diff --git a/satpy/tests/reader_tests/test_nwcsaf_nc.py b/satpy/tests/reader_tests/test_nwcsaf_nc.py index a3235e99e7..4f6755f390 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_nc.py +++ b/satpy/tests/reader_tests/test_nwcsaf_nc.py @@ -506,18 +506,13 @@ def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(sel def _check_filehandler_area_def(file_handler, dsid): - import warnings - - correct_h = float(PROJ["gdal_projection"].split("+h=")[-1]) - correct_a = float(PROJ["gdal_projection"].split("+a=")[-1].split()[0]) - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - area_definition = file_handler.get_area_def(dsid) - assert area_definition.proj_dict["h"] == correct_h - assert area_definition.proj_dict["a"] == correct_a - assert area_definition.proj_dict["units"] == "m" + from pyproj import CRS + + area_definition = file_handler.get_area_def(dsid) + + expected_crs = CRS(PROJ["gdal_projection"]) + assert area_definition.crs == expected_crs + correct_extent = (PROJ["gdal_xgeo_up_left"], PROJ["gdal_ygeo_low_right"], PROJ["gdal_xgeo_low_right"], diff --git a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py index da99fd2d27..90b9d4432f 100644 --- a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py +++ b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py @@ -180,6 +180,7 @@ def test_get_area_def(self, area_exp, fake_file_dict): warnings.filterwarnings("ignore", message=r"You will likely lose important projection information", category=UserWarning) + # The corresponding CRS objects do not match even if the proj dicts match, so use the dicts assert area.proj_dict == area_exp.proj_dict def test_bad_fname(self, fake_dataset, fake_file_dict): diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index f42a1d4648..798aabf61e 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -16,12 +16,12 @@ """Module for testing the satpy.readers.osisaf_l3 module.""" import os -import warnings from datetime import datetime import numpy as np import pytest import xarray as xr +from pyproj import CRS from satpy import DataQuery from satpy.readers.osisaf_l3_nc import OSISAFL3NCFileHandler @@ -224,15 +224,9 @@ def test_get_area_def_stere(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert area_def.proj_dict["a"] == 6378273.0 - assert area_def.proj_dict["lat_0"] == -90 - assert area_def.proj_dict["lat_ts"] == -70 - assert area_def.proj_dict["lon_0"] == 0 - assert area_def.proj_dict["proj"] == "stere" + + expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere")) + assert area_def.crs == expected_crs assert area_def.width == 5 assert area_def.height == 2 @@ -248,14 +242,9 @@ def test_get_area_def_ease(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_lambert_azimuthal_equal_area" - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert area_def.proj_dict["R"] == 6371228 - assert area_def.proj_dict["lat_0"] == -90 - assert area_def.proj_dict["lon_0"] == 0 - assert area_def.proj_dict["proj"] == "laea" + + expected_crs = CRS(dict(R=6371228, lat_0=-90, lon_0=0, proj="laea")) + assert area_def.crs == expected_crs assert area_def.width == 5 assert area_def.height == 2 @@ -288,15 +277,9 @@ def test_get_area_def_stere(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert area_def.proj_dict["a"] == 6378273.0 - assert area_def.proj_dict["lat_0"] == -90 - assert area_def.proj_dict["lat_ts"] == -70 - assert area_def.proj_dict["lon_0"] == 0 - assert area_def.proj_dict["proj"] == "stere" + + expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere")) + assert area_def.crs == expected_crs assert area_def.width == 5 assert area_def.height == 2 @@ -331,12 +314,9 @@ def test_get_area_def_grid(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_geographic_area" - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert area_def.proj_dict["datum"] == "WGS84" - assert area_def.proj_dict["proj"] == "longlat" + + expected_crs = CRS(dict(datum="WGS84", proj="longlat")) + assert area_def.crs == expected_crs assert area_def.width == 5 assert area_def.height == 2 @@ -370,15 +350,9 @@ def test_get_area_def_stere(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert area_def.proj_dict["a"] == 6378273.0 - assert area_def.proj_dict["lat_0"] == -90 - assert area_def.proj_dict["lat_ts"] == -70 - assert area_def.proj_dict["lon_0"] == 0 - assert area_def.proj_dict["proj"] == "stere" + + expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere")) + assert area_def.crs == expected_crs assert area_def.width == 5 assert area_def.height == 2 diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index e928468228..3fe00edc80 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -18,7 +18,6 @@ """The HRIT msg reader tests package.""" import unittest -import warnings from datetime import datetime from unittest import mock @@ -26,6 +25,7 @@ import pytest import xarray as xr from numpy import testing as npt +from pyproj import CRS import satpy.tests.reader_tests.test_seviri_l1b_hrit_setup as setup from satpy.readers.seviri_l1b_hrit import HRITMSGEpilogueFileHandler, HRITMSGFileHandler, HRITMSGPrologueFileHandler @@ -117,21 +117,12 @@ def test_get_dataset_non_fill(self, calibrate, parent_get_dataset): def test_get_area_def(self): """Test getting the area def.""" - from pyresample.utils import proj4_radius_parameters area = self.reader.get_area_def(make_dataid(name="HRV", resolution=1000)) assert area.area_extent == (-45561979844414.07, -3720765401003.719, 45602912357076.38, 77771774058.38356) - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - proj_dict = area.proj_dict - a, b = proj4_radius_parameters(proj_dict) - assert a == 6378169.0 - assert b == pytest.approx(6356583.8) - assert proj_dict["h"] == 35785831.0 - assert proj_dict["lon_0"] == 0.0 - assert proj_dict["proj"] == "geos" - assert proj_dict["units"] == "m" + + expected_crs = CRS(dict(a=6378169.0, b=6356583.8, h=35785831.0, lon_0=0.0, proj="geos", units="m")) + assert expected_crs == area.crs + self.reader.fill_hrv = False area = self.reader.get_area_def(make_dataid(name="HRV", resolution=1000)) npt.assert_allclose(area.defs[0].area_extent, @@ -171,20 +162,12 @@ def _get_fake_data(self): def test_get_area_def(self): """Test getting the area def.""" - from pyresample.utils import proj4_radius_parameters area = self.reader.get_area_def(make_dataid(name="VIS006", resolution=3000)) - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - proj_dict = area.proj_dict - a, b = proj4_radius_parameters(proj_dict) - assert a == 6378169.0 - assert b == pytest.approx(6356583.8) - assert proj_dict["h"] == 35785831.0 - assert proj_dict["lon_0"] == self.projection_longitude - assert proj_dict["proj"] == "geos" - assert proj_dict["units"] == "m" + + expected_crs = CRS(dict(a=6378169.0, b=6356583.8, h=35785831.0, lon_0=self.projection_longitude, + proj="geos", units="m")) + assert area.crs == expected_crs + assert area.area_extent == (-77771774058.38356, -3720765401003.719, 30310525626438.438, 77771774058.38356) # Data shifted by 1.5km to N-W From 8b9c46e6e895cd98c0347b5ef9ae45334853bb65 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 16:08:22 +0000 Subject: [PATCH 642/702] Fix bug related to initial variables being overwritten and later used again with wrong dimensions. Add unit test to catch this. --- satpy/composites/__init__.py | 44 ++++++++++++++++++---------------- satpy/tests/test_composites.py | 8 +++++++ 2 files changed, 31 insertions(+), 21 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 09985d6ba1..4153a85963 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1083,30 +1083,31 @@ class HighCloudCompositor(CloudCompositor): of where abs(latitude). """ - def __init__(self, name, transition_min=(210., 230.), transition_max=300, latitude_min=(30., 60.), # noqa: D417 - transition_gamma=1.0, **kwargs): + def __init__(self, name, transition_min_limits=(210., 230.), latitude_min_limits=(30., 60.), # noqa: D417 + transition_max=300, transition_gamma=1.0, **kwargs): """Collect custom configuration values. Args: - transition_min (tuple): Brightness temperature values used to identify opaque white - clouds at different latitudes + transition_min_limits (tuple): Brightness temperature values used to identify opaque white + clouds at different latitudes transition_max (float): Brightness temperatures above this value are not considered to be high clouds -> transparent - latitude_min (tuple): Latitude values defining the intervals for computing latitude-dependent - transition_min values. + latitude_min_limits (tuple): Latitude values defining the intervals for computing latitude-dependent + `transition_min` values from `transition_min_limits`. transition_gamma (float): Gamma correction to apply to the alpha channel within the brightness temperature range (`transition_min` to `transition_max`). """ - if len(transition_min) != 2: - raise ValueError(f"Expected 2 `transition_min` values, got {len(transition_min)}") - if len(latitude_min) != 2: - raise ValueError(f"Expected 2 `latitude_min` values, got {len(latitude_min)}") + if len(transition_min_limits) != 2: + raise ValueError(f"Expected 2 `transition_min_limits` values, got {len(transition_min_limits)}") + if len(transition_min_limits) != 2: + raise ValueError(f"Expected 2 `latitude_min_limits` values, got {len(transition_min_limits)}") if type(transition_max) in [list, tuple]: raise ValueError(f"Expected `transition_max` to be of type float, is of type {type(transition_max)}") - self.latitude_min = latitude_min - super().__init__(name, transition_min=transition_min, transition_max=transition_max, + self.transition_min_limits = transition_min_limits + self.latitude_min_limits = latitude_min_limits + super().__init__(name, transition_min=None, transition_max=transition_max, transition_gamma=transition_gamma, **kwargs) def __call__(self, projectables, **kwargs): @@ -1122,16 +1123,17 @@ def __call__(self, projectables, **kwargs): _, lats = data.attrs["area"].get_lonlats(chunks=data.chunks, dtype=data.dtype) lats = np.abs(lats) - slope = (self.transition_min[1] - self.transition_min[0]) / (self.latitude_min[1] - self.latitude_min[0]) - offset = self.transition_min[0] - slope * self.latitude_min[0] - - tr_min_lat = xr.DataArray(name="tr_min_lat", coords=data.coords, dims=data.dims).astype(data.dtype) - tr_min_lat = tr_min_lat.where(lats >= self.latitude_min[0], self.transition_min[0]) - tr_min_lat = tr_min_lat.where(lats <= self.latitude_min[1], self.transition_min[1]) - tr_min_lat = tr_min_lat.where((lats < self.latitude_min[0]) | (lats > self.latitude_min[1]), - slope * lats + offset) + slope = (self.transition_min_limits[1] - self.transition_min_limits[0]) / \ + (self.latitude_min_limits[1] - self.latitude_min_limits[0]) + offset = self.transition_min_limits[0] - slope * self.latitude_min_limits[0] - self.transition_min = tr_min_lat + # Compute pixel-level latitude dependent transition_min values and pass to parent CloudCompositor class + transition_min = xr.DataArray(name="transition_min", coords=data.coords, dims=data.dims).astype(data.dtype) + transition_min = transition_min.where(lats >= self.latitude_min_limits[0], self.transition_min_limits[0]) + transition_min = transition_min.where(lats <= self.latitude_min_limits[1], self.transition_min_limits[1]) + transition_min = transition_min.where((lats < self.latitude_min_limits[0]) | + (lats > self.latitude_min_limits[1]), slope * lats + offset) + self.transition_min = transition_min return super().__call__(projectables, **kwargs) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 6b79a06a99..aa4f56f9de 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -973,6 +973,14 @@ def test_high_cloud_compositor(self): expected = np.stack([self.data, expexted_alpha]) np.testing.assert_almost_equal(res.values, expected) + def test_high_cloud_compositor_multiple_calls(self): + """Test that the modified init variables are reset properly when calling the compositor multiple times.""" + from satpy.composites import HighCloudCompositor + comp = HighCloudCompositor(name="test") + res = comp([self.data]) + res2 = comp([self.data]) + np.testing.assert_equal(res.values, res2.values) + def test_high_cloud_compositor_dtype(self): """Test that the datatype is not altered by the compositor.""" from satpy.composites import HighCloudCompositor From ef3ebfc7fc5a65e39a60a8d5b8e3242188941335 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 16:56:57 +0000 Subject: [PATCH 643/702] Use crude stretch instead in order to increase image contrast of especially cold cloud tops. --- satpy/etc/enhancements/generic.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index dfd5b5f5c6..25680d6db9 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -964,7 +964,7 @@ enhancements: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: - stretch: linear + stretch: crude geo_color_low_clouds: standard_name: geo_color_low_clouds @@ -972,7 +972,7 @@ enhancements: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: - stretch: linear + stretch: crude - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: From eaa24be4804d292482f855ae64f0070b6ec138ad Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 16:58:05 +0000 Subject: [PATCH 644/702] Refine blending range for DayNightCompositor. --- satpy/etc/composites/abi.yaml | 4 ++-- satpy/etc/composites/ahi.yaml | 4 ++-- satpy/etc/composites/fci.yaml | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/etc/composites/abi.yaml b/satpy/etc/composites/abi.yaml index e950ba027f..b40f353e6a 100644 --- a/satpy/etc/composites/abi.yaml +++ b/satpy/etc/composites/abi.yaml @@ -756,8 +756,8 @@ composites: # GeoColor geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor - lim_low: 73 - lim_high: 82 + lim_low: 78 + lim_high: 88 standard_name: geo_color_day_night_blend prerequisites: - true_color diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index e088bcf1a6..066b9cf0f2 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -506,8 +506,8 @@ composites: geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor - lim_low: 73 - lim_high: 82 + lim_low: 78 + lim_high: 88 standard_name: geo_color_day_night_blend prerequisites: - true_color_ndvi_green diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index d564cc1f36..f27011d301 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -128,8 +128,8 @@ composites: # GeoColor geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor - lim_low: 73 - lim_high: 82 + lim_low: 78 + lim_high: 88 standard_name: geo_color_day_night_blend prerequisites: - true_color From 739ce460f16550be0b2428109eb8e49d049707f0 Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 14 Dec 2023 18:01:14 +0100 Subject: [PATCH 645/702] sort the reader attribute file_handlers instead of the returned created filehandlers and change test accordingly --- satpy/readers/yaml_reader.py | 10 ++++++---- satpy/tests/test_yaml_reader.py | 6 +++--- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index 73b5f7b6ee..3171449b03 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -1178,12 +1178,14 @@ def create_filehandlers(self, filenames, fh_kwargs=None): if "segment" not in fh.filename_info: fh.filename_info["segment"] = fh.filename_info.get("count_in_repeat_cycle", 1) - # sort by segment number - for file_type in created_fhs.keys(): - created_fhs[file_type] = sorted(created_fhs[file_type], key=lambda x: x.filename_info.get("segment", 1)) - + self._sort_segment_filehandler_by_segment_number() return created_fhs + def _sort_segment_filehandler_by_segment_number(self): + for file_type in self.file_handlers.keys(): + self.file_handlers[file_type] = sorted(self.file_handlers[file_type], + key=lambda x: x.filename_info.get("segment", 0)) + def _load_dataset(self, dsid, ds_info, file_handlers, dim="y", pad_data=True): """Load only a piece of the dataset.""" if not pad_data: diff --git a/satpy/tests/test_yaml_reader.py b/satpy/tests/test_yaml_reader.py index 3f1db6a977..0b0293e453 100644 --- a/satpy/tests/test_yaml_reader.py +++ b/satpy/tests/test_yaml_reader.py @@ -1051,11 +1051,11 @@ def test_segments_sorting(self, cfh): fake_fh_3.filename_info["segment"] = 3 # put the filehandlers in an unsorted order - cfh.return_value = {"ft1": [fake_fh_1, fake_fh_3, fake_fh_2]} + reader.file_handlers = {"ft1": [fake_fh_1, fake_fh_3, fake_fh_2]} # check that the created filehandlers are sorted by segment number - created_fhs = reader.create_filehandlers(["fake.nc"]) - assert [fh.filename_info["segment"] for fh in created_fhs["ft1"]] == [1, 2, 3] + reader.create_filehandlers(["fake.nc"]) + assert [fh.filename_info["segment"] for fh in reader.file_handlers["ft1"]] == [1, 2, 3] @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch("satpy.readers.yaml_reader.FileYAMLReader._load_dataset") From 850b46e575d2a52e3ba7f63ff91ecb313a0a22bd Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 19:24:11 +0200 Subject: [PATCH 646/702] Add missing projection parameter --- satpy/tests/reader_tests/test_osisaf_l3.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index 798aabf61e..80fb581db7 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -225,7 +225,7 @@ def test_get_area_def_stere(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" - expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere")) + expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere", rf=298.27940986765)) assert area_def.crs == expected_crs assert area_def.width == 5 @@ -278,7 +278,7 @@ def test_get_area_def_stere(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" - expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere")) + expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere", rf=298.27940986765)) assert area_def.crs == expected_crs assert area_def.width == 5 @@ -351,7 +351,8 @@ def test_get_area_def_stere(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" - expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere")) + expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere", rf=298.27940986765)) + assert area_def.crs == expected_crs assert area_def.width == 5 From 203acca9e8261028af826a1050a89241f9113956 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 14 Dec 2023 14:07:46 -0600 Subject: [PATCH 647/702] Fix composites failing on non-aligned geolocation coordinates --- satpy/composites/__init__.py | 18 ++++++++++++++++++ satpy/tests/test_composites.py | 34 +++++++++++++++++++++++++++++++++- 2 files changed, 51 insertions(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 5fa9ca575b..a0ce73caea 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -185,6 +185,7 @@ def match_data_arrays(self, data_arrays): """ self.check_geolocation(data_arrays) new_arrays = self.drop_coordinates(data_arrays) + new_arrays = self.align_geo_coordinates(new_arrays) new_arrays = list(unify_chunks(*new_arrays)) return new_arrays @@ -210,6 +211,23 @@ def drop_coordinates(self, data_arrays): return new_arrays + def align_geo_coordinates(self, data_arrays: Sequence[xr.DataArray]) -> list[xr.DataArray]: + """Align DataArrays along geolocation coordinates. + + See :func:`~xarray.align` for more information. This function uses + the "override" join method to essentially ignore differences between + coordinates. The :meth:`check_geolocation` should be called before + this to ensure that geolocation coordinates and "area" are compatible. + The :meth:`drop_coordinates` method should be called before this to + ensure that coordinates that are considered "negligible" when computing + composites do not affect alignment. + + """ + non_geo_coords = tuple( + coord_name for data_arr in data_arrays + for coord_name in data_arr.coords if coord_name not in ("x", "y")) + return xr.align(*data_arrays, join="override", exclude=non_geo_coords) + def check_geolocation(self, data_arrays): """Check that the geolocations of the *data_arrays* are compatible. diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 4a7b2a2ce9..830a427c4a 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -37,7 +37,7 @@ # - tmp_path -class TestMatchDataArrays(unittest.TestCase): +class TestMatchDataArrays: """Test the utility method 'match_data_arrays'.""" def _get_test_ds(self, shape=(50, 100), dims=("y", "x")): @@ -132,6 +132,38 @@ def test_nondimensional_coords(self): ret_datasets = comp.match_data_arrays([ds, ds]) assert "acq_time" not in ret_datasets[0].coords + def test_almost_equal_geo_coordinates(self): + """Test that coordinates that are almost-equal still match. + + See https://github.com/pytroll/satpy/issues/2668 for discussion. + + Various operations like cropping and resampling can cause + geo-coordinates (y, x) to be very slightly unequal due to floating + point precision. This test makes sure that even in those cases we + can still generate composites from DataArrays with these coordinates. + + """ + from satpy.composites import CompositeBase + from satpy.resample import add_crs_xy_coords + + comp = CompositeBase("test_comp") + data_arr1 = self._get_test_ds(shape=(2, 2)) + data_arr1 = add_crs_xy_coords(data_arr1, data_arr1.attrs["area"]) + data_arr2 = self._get_test_ds(shape=(2, 2)) + data_arr2 = data_arr2.assign_coords( + x=data_arr1.coords["x"] + 0.000001, + y=data_arr1.coords["y"], + crs=data_arr1.coords["crs"], + ) + # data_arr2 = add_crs_xy_coords(data_arr2, data_arr2.attrs["area"]) + # data_arr2.assign_coords(x=data_arr2.coords["x"].copy() + 1.1) + # default xarray alignment would fail and collapse one of our dims + assert 0 in (data_arr2 - data_arr1).shape + new_data_arr1, new_data_arr2 = comp.match_data_arrays([data_arr1, data_arr2]) + assert 0 not in new_data_arr1.shape + assert 0 not in new_data_arr2.shape + assert 0 not in (new_data_arr2 - new_data_arr1).shape + class TestRatioSharpenedCompositors: """Test RatioSharpenedRGB and SelfSharpendRGB compositors.""" From cfbcaf76d8c726e0ffa1a7b7df9d2c81c3f4c4f2 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 14 Dec 2023 14:14:17 -0600 Subject: [PATCH 648/702] Cleanup match_data_arrays and add type annotations --- satpy/composites/__init__.py | 86 ++++++++++++++++++------------------ 1 file changed, 44 insertions(+), 42 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index a0ce73caea..ef4a559322 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -157,7 +157,7 @@ def apply_modifier_info(self, origin, destination): elif o.get(k) is not None: d[k] = o[k] - def match_data_arrays(self, data_arrays): + def match_data_arrays(self, data_arrays: Sequence[xr.DataArray]) -> list[xr.DataArray]: """Match data arrays so that they can be used together in a composite. For the purpose of this method, "can be used together" means: @@ -189,46 +189,7 @@ def match_data_arrays(self, data_arrays): new_arrays = list(unify_chunks(*new_arrays)) return new_arrays - def drop_coordinates(self, data_arrays): - """Drop negligible non-dimensional coordinates. - - Drops negligible coordinates if they do not correspond to any - dimension. Negligible coordinates are defined in the - :attr:`NEGLIGIBLE_COORDS` module attribute. - - Args: - data_arrays (List[arrays]): Arrays to be checked - """ - new_arrays = [] - for ds in data_arrays: - drop = [coord for coord in ds.coords - if coord not in ds.dims and - any([neglible in coord for neglible in NEGLIGIBLE_COORDS])] - if drop: - new_arrays.append(ds.drop_vars(drop)) - else: - new_arrays.append(ds) - - return new_arrays - - def align_geo_coordinates(self, data_arrays: Sequence[xr.DataArray]) -> list[xr.DataArray]: - """Align DataArrays along geolocation coordinates. - - See :func:`~xarray.align` for more information. This function uses - the "override" join method to essentially ignore differences between - coordinates. The :meth:`check_geolocation` should be called before - this to ensure that geolocation coordinates and "area" are compatible. - The :meth:`drop_coordinates` method should be called before this to - ensure that coordinates that are considered "negligible" when computing - composites do not affect alignment. - - """ - non_geo_coords = tuple( - coord_name for data_arr in data_arrays - for coord_name in data_arr.coords if coord_name not in ("x", "y")) - return xr.align(*data_arrays, join="override", exclude=non_geo_coords) - - def check_geolocation(self, data_arrays): + def check_geolocation(self, data_arrays: Sequence[xr.DataArray]) -> None: """Check that the geolocations of the *data_arrays* are compatible. For the purpose of this method, "compatible" means: @@ -238,7 +199,7 @@ def check_geolocation(self, data_arrays): - If all have an area, the areas should be all the same. Args: - data_arrays (List[arrays]): Arrays to be checked + data_arrays: Arrays to be checked Raises: :class:`IncompatibleAreas`: @@ -269,6 +230,47 @@ def check_geolocation(self, data_arrays): "'{}'".format(self.attrs["name"])) raise IncompatibleAreas("Areas are different") + @staticmethod + def drop_coordinates(data_arrays: Sequence[xr.DataArray]) -> list[xr.DataArray]: + """Drop negligible non-dimensional coordinates. + + Drops negligible coordinates if they do not correspond to any + dimension. Negligible coordinates are defined in the + :attr:`NEGLIGIBLE_COORDS` module attribute. + + Args: + data_arrays (List[arrays]): Arrays to be checked + """ + new_arrays = [] + for ds in data_arrays: + drop = [coord for coord in ds.coords + if coord not in ds.dims and + any([neglible in coord for neglible in NEGLIGIBLE_COORDS])] + if drop: + new_arrays.append(ds.drop_vars(drop)) + else: + new_arrays.append(ds) + + return new_arrays + + @staticmethod + def align_geo_coordinates(data_arrays: Sequence[xr.DataArray]) -> list[xr.DataArray]: + """Align DataArrays along geolocation coordinates. + + See :func:`~xarray.align` for more information. This function uses + the "override" join method to essentially ignore differences between + coordinates. The :meth:`check_geolocation` should be called before + this to ensure that geolocation coordinates and "area" are compatible. + The :meth:`drop_coordinates` method should be called before this to + ensure that coordinates that are considered "negligible" when computing + composites do not affect alignment. + + """ + non_geo_coords = tuple( + coord_name for data_arr in data_arrays + for coord_name in data_arr.coords if coord_name not in ("x", "y")) + return list(xr.align(*data_arrays, join="override", exclude=non_geo_coords)) + class DifferenceCompositor(CompositeBase): """Make the difference of two data arrays.""" From fb8ff3ba2f14f5edf9a03b1edd1288943d3620f5 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Thu, 14 Dec 2023 21:39:21 +0100 Subject: [PATCH 649/702] import hvplot directly inside method As Martin has suggested I'm importing directly inside method the hvplot library to remove an if condition and resolve "too complex" pre-commit control --- satpy/scene.py | 26 ++++++-------------------- 1 file changed, 6 insertions(+), 20 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index aea5b44cfe..fe6bbce1f9 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -39,12 +39,6 @@ from satpy.utils import convert_remote_files_to_fsspec, get_storage_options_from_reader_kwargs from satpy.writers import load_writer -#try: -# import hvplot.xarray as hvplot_xarray # noqa -#except ImportError: -# hvplot_xarray = None - - LOG = logging.getLogger(__name__) @@ -1074,12 +1068,12 @@ def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynami return gview def to_hvplot(self, datasets=None, *args, **kwargs): - """Convert satpy Scene to Hvplot. + """Convert satpy Scene to Hvplot. The method could not be used with composites of swath data. Args: datasets (list): Limit included products to these datasets. - kwargs: hvplot options dictionary. args: Arguments coming from hvplot + kwargs: hvplot options dictionary. Returns: hvplot object that contains within it the plots of datasets list. As default it contains all Scene datasets plots and a plot title is shown. @@ -1087,10 +1081,11 @@ def to_hvplot(self, datasets=None, *args, **kwargs): Example usage:: scene_list = ['ash','IR_108'] + scn = Scene() + scn.load(scene_list) + scn = scn.resample('eurol') plot = scn.to_hvplot(datasets=scene_list) - plot.ash+plot.IR_108 - """ def _get_crs(xarray_ds): @@ -1113,12 +1108,7 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): clabel=f"[{_get_units(xarray_ds,variable)}]", title=title, **defaults) - #def _check_hvplot_library(): - # if hvplot_xarray is None: - # raise ImportError("'hvplot' must be installed to use this feature") -# -# _check_hvplot_library() - + import hvplot.xarray as hvplot_xarray # noqa plot = Overlay() xarray_ds = self.to_xarray_dataset(datasets) @@ -1129,7 +1119,6 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): ccrs = None defaults={"x":"longitude","y":"latitude"} - if datasets is None: datasets = list(xarray_ds.keys()) @@ -1141,9 +1130,6 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): defaults.update(kwargs) - #if "latitude" in xarray_ds.coords: - # defaults.update({"x":"longitude","y":"latitude"}) - for element in datasets: title = f"{element} @ {_get_timestamp(xarray_ds)}" if xarray_ds[element].shape[0] == 3: From 807357a4d4ea1a4f4cad740d5978d534b1e61b20 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Thu, 14 Dec 2023 22:45:58 +0100 Subject: [PATCH 650/702] Add holoviews required library --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 33d417bf2e..49b00ae4e0 100644 --- a/setup.py +++ b/setup.py @@ -76,7 +76,7 @@ "doc": ["sphinx", "sphinx_rtd_theme", "sphinxcontrib-apidoc"], # Other "geoviews": ["geoviews"], - "hvplot": ["hvplot", "geoviews", "cartopy"], + "hvplot": ["hvplot", "geoviews", "cartopy", "holoviews"], "overlays": ["pycoast", "pydecorate"], "satpos_from_tle": ["skyfield", "astropy"], "tests": test_requires, From 8b7bba7bede6bc1b0f3c8ebc4d70a5a2b8cea501 Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Thu, 14 Dec 2023 23:41:10 +0100 Subject: [PATCH 651/702] Clean up tests for NDVIHybridGreen compositor. --- satpy/tests/compositor_tests/test_spectral.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 7472016c00..c9a7ab62b6 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -103,10 +103,10 @@ def test_ndvi_hybrid_green(self): # Test General functionality with linear strength (=1.0) res = comp((self.c01, self.c02, self.c03)) - assert isinstance(res, xr.DataArray) - assert isinstance(res.data, da.Array) - assert res.attrs["name"] == "ndvi_hybrid_green" - assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs["name"] == "ndvi_hybrid_green" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.values np.testing.assert_array_almost_equal(data, np.array([[0.2633, 0.3071], [0.2115, 0.3420]]), decimal=4) @@ -115,7 +115,7 @@ def test_ndvi_hybrid_green_dtype(self): with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") - res = comp((self.c01, self.c02, self.c03)).compute() + res = comp((self.c01, self.c02, self.c03)) assert res.data.dtype == np.float32 def test_nonlinear_scaling(self): @@ -124,7 +124,6 @@ def test_nonlinear_scaling(self): comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), strength=2.0, prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") - res = comp((self.c01, self.c02, self.c03)) res_np = res.data.compute() assert res.dtype == res_np.dtype From ebceffbd015fcf6c31056a328316f9d28669674c Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Thu, 14 Dec 2023 23:44:09 +0100 Subject: [PATCH 652/702] Add instance checks. --- satpy/tests/test_composites.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index aa4f56f9de..d17188846c 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -969,6 +969,8 @@ def test_high_cloud_compositor(self): with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = HighCloudCompositor(name="test") res = comp([self.data]) + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) expexted_alpha = np.array([[1.0, 0.7142857, 0.0], [1.0, 0.625, 0.0], [1.0, 0.5555555, 0.0]]) expected = np.stack([self.data, expexted_alpha]) np.testing.assert_almost_equal(res.values, expected) @@ -1014,6 +1016,8 @@ def test_low_cloud_compositor(self): with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = LowCloudCompositor(name="test") res = comp([self.btd, self.bt_win, self.lsm]) + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) expexted_alpha = np.array([[0.0, 0.25, 1.0], [0.0, 0.25, 1.0], [0.0, 0.0, 0.0]]) expected = np.stack([self.btd, expexted_alpha]) np.testing.assert_equal(res.values, expected) From 9c277391fc20a7883533a3441fea17de72f23a1d Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Thu, 14 Dec 2023 23:49:30 +0100 Subject: [PATCH 653/702] Remove trailing whitespace. --- satpy/tests/test_composites.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index d17188846c..4f82467ab9 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -970,7 +970,7 @@ def test_high_cloud_compositor(self): comp = HighCloudCompositor(name="test") res = comp([self.data]) assert isinstance(res, xr.DataArray) - assert isinstance(res.data, da.Array) + assert isinstance(res.data, da.Array) expexted_alpha = np.array([[1.0, 0.7142857, 0.0], [1.0, 0.625, 0.0], [1.0, 0.5555555, 0.0]]) expected = np.stack([self.data, expexted_alpha]) np.testing.assert_almost_equal(res.values, expected) From aad6ea810f237ae481ed72c63357a0bc7f532bf8 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Thu, 14 Dec 2023 22:45:58 +0100 Subject: [PATCH 654/702] Add holoviews required library --- setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 33d417bf2e..d31c21364a 100644 --- a/setup.py +++ b/setup.py @@ -76,7 +76,8 @@ "doc": ["sphinx", "sphinx_rtd_theme", "sphinxcontrib-apidoc"], # Other "geoviews": ["geoviews"], - "hvplot": ["hvplot", "geoviews", "cartopy"], + "hvplot": ["hvplot", "geoviews", "cartopy", "holoviews"], + "holoviews": ["holoviews"], "overlays": ["pycoast", "pydecorate"], "satpos_from_tle": ["skyfield", "astropy"], "tests": test_requires, From fca35cd5772335093b1e2defdbf7a52fb7f804da Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Thu, 14 Dec 2023 22:45:58 +0100 Subject: [PATCH 655/702] Add holoviews required library --- setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.py b/setup.py index d31c21364a..49b00ae4e0 100644 --- a/setup.py +++ b/setup.py @@ -77,7 +77,6 @@ # Other "geoviews": ["geoviews"], "hvplot": ["hvplot", "geoviews", "cartopy", "holoviews"], - "holoviews": ["holoviews"], "overlays": ["pycoast", "pydecorate"], "satpos_from_tle": ["skyfield", "astropy"], "tests": test_requires, From 7918f375de45272055b26d8d7ba1f3caf4aba759 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 15 Dec 2023 00:44:30 +0100 Subject: [PATCH 656/702] Revert "Add holoviews required library" This reverts commit 807357a4d4ea1a4f4cad740d5978d534b1e61b20. --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 49b00ae4e0..33d417bf2e 100644 --- a/setup.py +++ b/setup.py @@ -76,7 +76,7 @@ "doc": ["sphinx", "sphinx_rtd_theme", "sphinxcontrib-apidoc"], # Other "geoviews": ["geoviews"], - "hvplot": ["hvplot", "geoviews", "cartopy", "holoviews"], + "hvplot": ["hvplot", "geoviews", "cartopy"], "overlays": ["pycoast", "pydecorate"], "satpos_from_tle": ["skyfield", "astropy"], "tests": test_requires, From 1e9dbf29d90ba6c72bfa3110e1006b76baf66b8c Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 15 Dec 2023 06:25:20 +0000 Subject: [PATCH 657/702] Add data validity tests. --- satpy/composites/__init__.py | 4 ++-- satpy/tests/test_composites.py | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 4153a85963..a286f078c4 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1100,8 +1100,8 @@ def __init__(self, name, transition_min_limits=(210., 230.), latitude_min_limits """ if len(transition_min_limits) != 2: raise ValueError(f"Expected 2 `transition_min_limits` values, got {len(transition_min_limits)}") - if len(transition_min_limits) != 2: - raise ValueError(f"Expected 2 `latitude_min_limits` values, got {len(transition_min_limits)}") + if len(latitude_min_limits) != 2: + raise ValueError(f"Expected 2 `latitude_min_limits` values, got {len(latitude_min_limits)}") if type(transition_max) in [list, tuple]: raise ValueError(f"Expected `transition_max` to be of type float, is of type {type(transition_max)}") diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 4f82467ab9..f6726bc7e9 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -990,6 +990,24 @@ def test_high_cloud_compositor_dtype(self): res = comp([self.data]) assert res.data.dtype == self.dtype + def test_high_cloud_compositor_validity_checks(self): + """Test that errors are raised for invalid input data and settings.""" + from satpy.composites import HighCloudCompositor + + with pytest.raises(ValueError, match="Expected 2 `transition_min_limits` values, got 1"): + _ = HighCloudCompositor("test", transition_min_limits=(210., )) + + with pytest.raises(ValueError, match="Expected 2 `latitude_min_limits` values, got 3"): + _ = HighCloudCompositor("test", latitude_min_limits=(20., 40., 60.)) + + with pytest.raises(ValueError, match="Expected `transition_max` to be of type float, " + "is of type "): + _ = HighCloudCompositor("test", transition_max=(250., 300.)) + + comp = HighCloudCompositor("test") + with pytest.raises(ValueError, match="Expected 1 dataset, got 2"): + _ = comp([self.data, self.data]) + class TestLowCloudCompositor: """Test LowCloudCompositor.""" @@ -1029,6 +1047,20 @@ def test_low_cloud_compositor_dtype(self): res = comp([self.btd, self.bt_win, self.lsm]) assert res.data.dtype == self.dtype + def test_low_cloud_compositor_validity_checks(self): + """Test that errors are raised for invalid input data and settings.""" + from satpy.composites import LowCloudCompositor + + with pytest.raises(ValueError, match="Expected 2 `range_land` values, got 1"): + _ = LowCloudCompositor("test", range_land=(2.0, )) + + with pytest.raises(ValueError, match="Expected 2 `range_water` values, got 1"): + _ = LowCloudCompositor("test", range_water=(2.0,)) + + comp = LowCloudCompositor("test") + with pytest.raises(ValueError, match="Expected 3 datasets, got 2"): + _ = comp([self.btd, self.lsm]) + class TestSingleBandCompositor(unittest.TestCase): """Test the single-band compositor.""" From 30eaf9ed65d6ba89948a1bcbad67099e676032b5 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 15 Dec 2023 06:26:33 +0000 Subject: [PATCH 658/702] Rename 'sea' to 'water'. --- satpy/composites/__init__.py | 34 +++++++++++++++++----------------- satpy/etc/composites/abi.yaml | 4 ++-- satpy/etc/composites/ahi.yaml | 4 ++-- satpy/etc/composites/fci.yaml | 6 +++--- 4 files changed, 24 insertions(+), 24 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index a286f078c4..88375bc933 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1148,8 +1148,8 @@ class LowCloudCompositor(CloudCompositor): Pixels with `BTD` values below a given threshold will be transparent, whereas pixels with `BTD` values above another threshold will be opaque. The transparency of all other `BTD` values will be a linear function of the `BTD` value itself. Two sets of thresholds are used, one set for land surface types - (`range_land`) and another one for sea/water surface types (`range_sea`), respectively. Hence, - this compositor requires a land-sea-mask as a prerequisite input. This follows the GeoColor + (`range_land`) and another one for water surface types (`range_water`), respectively. Hence, + this compositor requires a land-water-mask as a prerequisite input. This follows the GeoColor implementation of night-time low-level clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`), but with some adjustments to the thresholds based on recent developments and feedback from CIRA. @@ -1157,9 +1157,9 @@ class LowCloudCompositor(CloudCompositor): only applicable during night-time. """ - def __init__(self, name, values_land=(1,), values_sea=(0,), # noqa: D417 + def __init__(self, name, values_land=(1,), values_water=(0,), # noqa: D417 range_land=(0.0, 4.0), - range_sea=(0.0, 4.0), + range_water=(0.0, 4.0), invert_alpha=True, transition_gamma=1.0, **kwargs): """Init info. @@ -1167,12 +1167,12 @@ def __init__(self, name, values_land=(1,), values_sea=(0,), # noqa: D417 Collect custom configuration values. Args: - values_land (list): List of values used to identify land surface pixels in the land-sea-mask. - values_sea (list): List of values used to identify sea/water surface pixels in the land-sea-mask. + values_land (list): List of values used to identify land surface pixels in the land-water-mask. + values_water (list): List of values used to identify water surface pixels in the land-water-mask. range_land (tuple): Threshold values used for masking low-level clouds from the brightness temperature difference over land surface types. - range_sea (tuple): Threshold values used for masking low-level clouds from the brightness temperature - difference over sea/water. + range_water (tuple): Threshold values used for masking low-level clouds from the brightness temperature + difference over water. invert_alpha (bool): Invert the alpha channel to make low data values transparent and high data values opaque. transition_gamma (float): Gamma correction to apply to the alpha channel within the brightness @@ -1180,13 +1180,13 @@ def __init__(self, name, values_land=(1,), values_sea=(0,), # noqa: D417 """ if len(range_land) != 2: raise ValueError(f"Expected 2 `range_land` values, got {len(range_land)}") - if len(range_sea) != 2: - raise ValueError(f"Expected 2 `range_sea` values, got {len(range_sea)}") + if len(range_water) != 2: + raise ValueError(f"Expected 2 `range_water` values, got {len(range_water)}") self.values_land = values_land if type(values_land) in [list, tuple] else [values_land] - self.values_sea = values_sea if type(values_sea) in [list, tuple] else [values_sea] + self.values_water = values_water if type(values_water) in [list, tuple] else [values_water] self.range_land = range_land - self.range_sea = range_sea + self.range_water = range_water super().__init__(name, transition_min=None, transition_max=None, invert_alpha=invert_alpha, transition_gamma=transition_gamma, **kwargs) @@ -1211,12 +1211,12 @@ def __call__(self, projectables, **kwargs): self.transition_min, self.transition_max = self.range_land res = super().__call__([btd.where(lsm.isin(self.values_land))], **kwargs) - # Call CloudCompositor for sea/water surface pixels - self.transition_min, self.transition_max = self.range_sea - res_sea = super().__call__([btd.where(lsm.isin(self.values_sea))], **kwargs) + # Call CloudCompositor for /water surface pixels + self.transition_min, self.transition_max = self.range_water + res_water = super().__call__([btd.where(lsm.isin(self.values_water))], **kwargs) - # Compine resutls for land and sea/water surface pixels - res = res.where(lsm.isin(self.values_land), res_sea) + # Compine resutls for land and water surface pixels + res = res.where(lsm.isin(self.values_land), res_water) # Make pixels with cold window channel brightness temperatures transparent to avoid spurious false # alarms caused by noise in the 3.9um channel that can occur for very cold cloud tops diff --git a/satpy/etc/composites/abi.yaml b/satpy/etc/composites/abi.yaml index b40f353e6a..cae7a7035f 100644 --- a/satpy/etc/composites/abi.yaml +++ b/satpy/etc/composites/abi.yaml @@ -773,7 +773,7 @@ composites: geo_color_low_clouds: standard_name: geo_color_low_clouds compositor: !!python/name:satpy.composites.LowCloudCompositor - values_sea: 0 + values_water: 0 values_land: 100 prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor @@ -782,7 +782,7 @@ composites: - name: C07 - name: C13 - compositor: !!python/name:satpy.composites.StaticImageCompositor - standard_name: land_sea_mask + standard_name: land_water_mask url: "https://zenodo.org/records/10076199/files/gshhs_land_water_mask_3km_i.tif" known_hash: "sha256:96df83c57416217e191f95dde3d3c1ce0373a8fc220e929228873db246ca3569" diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 066b9cf0f2..3db0d20f3c 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -523,7 +523,7 @@ composites: geo_color_low_clouds: standard_name: geo_color_low_clouds compositor: !!python/name:satpy.composites.LowCloudCompositor - values_sea: 0 + values_water: 0 values_land: 100 prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor @@ -532,7 +532,7 @@ composites: - name: B07 - name: B13 - compositor: !!python/name:satpy.composites.StaticImageCompositor - standard_name: land_sea_mask + standard_name: land_water_mask url: "https://zenodo.org/records/10076199/files/gshhs_land_water_mask_3km_i.tif" known_hash: "sha256:96df83c57416217e191f95dde3d3c1ce0373a8fc220e929228873db246ca3569" diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index f27011d301..775331c08b 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -145,10 +145,10 @@ composites: geo_color_low_clouds: standard_name: geo_color_low_clouds compositor: !!python/name:satpy.composites.LowCloudCompositor - values_sea: 0 + values_water: 0 values_land: 100 + range_water: [1.35, 5.0] range_land: [4.35, 6.75] - range_sea: [1.35, 5.0] prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: @@ -156,7 +156,7 @@ composites: - name: ir_38 - name: ir_105 - compositor: !!python/name:satpy.composites.StaticImageCompositor - standard_name: land_sea_mask + standard_name: land_water_mask url: "https://zenodo.org/records/10076199/files/gshhs_land_water_mask_3km_i.tif" known_hash: "sha256:96df83c57416217e191f95dde3d3c1ce0373a8fc220e929228873db246ca3569" From 60fa8a1c9d914f223a4917de63baff191ffa9f8a Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 15 Dec 2023 06:37:21 +0000 Subject: [PATCH 659/702] Add description and reference in geo_color composite recipes. --- satpy/etc/composites/abi.yaml | 7 +++++++ satpy/etc/composites/ahi.yaml | 8 +++++++- satpy/etc/composites/fci.yaml | 7 +++++++ 3 files changed, 21 insertions(+), 1 deletion(-) diff --git a/satpy/etc/composites/abi.yaml b/satpy/etc/composites/abi.yaml index cae7a7035f..4700aa470b 100644 --- a/satpy/etc/composites/abi.yaml +++ b/satpy/etc/composites/abi.yaml @@ -756,6 +756,13 @@ composites: # GeoColor geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor + description: > + GeoColor is a multi-layer blended RGB composite where the day-time part of the image is represented by true + color imagery and the nighttime part of the image by a three layer vertically blended stack composed of a + high-level cloud layer (single IR window channel), a low-level cloud layer (IR split window) and a static + surface terrain layer with city lights (NASA Black Marble). + references: + Research Article: https://journals.ametsoc.org/view/journals/atot/37/3/JTECH-D-19-0134.1.xml lim_low: 78 lim_high: 88 standard_name: geo_color_day_night_blend diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 3db0d20f3c..fe64a2bcae 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -503,9 +503,15 @@ composites: - _night_background_hires # GeoColor - geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor + description: > + GeoColor is a multi-layer blended RGB composite where the day-time part of the image is represented by true + color imagery and the nighttime part of the image by a three layer vertically blended stack composed of a + high-level cloud layer (single IR window channel), a low-level cloud layer (IR split window) and a static + surface terrain layer with city lights (NASA Black Marble). + references: + Research Article: https://journals.ametsoc.org/view/journals/atot/37/3/JTECH-D-19-0134.1.xml lim_low: 78 lim_high: 88 standard_name: geo_color_day_night_blend diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 775331c08b..0f0e98f4e0 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -128,6 +128,13 @@ composites: # GeoColor geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor + description: > + GeoColor is a multi-layer blended RGB composite where the day-time part of the image is represented by true + color imagery and the nighttime part of the image by a three layer vertically blended stack composed of a + high-level cloud layer (single IR window channel), a low-level cloud layer (IR split window) and a static + surface terrain layer with city lights (NASA Black Marble). + references: + Research Article: https://journals.ametsoc.org/view/journals/atot/37/3/JTECH-D-19-0134.1.xml lim_low: 78 lim_high: 88 standard_name: geo_color_day_night_blend From 4df285d66c1882075e30ea0f064808098be17af9 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 15 Dec 2023 09:05:01 +0200 Subject: [PATCH 660/702] Fix proj authority usage --- satpy/tests/writer_tests/test_mitiff.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/satpy/tests/writer_tests/test_mitiff.py b/satpy/tests/writer_tests/test_mitiff.py index b4ff371dab..f1519cf7a1 100644 --- a/satpy/tests/writer_tests/test_mitiff.py +++ b/satpy/tests/writer_tests/test_mitiff.py @@ -844,23 +844,23 @@ def test_convert_proj4_string(self): from pyresample.geometry import AreaDefinition from satpy.writers.mitiff import MITIFFWriter - checks = [{"epsg": "+init=EPSG:32631", + checks = [{"epsg": "EPSG:32631", "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 " "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " "+y_0=1515.000000\n")}, - {"epsg": "+init=EPSG:32632", + {"epsg": "EPSG:32632", "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=9 +k=0.9996 " "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " "+y_0=1515.000000\n")}, - {"epsg": "+init=EPSG:32633", + {"epsg": "EPSG:32633", "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=15 +k=0.9996 " "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " "+y_0=1515.000000\n")}, - {"epsg": "+init=EPSG:32634", + {"epsg": "EPSG:32634", "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=21 +k=0.9996 " "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " "+y_0=1515.000000\n")}, - {"epsg": "+init=EPSG:32635", + {"epsg": "EPSG:32635", "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=27 +k=0.9996 " "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " "+y_0=1515.000000\n")}] From 013b49fe2e014d39995bd05f9372e60f1812acf0 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 15 Dec 2023 09:19:31 +0200 Subject: [PATCH 661/702] Fix/suppress PROJ warnings about losing projection information --- satpy/tests/writer_tests/test_mitiff.py | 54 +++++++++++-------------- satpy/writers/mitiff.py | 8 +++- 2 files changed, 31 insertions(+), 31 deletions(-) diff --git a/satpy/tests/writer_tests/test_mitiff.py b/satpy/tests/writer_tests/test_mitiff.py index f1519cf7a1..4e8878687a 100644 --- a/satpy/tests/writer_tests/test_mitiff.py +++ b/satpy/tests/writer_tests/test_mitiff.py @@ -52,14 +52,13 @@ def _get_test_datasets(self): import dask.array as da import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( "test", "test", "test", - proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), + CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -119,14 +118,13 @@ def _get_test_datasets_sensor_set(self): import dask.array as da import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( "test", "test", "test", - proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), + CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -186,14 +184,14 @@ def _get_test_dataset(self, bands=3): import dask.array as da import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict + area_def = AreaDefinition( "test", "test", "test", - proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), + CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -217,14 +215,14 @@ def _get_test_one_dataset(self): import dask.array as da import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict + area_def = AreaDefinition( "test", "test", "test", - proj4_str_to_dict("+proj=geos +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. h=36000. +units=km"), + CRS("+proj=geos +datum=WGS84 +ellps=WGS84 +lon_0=0. h=36000. +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -248,14 +246,14 @@ def _get_test_one_dataset_sensor_set(self): import dask.array as da import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict + area_def = AreaDefinition( "test", "test", "test", - proj4_str_to_dict("+proj=geos +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. h=36000. +units=km"), + CRS("+proj=geos +datum=WGS84 +ellps=WGS84 +lon_0=0. h=36000. +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -278,14 +276,14 @@ def _get_test_dataset_with_bad_values(self, bands=3): from datetime import datetime import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict + area_def = AreaDefinition( "test", "test", "test", - proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), + CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -313,8 +311,8 @@ def _get_test_dataset_calibration(self, bands=6): import dask.array as da import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict from satpy.scene import Scene from satpy.tests.utils import make_dsq @@ -322,8 +320,7 @@ def _get_test_dataset_calibration(self, bands=6): "test", "test", "test", - proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), + CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -418,8 +415,8 @@ def _get_test_dataset_calibration_one_dataset(self, bands=1): import dask.array as da import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict from satpy.scene import Scene from satpy.tests.utils import make_dsq @@ -427,8 +424,7 @@ def _get_test_dataset_calibration_one_dataset(self, bands=1): "test", "test", "test", - proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), + CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -473,16 +469,15 @@ def _get_test_dataset_three_bands_two_prereq(self, bands=3): import dask.array as da import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict from satpy.tests.utils import make_dsq area_def = AreaDefinition( "test", "test", "test", - proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), + CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -508,16 +503,15 @@ def _get_test_dataset_three_bands_prereq(self, bands=3): import dask.array as da import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict from satpy.tests.utils import make_dsq area_def = AreaDefinition( "test", "test", "test", - proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), + CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), diff --git a/satpy/writers/mitiff.py b/satpy/writers/mitiff.py index 950fce8b21..3658ac16b7 100644 --- a/satpy/writers/mitiff.py +++ b/satpy/writers/mitiff.py @@ -221,6 +221,8 @@ def _add_sizes(self, datasets, first_dataset): return _image_description def _add_proj4_string(self, datasets, first_dataset): + import warnings + proj4_string = " Proj string: " if isinstance(datasets, list): @@ -232,7 +234,11 @@ def _add_proj4_string(self, datasets, first_dataset): if hasattr(area, "crs") and area.crs.to_epsg() is not None: proj4_string += "+init=EPSG:{}".format(area.crs.to_epsg()) else: - proj4_string += area.proj_str + # Filter out the PROJ warning of losing projection information + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=UserWarning, + message=r"You will likely lose important projection information") + proj4_string += area.proj_str x_0 = 0 y_0 = 0 From 755ec6b886ac2516e5c0f8c8021f9637d75a39b7 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 15 Dec 2023 09:38:22 +0200 Subject: [PATCH 662/702] Use datetime64[ns] in CF writer --- satpy/cf/coords.py | 4 ++-- satpy/tests/writer_tests/test_cf.py | 16 ++++++++-------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/satpy/cf/coords.py b/satpy/cf/coords.py index 9220632fcb..2449ab79ee 100644 --- a/satpy/cf/coords.py +++ b/satpy/cf/coords.py @@ -291,8 +291,8 @@ def add_time_bounds_dimension(ds: xr.Dataset, time: str = "time") -> xr.Dataset: if start_time is not None) end_time = min(end_time for end_time in end_times if end_time is not None) - ds["time_bnds"] = xr.DataArray([[np.datetime64(start_time), - np.datetime64(end_time)]], + ds["time_bnds"] = xr.DataArray([[np.datetime64(start_time, "ns"), + np.datetime64(end_time, "ns")]], dims=["time", "bnds_1d"]) ds[time].attrs["bounds"] = "time_bnds" ds[time].attrs["standard_name"] = "time" diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 62c9995cde..d37b612bb2 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -240,7 +240,7 @@ def test_single_time_value(self): test_array = np.array([[1, 2], [3, 4]]) scn["test-array"] = xr.DataArray(test_array, dims=["x", "y"], - coords={"time": np.datetime64("2018-05-30T10:05:00")}, + coords={"time": np.datetime64("2018-05-30T10:05:00", "ns")}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: @@ -255,7 +255,7 @@ def test_time_coordinate_on_a_swath(self): scn = Scene() test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) times = np.array(["2018-05-30T10:05:00", "2018-05-30T10:05:01", - "2018-05-30T10:05:02", "2018-05-30T10:05:03"], dtype=np.datetime64) + "2018-05-30T10:05:02", "2018-05-30T10:05:03"], dtype="datetime64[ns]") scn["test-array"] = xr.DataArray(test_array, dims=["y", "x"], coords={"time": ("y", times)}, @@ -273,7 +273,7 @@ def test_bounds(self): test_array = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) scn["test-array"] = xr.DataArray(test_array, dims=["x", "y", "time"], - coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, + coords={"time": [np.datetime64("2018-05-30T10:05:00", "ns")]}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: @@ -307,12 +307,12 @@ def test_bounds_minimum(self): test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) scn["test-arrayA"] = xr.DataArray(test_arrayA, dims=["x", "y", "time"], - coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, + coords={"time": [np.datetime64("2018-05-30T10:05:00", "ns")]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) scn["test-arrayB"] = xr.DataArray(test_arrayB, dims=["x", "y", "time"], - coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, + coords={"time": [np.datetime64("2018-05-30T10:05:00", "ns")]}, attrs=dict(start_time=start_timeB, end_time=end_timeB)) with TempFile() as filename: @@ -330,12 +330,12 @@ def test_bounds_missing_time_info(self): test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) scn["test-arrayA"] = xr.DataArray(test_arrayA, dims=["x", "y", "time"], - coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, + coords={"time": [np.datetime64("2018-05-30T10:05:00", "ns")]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) scn["test-arrayB"] = xr.DataArray(test_arrayB, dims=["x", "y", "time"], - coords={"time": [np.datetime64("2018-05-30T10:05:00")]}) + coords={"time": [np.datetime64("2018-05-30T10:05:00", "ns")]}) with TempFile() as filename: scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename, decode_cf=True) as f: @@ -350,7 +350,7 @@ def test_unlimited_dims_kwarg(self): test_array = np.array([[1, 2], [3, 4]]) scn["test-array"] = xr.DataArray(test_array, dims=["x", "y"], - coords={"time": np.datetime64("2018-05-30T10:05:00")}, + coords={"time": np.datetime64("2018-05-30T10:05:00", "ns")}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: From cd4cd7362bc3f62b0129f7cb466e031f04bc270e Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 15 Dec 2023 09:57:42 +0200 Subject: [PATCH 663/702] Catch warning about pretty time formatting --- satpy/tests/writer_tests/test_cf.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index d37b612bb2..bb87ff8c30 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -208,8 +208,10 @@ def test_groups(self): attrs={"name": "HRV", "start_time": tstart, "end_time": tend}) with TempFile() as filename: - scn.save_datasets(filename=filename, writer="cf", groups={"visir": ["IR_108", "VIS006"], "hrv": ["HRV"]}, - pretty=True) + with pytest.warns(UserWarning, match=r"Cannot pretty-format"): + scn.save_datasets(filename=filename, writer="cf", + groups={"visir": ["IR_108", "VIS006"], "hrv": ["HRV"]}, + pretty=True) nc_root = xr.open_dataset(filename) assert "history" in nc_root.attrs From 36b09d1f0036a6f47927d0a819ed9d38bb0c113e Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 15 Dec 2023 10:00:27 +0200 Subject: [PATCH 664/702] Catch warning of invalid NetCDF dataset name --- satpy/tests/writer_tests/test_cf.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index bb87ff8c30..020cb10ec3 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -152,7 +152,8 @@ def test_save_dataset_a_digit_no_prefix_include_attr(self): scn = Scene() scn["1"] = xr.DataArray([1, 2, 3]) with TempFile() as filename: - scn.save_datasets(filename=filename, writer="cf", include_orig_name=True, numeric_name_prefix="") + with pytest.warns(UserWarning, match=r"Invalid NetCDF dataset name"): + scn.save_datasets(filename=filename, writer="cf", include_orig_name=True, numeric_name_prefix="") with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f["1"][:], [1, 2, 3]) assert "original_name" not in f["1"].attrs From 8ea9e300bfdd48eff704cdbe68072e2442e3e45f Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 15 Dec 2023 10:14:53 +0200 Subject: [PATCH 665/702] Add area definitions to remove unnecessary warnings --- satpy/tests/test_writers.py | 16 +++++++++++++++- satpy/tests/writer_tests/test_geotiff.py | 18 ++++++++++++++++-- 2 files changed, 31 insertions(+), 3 deletions(-) diff --git a/satpy/tests/test_writers.py b/satpy/tests/test_writers.py index e2bfd898ab..bc68d767c1 100644 --- a/satpy/tests/test_writers.py +++ b/satpy/tests/test_writers.py @@ -548,13 +548,20 @@ def setUp(self): import tempfile from datetime import datetime + from pyresample.geometry import AreaDefinition + from satpy.scene import Scene + adef = AreaDefinition( + "test", "test", "test", "EPSG:4326", + 100, 200, (-180., -90., 180., 90.), + ) ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime(2018, 1, 1, 0, 0, 0)} + "start_time": datetime(2018, 1, 1, 0, 0, 0), + "area": adef} ) self.scn = Scene() self.scn["test"] = ds1 @@ -650,8 +657,14 @@ def setup_method(self): import tempfile from datetime import datetime + from pyresample.geometry import AreaDefinition + from satpy.scene import Scene + adef = AreaDefinition( + "test", "test", "test", "EPSG:4326", + 100, 200, (-180., -90., 180., 90.), + ) ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), @@ -659,6 +672,7 @@ def setup_method(self): "name": "test", "start_time": datetime(2018, 1, 1, 0, 0, 0), "sensor": "fake_sensor", + "area": adef, } ) ds2 = ds1.copy() diff --git a/satpy/tests/writer_tests/test_geotiff.py b/satpy/tests/writer_tests/test_geotiff.py index 74fcd43609..8925857637 100644 --- a/satpy/tests/writer_tests/test_geotiff.py +++ b/satpy/tests/writer_tests/test_geotiff.py @@ -32,12 +32,19 @@ def _get_test_datasets_2d(): """Create a single 2D test dataset.""" + from pyresample.geometry import AreaDefinition + + adef = AreaDefinition( + "test", "test", "test", "EPSG:4326", + 100, 200, (-180., -90., 180., 90.), + ) ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", "start_time": datetime.utcnow(), - "units": "K"} + "units": "K", + "area": adef} ) return [ds1] @@ -54,12 +61,19 @@ def _get_test_datasets_2d_nonlinear_enhancement(): def _get_test_datasets_3d(): """Create a single 3D test dataset.""" + from pyresample.geometry import AreaDefinition + + adef = AreaDefinition( + "test", "test", "test", "EPSG:4326", + 100, 200, (-180., -90., 180., 90.), + ) ds1 = xr.DataArray( da.zeros((3, 100, 200), chunks=50), dims=("bands", "y", "x"), coords={"bands": ["R", "G", "B"]}, attrs={"name": "test", - "start_time": datetime.utcnow()} + "start_time": datetime.utcnow(), + "area": adef} ) return [ds1] From d16728b5d2a9c24aa14d263bc58214fa63ad47b2 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 15 Dec 2023 09:14:55 +0100 Subject: [PATCH 666/702] Add holoviews in documentation --- doc/source/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 3aa810420e..37c197c6eb 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -70,7 +70,7 @@ def __getattr__(cls, name): for mod_name in MOCK_MODULES: sys.modules[mod_name] = Mock() # type: ignore -autodoc_mock_imports = ["cf", "glymur", "h5netcdf", "imageio", "mipp", "netCDF4", +autodoc_mock_imports = ["cf", "glymur", "h5netcdf", "holoviews", "imageio", "mipp", "netCDF4", "pygac", "pygrib", "pyhdf", "pyninjotiff", "pyorbital", "pyspectral", "rasterio", "trollimage", "zarr"] From 612e927726e445d6459267458dec1e2e6532dc7a Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 15 Dec 2023 10:39:45 +0200 Subject: [PATCH 667/702] Handle warnings from encoding time in CF --- satpy/tests/writer_tests/test_cf.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 020cb10ec3..6d1d15527b 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -247,7 +247,7 @@ def test_single_time_value(self): attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer="cf") + scn.save_datasets(filename=filename, writer="cf", encoding={"time": {"units": "seconds since 2018-01-01"}}) with xr.open_dataset(filename, decode_cf=True) as f: np.testing.assert_array_equal(f["time"], scn["test-array"]["time"]) bounds_exp = np.array([[start_time, end_time]], dtype="datetime64[m]") @@ -264,7 +264,8 @@ def test_time_coordinate_on_a_swath(self): coords={"time": ("y", times)}, attrs=dict(start_time=times[0], end_time=times[-1])) with TempFile() as filename: - scn.save_datasets(filename=filename, writer="cf", pretty=True) + scn.save_datasets(filename=filename, writer="cf", pretty=True, + encoding={"time": {"units": "seconds since 2018-01-01"}}) with xr.open_dataset(filename, decode_cf=True) as f: np.testing.assert_array_equal(f["time"], scn["test-array"]["time"]) @@ -280,7 +281,11 @@ def test_bounds(self): attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer="cf") + with warnings.catch_warnings(): + # The purpose is to use the default time encoding, silence the warning + warnings.filterwarnings("ignore", category=UserWarning, + message=r"Times can't be serialized faithfully to int64 with requested units") + scn.save_datasets(filename=filename, writer="cf") # Check decoded time coordinates & bounds with xr.open_dataset(filename, decode_cf=True) as f: bounds_exp = np.array([[start_time, end_time]], dtype="datetime64[m]") @@ -319,7 +324,8 @@ def test_bounds_minimum(self): attrs=dict(start_time=start_timeB, end_time=end_timeB)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer="cf") + scn.save_datasets(filename=filename, writer="cf", + encoding={"time": {"units": "seconds since 2018-01-01"}}) with xr.open_dataset(filename, decode_cf=True) as f: bounds_exp = np.array([[start_timeA, end_timeB]], dtype="datetime64[m]") np.testing.assert_array_equal(f["time_bnds"], bounds_exp) @@ -340,7 +346,8 @@ def test_bounds_missing_time_info(self): dims=["x", "y", "time"], coords={"time": [np.datetime64("2018-05-30T10:05:00", "ns")]}) with TempFile() as filename: - scn.save_datasets(filename=filename, writer="cf") + scn.save_datasets(filename=filename, writer="cf", + encoding={"time": {"units": "seconds since 2018-01-01"}}) with xr.open_dataset(filename, decode_cf=True) as f: bounds_exp = np.array([[start_timeA, end_timeA]], dtype="datetime64[m]") np.testing.assert_array_equal(f["time_bnds"], bounds_exp) @@ -357,7 +364,8 @@ def test_unlimited_dims_kwarg(self): attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer="cf", unlimited_dims=["time"]) + scn.save_datasets(filename=filename, writer="cf", unlimited_dims=["time"], + encoding={"time": {"units": "seconds since 2018-01-01"}}) with xr.open_dataset(filename) as f: assert set(f.encoding["unlimited_dims"]) == {"time"} From 8f44b3112853366e9914eeafb267e243bf413fb6 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 15 Dec 2023 08:47:15 +0000 Subject: [PATCH 668/702] Put new invert_alpha keyword as last optional keyword to ensure backwards compatibility. --- satpy/composites/__init__.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 88375bc933..fe21623010 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1014,7 +1014,7 @@ class CloudCompositor(GenericCompositor): """Detect clouds based on thresholding and use it as a mask for compositing.""" def __init__(self, name, transition_min=258.15, transition_max=298.15, # noqa: D417 - invert_alpha=False, transition_gamma=3.0, **kwargs): + transition_gamma=3.0, invert_alpha=False, **kwargs): """Collect custom configuration values. Args: @@ -1022,15 +1022,15 @@ def __init__(self, name, transition_min=258.15, transition_max=298.15, # noqa: clouds -> opaque white transition_max (float): Values above this are cloud free -> transparent + transition_gamma (float): Gamma correction to apply at the end invert_alpha (bool): Invert the alpha channel to make low data values transparent and high data values opaque. - transition_gamma (float): Gamma correction to apply at the end """ self.transition_min = transition_min self.transition_max = transition_max - self.invert_alpha = invert_alpha self.transition_gamma = transition_gamma + self.invert_alpha = invert_alpha super(CloudCompositor, self).__init__(name, **kwargs) def __call__(self, projectables, **kwargs): @@ -1160,8 +1160,8 @@ class LowCloudCompositor(CloudCompositor): def __init__(self, name, values_land=(1,), values_water=(0,), # noqa: D417 range_land=(0.0, 4.0), range_water=(0.0, 4.0), - invert_alpha=True, - transition_gamma=1.0, **kwargs): + transition_gamma=1.0, + invert_alpha=True, **kwargs): """Init info. Collect custom configuration values. @@ -1173,10 +1173,10 @@ def __init__(self, name, values_land=(1,), values_water=(0,), # noqa: D417 difference over land surface types. range_water (tuple): Threshold values used for masking low-level clouds from the brightness temperature difference over water. - invert_alpha (bool): Invert the alpha channel to make low data values transparent - and high data values opaque. transition_gamma (float): Gamma correction to apply to the alpha channel within the brightness temperature difference range. + invert_alpha (bool): Invert the alpha channel to make low data values transparent + and high data values opaque. """ if len(range_land) != 2: raise ValueError(f"Expected 2 `range_land` values, got {len(range_land)}") @@ -1188,7 +1188,7 @@ def __init__(self, name, values_land=(1,), values_water=(0,), # noqa: D417 self.range_land = range_land self.range_water = range_water super().__init__(name, transition_min=None, transition_max=None, - invert_alpha=invert_alpha, transition_gamma=transition_gamma, **kwargs) + transition_gamma=transition_gamma, invert_alpha=invert_alpha, **kwargs) def __call__(self, projectables, **kwargs): """Generate the composite. From 9b0bcae1169bf81297383e77806403625389d5b9 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 15 Dec 2023 10:44:15 +0100 Subject: [PATCH 669/702] Holoviews inside to_hvplot method --- satpy/scene.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/scene.py b/satpy/scene.py index fe6bbce1f9..d1ba795ac8 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -25,7 +25,6 @@ import numpy as np import xarray as xr -from holoviews import Overlay from pyresample.geometry import AreaDefinition, BaseDefinition, SwathDefinition from xarray import DataArray @@ -1109,6 +1108,8 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): **defaults) import hvplot.xarray as hvplot_xarray # noqa + from holoviews import Overlay + plot = Overlay() xarray_ds = self.to_xarray_dataset(datasets) From 36aa47145eabb1f1bea4adb5483606edbd4946c8 Mon Sep 17 00:00:00 2001 From: andream Date: Fri, 15 Dec 2023 12:38:11 +0100 Subject: [PATCH 670/702] add a check for the presence of file_handlers attribute --- satpy/readers/yaml_reader.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index 3171449b03..5444d7e16f 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -1182,9 +1182,10 @@ def create_filehandlers(self, filenames, fh_kwargs=None): return created_fhs def _sort_segment_filehandler_by_segment_number(self): - for file_type in self.file_handlers.keys(): - self.file_handlers[file_type] = sorted(self.file_handlers[file_type], - key=lambda x: x.filename_info.get("segment", 0)) + if hasattr(self, "file_handlers"): + for file_type in self.file_handlers.keys(): + self.file_handlers[file_type] = sorted(self.file_handlers[file_type], + key=lambda x: x.filename_info.get("segment", 0)) def _load_dataset(self, dsid, ds_info, file_handlers, dim="y", pad_data=True): """Load only a piece of the dataset.""" From d326eef012c27b212a8f8734857f971156f97152 Mon Sep 17 00:00:00 2001 From: andream Date: Fri, 15 Dec 2023 15:56:43 +0100 Subject: [PATCH 671/702] match all projectables instead of only subset in NDVIHybridGreen __call__ --- satpy/composites/spectral.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/composites/spectral.py b/satpy/composites/spectral.py index d656bab7ec..138f17bd85 100644 --- a/satpy/composites/spectral.py +++ b/satpy/composites/spectral.py @@ -159,9 +159,9 @@ def __call__(self, projectables, optional_datasets=None, **attrs): LOG.info(f"Applying NDVI-weighted hybrid-green correction with limits [{self.limits[0]}, " f"{self.limits[1]}] and strength {self.strength}.") - ndvi_input = self.match_data_arrays([projectables[1], projectables[2]]) + ndvi_input = self.match_data_arrays(projectables) - ndvi = (ndvi_input[1] - ndvi_input[0]) / (ndvi_input[1] + ndvi_input[0]) + ndvi = (ndvi_input[2] - ndvi_input[1]) / (ndvi_input[2] + ndvi_input[1]) ndvi = ndvi.clip(self.ndvi_min, self.ndvi_max) From 5301dcab4141098cd78781137f784f068e96d1f9 Mon Sep 17 00:00:00 2001 From: andream Date: Fri, 15 Dec 2023 17:57:37 +0100 Subject: [PATCH 672/702] use projectables in match_data_arrays return and add test for coordinates alignment --- satpy/composites/spectral.py | 4 ++-- satpy/tests/compositor_tests/test_spectral.py | 21 ++++++++++++++++--- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/satpy/composites/spectral.py b/satpy/composites/spectral.py index 138f17bd85..f7219ec94d 100644 --- a/satpy/composites/spectral.py +++ b/satpy/composites/spectral.py @@ -159,9 +159,9 @@ def __call__(self, projectables, optional_datasets=None, **attrs): LOG.info(f"Applying NDVI-weighted hybrid-green correction with limits [{self.limits[0]}, " f"{self.limits[1]}] and strength {self.strength}.") - ndvi_input = self.match_data_arrays(projectables) + projectables = self.match_data_arrays(projectables) - ndvi = (ndvi_input[2] - ndvi_input[1]) / (ndvi_input[2] + ndvi_input[1]) + ndvi = (projectables[2] - projectables[1]) / (projectables[2] + projectables[1]) ndvi = ndvi.clip(self.ndvi_min, self.ndvi_max) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index eb3db8de5c..a68f9f2f0a 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -73,15 +73,16 @@ class TestNdviHybridGreenCompositor: def setup_method(self): """Initialize channels.""" + coord_val = [1.0, 2.0] self.c01 = xr.DataArray( da.from_array(np.array([[0.25, 0.30], [0.20, 0.30]], dtype=np.float32), chunks=25), - dims=("y", "x"), attrs={"name": "C02"}) + dims=("y", "x"), coords=[coord_val, coord_val], attrs={"name": "C02"}) self.c02 = xr.DataArray( da.from_array(np.array([[0.25, 0.30], [0.25, 0.35]], dtype=np.float32), chunks=25), - dims=("y", "x"), attrs={"name": "C03"}) + dims=("y", "x"), coords=[coord_val, coord_val], attrs={"name": "C03"}) self.c03 = xr.DataArray( da.from_array(np.array([[0.35, 0.35], [0.28, 0.65]], dtype=np.float32), chunks=25), - dims=("y", "x"), attrs={"name": "C04"}) + dims=("y", "x"), coords=[coord_val, coord_val], attrs={"name": "C04"}) def test_ndvi_hybrid_green(self): """Test General functionality with linear scaling from ndvi to blend fraction.""" @@ -123,3 +124,17 @@ def test_invalid_strength(self): with pytest.raises(ValueError, match="Expected strength greater than 0.0, got 0.0."): _ = NDVIHybridGreen("ndvi_hybrid_green", strength=0.0, prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") + + def test_with_slightly_mismatching_coord_input(self): + """Test the case where an input (typically the red band) has a slightly different coordinate. + + If match_data_arrays is called correctly, the coords will be aligned and the array will have the expected shape. + + """ + comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), + standard_name="toa_bidirectional_reflectance") + + c02_bad_shape = self.c02.copy() + c02_bad_shape.coords["y"] = [1.1, 2.] + res = comp((self.c01, c02_bad_shape, self.c03)) + assert res.shape == (2, 2) From fb2ec9e17d600e3ec170d11d154ed485563d527f Mon Sep 17 00:00:00 2001 From: andream Date: Fri, 15 Dec 2023 18:00:46 +0100 Subject: [PATCH 673/702] make codefactor happy --- satpy/tests/compositor_tests/test_spectral.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index a68f9f2f0a..2f7d9fd7cb 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -79,10 +79,10 @@ def setup_method(self): dims=("y", "x"), coords=[coord_val, coord_val], attrs={"name": "C02"}) self.c02 = xr.DataArray( da.from_array(np.array([[0.25, 0.30], [0.25, 0.35]], dtype=np.float32), chunks=25), - dims=("y", "x"), coords=[coord_val, coord_val], attrs={"name": "C03"}) + dims=("y", "x"), coords=[coord_val, coord_val], attrs={"name": "C03"}) self.c03 = xr.DataArray( da.from_array(np.array([[0.35, 0.35], [0.28, 0.65]], dtype=np.float32), chunks=25), - dims=("y", "x"), coords=[coord_val, coord_val], attrs={"name": "C04"}) + dims=("y", "x"), coords=[coord_val, coord_val], attrs={"name": "C04"}) def test_ndvi_hybrid_green(self): """Test General functionality with linear scaling from ndvi to blend fraction.""" From e20ea4182a0cf14791e40e9886c9724167f49920 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 18 Dec 2023 09:06:34 +0100 Subject: [PATCH 674/702] Update changelog for v0.46.0 --- CHANGELOG.md | 56 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index aa85b83f56..8730209f99 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,59 @@ +## Version 0.46.0 (2023/12/18) + +### Issues Closed + +* [Issue 2668](https://github.com/pytroll/satpy/issues/2668) - FCI HRFI true_color unavailable even after native resampling if upper_right_corner is used ([PR 2690](https://github.com/pytroll/satpy/pull/2690) by [@djhoese](https://github.com/djhoese)) +* [Issue 2664](https://github.com/pytroll/satpy/issues/2664) - Cannot generate day-night composites +* [Issue 2654](https://github.com/pytroll/satpy/issues/2654) - Unable to read radiance with AVHRR EPS ([PR 2655](https://github.com/pytroll/satpy/pull/2655) by [@mraspaud](https://github.com/mraspaud)) +* [Issue 2647](https://github.com/pytroll/satpy/issues/2647) - Preservation of input data dtype in processing FCI data +* [Issue 2618](https://github.com/pytroll/satpy/issues/2618) - GCOM-C Support (Continued) ([PR 1094](https://github.com/pytroll/satpy/pull/1094) by [@mraspaud](https://github.com/mraspaud)) +* [Issue 2588](https://github.com/pytroll/satpy/issues/2588) - FCI chunks/segments out of order if pad_data=False ([PR 2692](https://github.com/pytroll/satpy/pull/2692) by [@ameraner](https://github.com/ameraner)) +* [Issue 2263](https://github.com/pytroll/satpy/issues/2263) - VIIRS day composite 'snow_age' does not work with Satpy 0.37.1 +* [Issue 1496](https://github.com/pytroll/satpy/issues/1496) - Improve error reporting of satpy.utils.get_satpos +* [Issue 1086](https://github.com/pytroll/satpy/issues/1086) - Add a reader for GCOM-C Level 1 data ([PR 1094](https://github.com/pytroll/satpy/pull/1094) by [@mraspaud](https://github.com/mraspaud)) + +In this release 9 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2694](https://github.com/pytroll/satpy/pull/2694) - Match all projectables in `NDVIHybridGreen.__call__` to avoid coordinate mismatch errors ([2668](https://github.com/pytroll/satpy/issues/2668), [2668](https://github.com/pytroll/satpy/issues/2668)) +* [PR 2692](https://github.com/pytroll/satpy/pull/2692) - Anticipate filehandler sorting in `GEOSegmentYAMLReader` to have sorted handlers also with `pad_data=False` ([2588](https://github.com/pytroll/satpy/issues/2588)) +* [PR 2690](https://github.com/pytroll/satpy/pull/2690) - Fix composites failing on non-aligned geolocation coordinates ([2668](https://github.com/pytroll/satpy/issues/2668)) +* [PR 2682](https://github.com/pytroll/satpy/pull/2682) - Update AHI HSD reader to correctly handle singleton arrays. +* [PR 2674](https://github.com/pytroll/satpy/pull/2674) - Update xarray version in CF writer tests for compression kwarg +* [PR 2671](https://github.com/pytroll/satpy/pull/2671) - Workaround AWIPS bug not handling integers properly in "awips_tiled" writer +* [PR 2669](https://github.com/pytroll/satpy/pull/2669) - Fix RealisticColors compositor upcasting data to float64 +* [PR 2655](https://github.com/pytroll/satpy/pull/2655) - Fix missing radiance units in eps l1b ([2654](https://github.com/pytroll/satpy/issues/2654)) + +#### Features added + +* [PR 2683](https://github.com/pytroll/satpy/pull/2683) - Fci/l2/amv/reader +* [PR 2679](https://github.com/pytroll/satpy/pull/2679) - Update MiRS reader coefficient files to newer version +* [PR 2677](https://github.com/pytroll/satpy/pull/2677) - Add remaining JPSS satellite platform aliases to "mirs" reader ([665](https://github.com/ssec/polar2grid/issues/665)) +* [PR 2669](https://github.com/pytroll/satpy/pull/2669) - Fix RealisticColors compositor upcasting data to float64 +* [PR 2660](https://github.com/pytroll/satpy/pull/2660) - Update tropomi_l2 reader with "_reduced" file patterns +* [PR 2557](https://github.com/pytroll/satpy/pull/2557) - Add baseline for GeoColor composite including FCI, AHI and ABI recipes +* [PR 2106](https://github.com/pytroll/satpy/pull/2106) - Add Scene function to use Hvplot backend visualization +* [PR 1094](https://github.com/pytroll/satpy/pull/1094) - Add Gcom-C sgli reader ([2618](https://github.com/pytroll/satpy/issues/2618), [1086](https://github.com/pytroll/satpy/issues/1086)) + +#### Backward incompatible changes + +* [PR 2684](https://github.com/pytroll/satpy/pull/2684) - Get rid of warnings in compositor tests + +#### Clean ups + +* [PR 2691](https://github.com/pytroll/satpy/pull/2691) - Reduce the number of warnings in writer tests +* [PR 2690](https://github.com/pytroll/satpy/pull/2690) - Fix composites failing on non-aligned geolocation coordinates ([2668](https://github.com/pytroll/satpy/issues/2668)) +* [PR 2684](https://github.com/pytroll/satpy/pull/2684) - Get rid of warnings in compositor tests +* [PR 2681](https://github.com/pytroll/satpy/pull/2681) - Get rid of warnings in resampler tests +* [PR 2676](https://github.com/pytroll/satpy/pull/2676) - Convert times in SEVIRI readers to nanosecond precision to silence warnings +* [PR 2658](https://github.com/pytroll/satpy/pull/2658) - Update unstable version of h5py in CI + +In this release 23 pull requests were closed. + + ## Version 0.45.0 (2023/11/29) ### Issues Closed From 74a3d14f24e797d79c02274e9b8cc7eb104ca3a3 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Mon, 18 Dec 2023 09:15:41 +0100 Subject: [PATCH 675/702] to_geoviews and to_hvplot in _scene_converters.py As requested by David in #2106 --- satpy/_scene_converters.py | 137 +++++++++++++++++++++++++++++++++++++ satpy/scene.py | 130 +---------------------------------- 2 files changed, 138 insertions(+), 129 deletions(-) diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py index fbc0a7a627..ce0ee27c1e 100644 --- a/satpy/_scene_converters.py +++ b/satpy/_scene_converters.py @@ -17,6 +17,7 @@ import xarray as xr +from satpy.composites import enhance2dataset from satpy.dataset import DataID @@ -36,6 +37,142 @@ def _get_dataarrays_from_identifiers(scn, identifiers): return dataarrays +def to_geoviews(scn, gvtype=None, datasets=None, + kdims=None, vdims=None, dynamic=False): + """Convert satpy Scene to geoviews. + + Args: + scn (satpy.Scene): Satpy Scene. + gvtype (gv plot type): + One of gv.Image, gv.LineContours, gv.FilledContours, gv.Points + Default to :class:`geoviews.Image`. + See Geoviews documentation for details. + datasets (list): Limit included products to these datasets + kdims (list of str): + Key dimensions. See geoviews documentation for more information. + vdims (list of str, optional): + Value dimensions. See geoviews documentation for more information. + If not given defaults to first data variable + dynamic (bool, optional): Load and compute data on-the-fly during + visualization. Default is ``False``. See + https://holoviews.org/user_guide/Gridded_Datasets.html#working-with-xarray-data-types + for more information. Has no effect when data to be visualized + only has 2 dimensions (y/x or longitude/latitude) and doesn't + require grouping via the Holoviews ``groupby`` function. + + Returns: geoviews object + + Todo: + * better handling of projection information in datasets which are + to be passed to geoviews + + """ + import geoviews as gv + from cartopy import crs # noqa + if gvtype is None: + gvtype = gv.Image + + ds = scn.to_xarray_dataset(datasets) + + if vdims is None: + # by default select first data variable as display variable + vdims = ds.data_vars[list(ds.data_vars.keys())[0]].name + + if hasattr(ds, "area") and hasattr(ds.area, "to_cartopy_crs"): + dscrs = ds.area.to_cartopy_crs() + gvds = gv.Dataset(ds, crs=dscrs) + else: + gvds = gv.Dataset(ds) + + # holoviews produces a log warning if you pass groupby arguments when groupby isn't used + groupby_kwargs = {"dynamic": dynamic} if gvds.ndims != 2 else {} + if "latitude" in ds.coords: + gview = gvds.to(gv.QuadMesh, kdims=["longitude", "latitude"], + vdims=vdims, **groupby_kwargs) + else: + gview = gvds.to(gvtype, kdims=["x", "y"], vdims=vdims, + **groupby_kwargs) + + return gview + +def to_hvplot(scn, datasets=None, *args, **kwargs): + """Convert satpy Scene to Hvplot. The method could not be used with composites of swath data. + + Args: + scn (satpy.Scene): Satpy Scene. + datasets (list): Limit included products to these datasets. + args: Arguments coming from hvplot + kwargs: hvplot options dictionary. + + Returns: + hvplot object that contains within it the plots of datasets list. + As default it contains all Scene datasets plots and a plot title + is shown. + + Example usage:: + + scene_list = ['ash','IR_108'] + scn = Scene() + scn.load(scene_list) + scn = scn.resample('eurol') + plot = scn.to_hvplot(datasets=scene_list) + plot.ash+plot.IR_108 + """ + + def _get_crs(xarray_ds): + return xarray_ds.area.to_cartopy_crs() + + def _get_timestamp(xarray_ds): + time = xarray_ds.attrs["start_time"] + return time.strftime("%Y %m %d -- %H:%M UTC") + + def _get_units(xarray_ds, variable): + return xarray_ds[variable].attrs["units"] + + def _plot_rgb(xarray_ds, variable, **defaults): + img = enhance2dataset(xarray_ds[variable]) + return img.hvplot.rgb(bands="bands", title=title, + clabel="", **defaults) + + def _plot_quadmesh(xarray_ds, variable, **defaults): + return xarray_ds[variable].hvplot.quadmesh( + clabel=f"[{_get_units(xarray_ds,variable)}]", title=title, + **defaults) + + import hvplot.xarray as hvplot_xarray # noqa + from holoviews import Overlay + + plot = Overlay() + xarray_ds = scn.to_xarray_dataset(datasets) + + if hasattr(xarray_ds, "area") and hasattr(xarray_ds.area, "to_cartopy_crs"): + ccrs = _get_crs(xarray_ds) + defaults={"x":"x","y":"y"} + else: + ccrs = None + defaults={"x":"longitude","y":"latitude"} + + if datasets is None: + datasets = list(xarray_ds.keys()) + + defaults.update(data_aspect=1, project=True, geo=True, + crs=ccrs, projection=ccrs, rasterize=True, + coastline="110m", cmap="Plasma", responsive=True, + dynamic=False, framewise=True,colorbar=False, + global_extent=False, xlabel="Longitude", + ylabel="Latitude") + + defaults.update(kwargs) + + for element in datasets: + title = f"{element} @ {_get_timestamp(xarray_ds)}" + if xarray_ds[element].shape[0] == 3: + plot[element] = _plot_rgb(xarray_ds, element, **defaults) + else: + plot[element] = _plot_quadmesh(xarray_ds, element, **defaults) + + return plot + def to_xarray(scn, datasets=None, # DataID header_attrs=None, diff --git a/satpy/scene.py b/satpy/scene.py index d1ba795ac8..5ba8832729 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -28,7 +28,7 @@ from pyresample.geometry import AreaDefinition, BaseDefinition, SwathDefinition from xarray import DataArray -from satpy.composites import IncompatibleAreas, enhance2dataset +from satpy.composites import IncompatibleAreas from satpy.composites.config_loader import load_compositor_configs_for_sensors from satpy.dataset import DataID, DataQuery, DatasetDict, combine_metadata, dataset_walker, replace_anc from satpy.dependency_tree import DependencyTree @@ -1012,134 +1012,6 @@ def show(self, dataset_id, overlay=None): img.show() return img - def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynamic=False): - """Convert satpy Scene to geoviews. - - Args: - gvtype (gv plot type): - One of gv.Image, gv.LineContours, gv.FilledContours, gv.Points - Default to :class:`geoviews.Image`. - See Geoviews documentation for details. - datasets (list): Limit included products to these datasets - kdims (list of str): - Key dimensions. See geoviews documentation for more information. - vdims (list of str, optional): - Value dimensions. See geoviews documentation for more information. - If not given defaults to first data variable - dynamic (bool, optional): Load and compute data on-the-fly during - visualization. Default is ``False``. See - https://holoviews.org/user_guide/Gridded_Datasets.html#working-with-xarray-data-types - for more information. Has no effect when data to be visualized - only has 2 dimensions (y/x or longitude/latitude) and doesn't - require grouping via the Holoviews ``groupby`` function. - - Returns: geoviews object - - Todo: - * better handling of projection information in datasets which are - to be passed to geoviews - - """ - import geoviews as gv - from cartopy import crs # noqa - if gvtype is None: - gvtype = gv.Image - - ds = self.to_xarray_dataset(datasets) - - if vdims is None: - # by default select first data variable as display variable - vdims = ds.data_vars[list(ds.data_vars.keys())[0]].name - - if hasattr(ds, "area") and hasattr(ds.area, "to_cartopy_crs"): - dscrs = ds.area.to_cartopy_crs() - gvds = gv.Dataset(ds, crs=dscrs) - else: - gvds = gv.Dataset(ds) - - # holoviews produces a log warning if you pass groupby arguments when groupby isn't used - groupby_kwargs = {"dynamic": dynamic} if gvds.ndims != 2 else {} - if "latitude" in ds.coords: - gview = gvds.to(gv.QuadMesh, kdims=["longitude", "latitude"], vdims=vdims, **groupby_kwargs) - else: - gview = gvds.to(gvtype, kdims=["x", "y"], vdims=vdims, **groupby_kwargs) - - return gview - - def to_hvplot(self, datasets=None, *args, **kwargs): - """Convert satpy Scene to Hvplot. The method could not be used with composites of swath data. - - Args: - datasets (list): Limit included products to these datasets. - args: Arguments coming from hvplot - kwargs: hvplot options dictionary. - - Returns: hvplot object that contains within it the plots of datasets list. - As default it contains all Scene datasets plots and a plot title is shown. - - Example usage:: - - scene_list = ['ash','IR_108'] - scn = Scene() - scn.load(scene_list) - scn = scn.resample('eurol') - plot = scn.to_hvplot(datasets=scene_list) - plot.ash+plot.IR_108 - """ - - def _get_crs(xarray_ds): - return xarray_ds.area.to_cartopy_crs() - - def _get_timestamp(xarray_ds): - time = xarray_ds.attrs["start_time"] - return time.strftime("%Y %m %d -- %H:%M UTC") - - def _get_units(xarray_ds, variable): - return xarray_ds[variable].attrs["units"] - - def _plot_rgb(xarray_ds, variable, **defaults): - img = enhance2dataset(xarray_ds[variable]) - return img.hvplot.rgb(bands="bands", title=title, - clabel="", **defaults) - - def _plot_quadmesh(xarray_ds, variable, **defaults): - return xarray_ds[variable].hvplot.quadmesh( - clabel=f"[{_get_units(xarray_ds,variable)}]", title=title, - **defaults) - - import hvplot.xarray as hvplot_xarray # noqa - from holoviews import Overlay - - plot = Overlay() - xarray_ds = self.to_xarray_dataset(datasets) - - if hasattr(xarray_ds, "area") and hasattr(xarray_ds.area, "to_cartopy_crs"): - ccrs = _get_crs(xarray_ds) - defaults={"x":"x","y":"y"} - else: - ccrs = None - defaults={"x":"longitude","y":"latitude"} - - if datasets is None: - datasets = list(xarray_ds.keys()) - - defaults.update(data_aspect=1, project=True, geo=True, - crs=ccrs, projection=ccrs, rasterize=True, coastline="110m", - cmap="Plasma", responsive=True, dynamic=False, framewise=True, - colorbar=False, global_extent=False, xlabel="Longitude", - ylabel="Latitude") - - defaults.update(kwargs) - - for element in datasets: - title = f"{element} @ {_get_timestamp(xarray_ds)}" - if xarray_ds[element].shape[0] == 3: - plot[element] = _plot_rgb(xarray_ds, element, **defaults) - else: - plot[element] = _plot_quadmesh(xarray_ds, element, **defaults) - - return plot - def to_xarray_dataset(self, datasets=None): """Merge all xr.DataArrays of a scene to a xr.DataSet. From f11db543b2d42e634ddc6a8c935d17094ea1bc4c Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Mon, 18 Dec 2023 10:28:49 +0100 Subject: [PATCH 676/702] to_hvplot and to_geoviews called in scene.py --- satpy/scene.py | 64 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/satpy/scene.py b/satpy/scene.py index 5ba8832729..24f3264dcd 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1012,6 +1012,70 @@ def show(self, dataset_id, overlay=None): img.show() return img + def to_geoviews(self, gvtype=None, datasets=None, + kdims=None, vdims=None, dynamic=False): + """Convert satpy Scene to geoviews. + + Args: + scn (satpy.Scene): Satpy Scene. + gvtype (gv plot type): + One of gv.Image, gv.LineContours, gv.FilledContours, gv.Points + Default to :class:`geoviews.Image`. + See Geoviews documentation for details. + datasets (list): Limit included products to these datasets + kdims (list of str): + Key dimensions. See geoviews documentation for more information. + vdims (list of str, optional): + Value dimensions. See geoviews documentation for more information. + If not given defaults to first data variable + dynamic (bool, optional): Load and compute data on-the-fly during + visualization. Default is ``False``. See + https://holoviews.org/user_guide/Gridded_Datasets.html#working-with-xarray-data-types + for more information. Has no effect when data to be visualized + only has 2 dimensions (y/x or longitude/latitude) and doesn't + require grouping via the Holoviews ``groupby`` function. + + Returns: geoviews object + + Todo: + * better handling of projection information in datasets which are + to be passed to geoviews + + """ + from satpy._scene_converters import to_geoviews + return to_geoviews(self, gvtype=None, datasets=None, + kdims=None, vdims=None, dynamic=False) + + + def to_hvplot(self, datasets=None, *args, **kwargs): + """Convert satpy Scene to Hvplot. The method could not be used with composites of swath data. + + Args: + scn (satpy.Scene): Satpy Scene. + datasets (list): Limit included products to these datasets. + args: Arguments coming from hvplot + kwargs: hvplot options dictionary. + + Returns: + hvplot object that contains within it the plots of datasets list. + As default it contains all Scene datasets plots and a plot title + is shown. + + Example usage:: + + scene_list = ['ash','IR_108'] + scn = Scene() + scn.load(scene_list) + scn = scn.resample('eurol') + plot = scn.to_hvplot(datasets=scene_list) + plot.ash+plot.IR_108 + """ + from satpy._scene_converters import to_hvplot + + return to_hvplot(self, datasets=None, *args, **kwargs) + + + def to_xarray_dataset(self, datasets=None): """Merge all xr.DataArrays of a scene to a xr.DataSet. From e7c85447f8a4b0302e91a238d17bbc11ea39e52e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 18 Dec 2023 09:16:11 -0600 Subject: [PATCH 677/702] Update intersphinx reference URLs --- doc/source/conf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 37c197c6eb..49e47b2cc2 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -278,7 +278,7 @@ def __getattr__(cls, name): # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { "dask": ("https://docs.dask.org/en/latest", None), - "geoviews": ("http://geoviews.org", None), + "geoviews": ("https://geoviews.org", None), "jobqueue": ("https://jobqueue.dask.org/en/latest", None), "numpy": ("https://numpy.org/doc/stable", None), "pydecorate": ("https://pydecorate.readthedocs.io/en/stable", None), @@ -287,7 +287,7 @@ def __getattr__(cls, name): "pyresample": ("https://pyresample.readthedocs.io/en/stable", None), "pytest": ("https://docs.pytest.org/en/stable/", None), "python": ("https://docs.python.org/3", None), - "scipy": ("http://scipy.github.io/devdocs", None), + "scipy": ("https://scipy.github.io/devdocs", None), "trollimage": ("https://trollimage.readthedocs.io/en/stable", None), "trollsift": ("https://trollsift.readthedocs.io/en/stable", None), "xarray": ("https://xarray.pydata.org/en/stable", None), From 3e473cd5a099d8204498642b82983c4682c1ea0b Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 18 Dec 2023 09:17:38 -0600 Subject: [PATCH 678/702] Fix eps_l1b reader Delayed usage causing docs failures Delayed objects should always be staticmethods or global functions so they can be easily serialized. --- satpy/readers/eps_l1b.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/satpy/readers/eps_l1b.py b/satpy/readers/eps_l1b.py index fbeb3ecba6..a74aac1559 100644 --- a/satpy/readers/eps_l1b.py +++ b/satpy/readers/eps_l1b.py @@ -224,12 +224,6 @@ def _interpolate(self, lons_like, lats_like): " and earth views = " + str(self.pixels)) - @delayed(nout=2, pure=True) - def _interpolate_20km_to_1km(self, lons, lats): - # Note: delayed will cast input dask-arrays to numpy arrays (needed by metop20kmto1km). - from geotiepoints import metop20kmto1km - return metop20kmto1km(lons, lats) - def _get_full_angles(self, solar_zenith, sat_zenith, solar_azimuth, sat_azimuth): nav_sample_rate = self["NAV_SAMPLE_RATE"] @@ -403,3 +397,10 @@ def end_time(self): """Get end time.""" # return datetime.strptime(self["SENSING_END"], "%Y%m%d%H%M%SZ") return self._end_time + + +@delayed(nout=2, pure=True) +def _interpolate_20km_to_1km(lons, lats): + # Note: delayed will cast input dask-arrays to numpy arrays (needed by metop20kmto1km). + from geotiepoints import metop20kmto1km + return metop20kmto1km(lons, lats) From 9707e7033d70195f782c8d7fb8df3d8bc29f013e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 18 Dec 2023 09:40:43 -0600 Subject: [PATCH 679/702] Fix reference to delayed function in eps_l1b reader --- satpy/readers/eps_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/eps_l1b.py b/satpy/readers/eps_l1b.py index a74aac1559..9ba5dece43 100644 --- a/satpy/readers/eps_l1b.py +++ b/satpy/readers/eps_l1b.py @@ -212,7 +212,7 @@ def _get_full_lonlats_uncached(self): def _interpolate(self, lons_like, lats_like): nav_sample_rate = self["NAV_SAMPLE_RATE"] if nav_sample_rate == 20 and self.pixels == 2048: - lons_like_1km, lats_like_1km = self._interpolate_20km_to_1km(lons_like, lats_like) + lons_like_1km, lats_like_1km = _interpolate_20km_to_1km(lons_like, lats_like) lons_like_1km = da.from_delayed(lons_like_1km, dtype=lons_like.dtype, shape=(self.scanlines, self.pixels)) lats_like_1km = da.from_delayed(lats_like_1km, dtype=lats_like.dtype, From ad0895091110f31fc6e92dfd6c9b15ba023bdbeb Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 21 Dec 2023 09:32:06 -0600 Subject: [PATCH 680/702] Fix 'viirs_edr' renaming two sets of dimensions to the same names --- satpy/readers/viirs_edr.py | 24 ++++++++++++++-------- satpy/tests/reader_tests/test_viirs_edr.py | 1 + 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index b0eaf7b7ba..eaf2f53d42 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -62,6 +62,7 @@ import logging from typing import Iterable +import dask.array as da import xarray as xr from satpy import DataID @@ -93,11 +94,6 @@ def __init__(self, filename, filename_info, filetype_info): "Along_Scan_750m": -1, "Along_Track_750m": row_chunks_m, }) - if "Columns" in self.nc.dims: - self.nc = self.nc.rename({"Columns": "x", "Rows": "y"}) - elif "Along_Track_375m" in self.nc.dims: - self.nc = self.nc.rename({"Along_Scan_375m": "x", "Along_Track_375m": "y"}) - self.nc = self.nc.rename({"Along_Scan_750m": "x", "Along_Track_750m": "y"}) # For some reason, no 'standard_name' is defined in some netCDF files, so # here we manually make the definitions. @@ -134,7 +130,8 @@ def get_dataset(self, dataset_id: DataID, info: dict) -> xr.DataArray: # delete the coordinates here so the base reader doesn't try to # make a SwathDefinition data_arr = data_arr.reset_coords(drop=True) - return data_arr + + return self._rename_dims(data_arr) def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: # xarray auto mask and scale handled any fills from the file @@ -152,6 +149,16 @@ def _decode_flag_meanings(data_arr: xr.DataArray): # only handle CF-standard flag meanings data_arr.attrs["flag_meanings"] = [flag for flag in data_arr.attrs["flag_meanings"].split(" ")] + @staticmethod + def _rename_dims(data_arr: xr.DataArray) -> xr.DataArray: + if "Columns" in data_arr.dims: + data_arr = data_arr.rename({"Columns": "x", "Rows": "y"}) + if "Along_Track_375m" in data_arr.dims: + data_arr = data_arr.rename({"Along_Scan_375m": "x", "Along_Track_375m": "y"}) + if "Along_Track_750m" in data_arr.dims: + data_arr = data_arr.rename({"Along_Scan_750m": "x", "Along_Track_750m": "y"}) + return data_arr + @property def start_time(self): """Get first date/time when observations were recorded.""" @@ -277,7 +284,7 @@ def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: new_data_arr = new_data_arr.where(good_mask) return new_data_arr - def _get_veg_index_good_mask(self) -> xr.DataArray: + def _get_veg_index_good_mask(self) -> da.Array: # each mask array should be TRUE when pixels are UNACCEPTABLE qf1 = self.nc["QF1 Surface Reflectance"] has_sun_glint = (qf1 & 0b11000000) > 0 @@ -306,8 +313,7 @@ def _get_veg_index_good_mask(self) -> xr.DataArray: ) # upscale from M-band resolution to I-band resolution bad_mask_iband_dask = bad_mask.data.repeat(2, axis=1).repeat(2, axis=0) - good_mask_iband = xr.DataArray(~bad_mask_iband_dask, dims=qf1.dims) - return good_mask_iband + return ~bad_mask_iband_dask class VIIRSLSTHandler(VIIRSJRRFileHandler): diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index a6932520c0..d042576d05 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -152,6 +152,7 @@ def _create_surf_refl_variables() -> dict[str, xr.DataArray]: "750m Surface Reflectance Band M1": xr.DataArray(m_data, dims=m_dims, attrs=sr_attrs), } for data_arr in data_arrs.values(): + data_arr.encoding["chunksizes"] = data_arr.shape if "scale_factor" not in data_arr.attrs: continue data_arr.encoding["dtype"] = np.int16 From e602e180353a1a67573d50ddb3832a5e89df9e53 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 1 Jan 2024 19:58:23 +0000 Subject: [PATCH 681/702] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.7 → v0.1.9](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.7...v0.1.9) - [github.com/pre-commit/mirrors-mypy: v1.7.1 → v1.8.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.7.1...v1.8.0) - [github.com/pycqa/isort: 5.13.1 → 5.13.2](https://github.com/pycqa/isort/compare/5.13.1...5.13.2) --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a398bd445f..37c458982a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ fail_fast: false repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.1.7' + rev: 'v0.1.9' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks @@ -19,7 +19,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.7.1' # Use the sha / tag you want to point at + rev: 'v1.8.0' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: @@ -29,7 +29,7 @@ repos: - types-requests args: ["--python-version", "3.9", "--ignore-missing-imports"] - repo: https://github.com/pycqa/isort - rev: 5.13.1 + rev: 5.13.2 hooks: - id: isort language_version: python3 From c569e442e2c2d589ed159136c5c6f84af001c993 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 3 Jan 2024 09:00:59 -0600 Subject: [PATCH 682/702] Add support for NOAA-21 in MiRS limb correction --- satpy/readers/mirs.py | 7 ++++--- satpy/tests/reader_tests/test_mirs.py | 13 ++++++++++--- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/satpy/readers/mirs.py b/satpy/readers/mirs.py index 5f68af2d6c..362ed1371c 100644 --- a/satpy/readers/mirs.py +++ b/satpy/readers/mirs.py @@ -304,9 +304,10 @@ def force_time(self, key): def _get_coeff_filenames(self): """Retrieve necessary files for coefficients if needed.""" coeff_fn = {"sea": None, "land": None} - if self.platform_name == "noaa-20": - coeff_fn["land"] = retrieve("readers/limbcoef_atmsland_noaa20.txt") - coeff_fn["sea"] = retrieve("readers/limbcoef_atmssea_noaa20.txt") + if self.platform_name.startswith("noaa"): + suffix = self.platform_name[-2:] + coeff_fn["land"] = retrieve(f"readers/limbcoef_atmsland_noaa{suffix}.txt") + coeff_fn["sea"] = retrieve(f"readers/limbcoef_atmssea_noaa{suffix}.txt") if self.platform_name == "npp": coeff_fn["land"] = retrieve("readers/limbcoef_atmsland_snpp.txt") coeff_fn["sea"] = retrieve("readers/limbcoef_atmssea_snpp.txt") diff --git a/satpy/tests/reader_tests/test_mirs.py b/satpy/tests/reader_tests/test_mirs.py index 69f5543411..d68f8143db 100644 --- a/satpy/tests/reader_tests/test_mirs.py +++ b/satpy/tests/reader_tests/test_mirs.py @@ -29,6 +29,7 @@ METOP_FILE = "IMG_SX.M2.D17037.S1601.E1607.B0000001.WE.HR.ORB.nc" NPP_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r6_npp_s201702061601000_e201702061607000_c202012201658410.nc" N20_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r4_n20_s201702061601000_e201702061607000_c202012201658410.nc" +N21_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r4_n21_s201702061601000_e201702061607000_c202012201658410.nc" OTHER_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r4_gpm_s201702061601000_e201702061607000_c202010080001310.nc" EXAMPLE_FILES = [METOP_FILE, NPP_MIRS_L2_SWATH, OTHER_MIRS_L2_SWATH] @@ -102,10 +103,8 @@ def fake_coeff_from_fn(fn): next_line = " {}\n".format(" ".join([str(x) for x in locations[idx - 1]])) coeff_str.append(next_line) for fov in range(1, N_FOV+1): - random_coeff = np.random.rand(all_nchx[nx]) random_coeff = np.ones(all_nchx[nx]) str_coeff = " ".join([str(x) for x in random_coeff]) - random_means = np.random.uniform(261, 267, all_nchx[nx]) random_means = np.zeros(all_nchx[nx]) str_means = " ".join([str(x) for x in random_means]) error_val = np.random.uniform(0, 4) @@ -309,6 +308,7 @@ def _check_attrs(data_arr, platform_name): ([METOP_FILE], TEST_VARS, "metop-a"), ([NPP_MIRS_L2_SWATH], TEST_VARS, "npp"), ([N20_MIRS_L2_SWATH], TEST_VARS, "noaa-20"), + ([N21_MIRS_L2_SWATH], TEST_VARS, "noaa-21"), ([OTHER_MIRS_L2_SWATH], TEST_VARS, "gpm"), ] ) @@ -323,9 +323,16 @@ def test_basic_load(self, filenames, loadable_ids, loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables, fh_kwargs=reader_kw) with mock.patch("satpy.readers.mirs.read_atms_coeff_to_string") as \ - fd, mock.patch("satpy.readers.mirs.retrieve"): + fd, mock.patch("satpy.readers.mirs.retrieve") as rtv: fd.side_effect = fake_coeff_from_fn loaded_data_arrs = r.load(loadable_ids) + if reader_kw.get("limb_correction", True) and platform_name in ("npp", "noaa-20", "noaa-21"): + suffix = f"noaa{platform_name[-2:]}" if platform_name.startswith("noaa") else "snpp" + assert rtv.call_count == 2 * len([var_name for var_name in loadable_ids if "btemp" in var_name]) + for calls_args in rtv.call_args_list: + assert calls_args[0][0].endswith(f"_{suffix}.txt") + else: + rtv.assert_not_called() assert len(loaded_data_arrs) == len(loadable_ids) test_data = fake_open_dataset(filenames[0]) From e5c50f593e4457b683a9e2c3247fd0a5a2178d64 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 3 Jan 2024 09:05:21 -0600 Subject: [PATCH 683/702] Refactor mirs tests --- satpy/tests/reader_tests/test_mirs.py | 78 +++++++++++++-------------- 1 file changed, 39 insertions(+), 39 deletions(-) diff --git a/satpy/tests/reader_tests/test_mirs.py b/satpy/tests/reader_tests/test_mirs.py index d68f8143db..d12a553235 100644 --- a/satpy/tests/reader_tests/test_mirs.py +++ b/satpy/tests/reader_tests/test_mirs.py @@ -268,40 +268,6 @@ def test_available_datasets(self, filenames, expected_datasets): for var_name in expected_datasets: assert var_name in avails - @staticmethod - def _check_area(data_arr): - from pyresample.geometry import SwathDefinition - area = data_arr.attrs["area"] - assert isinstance(area, SwathDefinition) - - @staticmethod - def _check_fill(data_arr): - assert "_FillValue" not in data_arr.attrs - if np.issubdtype(data_arr.dtype, np.floating): - # we started with float32, it should stay that way - assert data_arr.dtype.type == np.float64 - - @staticmethod - def _check_valid_range(data_arr, test_valid_range): - # valid_range is popped out of data_arr.attrs when it is applied - assert "valid_range" not in data_arr.attrs - assert data_arr.data.min() >= test_valid_range[0] - assert data_arr.data.max() <= test_valid_range[1] - - @staticmethod - def _check_fill_value(data_arr, test_fill_value): - assert "_FillValue" not in data_arr.attrs - assert not (data_arr.data == test_fill_value).any() - - @staticmethod - def _check_attrs(data_arr, platform_name): - attrs = data_arr.attrs - assert "scale_factor" not in attrs - assert "platform_name" in attrs - assert attrs["platform_name"] == platform_name - assert attrs["start_time"] == START_TIME - assert attrs["end_time"] == END_TIME - @pytest.mark.parametrize( ("filenames", "loadable_ids", "platform_name"), [ @@ -340,18 +306,18 @@ def test_basic_load(self, filenames, loadable_ids, data_arr = data_arr.compute() var_name = data_arr.attrs["name"] if var_name not in ["latitude", "longitude"]: - self._check_area(data_arr) - self._check_fill(data_arr) - self._check_attrs(data_arr, platform_name) + _check_area(data_arr) + _check_fill(data_arr) + _check_attrs(data_arr, platform_name) input_fake_data = test_data["BT"] if "btemp" in var_name \ else test_data[var_name] if "valid_range" in input_fake_data.attrs: valid_range = input_fake_data.attrs["valid_range"] - self._check_valid_range(data_arr, valid_range) + _check_valid_range(data_arr, valid_range) if "_FillValue" in input_fake_data.attrs: fill_value = input_fake_data.attrs["_FillValue"] - self._check_fill_value(data_arr, fill_value) + _check_fill_value(data_arr, fill_value) sensor = data_arr.attrs["sensor"] if reader_kw.get("limb_correction", True) and sensor == "atms": @@ -359,3 +325,37 @@ def test_basic_load(self, filenames, loadable_ids, else: fd.assert_not_called() assert data_arr.attrs["units"] == DEFAULT_UNITS[var_name] + + +def _check_area(data_arr): + from pyresample.geometry import SwathDefinition + area = data_arr.attrs["area"] + assert isinstance(area, SwathDefinition) + + +def _check_fill(data_arr): + assert "_FillValue" not in data_arr.attrs + if np.issubdtype(data_arr.dtype, np.floating): + # we started with float32, it should stay that way + assert data_arr.dtype.type == np.float64 + + +def _check_valid_range(data_arr, test_valid_range): + # valid_range is popped out of data_arr.attrs when it is applied + assert "valid_range" not in data_arr.attrs + assert data_arr.data.min() >= test_valid_range[0] + assert data_arr.data.max() <= test_valid_range[1] + + +def _check_fill_value(data_arr, test_fill_value): + assert "_FillValue" not in data_arr.attrs + assert not (data_arr.data == test_fill_value).any() + + +def _check_attrs(data_arr, platform_name): + attrs = data_arr.attrs + assert "scale_factor" not in attrs + assert "platform_name" in attrs + assert attrs["platform_name"] == platform_name + assert attrs["start_time"] == START_TIME + assert attrs["end_time"] == END_TIME From 31c65986ab180633ae24a7827f05d303199aab59 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 3 Jan 2024 09:13:55 -0600 Subject: [PATCH 684/702] Refactor mirs tests --- satpy/tests/reader_tests/test_mirs.py | 200 ++++++++++++-------------- 1 file changed, 92 insertions(+), 108 deletions(-) diff --git a/satpy/tests/reader_tests/test_mirs.py b/satpy/tests/reader_tests/test_mirs.py index d12a553235..2ca3325139 100644 --- a/satpy/tests/reader_tests/test_mirs.py +++ b/satpy/tests/reader_tests/test_mirs.py @@ -16,7 +16,8 @@ # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . -"""Module for testing the satpy.readers.tropomi_l2 module.""" +"""Module for testing the satpy.readers.mirs module.""" +from __future__ import annotations import os from datetime import datetime @@ -26,6 +27,10 @@ import pytest import xarray as xr +from satpy._config import config_search_paths +from satpy.readers import load_reader +from satpy.readers.yaml_reader import FileYAMLReader + METOP_FILE = "IMG_SX.M2.D17037.S1601.E1607.B0000001.WE.HR.ORB.nc" NPP_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r6_npp_s201702061601000_e201702061607000_c202012201658410.nc" N20_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r4_n20_s201702061601000_e201702061607000_c202012201658410.nc" @@ -218,113 +223,92 @@ def fake_open_dataset(filename, **kwargs): return _get_datasets_with_attributes() -class TestMirsL2_NcReader: - """Test mirs Reader.""" - - yaml_file = "mirs.yaml" - - def setup_method(self): - """Read fake data.""" - from satpy._config import config_search_paths - self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) - - @pytest.mark.parametrize( - ("filenames", "expected_loadables"), - [ - ([METOP_FILE], 1), - ([NPP_MIRS_L2_SWATH], 1), - ([OTHER_MIRS_L2_SWATH], 1), - ] - ) - def test_reader_creation(self, filenames, expected_loadables): - """Test basic initialization.""" - from satpy.readers import load_reader - with mock.patch("satpy.readers.mirs.xr.open_dataset") as od: - od.side_effect = fake_open_dataset - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames(filenames) - assert len(loadables) == expected_loadables - r.create_filehandlers(loadables) - # make sure we have some files - assert r.file_handlers - - @pytest.mark.parametrize( - ("filenames", "expected_datasets"), - [ - ([METOP_FILE], DS_IDS), - ([NPP_MIRS_L2_SWATH], DS_IDS), - ([OTHER_MIRS_L2_SWATH], DS_IDS), - ] - ) - def test_available_datasets(self, filenames, expected_datasets): - """Test that variables are dynamically discovered.""" - from satpy.readers import load_reader - with mock.patch("satpy.readers.mirs.xr.open_dataset") as od: - od.side_effect = fake_open_dataset - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames(filenames) - r.create_filehandlers(loadables) - avails = list(r.available_dataset_names) - for var_name in expected_datasets: - assert var_name in avails - - @pytest.mark.parametrize( - ("filenames", "loadable_ids", "platform_name"), - [ - ([METOP_FILE], TEST_VARS, "metop-a"), - ([NPP_MIRS_L2_SWATH], TEST_VARS, "npp"), - ([N20_MIRS_L2_SWATH], TEST_VARS, "noaa-20"), - ([N21_MIRS_L2_SWATH], TEST_VARS, "noaa-21"), - ([OTHER_MIRS_L2_SWATH], TEST_VARS, "gpm"), - ] - ) - @pytest.mark.parametrize("reader_kw", [{}, {"limb_correction": False}]) - def test_basic_load(self, filenames, loadable_ids, - platform_name, reader_kw): - """Test that variables are loaded properly.""" - from satpy.readers import load_reader - with mock.patch("satpy.readers.mirs.xr.open_dataset") as od: - od.side_effect = fake_open_dataset - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames(filenames) - r.create_filehandlers(loadables, fh_kwargs=reader_kw) - with mock.patch("satpy.readers.mirs.read_atms_coeff_to_string") as \ - fd, mock.patch("satpy.readers.mirs.retrieve") as rtv: - fd.side_effect = fake_coeff_from_fn - loaded_data_arrs = r.load(loadable_ids) - if reader_kw.get("limb_correction", True) and platform_name in ("npp", "noaa-20", "noaa-21"): - suffix = f"noaa{platform_name[-2:]}" if platform_name.startswith("noaa") else "snpp" - assert rtv.call_count == 2 * len([var_name for var_name in loadable_ids if "btemp" in var_name]) - for calls_args in rtv.call_args_list: - assert calls_args[0][0].endswith(f"_{suffix}.txt") - else: - rtv.assert_not_called() - assert len(loaded_data_arrs) == len(loadable_ids) - - test_data = fake_open_dataset(filenames[0]) - for _data_id, data_arr in loaded_data_arrs.items(): - data_arr = data_arr.compute() - var_name = data_arr.attrs["name"] - if var_name not in ["latitude", "longitude"]: - _check_area(data_arr) - _check_fill(data_arr) - _check_attrs(data_arr, platform_name) - - input_fake_data = test_data["BT"] if "btemp" in var_name \ - else test_data[var_name] - if "valid_range" in input_fake_data.attrs: - valid_range = input_fake_data.attrs["valid_range"] - _check_valid_range(data_arr, valid_range) - if "_FillValue" in input_fake_data.attrs: - fill_value = input_fake_data.attrs["_FillValue"] - _check_fill_value(data_arr, fill_value) - - sensor = data_arr.attrs["sensor"] - if reader_kw.get("limb_correction", True) and sensor == "atms": - fd.assert_called() - else: - fd.assert_not_called() - assert data_arr.attrs["units"] == DEFAULT_UNITS[var_name] +@pytest.mark.parametrize( + ("filenames", "expected_datasets"), + [ + ([METOP_FILE], DS_IDS), + ([NPP_MIRS_L2_SWATH], DS_IDS), + ([OTHER_MIRS_L2_SWATH], DS_IDS), + ] +) +def test_available_datasets(filenames, expected_datasets): + """Test that variables are dynamically discovered.""" + r = _create_fake_reader(filenames, {}) + avails = list(r.available_dataset_names) + for var_name in expected_datasets: + assert var_name in avails + + +@pytest.mark.parametrize( + ("filenames", "loadable_ids", "platform_name"), + [ + ([METOP_FILE], TEST_VARS, "metop-a"), + ([NPP_MIRS_L2_SWATH], TEST_VARS, "npp"), + ([N20_MIRS_L2_SWATH], TEST_VARS, "noaa-20"), + ([N21_MIRS_L2_SWATH], TEST_VARS, "noaa-21"), + ([OTHER_MIRS_L2_SWATH], TEST_VARS, "gpm"), + ] +) +@pytest.mark.parametrize("reader_kw", [{}, {"limb_correction": False}]) +def test_basic_load(filenames, loadable_ids, platform_name, reader_kw): + """Test that variables are loaded properly.""" + r = _create_fake_reader(filenames, reader_kw) + with mock.patch("satpy.readers.mirs.read_atms_coeff_to_string") as \ + fd, mock.patch("satpy.readers.mirs.retrieve") as rtv: + fd.side_effect = fake_coeff_from_fn + loaded_data_arrs = r.load(loadable_ids) + if reader_kw.get("limb_correction", True) and platform_name in ("npp", "noaa-20", "noaa-21"): + suffix = f"noaa{platform_name[-2:]}" if platform_name.startswith("noaa") else "snpp" + assert rtv.call_count == 2 * len([var_name for var_name in loadable_ids if "btemp" in var_name]) + for calls_args in rtv.call_args_list: + assert calls_args[0][0].endswith(f"_{suffix}.txt") + else: + rtv.assert_not_called() + assert len(loaded_data_arrs) == len(loadable_ids) + + test_data = fake_open_dataset(filenames[0]) + for _data_id, data_arr in loaded_data_arrs.items(): + data_arr = data_arr.compute() + var_name = data_arr.attrs["name"] + if var_name not in ["latitude", "longitude"]: + _check_area(data_arr) + _check_fill(data_arr) + _check_attrs(data_arr, platform_name) + + input_fake_data = test_data["BT"] if "btemp" in var_name \ + else test_data[var_name] + if "valid_range" in input_fake_data.attrs: + valid_range = input_fake_data.attrs["valid_range"] + _check_valid_range(data_arr, valid_range) + if "_FillValue" in input_fake_data.attrs: + fill_value = input_fake_data.attrs["_FillValue"] + _check_fill_value(data_arr, fill_value) + + sensor = data_arr.attrs["sensor"] + if reader_kw.get("limb_correction", True) and sensor == "atms": + fd.assert_called() + else: + fd.assert_not_called() + assert data_arr.attrs["units"] == DEFAULT_UNITS[var_name] + + +def _create_fake_reader( + filenames: list[str], + reader_kwargs: dict, + exp_loadable_files: int | None = None +) -> FileYAMLReader: + exp_loadable_files = exp_loadable_files if exp_loadable_files is not None else len(filenames) + reader_configs = config_search_paths(os.path.join("readers", "mirs.yaml")) + with mock.patch("satpy.readers.mirs.xr.open_dataset") as od: + od.side_effect = fake_open_dataset + r = load_reader(reader_configs) + loadables = r.select_files_from_pathnames(filenames) + r.create_filehandlers(loadables, fh_kwargs=reader_kwargs) + + assert isinstance(r, FileYAMLReader) + assert len(loadables) == exp_loadable_files + assert r.file_handlers + return r def _check_area(data_arr): From 34e6c7022dbc005d3bc236dbaf3d976e40fdfda6 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 3 Jan 2024 09:30:56 -0600 Subject: [PATCH 685/702] Refactor mirs tests --- satpy/tests/reader_tests/test_mirs.py | 84 ++++++++++++++++----------- 1 file changed, 49 insertions(+), 35 deletions(-) diff --git a/satpy/tests/reader_tests/test_mirs.py b/satpy/tests/reader_tests/test_mirs.py index 2ca3325139..f4c6bcbed1 100644 --- a/satpy/tests/reader_tests/test_mirs.py +++ b/satpy/tests/reader_tests/test_mirs.py @@ -28,6 +28,7 @@ import xarray as xr from satpy._config import config_search_paths +from satpy.dataset import DataID from satpy.readers import load_reader from satpy.readers.yaml_reader import FileYAMLReader @@ -253,43 +254,15 @@ def test_available_datasets(filenames, expected_datasets): def test_basic_load(filenames, loadable_ids, platform_name, reader_kw): """Test that variables are loaded properly.""" r = _create_fake_reader(filenames, reader_kw) - with mock.patch("satpy.readers.mirs.read_atms_coeff_to_string") as \ - fd, mock.patch("satpy.readers.mirs.retrieve") as rtv: - fd.side_effect = fake_coeff_from_fn - loaded_data_arrs = r.load(loadable_ids) - if reader_kw.get("limb_correction", True) and platform_name in ("npp", "noaa-20", "noaa-21"): - suffix = f"noaa{platform_name[-2:]}" if platform_name.startswith("noaa") else "snpp" - assert rtv.call_count == 2 * len([var_name for var_name in loadable_ids if "btemp" in var_name]) - for calls_args in rtv.call_args_list: - assert calls_args[0][0].endswith(f"_{suffix}.txt") - else: - rtv.assert_not_called() - assert len(loaded_data_arrs) == len(loadable_ids) test_data = fake_open_dataset(filenames[0]) - for _data_id, data_arr in loaded_data_arrs.items(): - data_arr = data_arr.compute() - var_name = data_arr.attrs["name"] - if var_name not in ["latitude", "longitude"]: - _check_area(data_arr) - _check_fill(data_arr) - _check_attrs(data_arr, platform_name) - - input_fake_data = test_data["BT"] if "btemp" in var_name \ - else test_data[var_name] - if "valid_range" in input_fake_data.attrs: - valid_range = input_fake_data.attrs["valid_range"] - _check_valid_range(data_arr, valid_range) - if "_FillValue" in input_fake_data.attrs: - fill_value = input_fake_data.attrs["_FillValue"] - _check_fill_value(data_arr, fill_value) - - sensor = data_arr.attrs["sensor"] - if reader_kw.get("limb_correction", True) and sensor == "atms": - fd.assert_called() - else: - fd.assert_not_called() - assert data_arr.attrs["units"] == DEFAULT_UNITS[var_name] + exp_limb_corr = reader_kw.get("limb_correction", True) and platform_name in ("npp", "noaa-20", "noaa-21") + loaded_data_arrs = _load_and_check_limb_correction_variables(r, loadable_ids, platform_name, exp_limb_corr) + for _data_id, data_arr_dask in loaded_data_arrs.items(): + data_arr = data_arr_dask.compute() + assert data_arr.dtype is data_arr_dask.dtype + # assert data_arr.dtype is np.float32 + _check_metadata(data_arr, test_data, platform_name) def _create_fake_reader( @@ -311,6 +284,47 @@ def _create_fake_reader( return r +def _load_and_check_limb_correction_variables( + reader: FileYAMLReader, + loadable_ids: list[str], + platform_name: str, + exp_limb_corr: bool +) -> dict[DataID, xr.DataArray]: + with mock.patch("satpy.readers.mirs.read_atms_coeff_to_string") as \ + fd, mock.patch("satpy.readers.mirs.retrieve") as rtv: + fd.side_effect = fake_coeff_from_fn + loaded_data_arrs = reader.load(loadable_ids) + if exp_limb_corr: + fd.assert_called() + suffix = f"noaa{platform_name[-2:]}" if platform_name.startswith("noaa") else "snpp" + assert rtv.call_count == 2 * len([var_name for var_name in loadable_ids if "btemp" in var_name]) + for calls_args in rtv.call_args_list: + assert calls_args[0][0].endswith(f"_{suffix}.txt") + else: + fd.assert_not_called() + rtv.assert_not_called() + assert len(loaded_data_arrs) == len(loadable_ids) + return loaded_data_arrs + + +def _check_metadata(data_arr: xr.DataArray, test_data: xr.Dataset, platform_name: str) -> None: + var_name = data_arr.attrs["name"] + if var_name not in ["latitude", "longitude"]: + _check_area(data_arr) + _check_fill(data_arr) + _check_attrs(data_arr, platform_name) + + input_fake_data = test_data["BT"] if "btemp" in var_name else test_data[var_name] + if "valid_range" in input_fake_data.attrs: + valid_range = input_fake_data.attrs["valid_range"] + _check_valid_range(data_arr, valid_range) + if "_FillValue" in input_fake_data.attrs: + fill_value = input_fake_data.attrs["_FillValue"] + _check_fill_value(data_arr, fill_value) + + assert data_arr.attrs["units"] == DEFAULT_UNITS[var_name] + + def _check_area(data_arr): from pyresample.geometry import SwathDefinition area = data_arr.attrs["area"] From fb21a71a2999c98ce68265d86b45c0b52270b1c9 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 3 Jan 2024 09:59:30 -0600 Subject: [PATCH 686/702] Fix floating point preservation in MiRS reader --- satpy/readers/mirs.py | 8 ++-- satpy/tests/reader_tests/test_mirs.py | 53 ++++++++++++++------------- 2 files changed, 32 insertions(+), 29 deletions(-) diff --git a/satpy/readers/mirs.py b/satpy/readers/mirs.py index 362ed1371c..3ddb223ec3 100644 --- a/satpy/readers/mirs.py +++ b/satpy/readers/mirs.py @@ -336,19 +336,21 @@ def _nan_for_dtype(data_arr_dtype): return np.timedelta64("NaT") if np.issubdtype(data_arr_dtype, np.datetime64): return np.datetime64("NaT") - return np.nan + return np.float32(np.nan) @staticmethod def _scale_data(data_arr, scale_factor, add_offset): """Scale data, if needed.""" scaling_needed = not (scale_factor == 1 and add_offset == 0) if scaling_needed: - data_arr = data_arr * scale_factor + add_offset + data_arr = data_arr * np.float32(scale_factor) + np.float32(add_offset) return data_arr def _fill_data(self, data_arr, fill_value, scale_factor, add_offset): """Fill missing data with NaN.""" if fill_value is not None: + # NOTE: Sfc_type and other category products are not detected or handled properly + # and will be converted from integers to 32-bit floats in this step fill_value = self._scale_data(fill_value, scale_factor, add_offset) fill_out = self._nan_for_dtype(data_arr.dtype) data_arr = data_arr.where(data_arr != fill_value, fill_out) @@ -373,7 +375,7 @@ def apply_attributes(self, data, ds_info): """ try: - global_attr_fill = self.nc.missing_value + global_attr_fill = self.nc.attrs["missing_value"] except AttributeError: global_attr_fill = 1.0 diff --git a/satpy/tests/reader_tests/test_mirs.py b/satpy/tests/reader_tests/test_mirs.py index f4c6bcbed1..b857147e47 100644 --- a/satpy/tests/reader_tests/test_mirs.py +++ b/satpy/tests/reader_tests/test_mirs.py @@ -43,21 +43,25 @@ N_CHANNEL = 22 N_FOV = 96 N_SCANLINE = 100 -DEFAULT_FILE_DTYPE = np.float64 +DEFAULT_FILE_DTYPE = np.float32 DEFAULT_2D_SHAPE = (N_SCANLINE, N_FOV) DEFAULT_DATE = datetime(2019, 6, 19, 13, 0) DEFAULT_LAT = np.linspace(23.09356, 36.42844, N_SCANLINE * N_FOV, dtype=DEFAULT_FILE_DTYPE) DEFAULT_LON = np.linspace(127.6879, 144.5284, N_SCANLINE * N_FOV, dtype=DEFAULT_FILE_DTYPE) -FREQ = xr.DataArray([23.8, 31.4, 50.3, 51.76, 52.8, 53.596, 54.4, 54.94, 55.5, - 57.29, 57.29, 57.29, 57.29, 57.29, 57.29, 88.2, 165.5, - 183.31, 183.31, 183.31, 183.31, 183.31][:N_CHANNEL], - dims="Channel", - attrs={"description": "Central Frequencies (GHz)"}) -POLO = xr.DataArray([2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, - 3, 3, 3][:N_CHANNEL], dims="Channel", - attrs={"description": "Polarizations"}) +FREQ = xr.DataArray( + np.array([23.8, 31.4, 50.3, 51.76, 52.8, 53.596, 54.4, 54.94, 55.5, + 57.29, 57.29, 57.29, 57.29, 57.29, 57.29, 88.2, 165.5, + 183.31, 183.31, 183.31, 183.31, 183.31][:N_CHANNEL], dtype=np.float32), + dims="Channel", + attrs={"description": "Central Frequencies (GHz)"}, +) +POLO = xr.DataArray( + np.array([2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3][:N_CHANNEL], dtype=np.int16), + dims="Channel", + attrs={"description": "Polarizations"}, +) DS_IDS = ["RR", "longitude", "latitude"] TEST_VARS = ["btemp_88v", "btemp_165h", @@ -125,7 +129,7 @@ def fake_coeff_from_fn(fn): def _get_datasets_with_attributes(**kwargs): """Represent files with two resolution of variables in them (ex. OCEAN).""" - bt = xr.DataArray(np.linspace(1830, 3930, N_SCANLINE * N_FOV * N_CHANNEL). + bt = xr.DataArray(np.linspace(1830, 3930, N_SCANLINE * N_FOV * N_CHANNEL, dtype=np.int16). reshape(N_SCANLINE, N_FOV, N_CHANNEL), attrs={"long_name": "Channel Temperature (K)", "units": "Kelvin", @@ -134,7 +138,7 @@ def _get_datasets_with_attributes(**kwargs): "_FillValue": -999, "valid_range": [0, 50000]}, dims=("Scanline", "Field_of_view", "Channel")) - rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV)), + rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"long_name": "Rain Rate (mm/hr)", "units": "mm/hr", "coordinates": "Longitude Latitude", @@ -142,7 +146,7 @@ def _get_datasets_with_attributes(**kwargs): "_FillValue": -999, "valid_range": [0, 1000]}, dims=("Scanline", "Field_of_view")) - sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV)), + sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"description": "type of surface:0-ocean," + "1-sea ice,2-land,3-snow", "units": "1", @@ -170,7 +174,7 @@ def _get_datasets_with_attributes(**kwargs): "Longitude": longitude } - attrs = {"missing_value": -999.} + attrs = {"missing_value": -999} ds = xr.Dataset(ds_vars, attrs=attrs) ds = ds.assign_coords({"Freq": FREQ, "Latitude": latitude, "Longitude": longitude}) return ds @@ -178,17 +182,17 @@ def _get_datasets_with_attributes(**kwargs): def _get_datasets_with_less_attributes(): """Represent files with two resolution of variables in them (ex. OCEAN).""" - bt = xr.DataArray(np.linspace(1830, 3930, N_SCANLINE * N_FOV * N_CHANNEL). + bt = xr.DataArray(np.linspace(1830, 3930, N_SCANLINE * N_FOV * N_CHANNEL, dtype=np.int16). reshape(N_SCANLINE, N_FOV, N_CHANNEL), attrs={"long_name": "Channel Temperature (K)", "scale_factor": 0.01}, dims=("Scanline", "Field_of_view", "Channel")) - rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV)), + rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"long_name": "Rain Rate (mm/hr)", "scale_factor": 0.1}, dims=("Scanline", "Field_of_view")) - sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV)), + sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"description": "type of surface:0-ocean," + "1-sea ice,2-land,3-snow"}, dims=("Scanline", "Field_of_view")) @@ -260,8 +264,12 @@ def test_basic_load(filenames, loadable_ids, platform_name, reader_kw): loaded_data_arrs = _load_and_check_limb_correction_variables(r, loadable_ids, platform_name, exp_limb_corr) for _data_id, data_arr_dask in loaded_data_arrs.items(): data_arr = data_arr_dask.compute() - assert data_arr.dtype is data_arr_dask.dtype - # assert data_arr.dtype is np.float32 + assert data_arr.dtype == data_arr_dask.dtype + if np.issubdtype(data_arr.dtype, np.floating): + # we started with float32, it should stay that way + # NOTE: Sfc_type does not have enough metadata to dynamically force integer type + # even though it is a mask/category product + assert data_arr.dtype.type == np.float32 _check_metadata(data_arr, test_data, platform_name) @@ -311,7 +319,7 @@ def _check_metadata(data_arr: xr.DataArray, test_data: xr.Dataset, platform_name var_name = data_arr.attrs["name"] if var_name not in ["latitude", "longitude"]: _check_area(data_arr) - _check_fill(data_arr) + assert "_FillValue" not in data_arr.attrs _check_attrs(data_arr, platform_name) input_fake_data = test_data["BT"] if "btemp" in var_name else test_data[var_name] @@ -331,13 +339,6 @@ def _check_area(data_arr): assert isinstance(area, SwathDefinition) -def _check_fill(data_arr): - assert "_FillValue" not in data_arr.attrs - if np.issubdtype(data_arr.dtype, np.floating): - # we started with float32, it should stay that way - assert data_arr.dtype.type == np.float64 - - def _check_valid_range(data_arr, test_valid_range): # valid_range is popped out of data_arr.attrs when it is applied assert "valid_range" not in data_arr.attrs From 9beba3c3b5fa7c531769b73a1e14d521e3c7b490 Mon Sep 17 00:00:00 2001 From: martin-rdz Date: Wed, 19 Jul 2023 17:28:49 +0000 Subject: [PATCH 687/702] adapt viirs_sdr reader for aws s3 --- satpy/readers/hdf5_utils.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/satpy/readers/hdf5_utils.py b/satpy/readers/hdf5_utils.py index 428d64e2f1..a4a92c2ad1 100644 --- a/satpy/readers/hdf5_utils.py +++ b/satpy/readers/hdf5_utils.py @@ -27,6 +27,7 @@ from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import np2str from satpy.utils import get_legacy_chunk_size +from satpy.readers import open_file_or_filename LOG = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() @@ -43,7 +44,8 @@ def __init__(self, filename, filename_info, filetype_info): self._attrs_cache = {} try: - file_handle = h5py.File(self.filename, "r") + f_obj = open_file_or_filename(self.filename) + file_handle = h5py.File(f_obj, "r") except IOError: LOG.exception( "Failed reading file %s. Possibly corrupted file", self.filename) @@ -73,7 +75,8 @@ def _collect_attrs(self, name, attrs): def get_reference(self, name, key): """Get reference.""" - with h5py.File(self.filename, "r") as hf: + f_obj = open_file_or_filename(self.filename) + with h5py.File(f_obj, "r") as hf: return self._get_reference(hf, hf[name].attrs[key]) def _get_reference(self, hf, ref): @@ -97,7 +100,8 @@ def __getitem__(self, key): val = self.file_content[key] if isinstance(val, h5py.Dataset): # these datasets are closed and inaccessible when the file is closed, need to reopen - dset = h5py.File(self.filename, "r")[key] + f_obj = open_file_or_filename(self.filename) + dset = h5py.File(f_obj, "r")[key] dset_data = da.from_array(dset, chunks=CHUNK_SIZE) attrs = self._attrs_cache.get(key, dset.attrs) if dset.ndim == 2: From 130361bc894c36fccb4131e17466abc4d52c80f5 Mon Sep 17 00:00:00 2001 From: martin-rdz Date: Tue, 12 Sep 2023 11:49:30 +0000 Subject: [PATCH 688/702] fix issue where open_file_or_filename tries to open pathlib path --- satpy/readers/__init__.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index c8fc0a8b69..5eeed7ca46 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -22,6 +22,7 @@ import os import pickle # nosec B403 import warnings +import pathlib from datetime import datetime, timedelta from functools import total_ordering @@ -779,8 +780,11 @@ def _get_compression(file): def open_file_or_filename(unknown_file_thing): """Try to open the *unknown_file_thing*, otherwise return the filename.""" - try: - f_obj = unknown_file_thing.open() - except AttributeError: + if isinstance(unknown_file_thing, (pathlib.WindowsPath, pathlib.PosixPath)): f_obj = unknown_file_thing + else: + try: + f_obj = unknown_file_thing.open() + except AttributeError: + f_obj = unknown_file_thing return f_obj From d3fe3fe5f71479ee5f4fae30dfe3728932a8cd2e Mon Sep 17 00:00:00 2001 From: martin-rdz Date: Tue, 3 Oct 2023 09:51:23 +0000 Subject: [PATCH 689/702] replaced PosixPath and WindowsPath with pathlib.Path following suggestion by @djhoese --- satpy/readers/__init__.py | 4 ++-- satpy/readers/hdf5_utils.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 5eeed7ca46..7cac7edb5d 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -20,9 +20,9 @@ import logging import os +import pathlib import pickle # nosec B403 import warnings -import pathlib from datetime import datetime, timedelta from functools import total_ordering @@ -780,7 +780,7 @@ def _get_compression(file): def open_file_or_filename(unknown_file_thing): """Try to open the *unknown_file_thing*, otherwise return the filename.""" - if isinstance(unknown_file_thing, (pathlib.WindowsPath, pathlib.PosixPath)): + if isinstance(unknown_file_thing, pathlib.Path): f_obj = unknown_file_thing else: try: diff --git a/satpy/readers/hdf5_utils.py b/satpy/readers/hdf5_utils.py index a4a92c2ad1..cfce968cf3 100644 --- a/satpy/readers/hdf5_utils.py +++ b/satpy/readers/hdf5_utils.py @@ -24,10 +24,10 @@ import numpy as np import xarray as xr +from satpy.readers import open_file_or_filename from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import np2str from satpy.utils import get_legacy_chunk_size -from satpy.readers import open_file_or_filename LOG = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() From 1c65aaf8f42257cae0e58661a744b14d4ab37151 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 4 Jan 2024 14:01:37 -0600 Subject: [PATCH 690/702] Switch FSFile tests to pytest and fixtures --- satpy/tests/test_readers.py | 156 +++++++++++++++++++++--------------- 1 file changed, 91 insertions(+), 65 deletions(-) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index d91e2b6fed..b7144391cf 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -18,11 +18,14 @@ """Test classes and functions in the readers/__init__.py module.""" import builtins +import contextlib import os import sys import unittest import warnings from contextlib import suppress +from pathlib import Path +from typing import Iterator from unittest import mock import pytest @@ -954,126 +957,149 @@ def _posixify_path(filename): return driveless_name.replace("\\", "/") -class TestFSFile(unittest.TestCase): - """Test the FSFile class.""" +@pytest.fixture(scope="module") +def random_string(): + """Random string to be used as fake file content.""" + return _generate_random_string() - def setUp(self): - """Set up the instance.""" - import tempfile - import zipfile - from pathlib import Path - import fsspec - self.random_string = _generate_random_string() - self.local_filename = os.path.join(tempfile.gettempdir(), self.random_string) - Path(self.local_filename).touch() - self.local_file = fsspec.open(self.local_filename) - - self.random_string2 = _generate_random_string() - self.local_filename2 = os.path.join(tempfile.gettempdir(), self.random_string2) - Path(self.local_filename2).touch() - self.zip_name = os.path.join(tempfile.gettempdir(), self.random_string2 + ".zip") - zip_file = zipfile.ZipFile(self.zip_name, "w", zipfile.ZIP_DEFLATED) - zip_file.write(self.local_filename2) - zip_file.close() - os.remove(self.local_filename2) +@pytest.fixture(scope="module") +def local_filename(tmp_path_factory, random_string): + """Create simple on-disk file.""" + with _local_file(tmp_path_factory, random_string) as local_path: + yield local_path - def tearDown(self): - """Destroy the instance.""" - os.remove(self.local_filename) - with suppress(PermissionError): - os.remove(self.zip_name) - def test_regular_filename_is_returned_with_str(self): +@contextlib.contextmanager +def _local_file(tmp_path_factory, filename: str) -> Iterator[Path]: + tmp_path = tmp_path_factory.mktemp("local_files") + local_filename = tmp_path / filename + local_filename.touch() + yield local_filename + local_filename.unlink() + + +@pytest.fixture(scope="module") +def local_file(local_filename): + """Open local file with fsspec.""" + import fsspec + + return fsspec.open(local_filename) + + +@pytest.fixture(scope="module") +def local_filename2(tmp_path_factory): + """Create a second local file.""" + random_string2 = _generate_random_string() + with _local_file(tmp_path_factory, random_string2) as local_path: + yield local_path + + +@pytest.fixture(scope="module") +def local_zip_file(local_filename2): + """Create local zip file containing one local file.""" + import zipfile + + zip_name = Path(str(local_filename2) + ".zip") + zip_file = zipfile.ZipFile(zip_name, "w", zipfile.ZIP_DEFLATED) + zip_file.write(local_filename2) + zip_file.close() + yield zip_name + with suppress(PermissionError): + zip_name.unlink() + + +class TestFSFile: + """Test the FSFile class.""" + + def test_regular_filename_is_returned_with_str(self, random_string): """Test that str give the filename.""" from satpy.readers import FSFile - assert str(FSFile(self.random_string)) == self.random_string + assert str(FSFile(random_string)) == random_string - def test_fsfile_with_regular_filename_abides_pathlike(self): + def test_fsfile_with_regular_filename_abides_pathlike(self, random_string): """Test that FSFile abides PathLike for regular filenames.""" from satpy.readers import FSFile - assert os.fspath(FSFile(self.random_string)) == self.random_string + assert os.fspath(FSFile(random_string)) == random_string - def test_fsfile_with_regular_filename_and_fs_spec_abides_pathlike(self): + def test_fsfile_with_regular_filename_and_fs_spec_abides_pathlike(self, random_string): """Test that FSFile abides PathLike for filename+fs instances.""" from satpy.readers import FSFile - assert os.fspath(FSFile(self.random_string, fs=None)) == self.random_string + assert os.fspath(FSFile(random_string, fs=None)) == random_string - def test_fsfile_with_pathlike(self): + def test_fsfile_with_pathlike(self, local_filename): """Test FSFile with path-like object.""" from pathlib import Path from satpy.readers import FSFile - f = FSFile(Path(self.local_filename)) - assert str(f) == os.fspath(f) == self.local_filename + f = FSFile(Path(local_filename)) + assert str(f) == os.fspath(f) == str(local_filename) - def test_fsfile_with_fs_open_file_abides_pathlike(self): + def test_fsfile_with_fs_open_file_abides_pathlike(self, local_file, random_string): """Test that FSFile abides PathLike for fsspec OpenFile instances.""" from satpy.readers import FSFile - assert os.fspath(FSFile(self.local_file)).endswith(self.random_string) + assert os.fspath(FSFile(local_file)).endswith(random_string) - def test_repr_includes_filename(self): + def test_repr_includes_filename(self, local_file, random_string): """Test that repr includes the filename.""" from satpy.readers import FSFile - assert self.random_string in repr(FSFile(self.local_file)) + assert random_string in repr(FSFile(local_file)) - def test_open_regular_file(self): + def test_open_regular_file(self, local_filename): """Test opening a regular file.""" from satpy.readers import FSFile - _assert_is_open_file_and_close(FSFile(self.local_filename).open()) + _assert_is_open_file_and_close(FSFile(local_filename).open()) - def test_open_local_fs_file(self): + def test_open_local_fs_file(self, local_file): """Test opening a localfs file.""" from satpy.readers import FSFile - _assert_is_open_file_and_close(FSFile(self.local_file).open()) + _assert_is_open_file_and_close(FSFile(local_file).open()) - def test_open_zip_fs_regular_filename(self): + def test_open_zip_fs_regular_filename(self, local_filename2, local_zip_file): """Test opening a zipfs with a regular filename provided.""" from fsspec.implementations.zip import ZipFileSystem from satpy.readers import FSFile - zip_fs = ZipFileSystem(self.zip_name) - file = FSFile(_posixify_path(self.local_filename2), zip_fs) + zip_fs = ZipFileSystem(local_zip_file) + file = FSFile(_posixify_path(local_filename2), zip_fs) _assert_is_open_file_and_close(file.open()) - def test_open_zip_fs_openfile(self): + def test_open_zip_fs_openfile(self, local_filename2, local_zip_file): """Test opening a zipfs openfile.""" import fsspec from satpy.readers import FSFile - open_file = fsspec.open("zip:/" + _posixify_path(self.local_filename2) + "::file://" + self.zip_name) + open_file = fsspec.open("zip:/" + _posixify_path(local_filename2) + "::file://" + str(local_zip_file)) file = FSFile(open_file) _assert_is_open_file_and_close(file.open()) - def test_sorting_fsfiles(self): + def test_sorting_fsfiles(self, local_filename, local_filename2, local_zip_file): """Test sorting FSFiles.""" from fsspec.implementations.zip import ZipFileSystem from satpy.readers import FSFile - zip_fs = ZipFileSystem(self.zip_name) - file1 = FSFile(self.local_filename2, zip_fs) + zip_fs = ZipFileSystem(local_zip_file) + file1 = FSFile(local_filename2, zip_fs) - file2 = FSFile(self.local_filename) + file2 = FSFile(local_filename) extra_file = os.path.normpath("/somedir/bla") sorted_filenames = [os.fspath(file) for file in sorted([file1, file2, extra_file])] expected_filenames = sorted([extra_file, os.fspath(file1), os.fspath(file2)]) assert sorted_filenames == expected_filenames - def test_equality(self): + def test_equality(self, local_filename, local_filename2, local_zip_file): """Test that FSFile compares equal when it should.""" from fsspec.implementations.zip import ZipFileSystem from satpy.readers import FSFile - zip_fs = ZipFileSystem(self.zip_name) - assert FSFile(self.local_filename) == FSFile(self.local_filename) - assert (FSFile(self.local_filename, zip_fs) == - FSFile(self.local_filename, zip_fs)) - assert (FSFile(self.local_filename, zip_fs) != - FSFile(self.local_filename)) - assert FSFile(self.local_filename) != FSFile(self.local_filename2) - - def test_hash(self): + zip_fs = ZipFileSystem(local_zip_file) + assert FSFile(local_filename) == FSFile(local_filename) + assert (FSFile(local_filename, zip_fs) == FSFile(local_filename, zip_fs)) + assert (FSFile(local_filename, zip_fs) != FSFile(local_filename)) + assert FSFile(local_filename) != FSFile(local_filename2) + + def test_hash(self, local_filename, local_filename2, local_zip_file): """Test that FSFile hashing behaves sanely.""" from fsspec.implementations.cached import CachingFileSystem from fsspec.implementations.local import LocalFileSystem @@ -1082,9 +1108,9 @@ def test_hash(self): from satpy.readers import FSFile lfs = LocalFileSystem() - zfs = ZipFileSystem(self.zip_name) + zfs = ZipFileSystem(local_zip_file) cfs = CachingFileSystem(fs=lfs) # make sure each name/fs-combi has its own hash assert len({hash(FSFile(fn, fs)) - for fn in {self.local_filename, self.local_filename2} - for fs in [None, lfs, zfs, cfs]}) == 2*4 + for fn in {local_filename, local_filename2} + for fs in [None, lfs, zfs, cfs]}) == 2 * 4 From 362c2e9d2a46f7a91e9ebc9e18fd6fc4b06274f3 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 4 Jan 2024 14:15:34 -0600 Subject: [PATCH 691/702] Add more details to open_file_or_filename docstring --- satpy/readers/__init__.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 7cac7edb5d..21554ba465 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -779,7 +779,15 @@ def _get_compression(file): def open_file_or_filename(unknown_file_thing): - """Try to open the *unknown_file_thing*, otherwise return the filename.""" + """Try to open the provided file "thing" if needed, otherwise return the filename or Path. + + This wraps the logic of getting something like an fsspec OpenFile object + that is not directly supported by most reading libraries and making it + usable. If a :class:`pathlib.Path` object or something that is not + open-able is provided then that object is passed along. In the case of + fsspec OpenFiles their ``.open()`` method is called and the result returned. + + """ if isinstance(unknown_file_thing, pathlib.Path): f_obj = unknown_file_thing else: From c5d4f6b7b1f3b21afd506e5cbdd5eb4ddf7515b2 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 4 Jan 2024 14:32:43 -0600 Subject: [PATCH 692/702] Add initial open_file_or_filename tests --- satpy/tests/test_readers.py | 121 ++++++++++++++++++++++++++++++++++++ 1 file changed, 121 insertions(+) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index b7144391cf..0dad685694 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -28,7 +28,10 @@ from typing import Iterator from unittest import mock +import numpy as np import pytest +import xarray as xr +from pytest_lazyfixture import lazy_fixture from satpy.dataset.data_dict import get_key from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange @@ -1114,3 +1117,121 @@ def test_hash(self, local_filename, local_filename2, local_zip_file): assert len({hash(FSFile(fn, fs)) for fn in {local_filename, local_filename2} for fs in [None, lfs, zfs, cfs]}) == 2 * 4 + + +@pytest.fixture(scope="module") +def local_netcdf_filename(tmp_path_factory): + """Create a simple local NetCDF file.""" + filename = tmp_path_factory.mktemp("fake_netcdfs") / "test.nc" + ds = xr.Dataset() + ds.attrs = { + "attr1": "a", + "attr2": 2, + } + ds["var1"] = xr.DataArray(np.zeros((10, 10), dtype=np.int16), dims=("y", "x")) + ds.to_netcdf(filename) + + yield str(filename) + filename.unlink() + + +@pytest.fixture(scope="module") +def local_netcdf_path(local_netcdf_filename): + """Get Path object pointing to local netcdf file.""" + return Path(local_netcdf_filename) + + +@pytest.fixture(scope="module") +def local_netcdf_fsspec(local_netcdf_filename): + """Get fsspec OpenFile object pointing to local netcdf file.""" + import fsspec + + return fsspec.open(local_netcdf_filename) + + +@pytest.fixture(scope="module") +def local_netcdf_fsfile(local_netcdf_fsspec): + """Get FSFile object wrapping an fsspec OpenFile pointing to local netcdf file.""" + from satpy.readers import FSFile + + return FSFile(local_netcdf_fsspec) + + +def _open_xarray_netcdf4(): + from functools import partial + + pytest.importorskip("netCDF4") + return partial(xr.open_dataset, engine="netcdf4") + + +def _open_xarray_h5netcdf(): + from functools import partial + + pytest.importorskip("h5netcdf") + return partial(xr.open_dataset, engine="h5netcdf") + + +def _open_xarray_default(): + pytest.importorskip("netCDF4") + pytest.importorskip("h5netcdf") + return xr.open_dataset + + +@pytest.fixture(scope="module") +def local_hdf5_filename(tmp_path_factory): + """Create on-disk HDF5 file.""" + import h5py + + filename = tmp_path_factory.mktemp("fake_hdf5s") / "test.h5" + h = h5py.File(filename, "w") + h.create_dataset("var1", data=np.zeros((10, 10), dtype=np.int16)) + h.close() + + yield str(filename) + filename.unlink() + + +@pytest.fixture(scope="module") +def local_hdf5_path(local_hdf5_filename): + """Get Path object pointing to local HDF5 file.""" + return Path(local_hdf5_filename) + + +@pytest.fixture(scope="module") +def local_hdf5_fsspec(local_hdf5_filename): + """Get fsspec OpenFile pointing to local HDF5 file.""" + import fsspec + + return fsspec.open(local_hdf5_filename) + + +def _open_h5py(): + h5py = pytest.importorskip("h5py") + return h5py.File + + +@pytest.mark.parametrize( + ("file_thing", "create_read_func"), + [ + (lazy_fixture("local_netcdf_filename"), _open_xarray_default), + (lazy_fixture("local_netcdf_filename"), _open_xarray_netcdf4), + (lazy_fixture("local_netcdf_filename"), _open_xarray_h5netcdf), + (lazy_fixture("local_netcdf_path"), _open_xarray_default), + (lazy_fixture("local_netcdf_path"), _open_xarray_netcdf4), + (lazy_fixture("local_netcdf_path"), _open_xarray_h5netcdf), + (lazy_fixture("local_netcdf_fsspec"), _open_xarray_default), + (lazy_fixture("local_netcdf_fsspec"), _open_xarray_h5netcdf), + (lazy_fixture("local_netcdf_fsfile"), _open_xarray_default), + (lazy_fixture("local_netcdf_fsfile"), _open_xarray_h5netcdf), + (lazy_fixture("local_hdf5_filename"), _open_h5py), + (lazy_fixture("local_hdf5_path"), _open_h5py), + (lazy_fixture("local_hdf5_fsspec"), _open_h5py), + ], +) +def test_open_file_or_filename(file_thing, create_read_func): + """Test various combinations of file-like things and opening them with various libraries.""" + from satpy.readers import open_file_or_filename + + read_func = create_read_func() + open_thing = open_file_or_filename(file_thing) + read_func(open_thing) From 180a0b8bdb416862b71d6e0bc51b07841c397393 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 4 Jan 2024 15:01:08 -0600 Subject: [PATCH 693/702] Allow Windows temporary files to fail to be removed --- satpy/tests/test_readers.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index 0dad685694..beeb16b972 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -1132,7 +1132,8 @@ def local_netcdf_filename(tmp_path_factory): ds.to_netcdf(filename) yield str(filename) - filename.unlink() + with suppress(PermissionError): + filename.unlink() @pytest.fixture(scope="module") @@ -1188,7 +1189,8 @@ def local_hdf5_filename(tmp_path_factory): h.close() yield str(filename) - filename.unlink() + with suppress(PermissionError): + filename.unlink() @pytest.fixture(scope="module") From 2185a19a797cd8e56f56e3cfceb756da833d1664 Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Fri, 5 Jan 2024 16:07:34 +0100 Subject: [PATCH 694/702] add config option to turn off mitiff corner corretion --- satpy/tests/writer_tests/test_mitiff.py | 51 +++++++++++++++++++++++++ satpy/writers/mitiff.py | 24 ++++++++---- 2 files changed, 67 insertions(+), 8 deletions(-) diff --git a/satpy/tests/writer_tests/test_mitiff.py b/satpy/tests/writer_tests/test_mitiff.py index 4e8878687a..1a9e8850d6 100644 --- a/satpy/tests/writer_tests/test_mitiff.py +++ b/satpy/tests/writer_tests/test_mitiff.py @@ -879,6 +879,57 @@ def test_convert_proj4_string(self): proj4_string = w._add_proj4_string(ds1, ds1) assert proj4_string == check["proj4"] + def test_correction_proj4_string(self): + """Test correction of proj4 lower left coordinate.""" + import dask.array as da + import xarray as xr + from pyresample.geometry import AreaDefinition + + from satpy.writers.mitiff import MITIFFWriter + area_def = AreaDefinition( + "test", + "test", + "test", + "+proj=merc", + 100, + 200, + (-1000., -1500., 1000., 1500.), + ) + + ds1 = xr.DataArray( + da.zeros((10, 20), chunks=20), + dims=("y", "x"), + attrs={"area": area_def} + ) + default_expected_proj4_string = ' Proj string: +init=EPSG:3395 +towgs84=0,0,0 +units=km +x_0=1020.000000 +y_0=1515.000000\n' + w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) + proj4_string = w._add_proj4_string(ds1, ds1) + assert proj4_string == default_expected_proj4_string + + kwargs = {'mitiff_pixel_adjustment': False} + new_expected_proj4_string = ' Proj string: +init=EPSG:3395 +towgs84=0,0,0 +units=km +x_0=1000.000000 +y_0=1500.000000\n' + w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) + proj4_string = w._add_proj4_string(ds1, ds1, **kwargs) + assert proj4_string == new_expected_proj4_string + + area_def2 = AreaDefinition( + "test", + "test", + "test", + "+proj=merc +x_0=0 +y_0=0", + 100, + 200, + (-1000., -1500., 1000., 1500.), + ) + ds2 = xr.DataArray( + da.zeros((10, 20), chunks=20), + dims=("y", "x"), + attrs={"area": area_def2} + ) + w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) + proj4_string = w._add_proj4_string(ds2, ds2, **kwargs) + assert proj4_string == new_expected_proj4_string + def test_save_dataset_palette(self): """Test writer operation as palette.""" from satpy.writers.mitiff import MITIFFWriter diff --git a/satpy/writers/mitiff.py b/satpy/writers/mitiff.py index 3658ac16b7..a97fe5de24 100644 --- a/satpy/writers/mitiff.py +++ b/satpy/writers/mitiff.py @@ -220,7 +220,7 @@ def _add_sizes(self, datasets, first_dataset): return _image_description - def _add_proj4_string(self, datasets, first_dataset): + def _add_proj4_string(self, datasets, first_dataset, **kwargs): import warnings proj4_string = " Proj string: " @@ -259,31 +259,39 @@ def _add_proj4_string(self, datasets, first_dataset): if "units" not in proj4_string: proj4_string += " +units=km" - proj4_string = self._append_projection_center(proj4_string, datasets, first_dataset, x_0, y_0) + proj4_string = self._append_projection_center(proj4_string, datasets, first_dataset, x_0, y_0, **kwargs) LOG.debug("proj4_string: %s", proj4_string) proj4_string += "\n" return proj4_string - def _append_projection_center(self, proj4_string, datasets, first_dataset, x_0, y_0): + def _append_projection_center(self, proj4_string, datasets, first_dataset, x_0, y_0, **kwargs): if isinstance(datasets, list): dataset = first_dataset else: dataset = datasets + corner_correction_x = dataset.attrs["area"].pixel_size_x + corner_correction_y = dataset.attrs["area"].pixel_size_y + try: + if kwargs['mitiff_pixel_adjustment'] is False: + corner_correction_x = 0 + corner_correction_y = 0 + except KeyError: + pass if "x_0" not in proj4_string: proj4_string += " +x_0=%.6f" % ( (-dataset.attrs["area"].area_extent[0] + - dataset.attrs["area"].pixel_size_x) + x_0) + corner_correction_x) + x_0) proj4_string += " +y_0=%.6f" % ( (-dataset.attrs["area"].area_extent[1] + - dataset.attrs["area"].pixel_size_y) + y_0) + corner_correction_y) + y_0) elif "+x_0=0" in proj4_string and "+y_0=0" in proj4_string: proj4_string = proj4_string.replace("+x_0=0", "+x_0=%.6f" % ( (-dataset.attrs["area"].area_extent[0] + - dataset.attrs["area"].pixel_size_x) + x_0)) + corner_correction_x) + x_0)) proj4_string = proj4_string.replace("+y_0=0", "+y_0=%.6f" % ( (-dataset.attrs["area"].area_extent[1] + - dataset.attrs["area"].pixel_size_y) + y_0)) + corner_correction_y) + y_0)) return proj4_string def _convert_epsg_to_proj(self, proj4_string, x_0): @@ -563,7 +571,7 @@ def _make_image_description(self, datasets, **kwargs): _image_description += " Map projection: Stereographic\n" - _image_description += self._add_proj4_string(datasets, first_dataset) + _image_description += self._add_proj4_string(datasets, first_dataset, **kwargs) _image_description += " TrueLat: 60N\n" _image_description += " GridRot: 0\n" From c3eb0ecb7dbee225db54764bea064aac88d392e4 Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Fri, 5 Jan 2024 16:16:00 +0100 Subject: [PATCH 695/702] refactor a bit --- satpy/writers/mitiff.py | 32 ++++++++++++++++++-------------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/satpy/writers/mitiff.py b/satpy/writers/mitiff.py index a97fe5de24..bf27d19840 100644 --- a/satpy/writers/mitiff.py +++ b/satpy/writers/mitiff.py @@ -259,25 +259,18 @@ def _add_proj4_string(self, datasets, first_dataset, **kwargs): if "units" not in proj4_string: proj4_string += " +units=km" - proj4_string = self._append_projection_center(proj4_string, datasets, first_dataset, x_0, y_0, **kwargs) + if isinstance(datasets, list): + _dataset = first_dataset + else: + _dataset = datasets + proj4_string = self._append_projection_center(proj4_string, _dataset, x_0, y_0, **kwargs) LOG.debug("proj4_string: %s", proj4_string) proj4_string += "\n" return proj4_string - def _append_projection_center(self, proj4_string, datasets, first_dataset, x_0, y_0, **kwargs): - if isinstance(datasets, list): - dataset = first_dataset - else: - dataset = datasets - corner_correction_x = dataset.attrs["area"].pixel_size_x - corner_correction_y = dataset.attrs["area"].pixel_size_y - try: - if kwargs['mitiff_pixel_adjustment'] is False: - corner_correction_x = 0 - corner_correction_y = 0 - except KeyError: - pass + def _append_projection_center(self, proj4_string, dataset, x_0, y_0, **kwargs): + corner_correction_x, corner_correction_y = self._set_correction_size(dataset, kwargs) if "x_0" not in proj4_string: proj4_string += " +x_0=%.6f" % ( (-dataset.attrs["area"].area_extent[0] + @@ -294,6 +287,17 @@ def _append_projection_center(self, proj4_string, datasets, first_dataset, x_0, corner_correction_y) + y_0)) return proj4_string + def _set_correction_size(self, dataset, kwargs): + corner_correction_x = dataset.attrs["area"].pixel_size_x + corner_correction_y = dataset.attrs["area"].pixel_size_y + try: + if kwargs['mitiff_pixel_adjustment'] is False: + corner_correction_x = 0 + corner_correction_y = 0 + except KeyError: + pass + return corner_correction_x,corner_correction_y + def _convert_epsg_to_proj(self, proj4_string, x_0): if "EPSG:32631" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32631", From 33aeb191747451393c195f71bc04a22b5c8b0413 Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Fri, 5 Jan 2024 16:27:19 +0100 Subject: [PATCH 696/702] refactor a bit more --- satpy/writers/mitiff.py | 36 +++++++++++++++++++----------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/satpy/writers/mitiff.py b/satpy/writers/mitiff.py index bf27d19840..2e3cd6d600 100644 --- a/satpy/writers/mitiff.py +++ b/satpy/writers/mitiff.py @@ -245,6 +245,20 @@ def _add_proj4_string(self, datasets, first_dataset, **kwargs): # FUTURE: Use pyproj 2.0+ to convert EPSG to PROJ4 if possible proj4_string, x_0 = self._convert_epsg_to_proj(proj4_string, x_0) + proj4_string = self._special_correction_of_proj_string(proj4_string) + + if isinstance(datasets, list): + _dataset = first_dataset + else: + _dataset = datasets + mitiff_pixel_adjustment = kwargs.get("mitiff_pixel_adjustment", False) + proj4_string = self._append_projection_center(proj4_string, _dataset, x_0, y_0, mitiff_pixel_adjustment) + LOG.debug("proj4_string: %s", proj4_string) + proj4_string += "\n" + + return proj4_string + + def _special_correction_of_proj_string(self, proj4_string): if "geos" in proj4_string: proj4_string = proj4_string.replace("+sweep=x ", "") if "+a=6378137.0 +b=6356752.31414" in proj4_string: @@ -258,19 +272,10 @@ def _add_proj4_string(self, datasets, first_dataset, **kwargs): if "units" not in proj4_string: proj4_string += " +units=km" - - if isinstance(datasets, list): - _dataset = first_dataset - else: - _dataset = datasets - proj4_string = self._append_projection_center(proj4_string, _dataset, x_0, y_0, **kwargs) - LOG.debug("proj4_string: %s", proj4_string) - proj4_string += "\n" - return proj4_string - def _append_projection_center(self, proj4_string, dataset, x_0, y_0, **kwargs): - corner_correction_x, corner_correction_y = self._set_correction_size(dataset, kwargs) + def _append_projection_center(self, proj4_string, dataset, x_0, y_0, mitiff_pixel_adjustment): + corner_correction_x, corner_correction_y = self._set_correction_size(dataset, mitiff_pixel_adjustment) if "x_0" not in proj4_string: proj4_string += " +x_0=%.6f" % ( (-dataset.attrs["area"].area_extent[0] + @@ -290,12 +295,9 @@ def _append_projection_center(self, proj4_string, dataset, x_0, y_0, **kwargs): def _set_correction_size(self, dataset, kwargs): corner_correction_x = dataset.attrs["area"].pixel_size_x corner_correction_y = dataset.attrs["area"].pixel_size_y - try: - if kwargs['mitiff_pixel_adjustment'] is False: - corner_correction_x = 0 - corner_correction_y = 0 - except KeyError: - pass + if kwargs.get("mitiff_pixel_adjustment", False): + corner_correction_x = 0 + corner_correction_y = 0 return corner_correction_x,corner_correction_y def _convert_epsg_to_proj(self, proj4_string, x_0): From c189f214342d516cb7e0b762e4311faea78f53ac Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Fri, 5 Jan 2024 16:43:39 +0100 Subject: [PATCH 697/702] Update tests after refactor --- satpy/tests/writer_tests/test_mitiff.py | 33 ++++++------------------- satpy/writers/mitiff.py | 6 ++--- 2 files changed, 11 insertions(+), 28 deletions(-) diff --git a/satpy/tests/writer_tests/test_mitiff.py b/satpy/tests/writer_tests/test_mitiff.py index 1a9e8850d6..2dafdd5896 100644 --- a/satpy/tests/writer_tests/test_mitiff.py +++ b/satpy/tests/writer_tests/test_mitiff.py @@ -901,34 +901,17 @@ def test_correction_proj4_string(self): dims=("y", "x"), attrs={"area": area_def} ) - default_expected_proj4_string = ' Proj string: +init=EPSG:3395 +towgs84=0,0,0 +units=km +x_0=1020.000000 +y_0=1515.000000\n' + default_expected_correction = (20.0, 15.0) w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) - proj4_string = w._add_proj4_string(ds1, ds1) - assert proj4_string == default_expected_proj4_string + mitiff_pixel_adjustment = True + correction = w._set_correction_size(ds1, mitiff_pixel_adjustment) + assert correction == default_expected_correction - kwargs = {'mitiff_pixel_adjustment': False} - new_expected_proj4_string = ' Proj string: +init=EPSG:3395 +towgs84=0,0,0 +units=km +x_0=1000.000000 +y_0=1500.000000\n' + mitiff_pixel_adjustment = False + new_expected_correction = (0, 0) w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) - proj4_string = w._add_proj4_string(ds1, ds1, **kwargs) - assert proj4_string == new_expected_proj4_string - - area_def2 = AreaDefinition( - "test", - "test", - "test", - "+proj=merc +x_0=0 +y_0=0", - 100, - 200, - (-1000., -1500., 1000., 1500.), - ) - ds2 = xr.DataArray( - da.zeros((10, 20), chunks=20), - dims=("y", "x"), - attrs={"area": area_def2} - ) - w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) - proj4_string = w._add_proj4_string(ds2, ds2, **kwargs) - assert proj4_string == new_expected_proj4_string + correction = w._set_correction_size(ds1, mitiff_pixel_adjustment) + assert correction == new_expected_correction def test_save_dataset_palette(self): """Test writer operation as palette.""" diff --git a/satpy/writers/mitiff.py b/satpy/writers/mitiff.py index 2e3cd6d600..b005d37ec5 100644 --- a/satpy/writers/mitiff.py +++ b/satpy/writers/mitiff.py @@ -251,7 +251,7 @@ def _add_proj4_string(self, datasets, first_dataset, **kwargs): _dataset = first_dataset else: _dataset = datasets - mitiff_pixel_adjustment = kwargs.get("mitiff_pixel_adjustment", False) + mitiff_pixel_adjustment = kwargs.get("mitiff_pixel_adjustment", True) proj4_string = self._append_projection_center(proj4_string, _dataset, x_0, y_0, mitiff_pixel_adjustment) LOG.debug("proj4_string: %s", proj4_string) proj4_string += "\n" @@ -292,10 +292,10 @@ def _append_projection_center(self, proj4_string, dataset, x_0, y_0, mitiff_pixe corner_correction_y) + y_0)) return proj4_string - def _set_correction_size(self, dataset, kwargs): + def _set_correction_size(self, dataset, mitiff_pixel_adjustment): corner_correction_x = dataset.attrs["area"].pixel_size_x corner_correction_y = dataset.attrs["area"].pixel_size_y - if kwargs.get("mitiff_pixel_adjustment", False): + if not mitiff_pixel_adjustment: corner_correction_x = 0 corner_correction_y = 0 return corner_correction_x,corner_correction_y From afe843b3077959b209a3b02dc24f738c34d9a76e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 5 Jan 2024 14:02:32 -0600 Subject: [PATCH 698/702] Add QC-based filtering to the VIIRS EDR AOD550 product --- satpy/etc/readers/viirs_edr.yaml | 2 +- satpy/readers/viirs_edr.py | 31 ++++++++++++++++++++ satpy/tests/reader_tests/test_viirs_edr.py | 34 +++++++++++++++++++++- 3 files changed, 65 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index c078e754aa..37f36934b8 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -25,7 +25,7 @@ file_types: file_patterns: - 'JRR-CloudHeight_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_aod: - file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSAODHandler file_patterns: - 'JRR-AOD_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_lst: diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index eaf2f53d42..de89a3dc0d 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -56,6 +56,21 @@ scene = satpy.Scene(filenames, reader='viirs_edr', reader_kwargs={"filter_veg": False}) +AOD Filtering +^^^^^^^^^^^^^ + +The AOD (Aerosol Optical Depth) product can be optionally filtered based on +Quality Control (QC) values in the file. By default no filtering is performed. +By providing the ``aod_qc_filter`` keyword argument and specifying the maximum +value of the ``QCAll`` variable to include (not mask). For example:: + + scene = satpy.Scene(filenames, reader='viirs_edr', reader_kwargs={"aod_qc_filter": 1}) + +will only preserve AOD550 values where the quality is 0 ("high") or +1 ("medium"). At the time of writing the ``QCAll`` variable has 1 ("medium"), +2 ("low"), and 3 ("no retrieval"). + + """ from __future__ import annotations @@ -343,3 +358,19 @@ def _scale_data(self): add_offset = self.nc[self._manual_scalings[var_name][1]] data_arr.data = data_arr.data * scale_factor.data + add_offset.data self.nc[var_name] = data_arr + + +class VIIRSAODHandler(VIIRSJRRFileHandler): + """File handler for AOD data files.""" + + def __init__(self, *args, aod_qc_filter: int | None = None, **kwargs) -> None: + """Initialize file handler and keep track of QC filtering.""" + super().__init__(*args, **kwargs) + self._aod_qc_filter = aod_qc_filter + + def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: + new_data_arr = super()._mask_invalid(data_arr, ds_info) + if self._aod_qc_filter is None or ds_info["name"] != "AOD550": + return new_data_arr + qc_all = self.nc["QCAll"] + return new_data_arr.where(qc_all <= self._aod_qc_filter) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index d042576d05..e61718e9db 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -220,6 +220,14 @@ def aod_file(tmp_path_factory: TempPathFactory) -> Path: data_vars = _create_continuous_variables( ("AOD550",) ) + qc_data = np.zeros(data_vars["AOD550"].shape, dtype=np.int8) + qc_data[-1, -1] = 2 + data_vars["QCAll"] = xr.DataArray( + qc_data, + dims=data_vars["AOD550"].dims, + attrs={"valid_range": [0, 3]}, + ) + data_vars["QCAll"].encoding["_FillValue"] = -128 return _create_fake_file(tmp_path_factory, fn, data_vars) @@ -371,7 +379,6 @@ def test_get_dataset_surf_refl_with_veg_idx( ("var_names", "data_file"), [ (("CldTopTemp", "CldTopHght", "CldTopPres"), lazy_fixture("cloud_height_file")), - (("AOD550",), lazy_fixture("aod_file")), (("VLST",), lazy_fixture("lst_file")), ] ) @@ -385,6 +392,31 @@ def test_get_dataset_generic(self, var_names, data_file): for var_name in var_names: _check_continuous_data_arr(scn[var_name]) + @pytest.mark.parametrize( + ("aod_qc_filter", "exp_masked_pixel"), + [ + (None, False), + (0, True), + (2, False) + ], + ) + def test_get_aod_filtered(self, aod_file, aod_qc_filter, exp_masked_pixel): + """Test that the AOD product can be loaded and filtered.""" + from satpy import Scene + bytes_in_m_row = 4 * 3200 + with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): + scn = Scene(reader="viirs_edr", filenames=[aod_file], reader_kwargs={"aod_qc_filter": aod_qc_filter}) + scn.load(["AOD550"]) + _check_continuous_data_arr(scn["AOD550"]) + data_np = scn["AOD550"].data.compute() + pixel_is_nan = np.isnan(data_np[-1, -1]) + assert pixel_is_nan if exp_masked_pixel else not pixel_is_nan + + # filtering should never affect geolocation + lons, lats = scn["AOD550"].attrs["area"].get_lonlats() + assert not np.isnan(lons[-1, -1].compute()) + assert not np.isnan(lats[-1, -1].compute()) + @pytest.mark.parametrize( ("data_file", "exp_available"), [ From ea5aa2298905a56bb8eed6408c5d91d9dba0c4f0 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 9 Jan 2024 20:03:50 -0600 Subject: [PATCH 699/702] Fix wildcard kwargs breaking VIIRS EDR reader --- satpy/readers/viirs_edr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index de89a3dc0d..36d9ed5ac0 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -91,7 +91,7 @@ class VIIRSJRRFileHandler(BaseFileHandler): """NetCDF4 reader for VIIRS Active Fires.""" - def __init__(self, filename, filename_info, filetype_info): + def __init__(self, filename, filename_info, filetype_info, **kwargs): """Initialize the geo filehandler.""" super(VIIRSJRRFileHandler, self).__init__(filename, filename_info, filetype_info) From 64eedbc9259beb6357ab206a477896e4e0a952da Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 9 Jan 2024 20:08:10 -0600 Subject: [PATCH 700/702] Remove unnecessary test file removal --- satpy/tests/test_readers.py | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index beeb16b972..3e54a93592 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -23,7 +23,6 @@ import sys import unittest import warnings -from contextlib import suppress from pathlib import Path from typing import Iterator from unittest import mock @@ -978,8 +977,7 @@ def _local_file(tmp_path_factory, filename: str) -> Iterator[Path]: tmp_path = tmp_path_factory.mktemp("local_files") local_filename = tmp_path / filename local_filename.touch() - yield local_filename - local_filename.unlink() + return local_filename @pytest.fixture(scope="module") @@ -1007,9 +1005,7 @@ def local_zip_file(local_filename2): zip_file = zipfile.ZipFile(zip_name, "w", zipfile.ZIP_DEFLATED) zip_file.write(local_filename2) zip_file.close() - yield zip_name - with suppress(PermissionError): - zip_name.unlink() + return zip_name class TestFSFile: @@ -1131,9 +1127,7 @@ def local_netcdf_filename(tmp_path_factory): ds["var1"] = xr.DataArray(np.zeros((10, 10), dtype=np.int16), dims=("y", "x")) ds.to_netcdf(filename) - yield str(filename) - with suppress(PermissionError): - filename.unlink() + return str(filename) @pytest.fixture(scope="module") @@ -1188,9 +1182,7 @@ def local_hdf5_filename(tmp_path_factory): h.create_dataset("var1", data=np.zeros((10, 10), dtype=np.int16)) h.close() - yield str(filename) - with suppress(PermissionError): - filename.unlink() + return str(filename) @pytest.fixture(scope="module") From 9d8b2fd0f4445daa3de30b7deff738c0d14a9fa7 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 9 Jan 2024 21:09:55 -0600 Subject: [PATCH 701/702] Add debug log message to know when AOD QC filtering is being performed --- satpy/readers/viirs_edr.py | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 36d9ed5ac0..b5ba999b13 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -372,5 +372,6 @@ def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: new_data_arr = super()._mask_invalid(data_arr, ds_info) if self._aod_qc_filter is None or ds_info["name"] != "AOD550": return new_data_arr + LOG.debug(f"Filtering AOD data to include quality <= {self._aod_qc_filter}") qc_all = self.nc["QCAll"] return new_data_arr.where(qc_all <= self._aod_qc_filter) From 529de8fe47bfd1f290a602cffcd89d8f02031849 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 9 Jan 2024 21:12:24 -0600 Subject: [PATCH 702/702] Fix contextmanager definition in test_readers.py --- satpy/tests/test_readers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index 3e54a93592..db3d1ccb1d 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -977,7 +977,7 @@ def _local_file(tmp_path_factory, filename: str) -> Iterator[Path]: tmp_path = tmp_path_factory.mktemp("local_files") local_filename = tmp_path / filename local_filename.touch() - return local_filename + yield local_filename @pytest.fixture(scope="module")