diff --git a/doc/source/conf.py b/doc/source/conf.py
index 020544ee4a..4bf20b0e38 100644
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -14,9 +14,9 @@
from __future__ import annotations
+import datetime as dt
import os
import sys
-from datetime import datetime
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
@@ -117,7 +117,7 @@ def __getattr__(cls, name):
# General information about the project.
project = u"Satpy"
-copyright = u"2009-{}, The PyTroll Team".format(datetime.utcnow().strftime("%Y")) # noqa: A001
+copyright = u"2009-{}, The PyTroll Team".format(dt.datetime.utcnow().strftime("%Y")) # noqa: A001
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/satpy/cf/decoding.py b/satpy/cf/decoding.py
index 0d7a9d22be..2515f6bd38 100644
--- a/satpy/cf/decoding.py
+++ b/satpy/cf/decoding.py
@@ -15,10 +15,12 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""CF decoding."""
+
import copy
+import datetime as dt
import json
-from datetime import datetime
def decode_attrs(attrs):
@@ -69,6 +71,6 @@ def _datetime_parser_json(json_dict):
def _str2datetime(string):
"""Convert string to datetime object."""
try:
- return datetime.fromisoformat(string)
+ return dt.datetime.fromisoformat(string)
except (TypeError, ValueError):
return None
diff --git a/satpy/composites/viirs.py b/satpy/composites/viirs.py
index 5df2d482af..a9a047fd21 100644
--- a/satpy/composites/viirs.py
+++ b/satpy/composites/viirs.py
@@ -15,12 +15,14 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Composite classes for the VIIRS instrument."""
+
from __future__ import annotations
+import datetime as dt
import logging
import math
-from datetime import datetime
import dask
import dask.array as da
@@ -842,7 +844,7 @@ def _linear_normalization_from_0to1(
data[mask] = data[mask] / theoretical_max
-def _check_moon_phase(moon_datasets: list[xr.DataArray], start_time: datetime) -> float:
+def _check_moon_phase(moon_datasets: list[xr.DataArray], start_time: dt.datetime) -> float:
"""Check if we have Moon phase as an input dataset and, if not, calculate it."""
if moon_datasets:
# convert to decimal instead of %
diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py
index a328402e0a..03208ebc50 100644
--- a/satpy/dataset/metadata.py
+++ b/satpy/dataset/metadata.py
@@ -15,11 +15,12 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Utilities for merging metadata from various sources."""
+import datetime as dt
import warnings
from collections.abc import Collection
-from datetime import datetime
from functools import partial, reduce
from operator import eq, is_
@@ -135,7 +136,7 @@ def _combine_time_parameters(values):
def _filter_time_values(values):
"""Remove values that are not datetime objects."""
- return [v for v in values if isinstance(v, datetime)]
+ return [v for v in values if isinstance(v, dt.datetime)]
def average_datetimes(datetime_list):
@@ -152,8 +153,8 @@ def average_datetimes(datetime_list):
Returns: Average datetime as a datetime object
"""
- total = [datetime.timestamp(dt) for dt in datetime_list]
- return datetime.fromtimestamp(sum(total) / len(total))
+ total = [dt.datetime.timestamp(d) for d in datetime_list]
+ return dt.datetime.fromtimestamp(sum(total) / len(total))
def _are_values_combinable(values):
diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py
index 1471ba3669..5ea8530612 100644
--- a/satpy/modifiers/angles.py
+++ b/satpy/modifiers/angles.py
@@ -18,11 +18,11 @@
"""Utilties for getting various angles for a dataset.."""
from __future__ import annotations
+import datetime as dt
import hashlib
import os
import shutil
import warnings
-from datetime import datetime
from functools import update_wrapper
from glob import glob
from typing import Any, Callable, Optional, Union
@@ -45,7 +45,7 @@
# pyorbital's get_observer_look function.
# The difference is on the order of 1e-10 at most as time changes so we force
# it to a single time for easier caching. It is *only* used if caching.
-STATIC_EARTH_INERTIAL_DATETIME = datetime(2000, 1, 1, 12, 0, 0)
+STATIC_EARTH_INERTIAL_DATETIME = dt.datetime(2000, 1, 1, 12, 0, 0)
DEFAULT_UNCACHE_TYPES = (SwathDefinition, xr.DataArray, da.Array)
HASHABLE_GEOMETRIES = (AreaDefinition, StackedAreaDefinition)
@@ -263,7 +263,7 @@ def _hash_args(*args, unhashable_types=DEFAULT_UNCACHE_TYPES):
raise TypeError(f"Unhashable type ({type(arg)}).")
if isinstance(arg, HASHABLE_GEOMETRIES):
arg = hash(arg)
- elif isinstance(arg, datetime):
+ elif isinstance(arg, dt.datetime):
arg = arg.isoformat(" ")
hashable_args.append(arg)
arg_hash = hashlib.sha1() # nosec
@@ -274,7 +274,7 @@ def _hash_args(*args, unhashable_types=DEFAULT_UNCACHE_TYPES):
def _sanitize_observer_look_args(*args):
new_args = []
for arg in args:
- if isinstance(arg, datetime):
+ if isinstance(arg, dt.datetime):
new_args.append(STATIC_EARTH_INERTIAL_DATETIME)
elif isinstance(arg, (float, np.float64, np.float32)):
# Round floating point numbers to nearest tenth. Numpy types don't
@@ -448,7 +448,7 @@ def _cos_zen_ndarray(lons, lats, utc_time):
return pyob_cos_zen(utc_time, lons, lats)
-def _get_sun_azimuth_ndarray(lons: np.ndarray, lats: np.ndarray, start_time: datetime) -> np.ndarray:
+def _get_sun_azimuth_ndarray(lons: np.ndarray, lats: np.ndarray, start_time: dt.datetime) -> np.ndarray:
with ignore_invalid_float_warnings():
suna = get_alt_az(start_time, lons, lats)[1]
suna = np.rad2deg(suna)
diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py
index ca131b101f..7835292eff 100644
--- a/satpy/readers/__init__.py
+++ b/satpy/readers/__init__.py
@@ -15,15 +15,17 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Shared objects of the various reader classes."""
+
from __future__ import annotations
+import datetime as dt
import logging
import os
import pathlib
import pickle # nosec B403
import warnings
-from datetime import datetime, timedelta
from functools import total_ordering
import yaml
@@ -213,7 +215,7 @@ def _get_sorted_file_groups(all_file_keys, time_threshold): # noqa: D417
# interest of sorting
flat_keys = ((v[0], rn, v[1]) for (rn, vL) in all_file_keys.items() for v in vL)
prev_key = None
- threshold = timedelta(seconds=time_threshold)
+ threshold = dt.timedelta(seconds=time_threshold)
# file_groups is sorted, because dictionaries are sorted by insertion
# order in Python 3.7+
file_groups = {}
@@ -222,7 +224,7 @@ def _get_sorted_file_groups(all_file_keys, time_threshold): # noqa: D417
if prev_key is None:
is_new_group = True
prev_key = gk
- elif isinstance(gk[0], datetime):
+ elif isinstance(gk[0], dt.datetime):
# datetimes within threshold difference are "the same time"
is_new_group = (gk[0] - prev_key[0]) > threshold
else:
diff --git a/satpy/readers/aapp_l1b.py b/satpy/readers/aapp_l1b.py
index e502a9da64..6e3072b4d0 100644
--- a/satpy/readers/aapp_l1b.py
+++ b/satpy/readers/aapp_l1b.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Reader for aapp level 1b data.
Options for loading:
@@ -24,9 +25,10 @@
https://nwp-saf.eumetsat.int/site/download/documentation/aapp/NWPSAF-MF-UD-003_Formats_v8.0.pdf
"""
+
+import datetime as dt
import functools
import logging
-from datetime import datetime, timedelta
import dask.array as da
import numpy as np
@@ -102,14 +104,14 @@ def _set_filedata_layout(self):
@property
def start_time(self):
"""Get the time of the first observation."""
- return datetime(self._data["scnlinyr"][0], 1, 1) + timedelta(
+ return dt.datetime(self._data["scnlinyr"][0], 1, 1) + dt.timedelta(
days=int(self._data["scnlindy"][0]) - 1,
milliseconds=int(self._data["scnlintime"][0]))
@property
def end_time(self):
"""Get the time of the final observation."""
- return datetime(self._data["scnlinyr"][-1], 1, 1) + timedelta(
+ return dt.datetime(self._data["scnlinyr"][-1], 1, 1) + dt.timedelta(
days=int(self._data["scnlindy"][-1]) - 1,
milliseconds=int(self._data["scnlintime"][-1]))
@@ -129,10 +131,10 @@ def _get_platform_name(self, platform_names_lookup):
def read(self):
"""Read the data."""
- tic = datetime.now()
+ tic = dt.datetime.now()
header = np.memmap(self.filename, dtype=self._header_type, mode="r", shape=(1, ))
data = np.memmap(self.filename, dtype=self._scan_type, offset=self._header_offset, mode="r")
- logger.debug("Reading time %s", str(datetime.now() - tic))
+ logger.debug("Reading time %s", str(dt.datetime.now() - tic))
self._header = header
self._data = data
diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py
index 107382d7ba..ecfd20a830 100644
--- a/satpy/readers/abi_base.py
+++ b/satpy/readers/abi_base.py
@@ -15,12 +15,13 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Advance Baseline Imager reader base class for the Level 1b and l2+ reader."""
+import datetime as dt
import logging
import math
from contextlib import suppress
-from datetime import datetime
import dask
import numpy as np
@@ -291,12 +292,12 @@ def _get_areadef_fixedgrid(self, key):
@property
def start_time(self):
"""Start time of the current file's observations."""
- return datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%S.%fZ")
+ return dt.datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%S.%fZ")
@property
def end_time(self):
"""End time of the current file's observations."""
- return datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%S.%fZ")
+ return dt.datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%S.%fZ")
def spatial_resolution_to_number(self):
"""Convert the 'spatial_resolution' global attribute to meters."""
diff --git a/satpy/readers/acspo.py b/satpy/readers/acspo.py
index 8a8262af33..90356f46e2 100644
--- a/satpy/readers/acspo.py
+++ b/satpy/readers/acspo.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""ACSPO SST Reader.
See the following page for more information:
@@ -22,8 +23,9 @@
https://podaac.jpl.nasa.gov/dataset/VIIRS_NPP-OSPO-L2P-v2.3
"""
+
+import datetime as dt
import logging
-from datetime import datetime
import numpy as np
@@ -83,7 +85,7 @@ def get_shape(self, ds_id, ds_info):
@staticmethod
def _parse_datetime(datestr):
- return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ")
+ return dt.datetime.strptime(datestr, "%Y%m%dT%H%M%SZ")
@property
def start_time(self):
diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py
index bf2ab09e79..7ea83a6820 100644
--- a/satpy/readers/ahi_hsd.py
+++ b/satpy/readers/ahi_hsd.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Advanced Himawari Imager (AHI) standard format data reader.
References:
@@ -58,10 +59,10 @@
"""
+import datetime as dt
import logging
import os
import warnings
-from datetime import datetime, timedelta
import dask.array as da
import numpy as np
@@ -419,12 +420,12 @@ def end_time(self):
@property
def observation_start_time(self):
"""Get the observation start time."""
- return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_start_time"].item()))
+ return dt.datetime(1858, 11, 17) + dt.timedelta(days=float(self.basic_info["observation_start_time"].item()))
@property
def observation_end_time(self):
"""Get the observation end time."""
- return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_end_time"].item()))
+ return dt.datetime(1858, 11, 17) + dt.timedelta(days=float(self.basic_info["observation_end_time"].item()))
@property
def _timeline(self):
@@ -760,7 +761,7 @@ def __init__(self, timeline, area):
def _parse_timeline(self, timeline):
try:
- return datetime.strptime(timeline, "%H%M").time()
+ return dt.datetime.strptime(timeline, "%H%M").time()
except ValueError:
return None
@@ -771,8 +772,8 @@ def get_nominal_start_time(self, observation_start_time):
def get_nominal_end_time(self, nominal_start_time):
"""Get nominal end time of the scan."""
freq = self._observation_frequency
- return nominal_start_time + timedelta(minutes=freq // 60,
- seconds=freq % 60)
+ return nominal_start_time + dt.timedelta(minutes=freq // 60,
+ seconds=freq % 60)
def _modify_observation_time_for_nominal(self, observation_time):
"""Round observation time to a nominal time based on known observation frequency.
@@ -793,8 +794,8 @@ def _modify_observation_time_for_nominal(self, observation_time):
)
return observation_time
timeline = self._get_closest_timeline(observation_time)
- dt = self._get_offset_relative_to_timeline()
- return timeline + timedelta(minutes=dt//60, seconds=dt % 60)
+ date = self._get_offset_relative_to_timeline()
+ return timeline + dt.timedelta(minutes=date//60, seconds=date % 60)
def _get_closest_timeline(self, observation_time):
"""Find the closest timeline for the given observation time.
@@ -808,11 +809,11 @@ def _get_closest_timeline(self, observation_time):
"""
delta_days = [-1, 0, 1]
surrounding_dates = [
- (observation_time + timedelta(days=delta)).date()
+ (observation_time + dt.timedelta(days=delta)).date()
for delta in delta_days
]
timelines = [
- datetime.combine(date, self.timeline)
+ dt.datetime.combine(date, self.timeline)
for date in surrounding_dates
]
diffs = [
diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py
index d6e6caa887..92c2915a1e 100644
--- a/satpy/readers/ahi_l2_nc.py
+++ b/satpy/readers/ahi_l2_nc.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Reader for Himawari L2 cloud products from NOAA's big data programme.
For more information about the data, see: .
@@ -43,8 +44,8 @@
supported. These include the CldHgtFlag and the CloudMaskPacked variables.
"""
+import datetime as dt
import logging
-from datetime import datetime
import xarray as xr
@@ -82,14 +83,14 @@ def __init__(self, filename, filename_info, filetype_info):
@property
def start_time(self):
"""Start timestamp of the dataset."""
- dt = self.nc.attrs["time_coverage_start"]
- return datetime.strptime(dt, "%Y-%m-%dT%H:%M:%SZ")
+ date = self.nc.attrs["time_coverage_start"]
+ return dt.datetime.strptime(date, "%Y-%m-%dT%H:%M:%SZ")
@property
def end_time(self):
"""End timestamp of the dataset."""
- dt = self.nc.attrs["time_coverage_end"]
- return datetime.strptime(dt, "%Y-%m-%dT%H:%M:%SZ")
+ date = self.nc.attrs["time_coverage_end"]
+ return dt.datetime.strptime(date, "%Y-%m-%dT%H:%M:%SZ")
def get_dataset(self, key, info):
"""Load a dataset."""
diff --git a/satpy/readers/ami_l1b.py b/satpy/readers/ami_l1b.py
index db8c8444d8..6841189eef 100644
--- a/satpy/readers/ami_l1b.py
+++ b/satpy/readers/ami_l1b.py
@@ -15,10 +15,11 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Advanced Meteorological Imager reader for the Level 1b NetCDF4 format."""
+import datetime as dt
import logging
-from datetime import datetime, timedelta
import dask.array as da
import numpy as np
@@ -117,14 +118,14 @@ def __init__(self, filename, filename_info, filetype_info,
@property
def start_time(self):
"""Get observation start time."""
- base = datetime(2000, 1, 1, 12, 0, 0)
- return base + timedelta(seconds=self.nc.attrs["observation_start_time"])
+ base = dt.datetime(2000, 1, 1, 12, 0, 0)
+ return base + dt.timedelta(seconds=self.nc.attrs["observation_start_time"])
@property
def end_time(self):
"""Get observation end time."""
- base = datetime(2000, 1, 1, 12, 0, 0)
- return base + timedelta(seconds=self.nc.attrs["observation_end_time"])
+ base = dt.datetime(2000, 1, 1, 12, 0, 0)
+ return base + dt.timedelta(seconds=self.nc.attrs["observation_end_time"])
def get_area_def(self, dsid):
"""Get area definition for this file."""
diff --git a/satpy/readers/amsr2_l2_gaasp.py b/satpy/readers/amsr2_l2_gaasp.py
index 54a3769747..21442f6f3a 100644
--- a/satpy/readers/amsr2_l2_gaasp.py
+++ b/satpy/readers/amsr2_l2_gaasp.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""GCOM-W1 AMSR2 Level 2 files from the GAASP software.
GAASP output files are in the NetCDF4 format. Software is provided by NOAA
@@ -36,8 +37,8 @@
"""
+import datetime as dt
import logging
-from datetime import datetime
from typing import Tuple
import numpy as np
@@ -94,7 +95,7 @@ def start_time(self):
return self.filename_info["start_time"]
except KeyError:
time_str = self.nc.attrs["time_coverage_start"]
- return datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ")
+ return dt.datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ")
@property
def end_time(self):
@@ -103,7 +104,7 @@ def end_time(self):
return self.filename_info["end_time"]
except KeyError:
time_str = self.nc.attrs["time_coverage_end"]
- return datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ")
+ return dt.datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ")
@property
def sensor_names(self):
diff --git a/satpy/readers/ascat_l2_soilmoisture_bufr.py b/satpy/readers/ascat_l2_soilmoisture_bufr.py
index a5f77fd7eb..9619977e89 100644
--- a/satpy/readers/ascat_l2_soilmoisture_bufr.py
+++ b/satpy/readers/ascat_l2_soilmoisture_bufr.py
@@ -15,14 +15,15 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""ASCAT Soil moisture product reader for BUFR messages.
Based on the IASI L2 SO2 BUFR reader.
"""
+import datetime as dt
import logging
-from datetime import datetime
import dask.array as da
import numpy as np
@@ -82,7 +83,7 @@ def extract_msg_date_extremes(self, bufr, date_min=None, date_max=None):
minutes = np.resize(ec.codes_get_array(bufr, "minute"), size)
seconds = np.resize(ec.codes_get_array(bufr, "second"), size)
for year, month, day, hour, minute, second in zip(years, months, days, hours, minutes, seconds):
- time_stamp = datetime(year, month, day, hour, minute, second)
+ time_stamp = dt.datetime(year, month, day, hour, minute, second)
date_min = time_stamp if not date_min else min(date_min, time_stamp)
date_max = time_stamp if not date_max else max(date_max, time_stamp)
return date_min, date_max
diff --git a/satpy/readers/atms_l1b_nc.py b/satpy/readers/atms_l1b_nc.py
index 95d48b81cd..4b8587c824 100644
--- a/satpy/readers/atms_l1b_nc.py
+++ b/satpy/readers/atms_l1b_nc.py
@@ -12,6 +12,7 @@
#
# You should have received a copy of the GNU General Public License
# along with satpy. If not, see .
+
"""Advanced Technology Microwave Sounder (ATMS) Level 1B product reader.
The format is explained in the `ATMS L1B Product User Guide`_
@@ -21,8 +22,8 @@
"""
+import datetime as dt
import logging
-from datetime import datetime
from satpy.readers.netcdf_utils import NetCDF4FileHandler
@@ -43,12 +44,12 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs):
@property
def start_time(self):
"""Get observation start time."""
- return datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT)
+ return dt.datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT)
@property
def end_time(self):
"""Get observation end time."""
- return datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT)
+ return dt.datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT)
@property
def platform_name(self):
diff --git a/satpy/readers/avhrr_l1b_gaclac.py b/satpy/readers/avhrr_l1b_gaclac.py
index 96a13449f7..47f0d97283 100644
--- a/satpy/readers/avhrr_l1b_gaclac.py
+++ b/satpy/readers/avhrr_l1b_gaclac.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Reading and calibrating GAC and LAC AVHRR data.
Uses Pygac under the hood. See the `Pygac Documentation`_ for supported data
@@ -29,8 +30,8 @@
https://pygac.readthedocs.io/en/stable
"""
+import datetime as dt
import logging
-from datetime import date, datetime, timedelta
import dask.array as da
import numpy as np
@@ -93,14 +94,14 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417
self.first_valid_lat = None
self.last_valid_lat = None
self._start_time = filename_info["start_time"]
- self._end_time = datetime.combine(filename_info["start_time"].date(),
- filename_info["end_time"].time())
+ self._end_time = dt.datetime.combine(filename_info["start_time"].date(),
+ filename_info["end_time"].time())
if self._end_time < self._start_time:
- self._end_time += timedelta(days=1)
+ self._end_time += dt.timedelta(days=1)
self.platform_id = filename_info["platform_id"]
if len(self.platform_id) == 3:
- self.reader_kwargs["header_date"] = date(2000, 1, 1)
+ self.reader_kwargs["header_date"] = dt.date(2000, 1, 1)
if self._is_avhrr3():
if filename_info.get("transfer_mode") == "GHRR":
@@ -184,8 +185,8 @@ def get_dataset(self, key, info):
# Update start/end time using the actual scanline timestamps
times = self.reader.get_times()
- self._start_time = times[0].astype(datetime)
- self._end_time = times[-1].astype(datetime)
+ self._start_time = times[0].astype(dt.datetime)
+ self._end_time = times[-1].astype(dt.datetime)
# Select user-defined scanlines and/or strip invalid coordinates
if (self.start_line is not None or self.end_line is not None
@@ -223,8 +224,8 @@ def slice(self, data, times): # noqa: A003
"""
sliced = self._slice(data)
times = self._slice(times)
- self._start_time = times[0].astype(datetime)
- self._end_time = times[-1].astype(datetime)
+ self._start_time = times[0].astype(dt.datetime)
+ self._end_time = times[-1].astype(dt.datetime)
return sliced, times
def _slice(self, data):
diff --git a/satpy/readers/caliop_l2_cloud.py b/satpy/readers/caliop_l2_cloud.py
index 54dd100ffc..e088dfd853 100644
--- a/satpy/readers/caliop_l2_cloud.py
+++ b/satpy/readers/caliop_l2_cloud.py
@@ -16,12 +16,13 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
# type: ignore
+
"""Interface to CALIOP L2 HDF4 cloud products."""
+import datetime as dt
import logging
import os.path
import re
-from datetime import datetime
from pyhdf.SD import SD, SDC
@@ -56,7 +57,7 @@ def get_end_time(self):
mda_dict = self.filehandle.attributes()
core_mda = mda_dict["coremetadata"]
end_time_str = self.parse_metadata_string(core_mda)
- self._end_time = datetime.strptime(end_time_str, "%Y-%m-%dT%H:%M:%SZ")
+ self._end_time = dt.datetime.strptime(end_time_str, "%Y-%m-%dT%H:%M:%SZ")
@staticmethod
def parse_metadata_string(metadata_string):
diff --git a/satpy/readers/electrol_hrit.py b/satpy/readers/electrol_hrit.py
index c773850a73..62f99fb0a4 100644
--- a/satpy/readers/electrol_hrit.py
+++ b/satpy/readers/electrol_hrit.py
@@ -24,8 +24,8 @@
"""
+import datetime as dt
import logging
-from datetime import datetime
import numpy as np
import xarray as xr
@@ -299,7 +299,7 @@ def get_dataset(self, key, info):
def calibrate(self, data, calibration):
"""Calibrate the data."""
- tic = datetime.now()
+ tic = dt.datetime.now()
if calibration == "counts":
res = data
elif calibration in ["radiance", "brightness_temperature"]:
@@ -311,7 +311,7 @@ def calibrate(self, data, calibration):
res.attrs["standard_name"] = calibration
res.attrs["calibration"] = calibration
- logger.debug("Calibration time " + str(datetime.now() - tic))
+ logger.debug("Calibration time " + str(dt.datetime.now() - tic))
return res
@staticmethod
diff --git a/satpy/readers/epic_l1b_h5.py b/satpy/readers/epic_l1b_h5.py
index 3fb8f69c01..0d993b0b6c 100644
--- a/satpy/readers/epic_l1b_h5.py
+++ b/satpy/readers/epic_l1b_h5.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""File handler for DSCOVR EPIC L1B data in hdf5 format.
The ``epic_l1b_h5`` reader reads and calibrates EPIC L1B image data in hdf5 format.
@@ -37,8 +38,8 @@
"""
+import datetime as dt
import logging
-from datetime import datetime
import dask.array as da
import numpy as np
@@ -74,13 +75,13 @@ def __init__(self, filename, filename_info, filetype_info):
@property
def start_time(self):
"""Get the start time."""
- start_time = datetime.strptime(self.file_content["/attr/begin_time"], "%Y-%m-%d %H:%M:%S")
+ start_time = dt.datetime.strptime(self.file_content["/attr/begin_time"], "%Y-%m-%d %H:%M:%S")
return start_time
@property
def end_time(self):
"""Get the end time."""
- end_time = datetime.strptime(self.file_content["/attr/end_time"], "%Y-%m-%d %H:%M:%S")
+ end_time = dt.datetime.strptime(self.file_content["/attr/end_time"], "%Y-%m-%d %H:%M:%S")
return end_time
@staticmethod
diff --git a/satpy/readers/eum_base.py b/satpy/readers/eum_base.py
index 3cbbb46433..fe4579301d 100644
--- a/satpy/readers/eum_base.py
+++ b/satpy/readers/eum_base.py
@@ -17,7 +17,7 @@
# satpy. If not, see .
"""Utilities for EUMETSAT satellite data."""
-from datetime import datetime, timedelta
+import datetime as dt
import numpy as np
@@ -44,9 +44,9 @@ def timecds2datetime(tcds):
except (KeyError, ValueError):
pass
- reference = datetime(1958, 1, 1)
- delta = timedelta(days=days, milliseconds=milliseconds,
- microseconds=microseconds)
+ reference = dt.datetime(1958, 1, 1)
+ delta = dt.timedelta(days=days, milliseconds=milliseconds,
+ microseconds=microseconds)
return reference + delta
diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py
index 0c7b9fb8cc..1344549fa9 100644
--- a/satpy/readers/fci_l1c_nc.py
+++ b/satpy/readers/fci_l1c_nc.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Interface to MTG-FCI L1c NetCDF files.
This module defines the :class:`FCIL1cNCFileHandler` file handler, to
@@ -111,8 +112,8 @@
from __future__ import absolute_import, division, print_function, unicode_literals
+import datetime as dt
import logging
-from datetime import timedelta
from functools import cached_property
import dask.array as da
@@ -227,12 +228,13 @@ def rc_period_min(self):
def nominal_start_time(self):
"""Get nominal start time."""
rc_date = self.observation_start_time.replace(hour=0, minute=0, second=0, microsecond=0)
- return rc_date + timedelta(minutes=(self.filename_info["repeat_cycle_in_day"]-1)*self.rc_period_min)
+ return rc_date + dt.timedelta(
+ minutes=(self.filename_info["repeat_cycle_in_day"]-1)*self.rc_period_min)
@property
def nominal_end_time(self):
"""Get nominal end time."""
- return self.nominal_start_time + timedelta(minutes=self.rc_period_min)
+ return self.nominal_start_time + dt.timedelta(minutes=self.rc_period_min)
@property
def observation_start_time(self):
diff --git a/satpy/readers/fy4_base.py b/satpy/readers/fy4_base.py
index b0452a5735..160b5795dd 100644
--- a/satpy/readers/fy4_base.py
+++ b/satpy/readers/fy4_base.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Base reader for the L1 HDF data from the AGRI and GHI instruments aboard the FengYun-4A/B satellites.
The files read by this reader are described in the official Real Time Data Service:
@@ -23,8 +24,8 @@
"""
+import datetime as dt
import logging
-from datetime import datetime
import dask.array as da
import numpy as np
@@ -200,20 +201,20 @@ def start_time(self):
"""Get the start time."""
start_time = self["/attr/Observing Beginning Date"] + "T" + self["/attr/Observing Beginning Time"] + "Z"
try:
- return datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ")
+ return dt.datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ")
except ValueError:
# For some data there is no sub-second component
- return datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%SZ")
+ return dt.datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%SZ")
@property
def end_time(self):
"""Get the end time."""
end_time = self["/attr/Observing Ending Date"] + "T" + self["/attr/Observing Ending Time"] + "Z"
try:
- return datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ")
+ return dt.datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ")
except ValueError:
# For some data there is no sub-second component
- return datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%SZ")
+ return dt.datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%SZ")
def get_area_def(self, key):
"""Get the area definition."""
diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py
index 4f34c1fde8..6b3ceb5e0a 100644
--- a/satpy/readers/gerb_l2_hr_h5.py
+++ b/satpy/readers/gerb_l2_hr_h5.py
@@ -16,16 +16,14 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
-
"""GERB L2 HR HDF5 reader.
A reader for the Top of Atmosphere outgoing fluxes from the Geostationary Earth Radiation
Budget instrument aboard the Meteosat Second Generation satellites.
"""
-
+import datetime as dt
import logging
-from datetime import timedelta
from satpy.readers.hdf5_utils import HDF5FileHandler
from satpy.resample import get_area_def
@@ -55,7 +53,7 @@ class GERB_HR_FileHandler(HDF5FileHandler):
@property
def end_time(self):
"""Get end time."""
- return self.start_time + timedelta(minutes=15)
+ return self.start_time + dt.timedelta(minutes=15)
@property
def start_time(self):
diff --git a/satpy/readers/ghrsst_l2.py b/satpy/readers/ghrsst_l2.py
index 6c4005623e..d407d49f14 100644
--- a/satpy/readers/ghrsst_l2.py
+++ b/satpy/readers/ghrsst_l2.py
@@ -14,12 +14,13 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Reader for the GHRSST level-2 formatted data."""
+import datetime as dt
import os
import tarfile
from contextlib import suppress
-from datetime import datetime
from functools import cached_property
import xarray as xr
@@ -39,9 +40,9 @@ def __init__(self, filename, filename_info, filetype_info, engine=None):
self._engine = engine
self._tarfile = None
- self.filename_info["start_time"] = datetime.strptime(
+ self.filename_info["start_time"] = dt.datetime.strptime(
self.nc.start_time, "%Y%m%dT%H%M%SZ")
- self.filename_info["end_time"] = datetime.strptime(
+ self.filename_info["end_time"] = dt.datetime.strptime(
self.nc.stop_time, "%Y%m%dT%H%M%SZ")
@cached_property
diff --git a/satpy/readers/ghrsst_l3c_sst.py b/satpy/readers/ghrsst_l3c_sst.py
index ef1dd220a9..8960275995 100644
--- a/satpy/readers/ghrsst_l3c_sst.py
+++ b/satpy/readers/ghrsst_l3c_sst.py
@@ -16,10 +16,11 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
# type: ignore
+
"""An OSISAF SST reader for the netCDF GHRSST format."""
+import datetime as dt
import logging
-from datetime import datetime
import numpy as np
@@ -37,7 +38,7 @@ class GHRSST_OSISAFL2(NetCDF4FileHandler):
"""Reader for the OSISAF SST GHRSST format."""
def _parse_datetime(self, datestr):
- return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ")
+ return dt.datetime.strptime(datestr, "%Y%m%dT%H%M%SZ")
def get_area_def(self, area_id, area_info):
"""Override abstract baseclass method."""
diff --git a/satpy/readers/glm_l2.py b/satpy/readers/glm_l2.py
index ceb11a33bc..7f1e77cd50 100644
--- a/satpy/readers/glm_l2.py
+++ b/satpy/readers/glm_l2.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Geostationary Lightning Mapper reader for the Level 2 format from glmtools.
More information about `glmtools` and the files it produces can be found on
@@ -23,8 +24,9 @@
https://github.com/deeplycloudy/glmtools
"""
+
+import datetime as dt
import logging
-from datetime import datetime
import numpy as np
@@ -52,12 +54,12 @@ def sensor(self):
@property
def start_time(self):
"""Start time of the current file's observations."""
- return datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%SZ")
+ return dt.datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%SZ")
@property
def end_time(self):
"""End time of the current file's observations."""
- return datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%SZ")
+ return dt.datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%SZ")
def _is_category_product(self, data_arr):
# if after autoscaling we still have an integer
diff --git a/satpy/readers/goci2_l2_nc.py b/satpy/readers/goci2_l2_nc.py
index a79d582544..b60a3e3876 100644
--- a/satpy/readers/goci2_l2_nc.py
+++ b/satpy/readers/goci2_l2_nc.py
@@ -15,13 +15,14 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Reader for GK-2B GOCI-II L2 products from NOSC.
For more information about the data, see:
"""
+import datetime as dt
import logging
-from datetime import datetime
import xarray as xr
@@ -65,14 +66,14 @@ def _merge_navigation_data(self, filetype):
@property
def start_time(self):
"""Start timestamp of the dataset."""
- dt = self.attrs["observation_start_time"]
- return datetime.strptime(dt, "%Y%m%d_%H%M%S")
+ date = self.attrs["observation_start_time"]
+ return dt.datetime.strptime(date, "%Y%m%d_%H%M%S")
@property
def end_time(self):
"""End timestamp of the dataset."""
- dt = self.attrs["observation_end_time"]
- return datetime.strptime(dt, "%Y%m%d_%H%M%S")
+ date = self.attrs["observation_end_time"]
+ return dt.datetime.strptime(date, "%Y%m%d_%H%M%S")
def get_dataset(self, key, info):
"""Load a dataset."""
diff --git a/satpy/readers/goes_imager_hrit.py b/satpy/readers/goes_imager_hrit.py
index d90ebb4a72..401274debb 100644
--- a/satpy/readers/goes_imager_hrit.py
+++ b/satpy/readers/goes_imager_hrit.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""GOES HRIT format reader.
References:
@@ -24,8 +25,8 @@
"""
+import datetime as dt
import logging
-from datetime import datetime, timedelta
import dask.array as da
import numpy as np
@@ -116,21 +117,21 @@ class CalibrationError(Exception):
("msecs", "u1")])
-def make_sgs_time(sgs_time_array: ArrayLike) -> datetime:
+def make_sgs_time(sgs_time_array: ArrayLike) -> dt.datetime:
"""Make sgs time."""
epoch_year = _epoch_year_from_sgs_time(sgs_time_array)
doy_offset = _epoch_doy_offset_from_sgs_time(sgs_time_array)
return epoch_year + doy_offset
-def _epoch_year_from_sgs_time(sgs_time_array: ArrayLike) -> datetime:
+def _epoch_year_from_sgs_time(sgs_time_array: ArrayLike) -> dt.datetime:
century = sgs_time_array["century"].astype(np.int64)
year = sgs_time_array["year"].astype(np.int64)
year = ((century >> 4) * 1000 + (century & 15) * 100 + (year >> 4) * 10 + (year & 15))
- return datetime(int(year), 1, 1)
+ return dt.datetime(int(year), 1, 1)
-def _epoch_doy_offset_from_sgs_time(sgs_time_array: ArrayLike) -> timedelta:
+def _epoch_doy_offset_from_sgs_time(sgs_time_array: ArrayLike) -> dt.timedelta:
doy1 = sgs_time_array["doy1"].astype(np.int64)
doy_hours = sgs_time_array["doy_hours"].astype(np.int64)
hours_mins = sgs_time_array["hours_mins"].astype(np.int64)
@@ -143,7 +144,7 @@ def _epoch_doy_offset_from_sgs_time(sgs_time_array: ArrayLike) -> timedelta:
mins = ((hours_mins & 15) * 10 + (mins_secs >> 4))
secs = ((mins_secs & 15) * 10 + (secs_msecs >> 4))
msecs = ((secs_msecs & 15) * 100 + (msecs >> 4) * 10 + (msecs & 15))
- return timedelta(
+ return dt.timedelta(
days=int(doy - 1),
hours=int(hours),
minutes=int(mins),
@@ -426,7 +427,7 @@ def _get_calibration_params(self):
def calibrate(self, data, calibration):
"""Calibrate the data."""
logger.debug("Calibration")
- tic = datetime.now()
+ tic = dt.datetime.now()
if calibration == "counts":
return data
if calibration == "reflectance":
@@ -437,7 +438,7 @@ def calibrate(self, data, calibration):
raise NotImplementedError("Don't know how to calibrate to " +
str(calibration))
- logger.debug("Calibration time " + str(datetime.now() - tic))
+ logger.debug("Calibration time " + str(dt.datetime.now() - tic))
return res
def _calibrate(self, data):
diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py
index 1b88919886..2916a36436 100644
--- a/satpy/readers/goes_imager_nc.py
+++ b/satpy/readers/goes_imager_nc.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Reader for GOES 8-15 imager data in netCDF format.
Supports netCDF files from both NOAA-CLASS and EUMETSAT.
@@ -223,10 +224,10 @@
.. _[SCHED-E]: http://www.ospo.noaa.gov/Operations/GOES/east/imager-routine.html
"""
+import datetime as dt
import logging
import re
from abc import abstractmethod
-from datetime import datetime, timedelta
import numpy as np
import pyresample.geometry
@@ -593,11 +594,11 @@
} # (nlines, ncols)
SCAN_DURATION = {
- FULL_DISC: timedelta(minutes=26),
- NORTH_HEMIS_WEST: timedelta(minutes=10, seconds=5),
- SOUTH_HEMIS_WEST: timedelta(minutes=6, seconds=54),
- NORTH_HEMIS_EAST: timedelta(minutes=14, seconds=15),
- SOUTH_HEMIS_EAST: timedelta(minutes=4, seconds=49)
+ FULL_DISC: dt.timedelta(minutes=26),
+ NORTH_HEMIS_WEST: dt.timedelta(minutes=10, seconds=5),
+ SOUTH_HEMIS_WEST: dt.timedelta(minutes=6, seconds=54),
+ NORTH_HEMIS_EAST: dt.timedelta(minutes=14, seconds=15),
+ SOUTH_HEMIS_EAST: dt.timedelta(minutes=4, seconds=49)
} # Source: [SCHED-W], [SCHED-E]
@@ -730,10 +731,15 @@ def _get_area_def_uniform_sampling(self, lon0, channel):
@property
def start_time(self):
"""Start timestamp of the dataset."""
- dt = self.nc["time"].dt
- return datetime(year=int(dt.year.item()), month=int(dt.month.item()), day=int(dt.day.item()),
- hour=int(dt.hour.item()), minute=int(dt.minute.item()),
- second=int(dt.second.item()), microsecond=int(dt.microsecond.item()))
+ date = self.nc["time"].dt
+ return dt.datetime(
+ year=int(date.year.item()),
+ month=int(date.month.item()),
+ day=int(date.day.item()),
+ hour=int(date.hour.item()),
+ minute=int(date.minute.item()),
+ second=int(date.second.item()),
+ microsecond=int(date.microsecond.item()))
@property
def end_time(self):
@@ -1018,11 +1024,11 @@ def get_dataset(self, key, info):
elif "latitude" in key["name"]:
data = self.geo_data["lat"]
else:
- tic = datetime.now()
+ tic = dt.datetime.now()
data = self.calibrate(self.nc["data"].isel(time=0),
calibration=key["calibration"],
channel=key["name"])
- logger.debug("Calibration time: {}".format(datetime.now() - tic))
+ logger.debug("Calibration time: {}".format(dt.datetime.now() - tic))
# Mask space pixels
data = data.where(self.meta["earth_mask"])
@@ -1076,11 +1082,11 @@ def get_dataset(self, key, info):
"""Load dataset designated by the given key from file."""
logger.debug("Reading dataset {}".format(key["name"]))
- tic = datetime.now()
+ tic = dt.datetime.now()
data = self.calibrate(self.nc["data"].isel(time=0),
calibration=key["calibration"],
channel=key["name"])
- logger.debug("Calibration time: {}".format(datetime.now() - tic))
+ logger.debug("Calibration time: {}".format(dt.datetime.now() - tic))
# Mask space pixels
data = data.where(self.meta["earth_mask"])
diff --git a/satpy/readers/gpm_imerg.py b/satpy/readers/gpm_imerg.py
index 7bc65ac4c6..4463be31b9 100644
--- a/satpy/readers/gpm_imerg.py
+++ b/satpy/readers/gpm_imerg.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Reader for GPM imerg data on half-hourly timesteps.
References:
@@ -23,8 +24,8 @@
"""
+import datetime as dt
import logging
-from datetime import datetime
import dask.array as da
import h5py
@@ -49,22 +50,22 @@ def __init__(self, filename, filename_info, filetype_info):
@property
def start_time(self):
"""Find the start time from filename info."""
- return datetime(self.finfo["date"].year,
- self.finfo["date"].month,
- self.finfo["date"].day,
- self.finfo["start_time"].hour,
- self.finfo["start_time"].minute,
- self.finfo["start_time"].second)
+ return dt.datetime(self.finfo["date"].year,
+ self.finfo["date"].month,
+ self.finfo["date"].day,
+ self.finfo["start_time"].hour,
+ self.finfo["start_time"].minute,
+ self.finfo["start_time"].second)
@property
def end_time(self):
"""Find the end time from filename info."""
- return datetime(self.finfo["date"].year,
- self.finfo["date"].month,
- self.finfo["date"].day,
- self.finfo["end_time"].hour,
- self.finfo["end_time"].minute,
- self.finfo["end_time"].second)
+ return dt.datetime(self.finfo["date"].year,
+ self.finfo["date"].month,
+ self.finfo["date"].day,
+ self.finfo["end_time"].hour,
+ self.finfo["end_time"].minute,
+ self.finfo["end_time"].second)
def get_dataset(self, dataset_id, ds_info):
"""Load a dataset."""
diff --git a/satpy/readers/grib.py b/satpy/readers/grib.py
index dadccce77a..4372226c12 100644
--- a/satpy/readers/grib.py
+++ b/satpy/readers/grib.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Generic Reader for GRIB2 files.
Currently this reader depends on the `pygrib` python package. The `eccodes`
@@ -22,8 +23,9 @@
of writing.
"""
+
+import datetime as dt
import logging
-from datetime import datetime
import dask.array as da
import numpy as np
@@ -105,7 +107,7 @@ def _create_dataset_ids(self, keys):
@staticmethod
def _convert_datetime(msg, date_key, time_key, date_format="%Y%m%d%H%M"):
date_str = "{:d}{:04d}".format(msg[date_key], msg[time_key])
- return datetime.strptime(date_str, date_format)
+ return dt.datetime.strptime(date_str, date_format)
@property
def start_time(self):
diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py
index 37fe714435..3fd920c01f 100644
--- a/satpy/readers/hdfeos_base.py
+++ b/satpy/readers/hdfeos_base.py
@@ -15,15 +15,16 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Base HDF-EOS reader."""
from __future__ import annotations
+import datetime as dt
import logging
import re
from ast import literal_eval
from contextlib import suppress
-from datetime import datetime
import numpy as np
import xarray as xr
@@ -182,7 +183,7 @@ def start_time(self):
try:
date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGDATE"]["VALUE"] + " " +
self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGTIME"]["VALUE"])
- return datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f")
+ return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f")
except KeyError:
return self._start_time_from_filename()
@@ -195,7 +196,7 @@ def end_time(self):
try:
date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGDATE"]["VALUE"] + " " +
self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGTIME"]["VALUE"])
- return datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f")
+ return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f")
except KeyError:
return self.start_time
diff --git a/satpy/readers/hrit_base.py b/satpy/readers/hrit_base.py
index bf53d84a65..d0b9ee44db 100644
--- a/satpy/readers/hrit_base.py
+++ b/satpy/readers/hrit_base.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""HRIT/LRIT format reader.
This module is the base module for all HRIT-based formats. Here, you will find
@@ -28,10 +29,10 @@
"""
+import datetime as dt
import logging
import os
from contextlib import contextmanager, nullcontext
-from datetime import timedelta
from io import BytesIO
from subprocess import PIPE, Popen # nosec B404
@@ -176,7 +177,7 @@ def __init__(self, filename, filename_info, filetype_info, hdr_info):
self.hdr_info = hdr_info
self._get_hd(self.hdr_info)
self._start_time = filename_info["start_time"]
- self._end_time = self._start_time + timedelta(minutes=15)
+ self._end_time = self._start_time + dt.timedelta(minutes=15)
def _get_hd(self, hdr_info):
"""Open the file, read and get the basic file header info and set the mda dictionary."""
diff --git a/satpy/readers/hrit_jma.py b/satpy/readers/hrit_jma.py
index 0c88faf46b..bfdd5da93b 100644
--- a/satpy/readers/hrit_jma.py
+++ b/satpy/readers/hrit_jma.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""HRIT format reader for JMA data.
Introduction
@@ -107,8 +108,8 @@
.. _AHI sample data: https://www.data.jma.go.jp/mscweb/en/himawari89/space_segment/sample_hrit.html
"""
+import datetime as dt
import logging
-from datetime import datetime
import numpy as np
import xarray as xr
@@ -453,7 +454,7 @@ def _interp(arr, cal):
def calibrate(self, data, calibration):
"""Calibrate the data."""
- tic = datetime.now()
+ tic = dt.datetime.now()
if calibration == "counts":
return data
@@ -466,17 +467,17 @@ def calibrate(self, data, calibration):
dims=data.dims, attrs=data.attrs,
coords=data.coords)
res = res.where(data < 65535)
- logger.debug("Calibration time " + str(datetime.now() - tic))
+ logger.debug("Calibration time " + str(dt.datetime.now() - tic))
return res
@property
def start_time(self):
"""Get start time of the scan."""
if self._use_acquisition_time_as_start_time:
- return self.acq_time[0].astype(datetime)
+ return self.acq_time[0].astype(dt.datetime)
return self._start_time
@property
def end_time(self):
"""Get end time of the scan."""
- return self.acq_time[-1].astype(datetime)
+ return self.acq_time[-1].astype(dt.datetime)
diff --git a/satpy/readers/hrpt.py b/satpy/readers/hrpt.py
index c4862e8169..cac8b9cd3d 100644
--- a/satpy/readers/hrpt.py
+++ b/satpy/readers/hrpt.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Reading and calibrating hrpt avhrr data.
Todo:
@@ -29,8 +30,8 @@
"""
+import datetime as dt
import logging
-from datetime import datetime
import dask.array as da
import numpy as np
@@ -130,7 +131,7 @@ def __init__(self, filename, filename_info, filetype_info):
self.channels = {i: None for i in AVHRR_CHANNEL_NAMES}
self.units = {i: "counts" for i in AVHRR_CHANNEL_NAMES}
- self.year = filename_info.get("start_time", datetime.utcnow()).year
+ self.year = filename_info.get("start_time", dt.datetime.utcnow()).year
@cached_property
def times(self):
@@ -272,10 +273,10 @@ def _get_avhrr_tiepoints(self, scan_points, scanline_nb):
def start_time(self):
"""Get the start time."""
return time_seconds(self._data["timecode"][0, np.newaxis, :],
- self.year).astype(datetime)[0]
+ self.year).astype(dt.datetime)[0]
@property
def end_time(self):
"""Get the end time."""
return time_seconds(self._data["timecode"][-1, np.newaxis, :],
- self.year).astype(datetime)[0]
+ self.year).astype(dt.datetime)[0]
diff --git a/satpy/readers/hsaf_grib.py b/satpy/readers/hsaf_grib.py
index a041bf0c73..b8238f17a5 100644
--- a/satpy/readers/hsaf_grib.py
+++ b/satpy/readers/hsaf_grib.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""A reader for files produced by the Hydrology SAF.
Currently this reader depends on the `pygrib` python package. The `eccodes`
@@ -22,8 +23,9 @@
of writing.
"""
+
+import datetime as dt
import logging
-from datetime import datetime, timedelta
import dask.array as da
import numpy as np
@@ -68,7 +70,7 @@ def __init__(self, filename, filename_info, filetype_info):
@staticmethod
def _get_datetime(msg):
dtstr = str(msg["dataDate"]) + str(msg["dataTime"]).zfill(4)
- return datetime.strptime(dtstr, "%Y%m%d%H%M")
+ return dt.datetime.strptime(dtstr, "%Y%m%d%H%M")
@property
def analysis_time(self):
@@ -151,7 +153,7 @@ def get_dataset(self, ds_id, ds_info):
flen = len(self.filename)
timedelt = self.filename[flen-10:flen-8]
ds_info["start_time"] = (ds_info["end_time"] -
- timedelta(hours=int(timedelt)))
+ dt.timedelta(hours=int(timedelt)))
else:
ds_info["start_time"] = ds_info["end_time"]
fill = msg["missingValue"]
diff --git a/satpy/readers/hsaf_h5.py b/satpy/readers/hsaf_h5.py
index 478b91ce2d..25b42ec6a5 100644
--- a/satpy/readers/hsaf_h5.py
+++ b/satpy/readers/hsaf_h5.py
@@ -15,9 +15,11 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""A reader for HDF5 Snow Cover (SC) file produced by the Hydrology SAF."""
+
+import datetime as dt
import logging
-from datetime import timedelta
import dask.array as da
import h5py
@@ -47,7 +49,7 @@ def __init__(self, filename, filename_info, filetype_info):
@property
def end_time(self):
"""Get end time."""
- return self.start_time + timedelta(hours=23, minutes=59, seconds=59)
+ return self.start_time + dt.timedelta(hours=23, minutes=59, seconds=59)
@property
def start_time(self):
diff --git a/satpy/readers/hy2_scat_l2b_h5.py b/satpy/readers/hy2_scat_l2b_h5.py
index 929d7dc934..dae6e44bf6 100644
--- a/satpy/readers/hy2_scat_l2b_h5.py
+++ b/satpy/readers/hy2_scat_l2b_h5.py
@@ -21,7 +21,7 @@
Also handle the HDF5 files from NSOAS, based on a file example.
"""
-from datetime import datetime
+import datetime as dt
import numpy as np
import xarray as xr
@@ -35,14 +35,14 @@ class HY2SCATL2BH5FileHandler(HDF5FileHandler):
@property
def start_time(self):
"""Time for first observation."""
- return datetime.strptime(self["/attr/Range_Beginning_Time"],
- "%Y%m%dT%H:%M:%S")
+ return dt.datetime.strptime(self["/attr/Range_Beginning_Time"],
+ "%Y%m%dT%H:%M:%S")
@property
def end_time(self):
"""Time for final observation."""
- return datetime.strptime(self["/attr/Range_Ending_Time"],
- "%Y%m%dT%H:%M:%S")
+ return dt.datetime.strptime(self["/attr/Range_Ending_Time"],
+ "%Y%m%dT%H:%M:%S")
@property
def platform_name(self):
diff --git a/satpy/readers/iasi_l2_so2_bufr.py b/satpy/readers/iasi_l2_so2_bufr.py
index b5088aa041..d66edb7995 100644
--- a/satpy/readers/iasi_l2_so2_bufr.py
+++ b/satpy/readers/iasi_l2_so2_bufr.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
r"""IASI L2 SO2 BUFR format reader.
Introduction
@@ -84,8 +85,8 @@
# TDB: this reader is based on iasi_l2.py and seviri_l2_bufr.py
+import datetime as dt
import logging
-from datetime import datetime
import dask.array as da
import numpy as np
@@ -154,7 +155,7 @@ def get_start_end_date(self):
minute = ec.codes_get(bufr, "minute")
second = ec.codes_get(bufr, "second")
- obs_time = datetime(year=year, month=month, day=day, hour=hour, minute=minute, second=second)
+ obs_time = dt.datetime(year=year, month=month, day=day, hour=hour, minute=minute, second=second)
if i == 0:
start_time = obs_time
diff --git a/satpy/readers/ici_l1b_nc.py b/satpy/readers/ici_l1b_nc.py
index 7adef62e4b..a5fd23c23b 100644
--- a/satpy/readers/ici_l1b_nc.py
+++ b/satpy/readers/ici_l1b_nc.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License
# along with satpy. If not, see .
+
"""EUMETSAT EPS-SG Ice Cloud Imager (ICI) Level 1B products reader.
The format is explained in the
@@ -26,8 +27,8 @@
"""
+import datetime as dt
import logging
-from datetime import datetime
from enum import Enum
from functools import cached_property
@@ -77,12 +78,12 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs):
def start_time(self):
"""Get observation start time."""
try:
- start_time = datetime.strptime(
+ start_time = dt.datetime.strptime(
self["/attr/sensing_start_time_utc"],
"%Y%m%d%H%M%S.%f",
)
except ValueError:
- start_time = datetime.strptime(
+ start_time = dt.datetime.strptime(
self["/attr/sensing_start_time_utc"],
"%Y-%m-%d %H:%M:%S.%f",
)
@@ -92,12 +93,12 @@ def start_time(self):
def end_time(self):
"""Get observation end time."""
try:
- end_time = datetime.strptime(
+ end_time = dt.datetime.strptime(
self["/attr/sensing_end_time_utc"],
"%Y%m%d%H%M%S.%f",
)
except ValueError:
- end_time = datetime.strptime(
+ end_time = dt.datetime.strptime(
self["/attr/sensing_end_time_utc"],
"%Y-%m-%d %H:%M:%S.%f",
)
diff --git a/satpy/readers/insat3d_img_l1b_h5.py b/satpy/readers/insat3d_img_l1b_h5.py
index 205f4d17b2..41ddee5df6 100644
--- a/satpy/readers/insat3d_img_l1b_h5.py
+++ b/satpy/readers/insat3d_img_l1b_h5.py
@@ -1,6 +1,7 @@
"""File handler for Insat 3D L1B data in hdf5 format."""
+
+import datetime as dt
from contextlib import suppress
-from datetime import datetime
from functools import cached_property
import dask.array as da
@@ -120,13 +121,15 @@ class Insat3DIMGL1BH5FileHandler(BaseFileHandler):
@property
def start_time(self):
"""Get the start time."""
- start_time = datetime.strptime(self.datatree.attrs["Acquisition_Start_Time"], "%d-%b-%YT%H:%M:%S")
+ start_time = dt.datetime.strptime(
+ self.datatree.attrs["Acquisition_Start_Time"], "%d-%b-%YT%H:%M:%S")
return start_time
@property
def end_time(self):
"""Get the end time."""
- end_time = datetime.strptime(self.datatree.attrs["Acquisition_End_Time"], "%d-%b-%YT%H:%M:%S")
+ end_time = dt.datetime.strptime(
+ self.datatree.attrs["Acquisition_End_Time"], "%d-%b-%YT%H:%M:%S")
return end_time
@cached_property
diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py
index 7675bd1624..1ccb895c25 100644
--- a/satpy/readers/mersi_l1b.py
+++ b/satpy/readers/mersi_l1b.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Reader for the FY-3D MERSI-2 L1B file format.
The files for this reader are HDF5 and come in four varieties; band data
@@ -24,7 +25,8 @@
platforms as well assuming no file format changes.
"""
-from datetime import datetime
+
+import datetime as dt
import dask.array as da
import numpy as np
@@ -44,7 +46,7 @@ def _strptime(self, date_attr, time_attr):
time = self[time_attr] # "18:27:39.720"
# cuts off microseconds because of unknown meaning
# is .720 == 720 microseconds or 720000 microseconds
- return datetime.strptime(date + " " + time.split(".")[0], "%Y-%m-%d %H:%M:%S")
+ return dt.datetime.strptime(date + " " + time.split(".")[0], "%Y-%m-%d %H:%M:%S")
@property
def start_time(self):
diff --git a/satpy/readers/msu_gsa_l1b.py b/satpy/readers/msu_gsa_l1b.py
index c4e45aa333..4a4ff3518f 100644
--- a/satpy/readers/msu_gsa_l1b.py
+++ b/satpy/readers/msu_gsa_l1b.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Reader for the Arctica-M1 MSU-GS/A data.
The files for this reader are HDF5 and contain channel data at 1km resolution
@@ -24,7 +25,8 @@
This reader was tested on sample data provided by EUMETSAT.
"""
-from datetime import datetime
+
+import datetime as dt
import numpy as np
@@ -38,7 +40,7 @@ class MSUGSAFileHandler(HDF5FileHandler):
def start_time(self):
"""Time for timeslot scan start."""
dtstr = self["/attr/timestamp_without_timezone"]
- return datetime.strptime(dtstr, "%Y-%m-%dT%H:%M:%S")
+ return dt.datetime.strptime(dtstr, "%Y-%m-%dT%H:%M:%S")
@property
def satellite_altitude(self):
diff --git a/satpy/readers/mws_l1b.py b/satpy/readers/mws_l1b.py
index 372a59ac37..1dc076e68f 100644
--- a/satpy/readers/mws_l1b.py
+++ b/satpy/readers/mws_l1b.py
@@ -1,24 +1,25 @@
# Copyright (c) 2022 Pytroll Developers
-
+#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
-
+#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
-
+#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
+
"""Reader for the EPS-SG Microwave Sounder (MWS) level-1b data.
Documentation: https://www.eumetsat.int/media/44139
"""
+import datetime as dt
import logging
-from datetime import datetime
import dask.array as da
import numpy as np
@@ -101,13 +102,13 @@ def __init__(self, filename, filename_info, filetype_info):
@property
def start_time(self):
"""Get start time."""
- return datetime.strptime(self["/attr/sensing_start_time_utc"],
+ return dt.datetime.strptime(self["/attr/sensing_start_time_utc"],
"%Y-%m-%d %H:%M:%S.%f")
@property
def end_time(self):
"""Get end time."""
- return datetime.strptime(self["/attr/sensing_end_time_utc"],
+ return dt.datetime.strptime(self["/attr/sensing_end_time_utc"],
"%Y-%m-%d %H:%M:%S.%f")
@property
diff --git a/satpy/readers/nwcsaf_msg2013_hdf5.py b/satpy/readers/nwcsaf_msg2013_hdf5.py
index 40a6441655..a3bc9ca168 100644
--- a/satpy/readers/nwcsaf_msg2013_hdf5.py
+++ b/satpy/readers/nwcsaf_msg2013_hdf5.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Reader for the old NWCSAF/Geo (v2013 and earlier) cloud product format.
References:
@@ -27,8 +28,8 @@
"""
+import datetime as dt
import logging
-from datetime import datetime
import h5py
import numpy as np
@@ -127,7 +128,7 @@ def get_area_def(self, dsid):
@property
def start_time(self):
"""Return the start time of the object."""
- return datetime.strptime(self.file_content["/attr/IMAGE_ACQUISITION_TIME"], "%Y%m%d%H%M")
+ return dt.datetime.strptime(self.file_content["/attr/IMAGE_ACQUISITION_TIME"], "%Y%m%d%H%M")
def get_area_extent(cfac, lfac, coff, loff, numcols, numlines):
diff --git a/satpy/readers/nwcsaf_nc.py b/satpy/readers/nwcsaf_nc.py
index e9809bdce5..64a284200d 100644
--- a/satpy/readers/nwcsaf_nc.py
+++ b/satpy/readers/nwcsaf_nc.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Nowcasting SAF common PPS&MSG NetCDF/CF format reader.
References:
@@ -22,11 +23,11 @@
"""
+import datetime as dt
import functools
import logging
import os
from contextlib import suppress
-from datetime import datetime
import dask.array as da
import numpy as np
@@ -435,9 +436,9 @@ def read_nwcsaf_time(time_value):
try:
# MSG:
try:
- return datetime.strptime(time_value, "%Y-%m-%dT%H:%M:%SZ")
+ return dt.datetime.strptime(time_value, "%Y-%m-%dT%H:%M:%SZ")
except TypeError: # Remove this in summer 2024 (this is not needed since h5netcdf 0.14)
- return datetime.strptime(time_value.astype(str), "%Y-%m-%dT%H:%M:%SZ")
+ return dt.datetime.strptime(time_value.astype(str), "%Y-%m-%dT%H:%M:%SZ")
except ValueError:
# PPS:
- return datetime.strptime(time_value, "%Y%m%dT%H%M%S%fZ")
+ return dt.datetime.strptime(time_value, "%Y%m%dT%H%M%S%fZ")
diff --git a/satpy/readers/oceancolorcci_l3_nc.py b/satpy/readers/oceancolorcci_l3_nc.py
index 075e885b36..d38e91c9e6 100644
--- a/satpy/readers/oceancolorcci_l3_nc.py
+++ b/satpy/readers/oceancolorcci_l3_nc.py
@@ -23,8 +23,9 @@
are supported and both the merged product files (OC_PRODUCTS) and single product (RRS, CHLOR_A, IOP, K_490) are
supported.
"""
+
+import datetime as dt
import logging
-from datetime import datetime
import dask.array as da
import numpy as np
@@ -41,7 +42,7 @@ class OCCCIFileHandler(NetCDF4FileHandler):
@staticmethod
def _parse_datetime(datestr):
"""Parse datetime."""
- return datetime.strptime(datestr, "%Y%m%d%H%MZ")
+ return dt.datetime.strptime(datestr, "%Y%m%d%H%MZ")
@property
def start_time(self):
diff --git a/satpy/readers/omps_edr.py b/satpy/readers/omps_edr.py
index 5421ae2cd2..12ef7d0ce4 100644
--- a/satpy/readers/omps_edr.py
+++ b/satpy/readers/omps_edr.py
@@ -15,16 +15,18 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Interface to OMPS EDR format."""
+
+import datetime as dt
import logging
-from datetime import datetime, timedelta
import numpy as np
from satpy.readers.hdf5_utils import HDF5FileHandler
-NO_DATE = datetime(1958, 1, 1)
-EPSILON_TIME = timedelta(days=2)
+NO_DATE = dt.datetime(1958, 1, 1)
+EPSILON_TIME = dt.timedelta(days=2)
LOG = logging.getLogger(__name__)
diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py
index 56d4773a43..1356471524 100644
--- a/satpy/readers/osisaf_l3_nc.py
+++ b/satpy/readers/osisaf_l3_nc.py
@@ -15,8 +15,8 @@
# satpy. If not, see .
"""A reader for OSI-SAF level 3 products in netCDF format."""
+import datetime as dt
import logging
-from datetime import datetime
from satpy.readers.netcdf_utils import NetCDF4FileHandler
@@ -197,7 +197,7 @@ def _get_platname(self):
def _parse_datetime(datestr):
for dt_format in ("%Y-%m-%d %H:%M:%S","%Y%m%dT%H%M%SZ", "%Y-%m-%dT%H:%M:%SZ"):
try:
- return datetime.strptime(datestr, dt_format)
+ return dt.datetime.strptime(datestr, dt_format)
except ValueError:
continue
raise ValueError(f"Unsupported date format: {datestr}")
diff --git a/satpy/readers/scatsat1_l2b.py b/satpy/readers/scatsat1_l2b.py
index 886ce458b3..d14665759d 100644
--- a/satpy/readers/scatsat1_l2b.py
+++ b/satpy/readers/scatsat1_l2b.py
@@ -17,7 +17,7 @@
# type: ignore
"""ScatSat-1 L2B Reader, distributed by Eumetsat in HDF5 format."""
-from datetime import datetime
+import datetime as dt
import h5py
@@ -34,8 +34,10 @@ def __init__(self, filename, filename_info, filetype_info):
self.h5f = h5py.File(self.filename, "r")
h5data = self.h5f["science_data"]
- self.filename_info["start_time"] = datetime.strptime(h5data.attrs["Range Beginning Date"], "%Y-%jT%H:%M:%S.%f")
- self.filename_info["end_time"] = datetime.strptime(h5data.attrs["Range Ending Date"], "%Y-%jT%H:%M:%S.%f")
+ self.filename_info["start_time"] = dt.datetime.strptime(
+ h5data.attrs["Range Beginning Date"], "%Y-%jT%H:%M:%S.%f")
+ self.filename_info["end_time"] = dt.datetime.strptime(
+ h5data.attrs["Range Ending Date"], "%Y-%jT%H:%M:%S.%f")
self.lons = None
self.lats = None
diff --git a/satpy/readers/scmi.py b/satpy/readers/scmi.py
index a4b8620f8b..fe19c63d8d 100644
--- a/satpy/readers/scmi.py
+++ b/satpy/readers/scmi.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""SCMI NetCDF4 Reader.
SCMI files are typically used for data for the ABI instrument onboard the
@@ -40,9 +41,9 @@
"""
+import datetime as dt
import logging
import os
-from datetime import datetime
import numpy as np
import xarray as xr
@@ -273,7 +274,7 @@ def get_area_def(self, key):
@property
def start_time(self):
"""Get the start time."""
- return datetime.strptime(self.nc.attrs["start_date_time"], "%Y%j%H%M%S")
+ return dt.datetime.strptime(self.nc.attrs["start_date_time"], "%Y%j%H%M%S")
@property
def end_time(self):
diff --git a/satpy/readers/seadas_l2.py b/satpy/readers/seadas_l2.py
index 03fa648330..24ee429fda 100644
--- a/satpy/readers/seadas_l2.py
+++ b/satpy/readers/seadas_l2.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Reader for SEADAS L2 products.
This reader currently only supports MODIS and VIIRS Chlorophyll A from SEADAS.
@@ -28,7 +29,7 @@
"""
-from datetime import datetime
+import datetime as dt
from .hdf4_utils import HDF4FileHandler
from .netcdf_utils import NetCDF4FileHandler
@@ -66,13 +67,13 @@ def _platform_name(self):
def start_time(self):
"""Get the starting observation time of this file's data."""
start_time = self[self.start_time_attr_name]
- return datetime.strptime(start_time[:-3], self.time_format)
+ return dt.datetime.strptime(start_time[:-3], self.time_format)
@property
def end_time(self):
"""Get the ending observation time of this file's data."""
end_time = self[self.end_time_attr_name]
- return datetime.strptime(end_time[:-3], self.time_format)
+ return dt.datetime.strptime(end_time[:-3], self.time_format)
@property
def sensor_names(self):
diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py
index 5b19e56833..ace63e3f12 100644
--- a/satpy/readers/seviri_base.py
+++ b/satpy/readers/seviri_base.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Common functionality for SEVIRI L1.5 data readers.
Introduction
@@ -186,8 +187,8 @@
"""
from __future__ import annotations
+import datetime as dt
import warnings
-from datetime import datetime, timedelta
import dask.array as da
import numpy as np
@@ -387,7 +388,7 @@
# To obtain the slope for the calibration, one should use the routine get_seviri_meirink_slope
# Epoch for the MEIRINK re-calibration
-MEIRINK_EPOCH = datetime(2000, 1, 1)
+MEIRINK_EPOCH = dt.datetime(2000, 1, 1)
MEIRINK_COEFS: dict[str, dict[int, dict[str, tuple[float, float]]]] = {}
MEIRINK_COEFS["2023"] = {}
@@ -1093,17 +1094,17 @@ def mask_bad_quality(data, line_validity, line_geometric_quality, line_radiometr
return data
-def round_nom_time(dt, time_delta):
+def round_nom_time(date, time_delta):
"""Round a datetime object to a multiple of a timedelta.
- dt : datetime.datetime object, default now.
+ date : datetime.datetime object, default now.
time_delta : timedelta object, we round to a multiple of this, default 1 minute.
adapted for SEVIRI from:
https://stackoverflow.com/questions/3463930/how-to-round-the-minute-of-a-datetime-object-python
"""
- seconds = (dt - dt.min).seconds
+ seconds = (date - date.min).seconds
round_to = time_delta.total_seconds()
rounding = (seconds + round_to / 2) // round_to * round_to
- return dt + timedelta(0, rounding - seconds, - dt.microsecond)
+ return date + dt.timedelta(0, rounding - seconds, - date.microsecond)
diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py
index 804198da0f..f65faa8ecc 100644
--- a/satpy/readers/seviri_l1b_hrit.py
+++ b/satpy/readers/seviri_l1b_hrit.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
r"""SEVIRI Level 1.5 HRIT format reader.
Introduction
@@ -213,8 +214,8 @@
from __future__ import division
import copy
+import datetime as dt
import logging
-from datetime import timedelta
import dask.array as da
import numpy as np
@@ -528,14 +529,14 @@ def nominal_start_time(self):
"""Get the start time and round it according to scan law."""
tm = self.prologue["ImageAcquisition"][
"PlannedAcquisitionTime"]["TrueRepeatCycleStart"]
- return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration))
+ return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration))
@property
def nominal_end_time(self):
"""Get the end time and round it according to scan law."""
tm = self.prologue["ImageAcquisition"][
"PlannedAcquisitionTime"]["PlannedRepeatCycleEnd"]
- return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration))
+ return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration))
@property
def observation_start_time(self):
diff --git a/satpy/readers/seviri_l1b_icare.py b/satpy/readers/seviri_l1b_icare.py
index 2024c46532..4d3243f5c8 100644
--- a/satpy/readers/seviri_l1b_icare.py
+++ b/satpy/readers/seviri_l1b_icare.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
r"""Interface to SEVIRI L1B data from ICARE (Lille).
Introduction
@@ -69,7 +70,8 @@
ancillary_variables: []
"""
-from datetime import datetime
+
+import datetime as dt
import numpy as np
@@ -169,9 +171,9 @@ def end_time(self):
attr = str(attr.astype(str))
# In some versions milliseconds are present, sometimes not.
try:
- endacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%SZ")
+ endacq = dt.datetime.strptime(attr, "%Y-%m-%dT%H:%M:%SZ")
except ValueError:
- endacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%S.%fZ")
+ endacq = dt.datetime.strptime(attr, "%Y-%m-%dT%H:%M:%S.%fZ")
return endacq
@property
@@ -182,9 +184,9 @@ def start_time(self):
attr = str(attr.astype(str))
# In some versions milliseconds are present, sometimes not.
try:
- stacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%SZ")
+ stacq = dt.datetime.strptime(attr, "%Y-%m-%dT%H:%M:%SZ")
except ValueError:
- stacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%S.%fZ")
+ stacq = dt.datetime.strptime(attr, "%Y-%m-%dT%H:%M:%S.%fZ")
return stacq
@property
diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py
index 361dd1bb50..976cb7c338 100644
--- a/satpy/readers/seviri_l1b_native.py
+++ b/satpy/readers/seviri_l1b_native.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
r"""SEVIRI Level 1.5 native format reader.
Introduction
@@ -97,9 +98,9 @@
https://www-cdn.eumetsat.int/files/2020-04/pdf_fg15_msg-native-format-15.pdf
"""
+import datetime as dt
import logging
import warnings
-from datetime import datetime, timedelta
import dask.array as da
import numpy as np
@@ -207,13 +208,13 @@ def _repeat_cycle_duration(self):
def nominal_start_time(self):
"""Get the repeat cycle nominal start time from file header and round it to expected nominal time slot."""
tm = self.header["15_DATA_HEADER"]["ImageAcquisition"]["PlannedAcquisitionTime"]["TrueRepeatCycleStart"]
- return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration))
+ return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration))
@property
def nominal_end_time(self):
"""Get the repeat cycle nominal end time from file header and round it to expected nominal time slot."""
tm = self.header["15_DATA_HEADER"]["ImageAcquisition"]["PlannedAcquisitionTime"]["PlannedRepeatCycleEnd"]
- return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration))
+ return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration))
@property
def observation_start_time(self):
@@ -609,7 +610,7 @@ def _get_hrv_channel(self):
def calibrate(self, data, dataset_id):
"""Calibrate the data."""
- tic = datetime.now()
+ tic = dt.datetime.now()
channel_name = dataset_id["name"]
calib = SEVIRICalibrationHandler(
platform_id=self.platform_id,
@@ -619,7 +620,7 @@ def calibrate(self, data, dataset_id):
scan_time=self.observation_start_time
)
res = calib.calibrate(data, dataset_id["calibration"])
- logger.debug("Calibration time " + str(datetime.now() - tic))
+ logger.debug("Calibration time " + str(dt.datetime.now() - tic))
return res
def _get_calib_coefs(self, channel_name):
diff --git a/satpy/readers/seviri_l1b_nc.py b/satpy/readers/seviri_l1b_nc.py
index 22b55eceda..fd19634fda 100644
--- a/satpy/readers/seviri_l1b_nc.py
+++ b/satpy/readers/seviri_l1b_nc.py
@@ -17,9 +17,8 @@
# satpy. If not, see .
"""SEVIRI netcdf format reader."""
-import datetime
+import datetime as dt
import logging
-from datetime import timedelta
import numpy as np
@@ -67,7 +66,7 @@ def __init__(self, filename, filename_info, filetype_info,
self.ext_calib_coefs = ext_calib_coefs or {}
self.mask_bad_quality_scan_lines = mask_bad_quality_scan_lines
self.mda = {}
- self.reference = datetime.datetime(1958, 1, 1)
+ self.reference = dt.datetime(1958, 1, 1)
self.get_metadata()
@property
@@ -82,13 +81,13 @@ def _repeat_cycle_duration(self):
def nominal_start_time(self):
"""Read the repeat cycle nominal start time from metadata and round it to expected nominal time slot."""
tm = self.deltaSt
- return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration))
+ return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration))
@property
def nominal_end_time(self):
"""Read the repeat cycle nominal end time from metadata and round it to expected nominal time slot."""
tm = self.deltaEnd
- return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration))
+ return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration))
@property
def observation_start_time(self):
@@ -146,11 +145,11 @@ def get_metadata(self):
# self.mda['hrv_number_of_lines'] = int(self.nc.dims['num_rows_hrv'])
# self.mda['hrv_number_of_columns'] = int(self.nc.dims['num_columns_hrv'])
- self.deltaSt = self.reference + datetime.timedelta(
+ self.deltaSt = self.reference + dt.timedelta(
days=int(self.nc.attrs["true_repeat_cycle_start_day"]),
milliseconds=int(self.nc.attrs["true_repeat_cycle_start_mi_sec"]))
- self.deltaEnd = self.reference + datetime.timedelta(
+ self.deltaEnd = self.reference + dt.timedelta(
days=int(self.nc.attrs["planned_repeat_cycle_end_day"]),
milliseconds=int(self.nc.attrs["planned_repeat_cycle_end_mi_sec"]))
diff --git a/satpy/readers/seviri_l2_bufr.py b/satpy/readers/seviri_l2_bufr.py
index 02aa0c2767..a48a7e00d6 100644
--- a/satpy/readers/seviri_l2_bufr.py
+++ b/satpy/readers/seviri_l2_bufr.py
@@ -23,8 +23,9 @@
https://navigator.eumetsat.int/
"""
+
+import datetime as dt
import logging
-from datetime import datetime, timedelta
import dask.array as da
import numpy as np
@@ -95,7 +96,7 @@ def __init__(self, filename, filename_info, filetype_info, with_area_definition=
else:
# Product was retrieved from the EUMETSAT Data Center
timeStr = self.get_attribute("typicalDate")+self.get_attribute("typicalTime")
- buf_start_time = datetime.strptime(timeStr, "%Y%m%d%H%M%S")
+ buf_start_time = dt.datetime.strptime(timeStr, "%Y%m%d%H%M%S")
sc_id = self.get_attribute("satelliteIdentifier")
self.mpef_header = {}
self.mpef_header["NominalTime"] = buf_start_time
@@ -120,7 +121,7 @@ def start_time(self):
@property
def end_time(self):
"""Return the repeat cycle end time."""
- return self.start_time + timedelta(minutes=15)
+ return self.start_time + dt.timedelta(minutes=15)
@property
def platform_name(self):
diff --git a/satpy/readers/seviri_l2_grib.py b/satpy/readers/seviri_l2_grib.py
index b69c60e7ac..d178d6b716 100644
--- a/satpy/readers/seviri_l2_grib.py
+++ b/satpy/readers/seviri_l2_grib.py
@@ -22,8 +22,8 @@
https://navigator.eumetsat.int/
"""
+import datetime as dt
import logging
-from datetime import timedelta
import dask.array as da
import numpy as np
@@ -62,7 +62,7 @@ def start_time(self):
@property
def end_time(self):
"""Return the sensing end time."""
- return self.start_time + timedelta(minutes=REPEAT_CYCLE_DURATION)
+ return self.start_time + dt.timedelta(minutes=REPEAT_CYCLE_DURATION)
def get_area_def(self, dataset_id):
"""Return the area definition for a dataset."""
diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py
index 1e2a64f783..079f93d2f3 100644
--- a/satpy/readers/sgli_l1b.py
+++ b/satpy/readers/sgli_l1b.py
@@ -13,6 +13,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""GCOM-C SGLI L1b reader.
GCOM-C has an imager instrument: SGLI
@@ -27,8 +28,8 @@
"""
+import datetime as dt
import logging
-from datetime import datetime
import dask.array as da
import h5py
@@ -63,13 +64,13 @@ def __init__(self, filename, filename_info, filetype_info):
def start_time(self):
"""Get the start time."""
the_time = self.h5file["Global_attributes"].attrs["Scene_start_time"].item()
- return datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f")
+ return dt.datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f")
@property
def end_time(self):
"""Get the end time."""
the_time = self.h5file["Global_attributes"].attrs["Scene_end_time"].item()
- return datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f")
+ return dt.datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f")
def get_dataset(self, key, info):
"""Get the dataset from the file."""
diff --git a/satpy/readers/slstr_l1b.py b/satpy/readers/slstr_l1b.py
index 02aae9f72b..3353ade4d3 100644
--- a/satpy/readers/slstr_l1b.py
+++ b/satpy/readers/slstr_l1b.py
@@ -15,13 +15,14 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""SLSTR L1b reader."""
+import datetime as dt
import logging
import os
import re
import warnings
-from datetime import datetime
import dask.array as da
import numpy as np
@@ -95,12 +96,12 @@ def get_dataset(self, key, info):
@property
def start_time(self):
"""Get the start time."""
- return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ")
+ return dt.datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ")
@property
def end_time(self):
"""Get the end time."""
- return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ")
+ return dt.datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ")
class NCSLSTR1B(BaseFileHandler):
@@ -224,12 +225,12 @@ def get_dataset(self, key, info):
@property
def start_time(self):
"""Get the start time."""
- return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ")
+ return dt.datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ")
@property
def end_time(self):
"""Get the end time."""
- return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ")
+ return dt.datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ")
class NCSLSTRAngles(BaseFileHandler):
@@ -326,12 +327,12 @@ def get_dataset(self, key, info):
@property
def start_time(self):
"""Get the start time."""
- return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ")
+ return dt.datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ")
@property
def end_time(self):
"""Get the end time."""
- return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ")
+ return dt.datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ")
class NCSLSTRFlag(BaseFileHandler):
@@ -376,9 +377,9 @@ def get_dataset(self, key, info):
@property
def start_time(self):
"""Get the start time."""
- return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ")
+ return dt.datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ")
@property
def end_time(self):
"""Get the end time."""
- return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ")
+ return dt.datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ")
diff --git a/satpy/readers/smos_l2_wind.py b/satpy/readers/smos_l2_wind.py
index 4a909ee2e4..c4d349ea67 100644
--- a/satpy/readers/smos_l2_wind.py
+++ b/satpy/readers/smos_l2_wind.py
@@ -24,8 +24,8 @@
SMOS_WIND_DS_PDD_20191107_signed.pdf
"""
+import datetime as dt
import logging
-from datetime import datetime
import numpy as np
from pyresample.geometry import AreaDefinition
@@ -41,12 +41,12 @@ class SMOSL2WINDFileHandler(NetCDF4FileHandler):
@property
def start_time(self):
"""Get start time."""
- return datetime.strptime(self["/attr/time_coverage_start"], "%Y-%m-%dT%H:%M:%S Z")
+ return dt.datetime.strptime(self["/attr/time_coverage_start"], "%Y-%m-%dT%H:%M:%S Z")
@property
def end_time(self):
"""Get end time."""
- return datetime.strptime(self["/attr/time_coverage_end"], "%Y-%m-%dT%H:%M:%S Z")
+ return dt.datetime.strptime(self["/attr/time_coverage_end"], "%Y-%m-%dT%H:%M:%S Z")
@property
def platform_shortname(self):
diff --git a/satpy/readers/tropomi_l2.py b/satpy/readers/tropomi_l2.py
index 768ca70948..2d571e2f12 100644
--- a/satpy/readers/tropomi_l2.py
+++ b/satpy/readers/tropomi_l2.py
@@ -29,8 +29,8 @@
"""
+import datetime as dt
import logging
-from datetime import datetime
import dask.array as da
import numpy as np
@@ -65,12 +65,12 @@ def platform_shortname(self):
@property
def time_coverage_start(self):
"""Get time_coverage_start."""
- return datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT)
+ return dt.datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT)
@property
def time_coverage_end(self):
"""Get time_coverage_end."""
- return datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT)
+ return dt.datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT)
@property
def sensor(self):
diff --git a/satpy/readers/vii_base_nc.py b/satpy/readers/vii_base_nc.py
index 83056189dc..07c5f6749a 100644
--- a/satpy/readers/vii_base_nc.py
+++ b/satpy/readers/vii_base_nc.py
@@ -19,8 +19,8 @@
"""EUMETSAT EPS-SG Visible/Infrared Imager (VII) readers base class."""
+import datetime as dt
import logging
-from datetime import datetime
from geotiepoints.viiinterpolator import tie_points_geo_interpolation, tie_points_interpolation
@@ -213,18 +213,18 @@ def _get_global_attributes(self):
def start_time(self):
"""Get observation start time."""
try:
- start_time = datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y%m%d%H%M%S.%f")
+ start_time = dt.datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y%m%d%H%M%S.%f")
except ValueError:
- start_time = datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y-%m-%d %H:%M:%S.%f")
+ start_time = dt.datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y-%m-%d %H:%M:%S.%f")
return start_time
@property
def end_time(self):
"""Get observation end time."""
try:
- end_time = datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y%m%d%H%M%S.%f")
+ end_time = dt.datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y%m%d%H%M%S.%f")
except ValueError:
- end_time = datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y-%m-%d %H:%M:%S.%f")
+ end_time = dt.datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y-%m-%d %H:%M:%S.%f")
return end_time
@property
diff --git a/satpy/readers/viirs_atms_sdr_base.py b/satpy/readers/viirs_atms_sdr_base.py
index 159a84a070..b55368e92b 100644
--- a/satpy/readers/viirs_atms_sdr_base.py
+++ b/satpy/readers/viirs_atms_sdr_base.py
@@ -18,8 +18,8 @@
"""Common utilities for reading VIIRS and ATMS SDR data."""
+import datetime as dt
import logging
-from datetime import datetime, timedelta
import dask.array as da
import numpy as np
@@ -27,8 +27,8 @@
from satpy.readers.hdf5_utils import HDF5FileHandler
-NO_DATE = datetime(1958, 1, 1)
-EPSILON_TIME = timedelta(days=2)
+NO_DATE = dt.datetime(1958, 1, 1)
+EPSILON_TIME = dt.timedelta(days=2)
LOG = logging.getLogger(__name__)
@@ -106,7 +106,7 @@ def _parse_datetime(self, datestr, timestr):
timestr = str(timestr.data.compute().astype(str))
datetime_str = datestr + timestr
- time_val = datetime.strptime(datetime_str, "%Y%m%d%H%M%S.%fZ")
+ time_val = dt.datetime.strptime(datetime_str, "%Y%m%d%H%M%S.%fZ")
if abs(time_val - NO_DATE) < EPSILON_TIME:
# catch rare case when SDR files have incorrect date
raise ValueError("Datetime invalid {}".format(time_val))
diff --git a/satpy/readers/viirs_compact.py b/satpy/readers/viirs_compact.py
index af3a4ce766..bb3bd83b71 100644
--- a/satpy/readers/viirs_compact.py
+++ b/satpy/readers/viirs_compact.py
@@ -29,9 +29,9 @@
"""
+import datetime as dt
import logging
from contextlib import suppress
-from datetime import datetime, timedelta
import dask.array as da
import h5py
@@ -173,10 +173,10 @@ def start_time(self):
@property
def end_time(self):
"""Get the end time."""
- end_time = datetime.combine(self.start_time.date(),
+ end_time = dt.datetime.combine(self.start_time.date(),
self.finfo["end_time"].time())
if end_time < self.start_time:
- end_time += timedelta(days=1)
+ end_time += dt.timedelta(days=1)
return end_time
def read_geo(self, key, info):
diff --git a/satpy/readers/viirs_l1b.py b/satpy/readers/viirs_l1b.py
index 510a37165d..7dd3079dbb 100644
--- a/satpy/readers/viirs_l1b.py
+++ b/satpy/readers/viirs_l1b.py
@@ -17,8 +17,8 @@
# satpy. If not, see .
"""Interface to VIIRS L1B format."""
+import datetime as dt
import logging
-from datetime import datetime
import numpy as np
@@ -32,7 +32,7 @@ class VIIRSL1BFileHandler(NetCDF4FileHandler):
def _parse_datetime(self, datestr):
"""Parse datetime."""
- return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%S.000Z")
+ return dt.datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%S.000Z")
@property
def start_orbit_number(self):
diff --git a/satpy/readers/viirs_l2.py b/satpy/readers/viirs_l2.py
index 9277620320..7a54b3e10c 100644
--- a/satpy/readers/viirs_l2.py
+++ b/satpy/readers/viirs_l2.py
@@ -9,8 +9,9 @@
3. Cloud Top Height
4. Deep Blue Aerosol Optical Thickness (Land and Ocean)
"""
+
+import datetime as dt
import logging
-from datetime import datetime
import numpy as np
@@ -23,7 +24,7 @@ class VIIRSL2FileHandler(NetCDF4FileHandler):
"""NetCDF File Handler for VIIRS L2 Products."""
def _parse_datetime(self, datestr):
"""Parse datetime."""
- return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%S.000Z")
+ return dt.datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%S.000Z")
@property
def start_time(self):
diff --git a/satpy/readers/viirs_sdr.py b/satpy/readers/viirs_sdr.py
index eef02f7777..28854b185d 100644
--- a/satpy/readers/viirs_sdr.py
+++ b/satpy/readers/viirs_sdr.py
@@ -28,10 +28,11 @@
- http://npp.gsfc.nasa.gov/science/sciencedocuments/082012/474-00001-03_CDFCBVolIII_RevC.pdf
"""
+
+import datetime as dt
import logging
import os.path
from contextlib import suppress
-from datetime import datetime, timedelta
from glob import glob
import numpy as np
@@ -39,8 +40,8 @@
from satpy.readers.viirs_atms_sdr_base import ATMS_DATASET_KEYS, DATASET_KEYS, VIIRS_DATASET_KEYS, JPSS_SDR_FileHandler
from satpy.readers.yaml_reader import FileYAMLReader
-NO_DATE = datetime(1958, 1, 1)
-EPSILON_TIME = timedelta(days=2)
+NO_DATE = dt.datetime(1958, 1, 1)
+EPSILON_TIME = dt.timedelta(days=2)
LOG = logging.getLogger(__name__)
diff --git a/satpy/readers/viirs_vgac_l1c_nc.py b/satpy/readers/viirs_vgac_l1c_nc.py
index 578e19bed3..2f43ffd2a2 100644
--- a/satpy/readers/viirs_vgac_l1c_nc.py
+++ b/satpy/readers/viirs_vgac_l1c_nc.py
@@ -15,8 +15,8 @@
# satpy. If not, see .
"""Reading VIIRS VGAC data."""
+import datetime as dt
import logging
-from datetime import datetime
import numpy as np
import xarray as xr
@@ -68,20 +68,20 @@ def fix_radiances_not_in_percent(self, data):
def set_time_attrs(self, data):
"""Set time from attributes."""
if "StartTime" in data.attrs:
- data.attrs["start_time"] = datetime.strptime(data.attrs["StartTime"], "%Y-%m-%dT%H:%M:%S")
- data.attrs["end_time"] = datetime.strptime(data.attrs["EndTime"], "%Y-%m-%dT%H:%M:%S")
+ data.attrs["start_time"] = dt.datetime.strptime(data.attrs["StartTime"], "%Y-%m-%dT%H:%M:%S")
+ data.attrs["end_time"] = dt.datetime.strptime(data.attrs["EndTime"], "%Y-%m-%dT%H:%M:%S")
self._end_time = data.attrs["end_time"]
self._start_time = data.attrs["start_time"]
def dt64_to_datetime(self, dt64):
"""Conversion of numpy.datetime64 to datetime objects."""
if isinstance(dt64, np.datetime64):
- return dt64.astype(datetime)
+ return dt64.astype(dt.datetime)
return dt64
def extract_time_data(self, data, nc):
"""Decode time data."""
- reference_time = np.datetime64(datetime.strptime(nc["proj_time0"].attrs["units"],
+ reference_time = np.datetime64(dt.datetime.strptime(nc["proj_time0"].attrs["units"],
"days since %d/%m/%YT%H:%M:%S"))
delta_part_of_day, delta_full_days = np.modf(nc["proj_time0"].values)
delta_full_days = np.timedelta64(delta_full_days.astype(np.int64), "D").astype("timedelta64[us]")
diff --git a/satpy/readers/virr_l1b.py b/satpy/readers/virr_l1b.py
index 260666ff8b..23e0339c93 100644
--- a/satpy/readers/virr_l1b.py
+++ b/satpy/readers/virr_l1b.py
@@ -40,8 +40,8 @@
"""
+import datetime as dt
import logging
-from datetime import datetime
import dask.array as da
import numpy as np
@@ -162,10 +162,10 @@ def _correct_slope(self, slope):
def start_time(self):
"""Get starting observation time."""
start_time = self["/attr/Observing Beginning Date"] + "T" + self["/attr/Observing Beginning Time"] + "Z"
- return datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ")
+ return dt.datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ")
@property
def end_time(self):
"""Get ending observation time."""
end_time = self["/attr/Observing Ending Date"] + "T" + self["/attr/Observing Ending Time"] + "Z"
- return datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ")
+ return dt.datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ")
diff --git a/satpy/tests/cf_tests/test_decoding.py b/satpy/tests/cf_tests/test_decoding.py
index c20cddf6da..51c1bfecaf 100644
--- a/satpy/tests/cf_tests/test_decoding.py
+++ b/satpy/tests/cf_tests/test_decoding.py
@@ -15,8 +15,10 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Tests for CF decoding."""
-from datetime import datetime
+
+import datetime as dt
import pytest
@@ -46,11 +48,11 @@ def expected(self):
"my_integer": 0,
"my_float": 0.0,
"my_list": [1, 2, 3],
- "my_timestamp1": datetime(2000, 1, 1),
- "my_timestamp2": datetime(2000, 1, 1, 12, 15, 33),
- "my_timestamp3": datetime(2000, 1, 1, 12, 15, 33, 123456),
+ "my_timestamp1": dt.datetime(2000, 1, 1),
+ "my_timestamp2": dt.datetime(2000, 1, 1, 12, 15, 33),
+ "my_timestamp3": dt.datetime(2000, 1, 1, 12, 15, 33, 123456),
"my_dict": {"a": {"b": [1, 2, 3]},
- "c": {"d": datetime(2000, 1, 1, 12, 15, 33, 123456)}}
+ "c": {"d": dt.datetime(2000, 1, 1, 12, 15, 33, 123456)}}
}
def test_decoding(self, attrs, expected):
diff --git a/satpy/tests/compositor_tests/test_viirs.py b/satpy/tests/compositor_tests/test_viirs.py
index 1641e4248b..95ff3e0d39 100644
--- a/satpy/tests/compositor_tests/test_viirs.py
+++ b/satpy/tests/compositor_tests/test_viirs.py
@@ -15,9 +15,10 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Tests for VIIRS compositors."""
-from datetime import datetime
+import datetime as dt
import dask.array as da
import numpy as np
@@ -52,7 +53,7 @@ def dnb(self, area):
c01 = xr.DataArray(dnb,
dims=("y", "x"),
attrs={"name": "DNB", "area": area,
- "start_time": datetime(2020, 1, 1, 12, 0, 0)})
+ "start_time": dt.datetime(2020, 1, 1, 12, 0, 0)})
return c01
@pytest.fixture()
@@ -66,7 +67,7 @@ def sza(self, area):
c02 = xr.DataArray(sza,
dims=("y", "x"),
attrs={"name": "solar_zenith_angle", "area": area,
- "start_time": datetime(2020, 1, 1, 12, 0, 0)})
+ "start_time": dt.datetime(2020, 1, 1, 12, 0, 0)})
return c02
@pytest.fixture()
@@ -79,7 +80,7 @@ def lza(self, area):
c03 = xr.DataArray(lza,
dims=("y", "x"),
attrs={"name": "lunar_zenith_angle", "area": area,
- "start_time": datetime(2020, 1, 1, 12, 0, 0)
+ "start_time": dt.datetime(2020, 1, 1, 12, 0, 0)
})
return c03
diff --git a/satpy/tests/features/steps/steps-load.py b/satpy/tests/features/steps/steps-load.py
index 7e2d1829a2..d83ac24754 100644
--- a/satpy/tests/features/steps/steps-load.py
+++ b/satpy/tests/features/steps/steps-load.py
@@ -45,13 +45,13 @@ def step_impl_data_available(context):
@when(u"user loads the data without providing a config file")
def step_impl_user_loads_no_config(context):
"""Load the data without a config."""
- from datetime import datetime
+ import datetime as dt
from satpy import Scene, find_files_and_readers
os.chdir("/tmp/")
readers_files = find_files_and_readers(sensor="viirs",
- start_time=datetime(2015, 3, 11, 11, 20),
- end_time=datetime(2015, 3, 11, 11, 26))
+ start_time=dt.datetime(2015, 3, 11, 11, 20),
+ end_time=dt.datetime(2015, 3, 11, 11, 26))
scn = Scene(filenames=readers_files)
scn.load(["M02"])
context.scene = scn
@@ -73,13 +73,13 @@ def step_impl_items_not_available(context):
@when(u"user wants to know what data is available")
def step_impl_user_checks_availability(context):
"""Check availability."""
- from datetime import datetime
+ import datetime as dt
from satpy import Scene, find_files_and_readers
os.chdir("/tmp/")
reader_files = find_files_and_readers(sensor="viirs",
- start_time=datetime(2015, 3, 11, 11, 20),
- end_time=datetime(2015, 3, 11, 11, 26))
+ start_time=dt.datetime(2015, 3, 11, 11, 20),
+ end_time=dt.datetime(2015, 3, 11, 11, 26))
scn = Scene(filenames=reader_files)
context.available_dataset_ids = scn.available_dataset_ids()
diff --git a/satpy/tests/modifier_tests/test_angles.py b/satpy/tests/modifier_tests/test_angles.py
index ecf0805ca8..b4f5430436 100644
--- a/satpy/tests/modifier_tests/test_angles.py
+++ b/satpy/tests/modifier_tests/test_angles.py
@@ -14,10 +14,11 @@
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
"""Tests for the angles in modifiers."""
+
import contextlib
+import datetime as dt
import warnings
from copy import deepcopy
-from datetime import datetime, timedelta
from glob import glob
from typing import Optional, Union
from unittest import mock
@@ -74,7 +75,7 @@ def _get_angle_test_data(area_def: Optional[Union[AreaDefinition, StackedAreaDef
"satellite_nominal_longitude": 10.0,
"satellite_nominal_latitude": 0.0,
}
- stime = datetime(2020, 1, 1, 12, 0, 0)
+ stime = dt.datetime(2020, 1, 1, 12, 0, 0)
data = da.zeros(shape, chunks=chunks)
vis = xr.DataArray(data,
dims=dims,
@@ -113,7 +114,7 @@ def _similar_sat_pos_datetime(orig_data, lon_offset=0.04):
new_data = orig_data.copy()
old_lon = new_data.attrs["orbital_parameters"]["satellite_nominal_longitude"]
new_data.attrs["orbital_parameters"]["satellite_nominal_longitude"] = old_lon + lon_offset
- new_data.attrs["start_time"] = new_data.attrs["start_time"] + timedelta(hours=36)
+ new_data.attrs["start_time"] = new_data.attrs["start_time"] + dt.timedelta(hours=36)
return new_data
@@ -372,15 +373,13 @@ def test_relative_azimuth_calculation(self):
def test_solazi_correction(self):
"""Test that solar azimuth angles are corrected into the right range."""
- from datetime import datetime
-
from satpy.modifiers.angles import _get_sun_azimuth_ndarray
lats = np.array([-80, 40, 0, 40, 80])
lons = np.array([-80, 40, 0, 40, 80])
- dt = datetime(2022, 1, 5, 12, 50, 0)
+ date = dt.datetime(2022, 1, 5, 12, 50, 0)
- azi = _get_sun_azimuth_ndarray(lats, lons, dt)
+ azi = _get_sun_azimuth_ndarray(lats, lons, date)
assert np.all(azi > 0)
diff --git a/satpy/tests/modifier_tests/test_crefl.py b/satpy/tests/modifier_tests/test_crefl.py
index dc9f4a232a..27c9847030 100644
--- a/satpy/tests/modifier_tests/test_crefl.py
+++ b/satpy/tests/modifier_tests/test_crefl.py
@@ -13,8 +13,8 @@
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
"""Tests for the CREFL ReflectanceCorrector modifier."""
+import datetime as dt
from contextlib import contextmanager
-from datetime import datetime
from unittest import mock
import numpy as np
@@ -82,8 +82,8 @@ def _make_viirs_xarray(data, area, name, standard_name, wavelength=None, units="
"resolution": 371, "name": name,
"standard_name": standard_name, "platform_name": "Suomi-NPP",
"polarization": None, "sensor": "viirs", "units": units,
- "start_time": datetime(2012, 2, 25, 18, 1, 24, 570942),
- "end_time": datetime(2012, 2, 25, 18, 11, 21, 175760), "area": area,
+ "start_time": dt.datetime(2012, 2, 25, 18, 1, 24, 570942),
+ "end_time": dt.datetime(2012, 2, 25, 18, 11, 21, 175760), "area": area,
"ancillary_variables": []
})
@@ -259,8 +259,8 @@ def test_reflectance_corrector_viirs(self, tmpdir, url, dem_mock_cm, dem_sds):
assert res.attrs["platform_name"] == "Suomi-NPP"
assert res.attrs["sensor"] == "viirs"
assert res.attrs["units"] == "%"
- assert res.attrs["start_time"] == datetime(2012, 2, 25, 18, 1, 24, 570942)
- assert res.attrs["end_time"] == datetime(2012, 2, 25, 18, 11, 21, 175760)
+ assert res.attrs["start_time"] == dt.datetime(2012, 2, 25, 18, 1, 24, 570942)
+ assert res.attrs["end_time"] == dt.datetime(2012, 2, 25, 18, 11, 21, 175760)
assert res.attrs["area"] == area
assert res.attrs["ancillary_variables"] == []
data = res.values
@@ -304,8 +304,8 @@ def make_xarray(name, calibration, wavelength=None, modifiers=None, resolution=1
"calibration": calibration, "resolution": resolution,
"name": name, "coordinates": ["longitude", "latitude"],
"platform_name": "EOS-Aqua", "polarization": None, "sensor": "modis",
- "units": "%", "start_time": datetime(2012, 8, 13, 18, 46, 1, 439838),
- "end_time": datetime(2012, 8, 13, 18, 57, 47, 746296), "area": area,
+ "units": "%", "start_time": dt.datetime(2012, 8, 13, 18, 46, 1, 439838),
+ "end_time": dt.datetime(2012, 8, 13, 18, 57, 47, 746296), "area": area,
"ancillary_variables": []
})
@@ -327,8 +327,8 @@ def make_xarray(name, calibration, wavelength=None, modifiers=None, resolution=1
assert res.attrs["platform_name"] == "EOS-Aqua"
assert res.attrs["sensor"] == "modis"
assert res.attrs["units"] == "%"
- assert res.attrs["start_time"] == datetime(2012, 8, 13, 18, 46, 1, 439838)
- assert res.attrs["end_time"] == datetime(2012, 8, 13, 18, 57, 47, 746296)
+ assert res.attrs["start_time"] == dt.datetime(2012, 8, 13, 18, 46, 1, 439838)
+ assert res.attrs["end_time"] == dt.datetime(2012, 8, 13, 18, 57, 47, 746296)
assert res.attrs["area"] == area
assert res.attrs["ancillary_variables"] == []
data = res.values
diff --git a/satpy/tests/multiscene_tests/test_blend.py b/satpy/tests/multiscene_tests/test_blend.py
index c964501225..c003106dea 100644
--- a/satpy/tests/multiscene_tests/test_blend.py
+++ b/satpy/tests/multiscene_tests/test_blend.py
@@ -19,7 +19,7 @@
"""Unit tests for blending datasets with the Multiscene object."""
-from datetime import datetime
+import datetime as dt
import dask.array as da
import numpy as np
@@ -101,8 +101,8 @@ def cloud_type_data_array1(test_area, data_type, image_mode):
"satellite_nominal_longitude": 0.0,
"satellite_nominal_latitude": 0,
}
- data_arr.attrs["start_time"] = datetime(2023, 1, 16, 11, 9, 17)
- data_arr.attrs["end_time"] = datetime(2023, 1, 16, 11, 12, 22)
+ data_arr.attrs["start_time"] = dt.datetime(2023, 1, 16, 11, 9, 17)
+ data_arr.attrs["end_time"] = dt.datetime(2023, 1, 16, 11, 12, 22)
data_arr.attrs["_satpy_id"] = dsid1
return data_arr
@@ -127,8 +127,8 @@ def cloud_type_data_array2(test_area, data_type, image_mode):
data_arr.attrs["sensor"] = {"avhrr-3"}
data_arr.attrs["units"] = "1"
data_arr.attrs["long_name"] = "SAFNWC PPS CT Cloud Type"
- data_arr.attrs["start_time"] = datetime(2023, 1, 16, 11, 12, 57, 500000)
- data_arr.attrs["end_time"] = datetime(2023, 1, 16, 11, 28, 1, 900000)
+ data_arr.attrs["start_time"] = dt.datetime(2023, 1, 16, 11, 12, 57, 500000)
+ data_arr.attrs["end_time"] = dt.datetime(2023, 1, 16, 11, 28, 1, 900000)
data_arr.attrs["_satpy_id"] = dsid1
return data_arr
@@ -152,8 +152,8 @@ def scene1_with_weights(cloud_type_data_array1, test_area):
modifiers=()
)
scene[dsid2] = _create_test_int8_dataset(name="geo-cma", area=test_area, values=2)
- scene[dsid2].attrs["start_time"] = datetime(2023, 1, 16, 11, 9, 17)
- scene[dsid2].attrs["end_time"] = datetime(2023, 1, 16, 11, 12, 22)
+ scene[dsid2].attrs["start_time"] = dt.datetime(2023, 1, 16, 11, 9, 17)
+ scene[dsid2].attrs["end_time"] = dt.datetime(2023, 1, 16, 11, 12, 22)
wgt2 = _create_test_dataset(name="geo-cma-wgt", area=test_area, values=0)
@@ -176,8 +176,8 @@ def scene2_with_weights(cloud_type_data_array2, test_area):
modifiers=()
)
scene[dsid2] = _create_test_int8_dataset(name="polar-cma", area=test_area, values=4)
- scene[dsid2].attrs["start_time"] = datetime(2023, 1, 16, 11, 12, 57, 500000)
- scene[dsid2].attrs["end_time"] = datetime(2023, 1, 16, 11, 28, 1, 900000)
+ scene[dsid2].attrs["start_time"] = dt.datetime(2023, 1, 16, 11, 12, 57, 500000)
+ scene[dsid2].attrs["end_time"] = dt.datetime(2023, 1, 16, 11, 28, 1, 900000)
wgt2 = _create_test_dataset(name="polar-cma-wgt", area=test_area, values=1)
return scene, [wgt1, wgt2]
@@ -223,8 +223,8 @@ def test_blend_two_scenes_using_stack(self, multi_scene_and_weights, groups,
xr.testing.assert_equal(result, expected.compute())
_check_stacked_metadata(result, "CloudType")
- assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 9, 17)
- assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 28, 1, 900000)
+ assert result.attrs["start_time"] == dt.datetime(2023, 1, 16, 11, 9, 17)
+ assert result.attrs["end_time"] == dt.datetime(2023, 1, 16, 11, 28, 1, 900000)
def test_blend_two_scenes_bad_blend_type(self, multi_scene_and_weights, groups):
"""Test exception is raised when bad 'blend_type' is used."""
@@ -274,8 +274,8 @@ def test_blend_two_scenes_using_stack_weighted(self, multi_scene_and_weights, gr
np.testing.assert_allclose(result.data, expected.data)
_check_stacked_metadata(result, "CloudType")
- assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 9, 17)
- assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 28, 1, 900000)
+ assert result.attrs["start_time"] == dt.datetime(2023, 1, 16, 11, 9, 17)
+ assert result.attrs["end_time"] == dt.datetime(2023, 1, 16, 11, 28, 1, 900000)
@pytest.fixture()
def datasets_and_weights(self):
@@ -286,23 +286,23 @@ def datasets_and_weights(self):
shape[1], shape[0], [-200, -200, 200, 200])
ds1 = xr.DataArray(da.ones(shape, chunks=-1), dims=("y", "x"),
- attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area})
+ attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area})
ds2 = xr.DataArray(da.ones(shape, chunks=-1) * 2, dims=("y", "x"),
- attrs={"start_time": datetime(2018, 1, 1, 1, 0, 0), "area": area})
+ attrs={"start_time": dt.datetime(2018, 1, 1, 1, 0, 0), "area": area})
ds3 = xr.DataArray(da.ones(shape, chunks=-1) * 3, dims=("y", "x"),
- attrs={"start_time": datetime(2018, 1, 1, 1, 0, 0), "area": area})
+ attrs={"start_time": dt.datetime(2018, 1, 1, 1, 0, 0), "area": area})
ds4 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "time"),
- attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area})
+ attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area})
ds5 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "time"),
- attrs={"start_time": datetime(2018, 1, 1, 1, 0, 0), "area": area})
+ attrs={"start_time": dt.datetime(2018, 1, 1, 1, 0, 0), "area": area})
wgt1 = xr.DataArray(da.ones(shape, chunks=-1), dims=("y", "x"),
- attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area})
+ attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area})
wgt2 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "x"),
- attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area})
+ attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area})
wgt3 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "x"),
- attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area})
+ attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area})
datastruct = {"shape": shape,
"area": area,
@@ -392,9 +392,9 @@ class TestTemporalRGB:
@pytest.fixture()
def nominal_data(self):
"""Return the input arrays for the nominal use case."""
- da1 = xr.DataArray([1, 0, 0], attrs={"start_time": datetime(2023, 5, 22, 9, 0, 0)})
- da2 = xr.DataArray([0, 1, 0], attrs={"start_time": datetime(2023, 5, 22, 10, 0, 0)})
- da3 = xr.DataArray([0, 0, 1], attrs={"start_time": datetime(2023, 5, 22, 11, 0, 0)})
+ da1 = xr.DataArray([1, 0, 0], attrs={"start_time": dt.datetime(2023, 5, 22, 9, 0, 0)})
+ da2 = xr.DataArray([0, 1, 0], attrs={"start_time": dt.datetime(2023, 5, 22, 10, 0, 0)})
+ da3 = xr.DataArray([0, 0, 1], attrs={"start_time": dt.datetime(2023, 5, 22, 11, 0, 0)})
return [da1, da2, da3]
@@ -422,7 +422,7 @@ def test_extra_datasets(self, nominal_data, expected_result):
"""Test that only the first three arrays affect the usage."""
from satpy.multiscene import temporal_rgb
- da4 = xr.DataArray([0, 0, 1], attrs={"start_time": datetime(2023, 5, 22, 12, 0, 0)})
+ da4 = xr.DataArray([0, 0, 1], attrs={"start_time": dt.datetime(2023, 5, 22, 12, 0, 0)})
res = temporal_rgb(nominal_data + [da4,])
diff --git a/satpy/tests/multiscene_tests/test_save_animation.py b/satpy/tests/multiscene_tests/test_save_animation.py
index 7ec1a53df8..67158c2334 100644
--- a/satpy/tests/multiscene_tests/test_save_animation.py
+++ b/satpy/tests/multiscene_tests/test_save_animation.py
@@ -15,17 +15,18 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Unit tests for saving animations using Multiscene."""
# NOTE:
# The following fixtures are not defined in this file, but are used and injected by Pytest:
# - tmp_path
+import datetime as dt
import os
import shutil
import tempfile
import unittest
-from datetime import datetime
from unittest import mock
import pytest
@@ -63,12 +64,12 @@ def test_save_mp4_distributed(self):
scenes[1]["ds3"] = _create_test_dataset("ds3")
# Add a start and end time
for ds_id in ["ds1", "ds2", "ds3"]:
- scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2)
- scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12)
+ scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2)
+ scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12)
if ds_id == "ds3":
continue
- scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1)
- scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12)
+ scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1)
+ scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12)
mscn = MultiScene(scenes)
fn = os.path.join(
@@ -125,12 +126,12 @@ def test_save_mp4_no_distributed(self):
scenes[1]["ds3"] = _create_test_dataset("ds3")
# Add a start and end time
for ds_id in ["ds1", "ds2", "ds3"]:
- scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2)
- scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12)
+ scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2)
+ scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12)
if ds_id == "ds3":
continue
- scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1)
- scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12)
+ scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1)
+ scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12)
mscn = MultiScene(scenes)
fn = os.path.join(
@@ -165,12 +166,12 @@ def test_save_datasets_simple(self):
scenes[1]["ds3"] = _create_test_dataset("ds3")
# Add a start and end time
for ds_id in ["ds1", "ds2", "ds3"]:
- scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2)
- scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12)
+ scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2)
+ scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12)
if ds_id == "ds3":
continue
- scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1)
- scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12)
+ scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1)
+ scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12)
mscn = MultiScene(scenes)
client_mock = mock.MagicMock()
@@ -198,12 +199,12 @@ def test_save_datasets_distributed_delayed(self):
scenes[1]["ds3"] = _create_test_dataset("ds3")
# Add a start and end time
for ds_id in ["ds1", "ds2", "ds3"]:
- scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2)
- scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12)
+ scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2)
+ scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12)
if ds_id == "ds3":
continue
- scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1)
- scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12)
+ scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1)
+ scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12)
mscn = MultiScene(scenes)
client_mock = mock.MagicMock()
@@ -233,12 +234,12 @@ def test_save_datasets_distributed_source_target(self):
scenes[1]["ds3"] = _create_test_dataset("ds3")
# Add a start and end time
for ds_id in ["ds1", "ds2", "ds3"]:
- scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2)
- scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12)
+ scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2)
+ scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12)
if ds_id == "ds3":
continue
- scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1)
- scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12)
+ scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1)
+ scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12)
mscn = MultiScene(scenes)
client_mock = mock.MagicMock()
@@ -313,12 +314,12 @@ def test_save_mp4(smg, tmp_path):
scenes[1]["ds3"] = _create_test_dataset("ds3")
# Add a start and end time
for ds_id in ["ds1", "ds2", "ds3"]:
- scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2)
- scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12)
+ scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2)
+ scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12)
if ds_id == "ds3":
continue
- scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1)
- scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12)
+ scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1)
+ scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12)
mscn = MultiScene(scenes)
fn = str(tmp_path /
diff --git a/satpy/tests/reader_tests/_li_test_utils.py b/satpy/tests/reader_tests/_li_test_utils.py
index 32107006fc..6b4d3f7629 100644
--- a/satpy/tests/reader_tests/_li_test_utils.py
+++ b/satpy/tests/reader_tests/_li_test_utils.py
@@ -14,7 +14,7 @@
# along with satpy. If not, see .
"""Common utility modules used for LI mock-oriented unit tests."""
-from datetime import datetime
+import datetime as dt
import numpy as np
import xarray as xr
@@ -127,8 +127,8 @@ def rand_u16(num):
def l2_lef_schema(settings=None):
"""Define schema for LI L2 LEF product."""
- epoch_ts = datetime(2000, 1, 1, 0, 0, 0, 0)
- start_time = datetime.now()
+ epoch_ts = dt.datetime(2000, 1, 1, 0, 0, 0, 0)
+ start_time = dt.datetime.now()
start_ts = (start_time - epoch_ts).total_seconds()
settings = settings or {}
@@ -287,9 +287,9 @@ def l2_lfl_schema(settings=None):
settings = settings or {}
nobs = settings.get("num_obs", 1234)
- epoch = datetime(2000, 1, 1)
- stime = (datetime(2019, 1, 1) - epoch).total_seconds()
- etime = (datetime(2019, 1, 2) - epoch).total_seconds()
+ epoch = dt.datetime(2000, 1, 1)
+ stime = (dt.datetime(2019, 1, 1) - epoch).total_seconds()
+ etime = (dt.datetime(2019, 1, 2) - epoch).total_seconds()
return {
"providers": settings.get("providers", {}),
diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py
index 6dc4bf2d05..d663f7b9d9 100644
--- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py
+++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py
@@ -15,10 +15,12 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""MODIS L1b and L2 test fixtures."""
+
from __future__ import annotations
-from datetime import datetime, timedelta
+import datetime as dt
from typing import Optional
import numpy as np
@@ -216,13 +218,13 @@ def _get_l1b_geo_variable_info(filename: str,
def generate_nasa_l1b_filename(prefix):
"""Generate a filename that follows NASA MODIS L1b convention."""
- now = datetime.now()
+ now = dt.datetime.now()
return f"{prefix}_A{now:%y%j_%H%M%S}_{now:%Y%j%H%M%S}.hdf"
def generate_imapp_filename(suffix):
"""Generate a filename that follows IMAPP MODIS L1b convention."""
- now = datetime.now()
+ now = dt.datetime.now()
return f"t1.{now:%y%j.%H%M}.{suffix}.hdf"
@@ -275,8 +277,8 @@ def _add_variable_to_file(h, var_name, var_info):
def _create_core_metadata(file_shortname: str) -> str:
- beginning_date = datetime.now()
- ending_date = beginning_date + timedelta(minutes=5)
+ beginning_date = dt.datetime.now()
+ ending_date = beginning_date + dt.timedelta(minutes=5)
core_metadata_header = "GROUP = INVENTORYMETADATA\nGROUPTYPE = MASTERGROUP\n\n" \
'GROUP = RANGEDATETIME\n\nOBJECT = RANGEBEGINNINGDATE\nNUM_VAL = 1\nVALUE = "{}"\n' \
"END_OBJECT = RANGEBEGINNINGDATE\n\nOBJECT = RANGEBEGINNINGTIME\n" \
@@ -593,7 +595,7 @@ def _get_mask_byte1_variable_info() -> dict:
def generate_nasa_l2_filename(prefix: str) -> str:
"""Generate a file name that follows MODIS 35 L2 convention in a temporary directory."""
- now = datetime.now()
+ now = dt.datetime.now()
return f"{prefix}_L2.A{now:%Y%j.%H%M}.061.{now:%Y%j%H%M%S}.hdf"
@@ -614,7 +616,7 @@ def modis_l2_nasa_mod35_file(tmpdir_factory) -> list[str]:
def generate_nasa_l3_filename(prefix: str) -> str:
"""Generate a file name that follows MODIS 09 L3 convention in a temporary directory."""
- now = datetime.now()
+ now = dt.datetime.now()
return f"{prefix}.A{now:%Y%j}.061.{now:%Y%j%H%M%S}.hdf"
diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py
index 969c497410..9755190318 100644
--- a/satpy/tests/reader_tests/test_abi_l1b.py
+++ b/satpy/tests/reader_tests/test_abi_l1b.py
@@ -15,10 +15,12 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""The abi_l1b reader tests package."""
+
from __future__ import annotations
-from datetime import datetime
+import datetime as dt
from pathlib import Path
from typing import Any, Callable
from unittest import mock
@@ -372,8 +374,8 @@ def test_get_dataset(self, c01_data_arr):
"timeline_ID": None,
"suffix": "suffix",
"units": "W m-2 um-1 sr-1",
- "start_time": datetime(2017, 9, 20, 17, 30, 40, 800000),
- "end_time": datetime(2017, 9, 20, 17, 41, 17, 500000),
+ "start_time": dt.datetime(2017, 9, 20, 17, 30, 40, 800000),
+ "end_time": dt.datetime(2017, 9, 20, 17, 41, 17, 500000),
}
res = c01_data_arr
diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py
index 4b8d3a9578..98a050aa48 100644
--- a/satpy/tests/reader_tests/test_abi_l2_nc.py
+++ b/satpy/tests/reader_tests/test_abi_l2_nc.py
@@ -14,7 +14,9 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
+
"""The abi_l2_nc reader tests package."""
+
import contextlib
from typing import Optional
from unittest import mock
@@ -151,7 +153,7 @@ class TestMCMIPReading:
@mock.patch("satpy.readers.abi_base.xr")
def test_mcmip_get_dataset(self, xr_, product, exp_metadata):
"""Test getting channel from MCMIP file."""
- from datetime import datetime
+ import datetime as dt
from pyresample.geometry import AreaDefinition
@@ -183,8 +185,8 @@ def test_mcmip_get_dataset(self, xr_, product, exp_metadata):
"scene_id": None,
"sensor": "abi",
"timeline_ID": None,
- "start_time": datetime(2017, 9, 20, 17, 30, 40, 800000),
- "end_time": datetime(2017, 9, 20, 17, 41, 17, 500000),
+ "start_time": dt.datetime(2017, 9, 20, 17, 30, 40, 800000),
+ "end_time": dt.datetime(2017, 9, 20, 17, 41, 17, 500000),
"ancillary_variables": [],
}
exp_attrs.update(exp_metadata)
diff --git a/satpy/tests/reader_tests/test_acspo.py b/satpy/tests/reader_tests/test_acspo.py
index 723d1dbecd..b85232bad4 100644
--- a/satpy/tests/reader_tests/test_acspo.py
+++ b/satpy/tests/reader_tests/test_acspo.py
@@ -17,8 +17,8 @@
# satpy. If not, see .
"""Module for testing the satpy.readers.acspo module."""
+import datetime as dt
import os
-from datetime import datetime, timedelta
from unittest import mock
import numpy as np
@@ -43,7 +43,7 @@ class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler):
def get_test_content(self, filename, filename_info, filetype_info):
"""Mimic reader input file content."""
- dt = filename_info.get("start_time", datetime(2016, 1, 1, 12, 0, 0))
+ date = filename_info.get("start_time", dt.datetime(2016, 1, 1, 12, 0, 0))
sat, inst = {
"VIIRS_NPP": ("NPP", "VIIRS"),
"VIIRS_N20": ("N20", "VIIRS"),
@@ -53,8 +53,8 @@ def get_test_content(self, filename, filename_info, filetype_info):
"/attr/platform": sat,
"/attr/sensor": inst,
"/attr/spatial_resolution": "742 m at nadir",
- "/attr/time_coverage_start": dt.strftime("%Y%m%dT%H%M%SZ"),
- "/attr/time_coverage_end": (dt + timedelta(minutes=6)).strftime("%Y%m%dT%H%M%SZ"),
+ "/attr/time_coverage_start": date.strftime("%Y%m%dT%H%M%SZ"),
+ "/attr/time_coverage_end": (date + dt.timedelta(minutes=6)).strftime("%Y%m%dT%H%M%SZ"),
}
file_content["lat"] = DEFAULT_LAT_DATA
diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py
index 393afca1c8..fbb0857734 100644
--- a/satpy/tests/reader_tests/test_ahi_hsd.py
+++ b/satpy/tests/reader_tests/test_ahi_hsd.py
@@ -15,13 +15,15 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""The ahi_hsd reader tests package."""
+
from __future__ import annotations
import contextlib
+import datetime as dt
import unittest
import warnings
-from datetime import datetime
from typing import Any, Dict
from unittest import mock
@@ -340,10 +342,10 @@ def test_read_band(self, calibrate, *mocks):
np.testing.assert_allclose(value, actual_obs_params[key])
time_params_exp = {
- "nominal_start_time": datetime(2018, 10, 22, 3, 0, 0, 0),
- "nominal_end_time": datetime(2018, 10, 22, 3, 10, 0, 0),
- "observation_start_time": datetime(2018, 10, 22, 3, 0, 20, 596896),
- "observation_end_time": datetime(2018, 10, 22, 3, 10, 20, 596896),
+ "nominal_start_time": dt.datetime(2018, 10, 22, 3, 0, 0, 0),
+ "nominal_end_time": dt.datetime(2018, 10, 22, 3, 10, 0, 0),
+ "observation_start_time": dt.datetime(2018, 10, 22, 3, 0, 20, 596896),
+ "observation_end_time": dt.datetime(2018, 10, 22, 3, 10, 20, 596896),
}
actual_time_params = im.attrs["time_parameters"]
for key, value in time_params_exp.items():
@@ -416,12 +418,12 @@ def test_scene_loading(self, calibrate, *mocks):
def test_time_properties(self):
"""Test start/end/scheduled time properties."""
with _fake_hsd_handler() as fh:
- assert fh.start_time == datetime(2018, 10, 22, 3, 0)
- assert fh.end_time == datetime(2018, 10, 22, 3, 10)
- assert fh.observation_start_time == datetime(2018, 10, 22, 3, 0, 20, 596896)
- assert fh.observation_end_time == datetime(2018, 10, 22, 3, 10, 20, 596896)
- assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 0, 0, 0)
- assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 10, 0, 0)
+ assert fh.start_time == dt.datetime(2018, 10, 22, 3, 0)
+ assert fh.end_time == dt.datetime(2018, 10, 22, 3, 10)
+ assert fh.observation_start_time == dt.datetime(2018, 10, 22, 3, 0, 20, 596896)
+ assert fh.observation_end_time == dt.datetime(2018, 10, 22, 3, 10, 20, 596896)
+ assert fh.nominal_start_time == dt.datetime(2018, 10, 22, 3, 0, 0, 0)
+ assert fh.nominal_end_time == dt.datetime(2018, 10, 22, 3, 10, 0, 0)
def test_blocklen_error(self, *mocks):
"""Test erraneous blocklength."""
@@ -639,14 +641,14 @@ class TestNominalTimeCalculator:
@pytest.mark.parametrize(
("timeline", "expected"),
[
- ("0300", datetime(2020, 1, 1, 3, 0, 0)),
- ("65526", datetime(2020, 1, 1, 12, 0, 0))
+ ("0300", dt.datetime(2020, 1, 1, 3, 0, 0)),
+ ("65526", dt.datetime(2020, 1, 1, 12, 0, 0))
]
)
def test_invalid_timeline(self, timeline, expected):
"""Test handling of invalid timeline."""
calc = _NominalTimeCalculator(timeline, "FLDK")
- res = calc.get_nominal_start_time(datetime(2020, 1, 1, 12, 0, 0))
+ res = calc.get_nominal_start_time(dt.datetime(2020, 1, 1, 12, 0, 0))
assert res == expected
@pytest.mark.parametrize(
@@ -654,49 +656,49 @@ def test_invalid_timeline(self, timeline, expected):
[
(
"JP01",
- {"tstart": datetime(2018, 10, 22, 3, 0, 0),
- "tend": datetime(2018, 10, 22, 3, 2, 30)}
+ {"tstart": dt.datetime(2018, 10, 22, 3, 0, 0),
+ "tend": dt.datetime(2018, 10, 22, 3, 2, 30)}
),
(
"JP04",
- {"tstart": datetime(2018, 10, 22, 3, 7, 30, 0),
- "tend": datetime(2018, 10, 22, 3, 10, 0, 0)}
+ {"tstart": dt.datetime(2018, 10, 22, 3, 7, 30, 0),
+ "tend": dt.datetime(2018, 10, 22, 3, 10, 0, 0)}
),
(
"R301",
- {"tstart": datetime(2018, 10, 22, 3, 0, 0),
- "tend": datetime(2018, 10, 22, 3, 2, 30)}
+ {"tstart": dt.datetime(2018, 10, 22, 3, 0, 0),
+ "tend": dt.datetime(2018, 10, 22, 3, 2, 30)}
),
(
"R304",
- {"tstart": datetime(2018, 10, 22, 3, 7, 30, 0),
- "tend": datetime(2018, 10, 22, 3, 10, 0, 0)}
+ {"tstart": dt.datetime(2018, 10, 22, 3, 7, 30, 0),
+ "tend": dt.datetime(2018, 10, 22, 3, 10, 0, 0)}
),
(
"R401",
- {"tstart": datetime(2018, 10, 22, 3, 0, 0),
- "tend": datetime(2018, 10, 22, 3, 0, 30)}
+ {"tstart": dt.datetime(2018, 10, 22, 3, 0, 0),
+ "tend": dt.datetime(2018, 10, 22, 3, 0, 30)}
),
(
"R420",
- {"tstart": datetime(2018, 10, 22, 3, 9, 30, 0),
- "tend": datetime(2018, 10, 22, 3, 10, 0, 0)}
+ {"tstart": dt.datetime(2018, 10, 22, 3, 9, 30, 0),
+ "tend": dt.datetime(2018, 10, 22, 3, 10, 0, 0)}
),
(
"R501",
- {"tstart": datetime(2018, 10, 22, 3, 0, 0),
- "tend": datetime(2018, 10, 22, 3, 0, 30)}
+ {"tstart": dt.datetime(2018, 10, 22, 3, 0, 0),
+ "tend": dt.datetime(2018, 10, 22, 3, 0, 30)}
),
(
"R520",
- {"tstart": datetime(2018, 10, 22, 3, 9, 30, 0),
- "tend": datetime(2018, 10, 22, 3, 10, 0, 0)}
+ {"tstart": dt.datetime(2018, 10, 22, 3, 9, 30, 0),
+ "tend": dt.datetime(2018, 10, 22, 3, 10, 0, 0)}
),
]
)
def test_areas(self, area, expected):
"""Test nominal timestamps for multiple areas."""
- obs_start_time = datetime(2018, 10, 22, 3, 0, 20, 596896)
+ obs_start_time = dt.datetime(2018, 10, 22, 3, 0, 20, 596896)
calc = _NominalTimeCalculator("0300", area)
nom_start_time = calc.get_nominal_start_time(obs_start_time)
nom_end_time = calc.get_nominal_end_time(nom_start_time)
@@ -708,27 +710,27 @@ def test_areas(self, area, expected):
[
(
"2350",
- datetime(2022, 12, 31, 23, 50, 1),
- {"tstart": datetime(2022, 12, 31, 23, 50, 0),
- "tend": datetime(2023, 1, 1, 0, 0, 0)}
+ dt.datetime(2022, 12, 31, 23, 50, 1),
+ {"tstart": dt.datetime(2022, 12, 31, 23, 50, 0),
+ "tend": dt.datetime(2023, 1, 1, 0, 0, 0)}
),
(
"2350",
- datetime(2022, 12, 31, 23, 49, 59),
- {"tstart": datetime(2022, 12, 31, 23, 50, 0),
- "tend": datetime(2023, 1, 1, 0, 0, 0)}
+ dt.datetime(2022, 12, 31, 23, 49, 59),
+ {"tstart": dt.datetime(2022, 12, 31, 23, 50, 0),
+ "tend": dt.datetime(2023, 1, 1, 0, 0, 0)}
),
(
"0000",
- datetime(2023, 1, 1, 0, 0, 1),
- {"tstart": datetime(2023, 1, 1, 0, 0, 0),
- "tend": datetime(2023, 1, 1, 0, 10, 0)}
+ dt.datetime(2023, 1, 1, 0, 0, 1),
+ {"tstart": dt.datetime(2023, 1, 1, 0, 0, 0),
+ "tend": dt.datetime(2023, 1, 1, 0, 10, 0)}
),
(
"0000",
- datetime(2022, 12, 31, 23, 59, 59),
- {"tstart": datetime(2023, 1, 1, 0, 0, 0),
- "tend": datetime(2023, 1, 1, 0, 10, 0)}
+ dt.datetime(2022, 12, 31, 23, 59, 59),
+ {"tstart": dt.datetime(2023, 1, 1, 0, 0, 0),
+ "tend": dt.datetime(2023, 1, 1, 0, 10, 0)}
),
]
)
diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py
index 817738bb82..fcb1c34658 100644
--- a/satpy/tests/reader_tests/test_ahi_l2_nc.py
+++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py
@@ -1,6 +1,6 @@
"""Tests for the Himawari L2 netCDF reader."""
-from datetime import datetime
+import datetime as dt
import numpy as np
import pytest
@@ -15,8 +15,8 @@
lat_data = rng.uniform(-90, 90, (5500, 5500))
lon_data = rng.uniform(-180, 180, (5500, 5500))
-start_time = datetime(2023, 8, 24, 5, 40, 21)
-end_time = datetime(2023, 8, 24, 5, 49, 40)
+start_time = dt.datetime(2023, 8, 24, 5, 40, 21)
+end_time = dt.datetime(2023, 8, 24, 5, 49, 40)
dimensions = {"Columns": 5500, "Rows": 5500}
diff --git a/satpy/tests/reader_tests/test_ami_l1b.py b/satpy/tests/reader_tests/test_ami_l1b.py
index 7dd2cfcb33..6af6c1099f 100644
--- a/satpy/tests/reader_tests/test_ami_l1b.py
+++ b/satpy/tests/reader_tests/test_ami_l1b.py
@@ -173,9 +173,9 @@ def test_filename_grouping(self):
def test_basic_attributes(self):
"""Test getting basic file attributes."""
- from datetime import datetime
- assert self.reader.start_time == datetime(2019, 9, 30, 3, 0, 31, 957882)
- assert self.reader.end_time == datetime(2019, 9, 30, 3, 9, 35, 606133)
+ import datetime as dt
+ assert self.reader.start_time == dt.datetime(2019, 9, 30, 3, 0, 31, 957882)
+ assert self.reader.end_time == dt.datetime(2019, 9, 30, 3, 9, 35, 606133)
def test_get_dataset(self):
"""Test gettting radiance data."""
diff --git a/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py b/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py
index 2f1b3ad7b0..d6e6597d69 100644
--- a/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py
+++ b/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py
@@ -17,8 +17,8 @@
# satpy. If not, see .
"""Tests for the 'amsr2_l2_gaasp' reader."""
+import datetime as dt
import os
-from datetime import datetime
from unittest import mock
import dask.array as da
@@ -259,8 +259,8 @@ def _check_attrs(data_arr):
assert "add_offset" not in attrs
assert attrs["platform_name"] == "GCOM-W1"
assert attrs["sensor"] == "amsr2"
- assert attrs["start_time"] == datetime(2020, 8, 12, 5, 58, 31)
- assert attrs["end_time"] == datetime(2020, 8, 12, 6, 7, 1)
+ assert attrs["start_time"] == dt.datetime(2020, 8, 12, 5, 58, 31)
+ assert attrs["end_time"] == dt.datetime(2020, 8, 12, 6, 7, 1)
@pytest.mark.parametrize(
("filenames", "loadable_ids"),
diff --git a/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py b/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py
index 07ed218e72..dc3e371b46 100644
--- a/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py
+++ b/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py
@@ -15,12 +15,13 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Unittesting the ASCAT SCATTEROMETER SOIL MOISTURE BUFR reader."""
+import datetime as dt
import os
import sys
import unittest
-from datetime import datetime
import numpy as np
@@ -152,8 +153,8 @@ def test_scene(self):
fname = os.path.join(self.base_dir, FILENAME)
scn = Scene(reader="ascat_l2_soilmoisture_bufr", filenames=[fname])
assert "scatterometer" in scn.sensor_names
- assert datetime(2020, 12, 21, 9, 33, 0) == scn.start_time
- assert datetime(2020, 12, 21, 9, 33, 59) == scn.end_time
+ assert dt.datetime(2020, 12, 21, 9, 33, 0) == scn.start_time
+ assert dt.datetime(2020, 12, 21, 9, 33, 59) == scn.end_time
@unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows")
def test_scene_load_available_datasets(self):
diff --git a/satpy/tests/reader_tests/test_atms_l1b_nc.py b/satpy/tests/reader_tests/test_atms_l1b_nc.py
index 6b27081ed9..f1f729311a 100644
--- a/satpy/tests/reader_tests/test_atms_l1b_nc.py
+++ b/satpy/tests/reader_tests/test_atms_l1b_nc.py
@@ -12,9 +12,10 @@
#
# You should have received a copy of the GNU General Public License
# along with satpy. If not, see .
+
"""The atms_l1b_nc reader tests package."""
-from datetime import datetime
+import datetime as dt
import numpy as np
import pytest
@@ -32,7 +33,7 @@ def reader(l1b_file):
"""Return reader of ATMS level1b data."""
return AtmsL1bNCFileHandler(
filename=l1b_file,
- filename_info={"creation_time": datetime(2020, 1, 2, 3, 4, 5)},
+ filename_info={"creation_time": dt.datetime(2020, 1, 2, 3, 4, 5)},
filetype_info={"antenna_temperature": "antenna_temp"},
)
@@ -78,11 +79,11 @@ class TestAtsmsL1bNCFileHandler:
def test_start_time(self, reader):
"""Test start time."""
- assert reader.start_time == datetime(2000, 1, 2, 3, 4, 5)
+ assert reader.start_time == dt.datetime(2000, 1, 2, 3, 4, 5)
def test_end_time(self, reader):
"""Test end time."""
- assert reader.end_time == datetime(2000, 1, 2, 4, 5, 6)
+ assert reader.end_time == dt.datetime(2000, 1, 2, 4, 5, 6)
def test_sensor(self, reader):
"""Test sensor."""
@@ -100,8 +101,8 @@ def test_antenna_temperature(self, reader, atms_fake_dataset):
)
@pytest.mark.parametrize(("param", "expect"), [
- ("start_time", datetime(2000, 1, 2, 3, 4, 5)),
- ("end_time", datetime(2000, 1, 2, 4, 5, 6)),
+ ("start_time", dt.datetime(2000, 1, 2, 3, 4, 5)),
+ ("end_time", dt.datetime(2000, 1, 2, 4, 5, 6)),
("platform_name", "JPSS-1"),
("sensor", "ATMS"),
])
@@ -135,11 +136,11 @@ def test_drop_coords(self, reader):
assert coords not in data.coords
@pytest.mark.parametrize(("param", "expect"), [
- ("start_time", datetime(2000, 1, 2, 3, 4, 5)),
- ("end_time", datetime(2000, 1, 2, 4, 5, 6)),
+ ("start_time", dt.datetime(2000, 1, 2, 3, 4, 5)),
+ ("end_time", dt.datetime(2000, 1, 2, 4, 5, 6)),
("platform_name", "JPSS-1"),
("sensor", "ATMS"),
- ("creation_time", datetime(2020, 1, 2, 3, 4, 5)),
+ ("creation_time", dt.datetime(2020, 1, 2, 3, 4, 5)),
("type", "test_data"),
("name", "test"),
])
diff --git a/satpy/tests/reader_tests/test_atms_sdr_hdf5.py b/satpy/tests/reader_tests/test_atms_sdr_hdf5.py
index 8971c2d933..4fe6c120a1 100644
--- a/satpy/tests/reader_tests/test_atms_sdr_hdf5.py
+++ b/satpy/tests/reader_tests/test_atms_sdr_hdf5.py
@@ -18,8 +18,8 @@
"""Module for testing the ATMS SDR HDF5 reader."""
+import datetime as dt
import os
-from datetime import datetime
from unittest import mock
import numpy as np
@@ -288,8 +288,8 @@ def test_init_start_end_time(self):
"""Test basic init with start and end times around the start/end times of the provided file."""
r = load_reader(self.reader_configs,
filter_parameters={
- "start_time": datetime(2022, 12, 19),
- "end_time": datetime(2022, 12, 21)
+ "start_time": dt.datetime(2022, 12, 19),
+ "end_time": dt.datetime(2022, 12, 21)
})
loadables = r.select_files_from_pathnames([
"SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5",
diff --git a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py
index 4f4e8e974a..3040a46750 100644
--- a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py
+++ b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py
@@ -15,9 +15,10 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Pygac interface."""
-from datetime import date, datetime
+import datetime as dt
from unittest import TestCase, mock
import dask.array as da
@@ -190,7 +191,7 @@ def test_init_eosip(self):
fh = self._get_eosip_fh(filename, **kwargs)
assert fh.start_time < fh.end_time
assert fh.reader_class is reader_cls
- assert fh.reader_kwargs["header_date"] > date(1994, 11, 15)
+ assert fh.reader_kwargs["header_date"] > dt.date(1994, 11, 15)
def test_read_raw_data(self):
"""Test raw data reading."""
@@ -456,8 +457,8 @@ def _slice_patched(data):
data_slc, times_slc = fh.slice(data, times)
np.testing.assert_array_equal(data_slc, data[1:3])
np.testing.assert_array_equal(times_slc, times[1:3])
- assert fh.start_time == datetime(1970, 1, 1, 0, 0, 0, 2)
- assert fh.end_time == datetime(1970, 1, 1, 0, 0, 0, 3)
+ assert fh.start_time == dt.datetime(1970, 1, 1, 0, 0, 0, 2)
+ assert fh.end_time == dt.datetime(1970, 1, 1, 0, 0, 0, 3)
@mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_qual_flags")
@mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._strip_invalid_lat")
diff --git a/satpy/tests/reader_tests/test_epic_l1b_h5.py b/satpy/tests/reader_tests/test_epic_l1b_h5.py
index 472cda7f2d..18eedbad6d 100644
--- a/satpy/tests/reader_tests/test_epic_l1b_h5.py
+++ b/satpy/tests/reader_tests/test_epic_l1b_h5.py
@@ -89,11 +89,11 @@ def setup_method(self):
def test_times(self, setup_hdf5_file):
"""Test start and end times load properly."""
- from datetime import datetime
+ import datetime as dt
test_reader = self._setup_h5(setup_hdf5_file)
- assert test_reader.start_time == datetime(2015, 6, 13, 12, 0, 37)
- assert test_reader.end_time == datetime(2015, 6, 13, 12, 5, 1)
+ assert test_reader.start_time == dt.datetime(2015, 6, 13, 12, 0, 37)
+ assert test_reader.end_time == dt.datetime(2015, 6, 13, 12, 5, 1)
def test_counts_calibration(self, setup_hdf5_file):
"""Test that data is correctly calibrated."""
diff --git a/satpy/tests/reader_tests/test_eum_base.py b/satpy/tests/reader_tests/test_eum_base.py
index 55ac977b59..35b29aa79c 100644
--- a/satpy/tests/reader_tests/test_eum_base.py
+++ b/satpy/tests/reader_tests/test_eum_base.py
@@ -15,10 +15,11 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""EUMETSAT base reader tests package."""
+import datetime as dt
import unittest
-from datetime import datetime
import numpy as np
@@ -40,18 +41,18 @@ def test_fun(self):
"""Test function for TestMakeTimeCdsDictionary."""
# time_cds_short
tcds = {"Days": np.array(1), "Milliseconds": np.array(2)}
- expected = datetime(1958, 1, 2, 0, 0, 0, 2000)
+ expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2000)
assert timecds2datetime(tcds) == expected
# time_cds
tcds = {"Days": np.array(1), "Milliseconds": np.array(2), "Microseconds": np.array(3)}
- expected = datetime(1958, 1, 2, 0, 0, 0, 2003)
+ expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2003)
assert timecds2datetime(tcds) == expected
# time_cds_expanded
tcds = {"Days": np.array(1), "Milliseconds": np.array(2), "Microseconds": np.array(3),
"Nanoseconds": np.array(4)}
- expected = datetime(1958, 1, 2, 0, 0, 0, 2003)
+ expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2003)
assert timecds2datetime(tcds) == expected
@@ -62,17 +63,17 @@ def test_fun(self):
"""Test function for TestMakeTimeCdsRecarray."""
# time_cds_short
tcds = np.array([(1, 2)], dtype=np.dtype(time_cds_short))
- expected = datetime(1958, 1, 2, 0, 0, 0, 2000)
+ expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2000)
assert timecds2datetime(tcds) == expected
# time_cds
tcds = np.array([(1, 2, 3)], dtype=np.dtype(time_cds))
- expected = datetime(1958, 1, 2, 0, 0, 0, 2003)
+ expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2003)
assert timecds2datetime(tcds) == expected
# time_cds_expanded
tcds = np.array([(1, 2, 3, 4)], dtype=np.dtype(time_cds_expanded))
- expected = datetime(1958, 1, 2, 0, 0, 0, 2003)
+ expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2003)
assert timecds2datetime(tcds) == expected
@@ -97,9 +98,9 @@ def test_timestamps(self):
(21916, 42309417, 918, 443))]]], dtype=pat_dt)
expected = {
- "TrueRepeatCycleStart": datetime(2018, 1, 2, 11, 30, 9, 544305),
- "PlanForwardScanEnd": datetime(2018, 1, 2, 11, 42, 40, 340660),
- "PlannedRepeatCycleEnd": datetime(2018, 1, 2, 11, 45, 9, 417918)
+ "TrueRepeatCycleStart": dt.datetime(2018, 1, 2, 11, 30, 9, 544305),
+ "PlanForwardScanEnd": dt.datetime(2018, 1, 2, 11, 42, 40, 340660),
+ "PlannedRepeatCycleEnd": dt.datetime(2018, 1, 2, 11, 45, 9, 417918)
}
assert recarray2dict(pat) == expected
diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py
index cd347ce07e..65e21edd38 100644
--- a/satpy/tests/reader_tests/test_generic_image.py
+++ b/satpy/tests/reader_tests/test_generic_image.py
@@ -32,14 +32,14 @@ class TestGenericImage(unittest.TestCase):
def setUp(self):
"""Create temporary images to test on."""
+ import datetime as dt
import tempfile
- from datetime import datetime
from pyresample.geometry import AreaDefinition
from satpy.scene import Scene
- self.date = datetime(2018, 1, 1)
+ self.date = dt.datetime(2018, 1, 1)
# Create area definition
pcs_id = "ETRS89 / LAEA Europe"
diff --git a/satpy/tests/reader_tests/test_ghrsst_l2.py b/satpy/tests/reader_tests/test_ghrsst_l2.py
index 66c030e91d..b4cabccfa4 100644
--- a/satpy/tests/reader_tests/test_ghrsst_l2.py
+++ b/satpy/tests/reader_tests/test_ghrsst_l2.py
@@ -17,9 +17,9 @@
# satpy. If not, see .
"""Module for testing the satpy.readers.ghrsst_l2 module."""
+import datetime as dt
import os
import tarfile
-from datetime import datetime
from pathlib import Path
import numpy as np
@@ -124,7 +124,7 @@ def test_get_dataset(self, tmp_path):
def test_get_sensor(self, tmp_path):
"""Test retrieval of the sensor name from the netCDF file."""
- dt_valid = datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z
+ dt_valid = dt.datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z
filename_info = {"field_type": "NARSST", "generating_centre": "FRA_",
"satid": "NOAA20_", "valid_time": dt_valid}
@@ -136,9 +136,9 @@ def test_get_sensor(self, tmp_path):
def test_get_start_and_end_times(self, tmp_path):
"""Test retrieval of the sensor name from the netCDF file."""
- dt_valid = datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z
- good_start_time = datetime(2022, 3, 21, 11, 26, 40) # 20220321T112640Z
- good_stop_time = datetime(2022, 3, 21, 14, 57, 11) # 20220321T145711Z
+ dt_valid = dt.datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z
+ good_start_time = dt.datetime(2022, 3, 21, 11, 26, 40) # 20220321T112640Z
+ good_stop_time = dt.datetime(2022, 3, 21, 14, 57, 11) # 20220321T145711Z
filename_info = {"field_type": "NARSST", "generating_centre": "FRA_",
"satid": "NOAA20_", "valid_time": dt_valid}
diff --git a/satpy/tests/reader_tests/test_glm_l2.py b/satpy/tests/reader_tests/test_glm_l2.py
index 81636ba630..8ee53e29a2 100644
--- a/satpy/tests/reader_tests/test_glm_l2.py
+++ b/satpy/tests/reader_tests/test_glm_l2.py
@@ -128,9 +128,9 @@ def setUp(self, xr_):
def test_basic_attributes(self):
"""Test getting basic file attributes."""
- from datetime import datetime
- assert self.reader.start_time == datetime(2017, 9, 20, 17, 30, 40)
- assert self.reader.end_time == datetime(2017, 9, 20, 17, 41, 17)
+ import datetime as dt
+ assert self.reader.start_time == dt.datetime(2017, 9, 20, 17, 30, 40)
+ assert self.reader.end_time == dt.datetime(2017, 9, 20, 17, 41, 17)
def test_get_dataset(self):
"""Test the get_dataset method."""
diff --git a/satpy/tests/reader_tests/test_goci2_l2_nc.py b/satpy/tests/reader_tests/test_goci2_l2_nc.py
index 865ac3184e..e8bdae0e58 100644
--- a/satpy/tests/reader_tests/test_goci2_l2_nc.py
+++ b/satpy/tests/reader_tests/test_goci2_l2_nc.py
@@ -15,8 +15,10 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Module for testing the satpy.readers.goci2_l2_nc module."""
-from datetime import datetime
+
+import datetime as dt
import numpy as np
import pytest
@@ -30,8 +32,8 @@
# - tmp_path_factory
-start_time = datetime(2024, 2, 14, 2, 32, 27)
-end_time = datetime(2024, 2, 14, 2, 33, 31)
+start_time = dt.datetime(2024, 2, 14, 2, 32, 27)
+end_time = dt.datetime(2024, 2, 14, 2, 33, 31)
global_attrs = {
"observation_start_time": start_time.strftime("%Y%m%d_%H%M%S"),
diff --git a/satpy/tests/reader_tests/test_gpm_imerg.py b/satpy/tests/reader_tests/test_gpm_imerg.py
index a75e59863f..d038d0f0d7 100644
--- a/satpy/tests/reader_tests/test_gpm_imerg.py
+++ b/satpy/tests/reader_tests/test_gpm_imerg.py
@@ -14,12 +14,12 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
-"""Unittests for GPM IMERG reader."""
+"""Unittests for GPM IMERG reader."""
+import datetime as dt
import os
import unittest
-from datetime import datetime
from unittest import mock
import dask.array as da
@@ -127,8 +127,8 @@ def test_load_data(self):
assert reader.file_handlers
res = reader.load(["IRprecipitation"])
assert 1 == len(res)
- assert res["IRprecipitation"].start_time == datetime(2020, 1, 31, 23, 30, 0)
- assert res["IRprecipitation"].end_time == datetime(2020, 1, 31, 23, 59, 59)
+ assert res["IRprecipitation"].start_time == dt.datetime(2020, 1, 31, 23, 30, 0)
+ assert res["IRprecipitation"].end_time == dt.datetime(2020, 1, 31, 23, 59, 59)
assert res["IRprecipitation"].resolution == 0.1
assert res["IRprecipitation"].area.width == 3600
assert res["IRprecipitation"].area.height == 1800
diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py
index 12317f11f1..dd57c12fdd 100644
--- a/satpy/tests/reader_tests/test_hrit_base.py
+++ b/satpy/tests/reader_tests/test_hrit_base.py
@@ -18,10 +18,10 @@
"""The HRIT base reader tests package."""
import bz2
+import datetime as dt
import gzip
import os
import unittest
-from datetime import datetime, timedelta
from tempfile import NamedTemporaryFile, gettempdir
from unittest import mock
@@ -189,7 +189,7 @@ def setup_method(self, method):
with mock.patch.object(HRITFileHandler, "_get_hd", new=new_get_hd):
self.reader = HRITFileHandler("filename",
{"platform_shortname": "MSG3",
- "start_time": datetime(2016, 3, 3, 0, 0)},
+ "start_time": dt.datetime(2016, 3, 3, 0, 0)},
{"filetype": "info"},
[mock.MagicMock(), mock.MagicMock(),
mock.MagicMock()])
@@ -269,9 +269,9 @@ def test_read_band_gzip_stream(self, stub_gzipped_hrit_file):
def test_start_end_time(self):
"""Test reading and converting start/end time."""
- assert self.reader.start_time == datetime(2016, 3, 3, 0, 0)
+ assert self.reader.start_time == dt.datetime(2016, 3, 3, 0, 0)
assert self.reader.start_time == self.reader.observation_start_time
- assert self.reader.end_time == datetime(2016, 3, 3, 0, 0) + timedelta(minutes=15)
+ assert self.reader.end_time == dt.datetime(2016, 3, 3, 0, 0) + dt.timedelta(minutes=15)
assert self.reader.end_time == self.reader.observation_end_time
@@ -292,7 +292,7 @@ def test_read_band_filepath(self, stub_compressed_hrit_file):
with mock.patch.object(HRITFileHandler, "_get_hd", side_effect=new_get_hd, autospec=True) as get_hd:
self.reader = HRITFileHandler(filename,
{"platform_shortname": "MSG3",
- "start_time": datetime(2016, 3, 3, 0, 0)},
+ "start_time": dt.datetime(2016, 3, 3, 0, 0)},
{"filetype": "info"},
[mock.MagicMock(), mock.MagicMock(),
mock.MagicMock()])
diff --git a/satpy/tests/reader_tests/test_hsaf_grib.py b/satpy/tests/reader_tests/test_hsaf_grib.py
index da0f6dd86b..296bb921c4 100644
--- a/satpy/tests/reader_tests/test_hsaf_grib.py
+++ b/satpy/tests/reader_tests/test_hsaf_grib.py
@@ -17,9 +17,9 @@
# satpy. If not, see .
"""Module for testing the satpy.readers.grib module."""
+import datetime as dt
import sys
import unittest
-from datetime import datetime
from unittest import mock
import numpy as np
@@ -132,7 +132,7 @@ def tearDown(self):
def test_init(self, pg):
"""Test the init function, ensure that the correct dates and metadata are returned."""
pg.open.return_value = FakeGRIB()
- correct_dt = datetime(2019, 6, 3, 16, 45, 0)
+ correct_dt = dt.datetime(2019, 6, 3, 16, 45, 0)
from satpy.readers.hsaf_grib import HSAFFileHandler
fh = HSAFFileHandler("filename", mock.MagicMock(), mock.MagicMock())
assert fh._analysis_time == correct_dt
diff --git a/satpy/tests/reader_tests/test_hsaf_h5.py b/satpy/tests/reader_tests/test_hsaf_h5.py
index 49658e6727..bdd523ad0d 100644
--- a/satpy/tests/reader_tests/test_hsaf_h5.py
+++ b/satpy/tests/reader_tests/test_hsaf_h5.py
@@ -1,6 +1,7 @@
"""Tests for the H-SAF H5 reader."""
+
+import datetime as dt
import os
-from datetime import datetime
import h5py
import numpy as np
@@ -50,7 +51,7 @@ def test_hsaf_sc_datetime(sc_h5_file):
loaded_scene = _get_scene_with_loaded_sc_datasets(sc_h5_file)
fname = os.path.basename(sc_h5_file)
dtstr = fname.split("_")[1]
- obs_time = datetime.strptime(dtstr, "%Y%m%d")
+ obs_time = dt.datetime.strptime(dtstr, "%Y%m%d")
assert loaded_scene["SC"].attrs["data_time"] == obs_time
diff --git a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py
index f90da00613..999fb50045 100644
--- a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py
+++ b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py
@@ -509,7 +509,7 @@ def test_reading_attrs_nsoas(self):
def test_properties(self):
"""Test platform_name."""
- from datetime import datetime
+ import datetime as dt
from satpy.readers import load_reader
filenames = [
@@ -521,5 +521,5 @@ def test_properties(self):
# Make sure we have some files
res = reader.load(["wvc_lon"])
assert res["wvc_lon"].platform_name == "HY-2B"
- assert res["wvc_lon"].start_time == datetime(2020, 3, 26, 1, 11, 7)
- assert res["wvc_lon"].end_time == datetime(2020, 3, 26, 2, 55, 40)
+ assert res["wvc_lon"].start_time == dt.datetime(2020, 3, 26, 1, 11, 7)
+ assert res["wvc_lon"].end_time == dt.datetime(2020, 3, 26, 2, 55, 40)
diff --git a/satpy/tests/reader_tests/test_ici_l1b_nc.py b/satpy/tests/reader_tests/test_ici_l1b_nc.py
index a5909b249d..ab8bad2527 100644
--- a/satpy/tests/reader_tests/test_ici_l1b_nc.py
+++ b/satpy/tests/reader_tests/test_ici_l1b_nc.py
@@ -15,13 +15,14 @@
#
# You should have received a copy of the GNU General Public License
# along with satpy. If not, see .
+
"""The ici_l1b_nc reader tests package.
This version tests the reader for ICI test data as per PFS V3A.
"""
-from datetime import datetime
+import datetime as dt
from unittest.mock import patch
import numpy as np
@@ -50,13 +51,13 @@ def reader(fake_file):
filename=fake_file,
filename_info={
"sensing_start_time": (
- datetime.fromisoformat("2000-01-01T01:00:00")
+ dt.datetime.fromisoformat("2000-01-01T01:00:00")
),
"sensing_end_time": (
- datetime.fromisoformat("2000-01-01T02:00:00")
+ dt.datetime.fromisoformat("2000-01-01T02:00:00")
),
"creation_time": (
- datetime.fromisoformat("2000-01-01T03:00:00")
+ dt.datetime.fromisoformat("2000-01-01T03:00:00")
),
},
filetype_info={
@@ -217,11 +218,11 @@ class TestIciL1bNCFileHandler:
def test_start_time(self, reader):
"""Test start time."""
- assert reader.start_time == datetime(2000, 1, 2, 3, 4, 5)
+ assert reader.start_time == dt.datetime(2000, 1, 2, 3, 4, 5)
def test_end_time(self, reader):
"""Test end time."""
- assert reader.end_time == datetime(2000, 1, 2, 4, 5, 6)
+ assert reader.end_time == dt.datetime(2000, 1, 2, 4, 5, 6)
def test_sensor(self, reader):
"""Test sensor."""
@@ -517,13 +518,13 @@ def test_get_global_attributes(self, reader):
attributes = reader._get_global_attributes()
assert attributes == {
"filename": reader.filename,
- "start_time": datetime(2000, 1, 2, 3, 4, 5),
- "end_time": datetime(2000, 1, 2, 4, 5, 6),
+ "start_time": dt.datetime(2000, 1, 2, 3, 4, 5),
+ "end_time": dt.datetime(2000, 1, 2, 4, 5, 6),
"spacecraft_name": "SGB",
"ssp_lon": None,
"sensor": "ICI",
- "filename_start_time": datetime(2000, 1, 1, 1, 0),
- "filename_end_time": datetime(2000, 1, 1, 2, 0),
+ "filename_start_time": dt.datetime(2000, 1, 1, 1, 0),
+ "filename_end_time": dt.datetime(2000, 1, 1, 2, 0),
"platform_name": "SGB",
"quality_group": {
"duration_of_product": np.array(1000., dtype=np.float32),
diff --git a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py
index 9fa7af224d..0cefac2a2f 100644
--- a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py
+++ b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py
@@ -1,6 +1,7 @@
"""Tests for the Insat3D reader."""
+
+import datetime as dt
import os
-from datetime import datetime
import dask.array as da
import h5netcdf
@@ -72,8 +73,8 @@
"ALBEDO": "%",
"TEMP": "K"}
-start_time = datetime(2009, 6, 9, 9, 0)
-end_time = datetime(2009, 6, 9, 9, 30)
+start_time = dt.datetime(2009, 6, 9, 9, 0)
+end_time = dt.datetime(2009, 6, 9, 9, 30)
subsatellite_longitude = 82
time_pattern = "%d-%b-%YT%H:%M:%S"
diff --git a/satpy/tests/reader_tests/test_li_l2_nc.py b/satpy/tests/reader_tests/test_li_l2_nc.py
index 5e9d0ff563..05eb37b4e4 100644
--- a/satpy/tests/reader_tests/test_li_l2_nc.py
+++ b/satpy/tests/reader_tests/test_li_l2_nc.py
@@ -12,9 +12,11 @@
#
# You should have received a copy of the GNU General Public License
# along with satpy. If not, see .
+
"""Unit tests on the LI L2 reader using the conventional mock constructed context."""
+
+import datetime as dt
import os
-from datetime import datetime
from unittest import mock
import numpy as np
@@ -405,7 +407,7 @@ def test_report_datetimes(self, filetype_infos):
assert dset.values.dtype == np.dtype("datetime64[ns]")
# The default epoch_time should be 1.234 seconds after epoch:
- ref_time = np.datetime64(datetime(2000, 1, 1, 0, 0, 1, 234000))
+ ref_time = np.datetime64(dt.datetime(2000, 1, 1, 0, 0, 1, 234000))
assert np.all(dset.values == ref_time)
# Check time_offset:
diff --git a/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py b/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py
index 4083f7de00..be0bc12ee1 100644
--- a/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py
+++ b/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py
@@ -18,10 +18,10 @@
# Satpy. If not, see .
"""Module for testing the satpy.readers.tropomi_l2 module."""
+import datetime as dt
import itertools
import os
import unittest
-from datetime import datetime
from unittest import mock
import numpy as np
@@ -31,7 +31,7 @@
DEFAULT_FILE_DTYPE = np.float32
DEFAULT_FILE_SHAPE = (721, 1440)
-DEFAULT_DATE = datetime(2019, 6, 19, 13, 0)
+DEFAULT_DATE = dt.datetime(2019, 6, 19, 13, 0)
DEFAULT_LAT = np.linspace(-90, 90, DEFAULT_FILE_SHAPE[0], dtype=DEFAULT_FILE_DTYPE)
DEFAULT_LON = np.linspace(-180, 180, DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE)
DEFAULT_FILE_FLOAT_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1],
diff --git a/satpy/tests/reader_tests/test_mimic_TPW2_nc.py b/satpy/tests/reader_tests/test_mimic_TPW2_nc.py
index 63214b0477..29857afbed 100644
--- a/satpy/tests/reader_tests/test_mimic_TPW2_nc.py
+++ b/satpy/tests/reader_tests/test_mimic_TPW2_nc.py
@@ -16,11 +16,12 @@
#
# You should have received a copy of the GNU General Public License along with
# Satpy. If not, see .
+
"""Module for testing the satpy.readers.tropomi_l2 module."""
+import datetime as dt
import os
import unittest
-from datetime import datetime
from unittest import mock
import numpy as np
@@ -43,8 +44,8 @@ class FakeNetCDF4FileHandlerMimic(FakeNetCDF4FileHandler):
def get_test_content(self, filename, filename_info, filetype_info):
"""Mimic reader input file content."""
from xarray import DataArray
- dt_s = filename_info.get("start_time", datetime(2019, 6, 19, 13, 0))
- dt_e = filename_info.get("end_time", datetime(2019, 6, 19, 13, 0))
+ dt_s = filename_info.get("start_time", dt.datetime(2019, 6, 19, 13, 0))
+ dt_e = filename_info.get("end_time", dt.datetime(2019, 6, 19, 13, 0))
if filetype_info["file_type"] == "mimicTPW2_comp":
file_content = {
diff --git a/satpy/tests/reader_tests/test_mirs.py b/satpy/tests/reader_tests/test_mirs.py
index b857147e47..e4e547bdf8 100644
--- a/satpy/tests/reader_tests/test_mirs.py
+++ b/satpy/tests/reader_tests/test_mirs.py
@@ -16,11 +16,13 @@
#
# You should have received a copy of the GNU General Public License along with
# Satpy. If not, see .
+
"""Module for testing the satpy.readers.mirs module."""
+
from __future__ import annotations
+import datetime as dt
import os
-from datetime import datetime
from unittest import mock
import numpy as np
@@ -45,7 +47,7 @@
N_SCANLINE = 100
DEFAULT_FILE_DTYPE = np.float32
DEFAULT_2D_SHAPE = (N_SCANLINE, N_FOV)
-DEFAULT_DATE = datetime(2019, 6, 19, 13, 0)
+DEFAULT_DATE = dt.datetime(2019, 6, 19, 13, 0)
DEFAULT_LAT = np.linspace(23.09356, 36.42844, N_SCANLINE * N_FOV,
dtype=DEFAULT_FILE_DTYPE)
DEFAULT_LON = np.linspace(127.6879, 144.5284, N_SCANLINE * N_FOV,
@@ -71,8 +73,8 @@
PLATFORM = {"M2": "metop-a", "NPP": "npp", "GPM": "gpm"}
SENSOR = {"m2": "amsu-mhs", "npp": "atms", "gpm": "GPI"}
-START_TIME = datetime(2017, 2, 6, 16, 1, 0)
-END_TIME = datetime(2017, 2, 6, 16, 7, 0)
+START_TIME = dt.datetime(2017, 2, 6, 16, 1, 0)
+END_TIME = dt.datetime(2017, 2, 6, 16, 7, 0)
def fake_coeff_from_fn(fn):
diff --git a/satpy/tests/reader_tests/test_mws_l1b_nc.py b/satpy/tests/reader_tests/test_mws_l1b_nc.py
index 2d227822a4..52a894bd00 100644
--- a/satpy/tests/reader_tests/test_mws_l1b_nc.py
+++ b/satpy/tests/reader_tests/test_mws_l1b_nc.py
@@ -19,8 +19,8 @@
"""
+import datetime as dt
import logging
-from datetime import datetime
from unittest.mock import patch
import numpy as np
@@ -50,13 +50,13 @@ def reader(fake_file):
filename=fake_file,
filename_info={
"start_time": (
- datetime.fromisoformat("2000-01-01T01:00:00")
+ dt.datetime.fromisoformat("2000-01-01T01:00:00")
),
"end_time": (
- datetime.fromisoformat("2000-01-01T02:00:00")
+ dt.datetime.fromisoformat("2000-01-01T02:00:00")
),
"creation_time": (
- datetime.fromisoformat("2000-01-01T03:00:00")
+ dt.datetime.fromisoformat("2000-01-01T03:00:00")
),
},
filetype_info={
@@ -207,11 +207,11 @@ class TestMwsL1bNCFileHandler:
def test_start_time(self, reader):
"""Test acquiring the start time."""
- assert reader.start_time == datetime(2000, 1, 2, 3, 4, 5)
+ assert reader.start_time == dt.datetime(2000, 1, 2, 3, 4, 5)
def test_end_time(self, reader):
"""Test acquiring the end time."""
- assert reader.end_time == datetime(2000, 1, 2, 4, 5, 6)
+ assert reader.end_time == dt.datetime(2000, 1, 2, 4, 5, 6)
def test_sensor(self, reader):
"""Test sensor."""
@@ -356,12 +356,12 @@ def test_get_global_attributes(self, reader):
attributes = reader._get_global_attributes()
assert attributes == {
"filename": reader.filename,
- "start_time": datetime(2000, 1, 2, 3, 4, 5),
- "end_time": datetime(2000, 1, 2, 4, 5, 6),
+ "start_time": dt.datetime(2000, 1, 2, 3, 4, 5),
+ "end_time": dt.datetime(2000, 1, 2, 4, 5, 6),
"spacecraft_name": "Metop-SG-A1",
"sensor": "MWS",
- "filename_start_time": datetime(2000, 1, 1, 1, 0),
- "filename_end_time": datetime(2000, 1, 1, 2, 0),
+ "filename_start_time": dt.datetime(2000, 1, 1, 1, 0),
+ "filename_end_time": dt.datetime(2000, 1, 1, 2, 0),
"platform_name": "Metop-SG-A1",
"quality_group": {
"duration_of_product": np.array(5944., dtype=np.float32),
diff --git a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py
index 90b9d4432f..0293a88fe3 100644
--- a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py
+++ b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py
@@ -16,10 +16,11 @@
#
# You should have received a copy of the GNU General Public License along with
# Satpy. If not, see .
+
"""Module for testing the satpy.readers.oceancolorcci_l3_nc module."""
+import datetime as dt
import os
-from datetime import datetime
import numpy as np
import pytest
@@ -243,12 +244,12 @@ def test_get_dataset_5d_allprods(self, fake_dataset, fake_file_dict):
def test_start_time(self, fake_file_dict):
"""Test start time property."""
reader = self._create_reader_for_resolutions([fake_file_dict["k490_1d"]])
- assert reader.start_time == datetime(2021, 8, 1, 0, 0, 0)
+ assert reader.start_time == dt.datetime(2021, 8, 1, 0, 0, 0)
def test_end_time(self, fake_file_dict):
"""Test end time property."""
reader = self._create_reader_for_resolutions([fake_file_dict["iop_8d"]])
- assert reader.end_time == datetime(2021, 8, 31, 23, 59, 0)
+ assert reader.end_time == dt.datetime(2021, 8, 31, 23, 59, 0)
def test_correct_dimnames(self, fake_file_dict):
"""Check that the loaded dimension names are correct."""
diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py
index 80fb581db7..106687a509 100644
--- a/satpy/tests/reader_tests/test_osisaf_l3.py
+++ b/satpy/tests/reader_tests/test_osisaf_l3.py
@@ -15,8 +15,8 @@
# satpy. If not, see .
"""Module for testing the satpy.readers.osisaf_l3 module."""
+import datetime as dt
import os
-from datetime import datetime
import numpy as np
import pytest
@@ -206,8 +206,8 @@ def setup_method(self):
super().setup_method(tester="ice")
self.filename_info = {"grid": "ease"}
self.filetype_info = {"file_type": "osi_sea_ice_conc"}
- self.good_start_time = datetime(2022, 12, 15, 0, 0, 0)
- self.good_stop_time = datetime(2022, 12, 16, 0, 0, 0)
+ self.good_start_time = dt.datetime(2022, 12, 15, 0, 0, 0)
+ self.good_stop_time = dt.datetime(2022, 12, 16, 0, 0, 0)
self.varname = "ice_conc"
self.stdname = "sea_ice_area_fraction"
self.fillv = -999
@@ -260,8 +260,8 @@ def setup_method(self):
super().setup_method(tester="flux_stere")
self.filename_info = {"grid": "polstere"}
self.filetype_info = {"file_type": "osi_radflux_stere"}
- self.good_start_time = datetime(2023, 10, 10, 0, 0, 0)
- self.good_stop_time = datetime(2023, 10, 10, 23, 59, 59)
+ self.good_start_time = dt.datetime(2023, 10, 10, 0, 0, 0)
+ self.good_stop_time = dt.datetime(2023, 10, 10, 23, 59, 59)
self.varname = "ssi"
self.stdname = "surface_downwelling_shortwave_flux_in_air"
self.fillv = -999.99
@@ -295,8 +295,8 @@ def setup_method(self):
super().setup_method(tester="flux_geo")
self.filename_info = {}
self.filetype_info = {"file_type": "osi_radflux_grid"}
- self.good_start_time = datetime(2022, 12, 28, 18, 30, 0)
- self.good_stop_time = datetime(2022, 12, 28, 19, 30, 0)
+ self.good_start_time = dt.datetime(2022, 12, 28, 18, 30, 0)
+ self.good_stop_time = dt.datetime(2022, 12, 28, 19, 30, 0)
self.varname = "ssi"
self.stdname = "surface_downwelling_shortwave_flux_in_air"
self.fillv = -32768
@@ -332,8 +332,8 @@ def setup_method(self):
super().setup_method(tester="sst")
self.filename_info = {}
self.filetype_info = {"file_type": "osi_sst"}
- self.good_start_time = datetime(2022, 12, 15, 0, 0, 0)
- self.good_stop_time = datetime(2022, 12, 16, 0, 0, 0)
+ self.good_start_time = dt.datetime(2022, 12, 15, 0, 0, 0)
+ self.good_stop_time = dt.datetime(2022, 12, 16, 0, 0, 0)
self.varname = "surface_temperature"
self.stdname = "sea_ice_surface_temperature"
self.fillv = -32768
diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py
index f94ae55cc0..fb4fd6831b 100644
--- a/satpy/tests/reader_tests/test_satpy_cf_nc.py
+++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py
@@ -15,9 +15,11 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Tests for the CF reader."""
+
+import datetime as dt
import warnings
-from datetime import datetime
import numpy as np
import pytest
@@ -66,8 +68,8 @@ def _create_test_netcdf(filename, resolution=742):
"solar_zenith_angle": solar_zenith_angle_i
}
- tstart = datetime(2019, 4, 1, 12, 0)
- tend = datetime(2019, 4, 1, 12, 15)
+ tstart = dt.datetime(2019, 4, 1, 12, 0)
+ tend = dt.datetime(2019, 4, 1, 12, 15)
common_attrs = {
"start_time": tstart,
"end_time": tend,
@@ -107,12 +109,12 @@ def area():
def common_attrs(area):
"""Get common dataset attributes."""
return {
- "start_time": datetime(2019, 4, 1, 12, 0, 0, 123456),
- "end_time": datetime(2019, 4, 1, 12, 15),
+ "start_time": dt.datetime(2019, 4, 1, 12, 0, 0, 123456),
+ "end_time": dt.datetime(2019, 4, 1, 12, 15),
"platform_name": "tirosn",
"orbit_number": 99999,
"area": area,
- "my_timestamp": datetime(2000, 1, 1)
+ "my_timestamp": dt.datetime(2000, 1, 1)
}
@@ -263,7 +265,7 @@ def cf_scene(datasets, common_attrs):
@pytest.fixture()
def nc_filename(tmp_path):
"""Create an nc filename for viirs m band."""
- now = datetime.utcnow()
+ now = dt.datetime.utcnow()
filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-mband-20201007075915-20201007080744.nc"
return str(tmp_path / filename)
@@ -271,7 +273,7 @@ def nc_filename(tmp_path):
@pytest.fixture()
def nc_filename_i(tmp_path):
"""Create an nc filename for viirs i band."""
- now = datetime.utcnow()
+ now = dt.datetime.utcnow()
filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-iband-20201007075915-20201007080744.nc"
return str(tmp_path / filename)
diff --git a/satpy/tests/reader_tests/test_scmi.py b/satpy/tests/reader_tests/test_scmi.py
index 13c74a7d5c..12fbb7dc2a 100644
--- a/satpy/tests/reader_tests/test_scmi.py
+++ b/satpy/tests/reader_tests/test_scmi.py
@@ -103,11 +103,11 @@ def setUp(self, xr_):
def test_basic_attributes(self):
"""Test getting basic file attributes."""
- from datetime import datetime
+ import datetime as dt
from satpy.tests.utils import make_dataid
- assert self.reader.start_time == datetime(2017, 7, 29, 12, 0, 0, 0)
- assert self.reader.end_time == datetime(2017, 7, 29, 12, 0, 0, 0)
+ assert self.reader.start_time == dt.datetime(2017, 7, 29, 12, 0, 0, 0)
+ assert self.reader.end_time == dt.datetime(2017, 7, 29, 12, 0, 0, 0)
assert self.reader.get_shape(make_dataid(name="C05"), {}) == (2, 5)
def test_data_load(self):
diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py
index a07bb799bc..f705796521 100644
--- a/satpy/tests/reader_tests/test_seviri_base.py
+++ b/satpy/tests/reader_tests/test_seviri_base.py
@@ -17,8 +17,8 @@
# satpy. If not, see .
"""Test the MSG common (native and hrit format) functionionalities."""
+import datetime as dt
import unittest
-from datetime import datetime, timedelta
import dask.array as da
import numpy as np
@@ -117,18 +117,18 @@ def test_pad_data_vertically_bad_shape(self):
def observation_start_time(self):
"""Get scan start timestamp for testing."""
- return datetime(2023, 3, 20, 15, 0, 10, 691000)
+ return dt.datetime(2023, 3, 20, 15, 0, 10, 691000)
def observation_end_time(self):
"""Get scan end timestamp for testing."""
- return datetime(2023, 3, 20, 15, 12, 43, 843000)
+ return dt.datetime(2023, 3, 20, 15, 12, 43, 843000)
def test_round_nom_time(self):
"""Test the rouding of start/end_time."""
- assert round_nom_time(dt=self.observation_start_time(),
- time_delta=timedelta(minutes=15)) == datetime(2023, 3, 20, 15, 0)
- assert round_nom_time(dt=self.observation_end_time(),
- time_delta=timedelta(minutes=15)) == datetime(2023, 3, 20, 15, 15)
+ assert round_nom_time(date=self.observation_start_time(),
+ time_delta=dt.timedelta(minutes=15)) == dt.datetime(2023, 3, 20, 15, 0)
+ assert round_nom_time(date=self.observation_end_time(),
+ time_delta=dt.timedelta(minutes=15)) == dt.datetime(2023, 3, 20, 15, 15)
@staticmethod
def test_pad_data_horizontally():
@@ -177,13 +177,13 @@ def test_get_padding_area_int():
ORBIT_POLYNOMIALS = {
"StartTime": np.array([
[
- datetime(2006, 1, 1, 6), datetime(2006, 1, 1, 12),
- datetime(2006, 1, 1, 18), datetime(1958, 1, 1, 0)]
+ dt.datetime(2006, 1, 1, 6), dt.datetime(2006, 1, 1, 12),
+ dt.datetime(2006, 1, 1, 18), dt.datetime(1958, 1, 1, 0)]
]),
"EndTime": np.array([
[
- datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18),
- datetime(2006, 1, 2, 0), datetime(1958, 1, 1, 0)
+ dt.datetime(2006, 1, 1, 12), dt.datetime(2006, 1, 1, 18),
+ dt.datetime(2006, 1, 2, 0), dt.datetime(1958, 1, 1, 0)
]
]),
"X": [np.zeros(8),
@@ -212,18 +212,18 @@ def test_get_padding_area_int():
# 01-03: Overlap (10:00 - 13:00)
"StartTime": np.array([
[
- datetime(2005, 12, 31, 10), datetime(2005, 12, 31, 12),
- datetime(2006, 1, 1, 10), datetime(2006, 1, 1, 13),
- datetime(2006, 1, 2, 0), datetime(2006, 1, 2, 18),
- datetime(2006, 1, 3, 6), datetime(2006, 1, 3, 10),
+ dt.datetime(2005, 12, 31, 10), dt.datetime(2005, 12, 31, 12),
+ dt.datetime(2006, 1, 1, 10), dt.datetime(2006, 1, 1, 13),
+ dt.datetime(2006, 1, 2, 0), dt.datetime(2006, 1, 2, 18),
+ dt.datetime(2006, 1, 3, 6), dt.datetime(2006, 1, 3, 10),
]
]),
"EndTime": np.array([
[
- datetime(2005, 12, 31, 12), datetime(2005, 12, 31, 18),
- datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18),
- datetime(2006, 1, 2, 4), datetime(2006, 1, 2, 22),
- datetime(2006, 1, 3, 13), datetime(2006, 1, 3, 18),
+ dt.datetime(2005, 12, 31, 12), dt.datetime(2005, 12, 31, 18),
+ dt.datetime(2006, 1, 1, 12), dt.datetime(2006, 1, 1, 18),
+ dt.datetime(2006, 1, 2, 4), dt.datetime(2006, 1, 2, 22),
+ dt.datetime(2006, 1, 3, 13), dt.datetime(2006, 1, 3, 18),
]
]),
"X": [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0],
@@ -233,12 +233,12 @@ def test_get_padding_area_int():
ORBIT_POLYNOMIALS_INVALID = {
"StartTime": np.array([
[
- datetime(1958, 1, 1), datetime(1958, 1, 1)
+ dt.datetime(1958, 1, 1), dt.datetime(1958, 1, 1)
]
]),
"EndTime": np.array([
[
- datetime(1958, 1, 1), datetime(1958, 1, 1)
+ dt.datetime(1958, 1, 1), dt.datetime(1958, 1, 1)
]
]),
"X": [1, 2],
@@ -254,8 +254,8 @@ class TestSatellitePosition:
def orbit_polynomial(self):
"""Get an orbit polynomial for testing."""
return OrbitPolynomial(
- start_time=datetime(2006, 1, 1, 12),
- end_time=datetime(2006, 1, 1, 18),
+ start_time=dt.datetime(2006, 1, 1, 12),
+ end_time=dt.datetime(2006, 1, 1, 18),
coefs=(
np.array([8.41607082e+04, 2.94319260e+00, 9.86748617e-01,
-2.70135453e-01, -3.84364650e-02, 8.48718433e-03,
@@ -272,7 +272,7 @@ def orbit_polynomial(self):
@pytest.fixture()
def time(self):
"""Get scan timestamp for testing."""
- return datetime(2006, 1, 1, 12, 15, 9, 304888)
+ return dt.datetime(2006, 1, 1, 12, 15, 9, 304888)
def test_eval_polynomial(self, orbit_polynomial, time):
"""Test getting the position in cartesian coordinates."""
@@ -305,7 +305,7 @@ class TestOrbitPolynomialFinder:
# Contiguous validity intervals (that's the norm)
(
ORBIT_POLYNOMIALS_SYNTH,
- datetime(2005, 12, 31, 12, 15),
+ dt.datetime(2005, 12, 31, 12, 15),
OrbitPolynomial(
coefs=(2.0, 2.1, 2.2),
start_time=np.datetime64("2005-12-31 12:00"),
@@ -316,7 +316,7 @@ class TestOrbitPolynomialFinder:
# not too far away
(
ORBIT_POLYNOMIALS_SYNTH,
- datetime(2006, 1, 1, 12, 15),
+ dt.datetime(2006, 1, 1, 12, 15),
OrbitPolynomial(
coefs=(3.0, 3.1, 3.2),
start_time=np.datetime64("2006-01-01 10:00"),
@@ -326,7 +326,7 @@ class TestOrbitPolynomialFinder:
# Overlapping intervals
(
ORBIT_POLYNOMIALS_SYNTH,
- datetime(2006, 1, 3, 12, 15),
+ dt.datetime(2006, 1, 3, 12, 15),
OrbitPolynomial(
coefs=(8.0, 8.1, 8.2),
start_time=np.datetime64("2006-01-03 10:00"),
@@ -351,9 +351,9 @@ def test_get_orbit_polynomial(self, orbit_polynomials, time,
[
# No interval enclosing the given timestamp and closest interval
# too far away
- (ORBIT_POLYNOMIALS_SYNTH, datetime(2006, 1, 2, 12, 15)),
+ (ORBIT_POLYNOMIALS_SYNTH, dt.datetime(2006, 1, 2, 12, 15)),
# No valid polynomials at all
- (ORBIT_POLYNOMIALS_INVALID, datetime(2006, 1, 1, 12, 15))
+ (ORBIT_POLYNOMIALS_INVALID, dt.datetime(2006, 1, 1, 12, 15))
]
)
def test_get_orbit_polynomial_exceptions(self, orbit_polynomials, time):
@@ -378,14 +378,14 @@ def test_get_meirink_slope_epoch(self, platform_id, channel_name):
assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS["2023"][platform_id][channel_name][0]/1000.
@pytest.mark.parametrize(("platform_id", "time", "expected"), [
- (321, datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]),
- (321, datetime(2010, 12, 31, 0, 0), [0.0258479563, 0.0322386887, 0.022895110500000003]),
- (322, datetime(2010, 1, 18, 0, 0), [0.021964051999999998, 0.027548445, 0.021576766]),
- (322, datetime(2015, 6, 1, 0, 0), [0.022465028, 0.027908105, 0.021674373999999996]),
- (323, datetime(2005, 1, 18, 0, 0), [0.0209088464, 0.0265355228, 0.0230132616]),
- (323, datetime(2010, 12, 31, 0, 0), [0.022181355200000002, 0.0280103379, 0.0229511138]),
- (324, datetime(2010, 1, 18, 0, 0), [0.0218362, 0.027580748, 0.022285370999999998]),
- (324, datetime(2015, 6, 1, 0, 0), [0.0225418, 0.028530172, 0.022248718999999997]),
+ (321, dt.datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]),
+ (321, dt.datetime(2010, 12, 31, 0, 0), [0.0258479563, 0.0322386887, 0.022895110500000003]),
+ (322, dt.datetime(2010, 1, 18, 0, 0), [0.021964051999999998, 0.027548445, 0.021576766]),
+ (322, dt.datetime(2015, 6, 1, 0, 0), [0.022465028, 0.027908105, 0.021674373999999996]),
+ (323, dt.datetime(2005, 1, 18, 0, 0), [0.0209088464, 0.0265355228, 0.0230132616]),
+ (323, dt.datetime(2010, 12, 31, 0, 0), [0.022181355200000002, 0.0280103379, 0.0229511138]),
+ (324, dt.datetime(2010, 1, 18, 0, 0), [0.0218362, 0.027580748, 0.022285370999999998]),
+ (324, dt.datetime(2015, 6, 1, 0, 0), [0.0225418, 0.028530172, 0.022248718999999997]),
])
def test_get_meirink_slope_2020(self, platform_id, time, expected):
"""Test the value of the slope of the Meirink calibration."""
diff --git a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py
index e6c2cdcf16..8eaf2b83da 100644
--- a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py
+++ b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py
@@ -17,8 +17,8 @@
# satpy. If not, see .
"""Unittesting the native msg reader."""
+import datetime as dt
import unittest
-from datetime import datetime
import numpy as np
import pytest
@@ -110,7 +110,7 @@ def setUp(self):
"""Set up the SEVIRI Calibration algorithm for testing."""
self.algo = SEVIRICalibrationAlgorithm(
platform_id=PLATFORM_ID,
- scan_time=datetime(2020, 8, 15, 13, 0, 40)
+ scan_time=dt.datetime(2020, 8, 15, 13, 0, 40)
)
def test_convert_to_radiance(self):
@@ -212,7 +212,7 @@ class TestFileHandlerCalibrationBase:
gains_gsics = [0, 0, 0, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 0]
offsets_gsics = [0, 0, 0, -0.4, -0.5, -0.6, -0.7, -0.8, -0.9, -1.0, -1.1, 0]
radiance_types = 2 * np.ones(12)
- scan_time = datetime(2020, 1, 1)
+ scan_time = dt.datetime(2020, 1, 1)
external_coefs = {
"VIS006": {"gain": 10, "offset": -10},
"IR_108": {"gain": 20, "offset": -20},
diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py
index 3fe00edc80..1d0313621c 100644
--- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py
+++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py
@@ -15,10 +15,11 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""The HRIT msg reader tests package."""
+import datetime as dt
import unittest
-from datetime import datetime
from unittest import mock
import numpy as np
@@ -47,7 +48,7 @@ class TestHRITMSGFileHandlerHRV(TestHRITMSGBase):
def setUp(self):
"""Set up the hrit file handler for testing HRV."""
- self.observation_start_time = datetime(2006, 1, 1, 12, 15, 9, 304888)
+ self.observation_start_time = dt.datetime(2006, 1, 1, 12, 15, 9, 304888)
self.nlines = 464
self.reader = setup.get_fake_file_handler(
observation_start_time=self.observation_start_time,
@@ -139,7 +140,7 @@ class TestHRITMSGFileHandler(TestHRITMSGBase):
def setUp(self):
"""Set up the hrit file handler for testing."""
- self.observation_start_time = datetime(2006, 1, 1, 12, 15, 9, 304888)
+ self.observation_start_time = dt.datetime(2006, 1, 1, 12, 15, 9, 304888)
self.nlines = 464
self.ncols = 3712
self.projection_longitude = 9.5
@@ -214,13 +215,13 @@ def test_get_dataset(self, calibrate, parent_get_dataset):
setup.get_attrs_exp(self.projection_longitude)
)
# testing start/end time
- assert datetime(2006, 1, 1, 12, 15, 9, 304888) == self.reader.observation_start_time
- assert datetime(2006, 1, 1, 12, 15) == self.reader.start_time
+ assert dt.datetime(2006, 1, 1, 12, 15, 9, 304888) == self.reader.observation_start_time
+ assert dt.datetime(2006, 1, 1, 12, 15) == self.reader.start_time
assert self.reader.start_time == self.reader.nominal_start_time
- assert datetime(2006, 1, 1, 12, 27, 39) == self.reader.observation_end_time
+ assert dt.datetime(2006, 1, 1, 12, 27, 39) == self.reader.observation_end_time
assert self.reader.end_time == self.reader.nominal_end_time
- assert datetime(2006, 1, 1, 12, 30) == self.reader.end_time
+ assert dt.datetime(2006, 1, 1, 12, 30) == self.reader.end_time
# test repeat cycle duration
assert 15 == self.reader._repeat_cycle_duration
# Change the reducescan scenario to test the repeat cycle duration handling
@@ -292,7 +293,7 @@ class TestHRITMSGPrologueFileHandler(unittest.TestCase):
def setUp(self, *mocks):
"""Set up the test case."""
fh = setup.get_fake_file_handler(
- observation_start_time=datetime(2016, 3, 3, 0, 0),
+ observation_start_time=dt.datetime(2016, 3, 3, 0, 0),
nlines=464,
ncols=3712,
)
diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py
index d668fe5240..21c42c0281 100644
--- a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py
+++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py
@@ -15,9 +15,10 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Setup for SEVIRI HRIT reader tests."""
-from datetime import datetime
+import datetime as dt
from unittest import mock
import numpy as np
@@ -126,8 +127,8 @@ def get_fake_prologue(projection_longitude, orbit_polynomials):
},
"ImageAcquisition": {
"PlannedAcquisitionTime": {
- "TrueRepeatCycleStart": datetime(2006, 1, 1, 12, 15, 9, 304888),
- "PlannedRepeatCycleEnd": datetime(2006, 1, 1, 12, 30, 0, 0)
+ "TrueRepeatCycleStart": dt.datetime(2006, 1, 1, 12, 15, 9, 304888),
+ "PlannedRepeatCycleEnd": dt.datetime(2006, 1, 1, 12, 30, 0, 0)
}
}
}
@@ -149,8 +150,8 @@ def get_fake_epilogue():
},
"ActualScanningSummary": {
"ReducedScan": 0,
- "ForwardScanStart": datetime(2006, 1, 1, 12, 15, 9, 304888),
- "ForwardScanEnd": datetime(2006, 1, 1, 12, 27, 39, 0)
+ "ForwardScanStart": dt.datetime(2006, 1, 1, 12, 15, 9, 304888),
+ "ForwardScanEnd": dt.datetime(2006, 1, 1, 12, 27, 39, 0)
}
}
}
@@ -198,7 +199,7 @@ def get_fake_dataset_info():
def get_acq_time_cds(start_time, nlines):
"""Get fake scanline acquisition times."""
- days_since_1958 = (start_time - datetime(1958, 1, 1)).days
+ days_since_1958 = (start_time - dt.datetime(1958, 1, 1)).days
tline = np.zeros(
nlines,
dtype=[("days", ">u2"), ("milliseconds", ">u4")]
@@ -238,12 +239,12 @@ def get_attrs_exp(projection_longitude=0.0):
"satellite_actual_latitude": -0.5711243456528018,
"satellite_actual_altitude": 35783296.150123544},
"georef_offset_corrected": True,
- "nominal_start_time": datetime(2006, 1, 1, 12, 15),
- "nominal_end_time": datetime(2006, 1, 1, 12, 30),
+ "nominal_start_time": dt.datetime(2006, 1, 1, 12, 15),
+ "nominal_end_time": dt.datetime(2006, 1, 1, 12, 30),
"time_parameters": {
- "nominal_start_time": datetime(2006, 1, 1, 12, 15),
- "nominal_end_time": datetime(2006, 1, 1, 12, 30),
- "observation_start_time": datetime(2006, 1, 1, 12, 15, 9, 304888),
- "observation_end_time": datetime(2006, 1, 1, 12, 27, 39, 0)
+ "nominal_start_time": dt.datetime(2006, 1, 1, 12, 15),
+ "nominal_end_time": dt.datetime(2006, 1, 1, 12, 30),
+ "observation_start_time": dt.datetime(2006, 1, 1, 12, 15, 9, 304888),
+ "observation_end_time": dt.datetime(2006, 1, 1, 12, 27, 39, 0)
}
}
diff --git a/satpy/tests/reader_tests/test_seviri_l1b_icare.py b/satpy/tests/reader_tests/test_seviri_l1b_icare.py
index 7c32001168..cb8a1fb6af 100644
--- a/satpy/tests/reader_tests/test_seviri_l1b_icare.py
+++ b/satpy/tests/reader_tests/test_seviri_l1b_icare.py
@@ -124,7 +124,7 @@ def test_init(self):
def test_load_dataset_vis(self):
"""Test loading all datasets from a full swath file."""
- from datetime import datetime
+ import datetime as dt
r = load_reader(self.reader_configs)
loadables = r.select_files_from_pathnames([
"GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf"
@@ -133,7 +133,7 @@ def test_load_dataset_vis(self):
datasets = r.load(["VIS008"])
assert len(datasets) == 1
for v in datasets.values():
- dt = datetime(2004, 12, 29, 12, 27, 44)
+ dt = dt.datetime(2004, 12, 29, 12, 27, 44)
assert v.attrs["end_time"] == dt
assert v.attrs["calibration"] == "reflectance"
diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py
index 6382517b55..8f4e46e2fb 100644
--- a/satpy/tests/reader_tests/test_seviri_l1b_native.py
+++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py
@@ -19,10 +19,10 @@
from __future__ import annotations
+import datetime as dt
import os
import unittest
import warnings
-from datetime import datetime
from unittest import mock
import dask.array as da
@@ -889,8 +889,8 @@ def file_handler(self):
"15TRAILER": {
"ImageProductionStats": {
"ActualScanningSummary": {
- "ForwardScanStart": datetime(2006, 1, 1, 12, 15, 9, 304888),
- "ForwardScanEnd": datetime(2006, 1, 1, 12, 27, 9, 304888),
+ "ForwardScanStart": dt.datetime(2006, 1, 1, 12, 15, 9, 304888),
+ "ForwardScanEnd": dt.datetime(2006, 1, 1, 12, 27, 9, 304888),
"ReducedScan": 0
}
}
@@ -941,8 +941,8 @@ def _fake_header():
},
"ImageAcquisition": {
"PlannedAcquisitionTime": {
- "TrueRepeatCycleStart": datetime(2006, 1, 1, 12, 15, 0, 0),
- "PlannedRepeatCycleEnd": datetime(2006, 1, 1, 12, 30, 0, 0),
+ "TrueRepeatCycleStart": dt.datetime(2006, 1, 1, 12, 15, 0, 0),
+ "PlannedRepeatCycleEnd": dt.datetime(2006, 1, 1, 12, 30, 0, 0),
}
}
},
@@ -993,19 +993,19 @@ def test_get_dataset(self, file_handler):
expected = self._exp_data_array()
xr.testing.assert_equal(xarr, expected)
assert "raw_metadata" not in xarr.attrs
- assert file_handler.start_time == datetime(2006, 1, 1, 12, 15, 0)
- assert file_handler.end_time == datetime(2006, 1, 1, 12, 30, 0)
+ assert file_handler.start_time == dt.datetime(2006, 1, 1, 12, 15, 0)
+ assert file_handler.end_time == dt.datetime(2006, 1, 1, 12, 30, 0)
assert_attrs_equal(xarr.attrs, expected.attrs, tolerance=1e-4)
def test_time(self, file_handler):
"""Test start/end nominal/observation time handling."""
- assert datetime(2006, 1, 1, 12, 15, 9, 304888) == file_handler.observation_start_time
- assert datetime(2006, 1, 1, 12, 15,) == file_handler.start_time
+ assert dt.datetime(2006, 1, 1, 12, 15, 9, 304888) == file_handler.observation_start_time
+ assert dt.datetime(2006, 1, 1, 12, 15,) == file_handler.start_time
assert file_handler.start_time == file_handler.nominal_start_time
- assert datetime(2006, 1, 1, 12, 27, 9, 304888) == file_handler.observation_end_time
+ assert dt.datetime(2006, 1, 1, 12, 27, 9, 304888) == file_handler.observation_end_time
assert file_handler.end_time == file_handler.nominal_end_time
- assert datetime(2006, 1, 1, 12, 30,) == file_handler.end_time
+ assert dt.datetime(2006, 1, 1, 12, 30,) == file_handler.end_time
def test_repeat_cycle_duration(self, file_handler):
"""Test repeat cycle handling for FD or ReduscedScan."""
@@ -1035,10 +1035,10 @@ def _exp_data_array():
"projection_altitude": 35785831.0
},
"time_parameters": {
- "nominal_start_time": datetime(2006, 1, 1, 12, 15, 0),
- "nominal_end_time": datetime(2006, 1, 1, 12, 30, 0),
- "observation_start_time": datetime(2006, 1, 1, 12, 15, 9, 304888),
- "observation_end_time": datetime(2006, 1, 1, 12, 27, 9, 304888),
+ "nominal_start_time": dt.datetime(2006, 1, 1, 12, 15, 0),
+ "nominal_end_time": dt.datetime(2006, 1, 1, 12, 30, 0),
+ "observation_start_time": dt.datetime(2006, 1, 1, 12, 15, 9, 304888),
+ "observation_end_time": dt.datetime(2006, 1, 1, 12, 27, 9, 304888),
},
"georef_offset_corrected": True,
"platform_name": "MSG-3",
diff --git a/satpy/tests/reader_tests/test_seviri_l1b_nc.py b/satpy/tests/reader_tests/test_seviri_l1b_nc.py
index cd5e2c713f..d77933b9a0 100644
--- a/satpy/tests/reader_tests/test_seviri_l1b_nc.py
+++ b/satpy/tests/reader_tests/test_seviri_l1b_nc.py
@@ -17,7 +17,7 @@
# satpy. If not, see .
"""The HRIT msg reader tests package."""
-from datetime import datetime
+import datetime as dt
from unittest import mock
import numpy as np
@@ -34,7 +34,7 @@
def to_cds_time(time):
"""Convert datetime to (days, msecs) since 1958-01-01."""
- if isinstance(time, datetime):
+ if isinstance(time, dt.datetime):
time = np.datetime64(time)
t0 = np.datetime64("1958-01-01 00:00")
delta = time - t0
@@ -62,13 +62,13 @@ def _get_fake_dataset(self, counts, h5netcdf):
line_validity = np.repeat([3, 3], 11).reshape(2, 11)
line_geom_radio_quality = np.repeat([4, 4], 11).reshape(2, 11)
orbit_poly_start_day, orbit_poly_start_msec = to_cds_time(
- np.array([datetime(2019, 12, 31, 18),
- datetime(2019, 12, 31, 22)],
+ np.array([dt.datetime(2019, 12, 31, 18),
+ dt.datetime(2019, 12, 31, 22)],
dtype="datetime64")
)
orbit_poly_end_day, orbit_poly_end_msec = to_cds_time(
- np.array([datetime(2019, 12, 31, 22),
- datetime(2020, 1, 1, 2)],
+ np.array([dt.datetime(2019, 12, 31, 22),
+ dt.datetime(2020, 1, 1, 2)],
dtype="datetime64")
)
counts = counts.rename({
@@ -325,10 +325,10 @@ def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_
"projection_altitude": 35785831.0
},
"time_parameters": {
- "nominal_start_time": datetime(2020, 1, 1, 0, 0),
- "nominal_end_time": datetime(2020, 1, 1, 0, 0),
- "observation_start_time": datetime(2020, 1, 1, 0, 0),
- "observation_end_time": datetime(2020, 1, 1, 0, 0),
+ "nominal_start_time": dt.datetime(2020, 1, 1, 0, 0),
+ "nominal_end_time": dt.datetime(2020, 1, 1, 0, 0),
+ "observation_start_time": dt.datetime(2020, 1, 1, 0, 0),
+ "observation_end_time": dt.datetime(2020, 1, 1, 0, 0),
},
"georef_offset_corrected": True,
"platform_name": "Meteosat-11",
@@ -352,13 +352,13 @@ def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_
def test_time(self, file_handler):
"""Test start/end nominal/observation time handling."""
- assert datetime(2020, 1, 1, 0, 0) == file_handler.observation_start_time
- assert datetime(2020, 1, 1, 0, 0) == file_handler.start_time
+ assert dt.datetime(2020, 1, 1, 0, 0) == file_handler.observation_start_time
+ assert dt.datetime(2020, 1, 1, 0, 0) == file_handler.start_time
assert file_handler.start_time == file_handler.nominal_start_time
- assert datetime(2020, 1, 1, 0, 0) == file_handler.observation_end_time
+ assert dt.datetime(2020, 1, 1, 0, 0) == file_handler.observation_end_time
assert file_handler.end_time == file_handler.nominal_end_time
- assert datetime(2020, 1, 1, 0, 0) == file_handler.end_time
+ assert dt.datetime(2020, 1, 1, 0, 0) == file_handler.end_time
def test_repeat_cycle_duration(self, file_handler):
"""Test repeat cycle handling for FD or ReduscedScan."""
diff --git a/satpy/tests/reader_tests/test_seviri_l2_bufr.py b/satpy/tests/reader_tests/test_seviri_l2_bufr.py
index ec3fdf7b56..9a6e6e6f83 100644
--- a/satpy/tests/reader_tests/test_seviri_l2_bufr.py
+++ b/satpy/tests/reader_tests/test_seviri_l2_bufr.py
@@ -17,9 +17,9 @@
# satpy. If not, see .
"""Unittesting the SEVIRI L2 BUFR reader."""
+import datetime as dt
import sys
import unittest
-from datetime import datetime
from unittest import mock
import dask.array as da
@@ -37,7 +37,7 @@
"spacecraft": "MSG2",
"server": "TESTSERVER"}
MPEF_PRODUCT_HEADER = {
- "NominalTime": datetime(2019, 11, 6, 18, 0),
+ "NominalTime": dt.datetime(2019, 11, 6, 18, 0),
"SpacecraftName": "09",
"RectificationLongitude": "E0455"
}
diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py
index 7f5fffa70c..db09e04edb 100644
--- a/satpy/tests/reader_tests/test_sgli_l1b.py
+++ b/satpy/tests/reader_tests/test_sgli_l1b.py
@@ -1,6 +1,7 @@
"""Tests for the SGLI L1B backend."""
+
+import datetime as dt
import sys
-from datetime import datetime, timedelta
import dask
import h5py
@@ -9,8 +10,8 @@
from satpy.readers.sgli_l1b import HDF5SGLI
-START_TIME = datetime.now()
-END_TIME = START_TIME + timedelta(minutes=5)
+START_TIME = dt.datetime.now()
+END_TIME = START_TIME + dt.timedelta(minutes=5)
FULL_KM_ARRAY = np.arange(1955 * 1250, dtype=np.uint16).reshape((1955, 1250))
MASK = 16383
LON_LAT_ARRAY = np.arange(197 * 126, dtype=np.float32).reshape((197, 126))
@@ -168,14 +169,14 @@ def test_start_time(sgli_vn_file):
"""Test that the start time is extracted."""
handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {})
microseconds = START_TIME.microsecond % 1000
- assert handler.start_time == START_TIME - timedelta(microseconds=microseconds)
+ assert handler.start_time == START_TIME - dt.timedelta(microseconds=microseconds)
def test_end_time(sgli_vn_file):
"""Test that the end time is extracted."""
handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {})
microseconds = END_TIME.microsecond % 1000
- assert handler.end_time == END_TIME - timedelta(microseconds=microseconds)
+ assert handler.end_time == END_TIME - dt.timedelta(microseconds=microseconds)
def test_get_dataset_counts(sgli_vn_file):
"""Test that counts can be extracted from a file."""
diff --git a/satpy/tests/reader_tests/test_slstr_l1b.py b/satpy/tests/reader_tests/test_slstr_l1b.py
index b6784d4e2b..becc1455b2 100644
--- a/satpy/tests/reader_tests/test_slstr_l1b.py
+++ b/satpy/tests/reader_tests/test_slstr_l1b.py
@@ -15,10 +15,12 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Module for testing the satpy.readers.nc_slstr module."""
+
+import datetime as dt
import unittest
import unittest.mock as mock
-from datetime import datetime
import numpy as np
import pytest
@@ -136,10 +138,10 @@ def test_instantiate(self, bvs_, xr_):
bvs_.return_value = self.FakeSpl
xr_.open_dataset.return_value = self.fake_dataset
- good_start = datetime.strptime(self.start_time,
- "%Y-%m-%dT%H:%M:%S.%fZ")
- good_end = datetime.strptime(self.end_time,
- "%Y-%m-%dT%H:%M:%S.%fZ")
+ good_start = dt.datetime.strptime(self.start_time,
+ "%Y-%m-%dT%H:%M:%S.%fZ")
+ good_end = dt.datetime.strptime(self.end_time,
+ "%Y-%m-%dT%H:%M:%S.%fZ")
ds_id = make_dataid(name="foo", calibration="radiance",
stripe="a", view="nadir")
diff --git a/satpy/tests/reader_tests/test_smos_l2_wind.py b/satpy/tests/reader_tests/test_smos_l2_wind.py
index 519030447b..409feb62ad 100644
--- a/satpy/tests/reader_tests/test_smos_l2_wind.py
+++ b/satpy/tests/reader_tests/test_smos_l2_wind.py
@@ -18,9 +18,9 @@
# Satpy. If not, see .
"""Module for testing the satpy.readers.smos_l2_wind module."""
+import datetime as dt
import os
import unittest
-from datetime import datetime
from unittest import mock
import numpy as np
@@ -35,8 +35,8 @@ class FakeNetCDF4FileHandlerSMOSL2WIND(FakeNetCDF4FileHandler):
def get_test_content(self, filename, filename_info, filetype_info):
"""Mimic reader input file content."""
from xarray import DataArray
- dt_s = filename_info.get("start_time", datetime(2020, 4, 22, 12, 0, 0))
- dt_e = filename_info.get("end_time", datetime(2020, 4, 22, 12, 0, 0))
+ dt_s = filename_info.get("start_time", dt.datetime(2020, 4, 22, 12, 0, 0))
+ dt_e = filename_info.get("end_time", dt.datetime(2020, 4, 22, 12, 0, 0))
if filetype_info["file_type"] == "smos_l2_wind":
file_content = {
diff --git a/satpy/tests/reader_tests/test_tropomi_l2.py b/satpy/tests/reader_tests/test_tropomi_l2.py
index 7305bf365c..4bdf3f67d2 100644
--- a/satpy/tests/reader_tests/test_tropomi_l2.py
+++ b/satpy/tests/reader_tests/test_tropomi_l2.py
@@ -16,11 +16,12 @@
#
# You should have received a copy of the GNU General Public License along with
# Satpy. If not, see .
+
"""Module for testing the satpy.readers.tropomi_l2 module."""
+import datetime as dt
import os
import unittest
-from datetime import datetime, timedelta
from unittest import mock
import numpy as np
@@ -41,13 +42,13 @@ class FakeNetCDF4FileHandlerTL2(FakeNetCDF4FileHandler):
def get_test_content(self, filename, filename_info, filetype_info):
"""Mimic reader input file content."""
- dt_s = filename_info.get("start_time", datetime(2016, 1, 1, 12, 0, 0))
- dt_e = filename_info.get("end_time", datetime(2016, 1, 1, 12, 0, 0))
+ dt_s = filename_info.get("start_time", dt.datetime(2016, 1, 1, 12, 0, 0))
+ dt_e = filename_info.get("end_time", dt.datetime(2016, 1, 1, 12, 0, 0))
if filetype_info["file_type"] == "tropomi_l2":
file_content = {
- "/attr/time_coverage_start": (dt_s+timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"),
- "/attr/time_coverage_end": (dt_e-timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"),
+ "/attr/time_coverage_start": (dt_s+dt.timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"),
+ "/attr/time_coverage_end": (dt_e-dt.timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"),
"/attr/platform_shortname": "S5P",
"/attr/sensor": "TROPOMI",
}
@@ -141,8 +142,8 @@ def test_load_no2(self):
for d in ds.values():
assert d.attrs["platform_shortname"] == "S5P"
assert d.attrs["sensor"] == "tropomi"
- assert d.attrs["time_coverage_start"] == datetime(2018, 7, 9, 17, 25, 34)
- assert d.attrs["time_coverage_end"] == datetime(2018, 7, 9, 18, 23, 4)
+ assert d.attrs["time_coverage_start"] == dt.datetime(2018, 7, 9, 17, 25, 34)
+ assert d.attrs["time_coverage_end"] == dt.datetime(2018, 7, 9, 18, 23, 4)
assert "area" in d.attrs
assert d.attrs["area"] is not None
assert "y" in d.dims
diff --git a/satpy/tests/reader_tests/test_utils.py b/satpy/tests/reader_tests/test_utils.py
index 67bdb41374..ba43688b76 100644
--- a/satpy/tests/reader_tests/test_utils.py
+++ b/satpy/tests/reader_tests/test_utils.py
@@ -15,11 +15,12 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Testing of helper functions."""
+import datetime as dt
import os
import unittest
-from datetime import datetime
from unittest import mock
import dask.array as da
@@ -430,7 +431,7 @@ class TestSunEarthDistanceCorrection:
def setup_method(self):
"""Create input / output arrays for the tests."""
- self.test_date = datetime(2020, 8, 15, 13, 0, 40)
+ self.test_date = dt.datetime(2020, 8, 15, 13, 0, 40)
raw_refl = xr.DataArray(da.from_array([10., 20., 40., 1., 98., 50.]),
attrs={"start_time": self.test_date,
@@ -462,7 +463,7 @@ def test_get_utc_time(self):
# Now check correct time is returned with utc_date passed
tmp_array = self.raw_refl.copy()
- new_test_date = datetime(2019, 2, 1, 15, 2, 12)
+ new_test_date = dt.datetime(2019, 2, 1, 15, 2, 12)
utc_time = hf.get_array_date(tmp_array, new_test_date)
assert utc_time == new_test_date
diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py
index d764891760..49cf7a4885 100644
--- a/satpy/tests/reader_tests/test_viirs_edr.py
+++ b/satpy/tests/reader_tests/test_viirs_edr.py
@@ -21,8 +21,8 @@
"""
from __future__ import annotations
+import datetime as dt
import shutil
-from datetime import datetime, timedelta
from pathlib import Path
from typing import Iterable
@@ -40,8 +40,8 @@
I_ROWS = 32 # one scan
M_COLS = 3200
M_ROWS = 16 # one scan
-START_TIME = datetime(2023, 5, 30, 17, 55, 41, 0)
-END_TIME = datetime(2023, 5, 30, 17, 57, 5, 0)
+START_TIME = dt.datetime(2023, 5, 30, 17, 55, 41, 0)
+END_TIME = dt.datetime(2023, 5, 30, 17, 57, 5, 0)
QF1_FLAG_MEANINGS = """
\tBits are listed from the MSB (bit 7) to the LSB (bit 0):
\tBit Description
@@ -78,7 +78,7 @@ def surface_reflectance_file(tmp_path_factory: TempPathFactory) -> Path:
@pytest.fixture(scope="module")
def surface_reflectance_file2(tmp_path_factory: TempPathFactory) -> Path:
"""Generate fake surface reflectance EDR file."""
- return _create_surface_reflectance_file(tmp_path_factory, START_TIME + timedelta(minutes=5),
+ return _create_surface_reflectance_file(tmp_path_factory, START_TIME + dt.timedelta(minutes=5),
include_veg_indices=False)
@@ -97,7 +97,7 @@ def surface_reflectance_with_veg_indices_file(tmp_path_factory: TempPathFactory)
@pytest.fixture(scope="module")
def surface_reflectance_with_veg_indices_file2(tmp_path_factory: TempPathFactory) -> Path:
"""Generate fake surface reflectance EDR file with vegetation indexes included."""
- return _create_surface_reflectance_file(tmp_path_factory, START_TIME + timedelta(minutes=5),
+ return _create_surface_reflectance_file(tmp_path_factory, START_TIME + dt.timedelta(minutes=5),
include_veg_indices=True)
@@ -110,7 +110,7 @@ def multiple_surface_reflectance_files_with_veg_indices(surface_reflectance_with
def _create_surface_reflectance_file(
tmp_path_factory: TempPathFactory,
- start_time: datetime,
+ start_time: dt.datetime,
include_veg_indices: bool = False,
) -> Path:
fn = f"SurfRefl_v1r2_npp_s{start_time:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc"
diff --git a/satpy/tests/reader_tests/test_viirs_l1b.py b/satpy/tests/reader_tests/test_viirs_l1b.py
index e60f83cfd0..b2a5c4b476 100644
--- a/satpy/tests/reader_tests/test_viirs_l1b.py
+++ b/satpy/tests/reader_tests/test_viirs_l1b.py
@@ -17,8 +17,8 @@
# satpy. If not, see .
"""Module for testing the satpy.readers.viirs_l1b module."""
+import datetime as dt
import os
-from datetime import datetime, timedelta
from unittest import mock
import numpy as np
@@ -49,7 +49,7 @@ class FakeNetCDF4FileHandlerDay(FakeNetCDF4FileHandler):
def get_test_content(self, filename, filename_info, filetype_info):
"""Mimic reader input file content."""
- dt = filename_info.get("start_time", datetime(2016, 1, 1, 12, 0, 0))
+ date = filename_info.get("start_time", dt.datetime(2016, 1, 1, 12, 0, 0))
file_type = filename[:5].lower()
num_lines = DEFAULT_FILE_SHAPE[0]
num_pixels = DEFAULT_FILE_SHAPE[1]
@@ -60,8 +60,8 @@ def get_test_content(self, filename, filename_info, filetype_info):
"/dimension/number_of_lines": num_lines,
"/dimension/number_of_pixels": num_pixels,
"/dimension/number_of_LUT_values": num_luts,
- "/attr/time_coverage_start": dt.strftime("%Y-%m-%dT%H:%M:%S.000Z"),
- "/attr/time_coverage_end": (dt + timedelta(minutes=6)).strftime("%Y-%m-%dT%H:%M:%S.000Z"),
+ "/attr/time_coverage_start": date.strftime("%Y-%m-%dT%H:%M:%S.000Z"),
+ "/attr/time_coverage_end": (date + dt.timedelta(minutes=6)).strftime("%Y-%m-%dT%H:%M:%S.000Z"),
"/attr/orbit_number": 26384,
"/attr/instrument": "VIIRS",
"/attr/platform": "Suomi-NPP",
diff --git a/satpy/tests/reader_tests/test_viirs_l2.py b/satpy/tests/reader_tests/test_viirs_l2.py
index 79884f3d4f..01801535ed 100644
--- a/satpy/tests/reader_tests/test_viirs_l2.py
+++ b/satpy/tests/reader_tests/test_viirs_l2.py
@@ -1,6 +1,7 @@
"""Module for testing the satpy.readers.viirs_l2 module."""
+
+import datetime as dt
import os
-from datetime import datetime, timedelta
from unittest import mock
import numpy as np
@@ -27,7 +28,7 @@ class FakeNetCDF4FileHandlerVIIRSL2(FakeNetCDF4FileHandler):
def get_test_content(self, filename, filename_info, filetype_info):
"""Mimic reader input file content."""
- dt = filename_info.get("start_time", datetime(2023, 12, 30, 22, 30, 0))
+ date = filename_info.get("start_time", dt.datetime(2023, 12, 30, 22, 30, 0))
file_type = filename[:6]
num_lines = DEFAULT_FILE_SHAPE[0]
num_pixels = DEFAULT_FILE_SHAPE[1]
@@ -36,8 +37,8 @@ def get_test_content(self, filename, filename_info, filetype_info):
"/dimension/number_of_scans": num_scans,
"/dimension/number_of_lines": num_lines,
"/dimension/number_of_pixels": num_pixels,
- "/attr/time_coverage_start": dt.strftime("%Y-%m-%dT%H:%M:%S.000Z"),
- "/attr/time_coverage_end": (dt + timedelta(minutes=6)).strftime(
+ "/attr/time_coverage_start": date.strftime("%Y-%m-%dT%H:%M:%S.000Z"),
+ "/attr/time_coverage_end": (date + dt.timedelta(minutes=6)).strftime(
"%Y-%m-%dT%H:%M:%S.000Z"
),
"/attr/orbit_number": 26384,
diff --git a/satpy/tests/reader_tests/test_viirs_sdr.py b/satpy/tests/reader_tests/test_viirs_sdr.py
index 952224daaf..2758ceb81c 100644
--- a/satpy/tests/reader_tests/test_viirs_sdr.py
+++ b/satpy/tests/reader_tests/test_viirs_sdr.py
@@ -354,12 +354,12 @@ def test_init_start_time_is_nodate(self):
def test_init_start_time_beyond(self):
"""Test basic init with start_time after the provided files."""
- from datetime import datetime
+ import datetime as dt
from satpy.readers import load_reader
r = load_reader(self.reader_configs,
filter_parameters={
- "start_time": datetime(2012, 2, 26)
+ "start_time": dt.datetime(2012, 2, 26)
})
fhs = r.create_filehandlers([
"SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5",
@@ -368,12 +368,12 @@ def test_init_start_time_beyond(self):
def test_init_end_time_beyond(self):
"""Test basic init with end_time before the provided files."""
- from datetime import datetime
+ import datetime as dt
from satpy.readers import load_reader
r = load_reader(self.reader_configs,
filter_parameters={
- "end_time": datetime(2012, 2, 24)
+ "end_time": dt.datetime(2012, 2, 24)
})
fhs = r.create_filehandlers([
"SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5",
@@ -382,14 +382,14 @@ def test_init_end_time_beyond(self):
def test_init_start_end_time(self):
"""Test basic init with end_time before the provided files."""
- from datetime import datetime
+ import datetime as dt
from satpy.readers import load_reader
r = load_reader(self.reader_configs,
filter_parameters={
- "start_time": datetime(2012, 2, 24),
- "end_time": datetime(2012, 2, 26)
+ "start_time": dt.datetime(2012, 2, 24),
+ "end_time": dt.datetime(2012, 2, 26)
})
loadables = r.select_files_from_pathnames([
"SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5",
diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py
index ba9b83d707..b03052ea30 100644
--- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py
+++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License
# along with satpy. If not, see .
+
"""The viirs_vgac_l1b_nc reader tests package.
This version tests the readers for VIIIRS VGAC data preliminary version.
@@ -22,7 +23,7 @@
"""
-from datetime import datetime
+import datetime as dt
import numpy as np
import pytest
@@ -33,7 +34,7 @@
@pytest.fixture()
def nc_filename(tmp_path):
"""Create an nc test data file and return its filename."""
- now = datetime.utcnow()
+ now = dt.datetime.utcnow()
filename = f"VGAC_VJ10XMOD_A{now:%Y%j_%H%M}_n004946_K005.nc"
filename_str = str(tmp_path / filename)
# Create test data
@@ -107,10 +108,10 @@ def test_read_vgac(self, nc_filename):
assert (diff_e > np.timedelta64(-5, "us"))
assert (scn_["M05"][0, 0] == 100)
assert (scn_["M15"][0, 0] == 400)
- assert scn_.start_time == datetime(year=2023, month=3, day=28,
- hour=9, minute=8, second=7)
- assert scn_.end_time == datetime(year=2023, month=3, day=28,
- hour=10, minute=11, second=12)
+ assert scn_.start_time == dt.datetime(year=2023, month=3, day=28,
+ hour=9, minute=8, second=7)
+ assert scn_.end_time == dt.datetime(year=2023, month=3, day=28,
+ hour=10, minute=11, second=12)
def test_dt64_to_datetime(self):
"""Test datetime conversion branch."""
@@ -118,8 +119,8 @@ def test_dt64_to_datetime(self):
fh = VGACFileHandler(filename="",
filename_info={"start_time": "2023-03-28T09:08:07"},
filetype_info="")
- in_dt = datetime(year=2023, month=3, day=28,
- hour=9, minute=8, second=7)
+ in_dt = dt.datetime(year=2023, month=3, day=28,
+ hour=9, minute=8, second=7)
out_dt = fh.dt64_to_datetime(in_dt)
assert out_dt == in_dt
diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py
index 9b0dd9098e..4490903880 100644
--- a/satpy/tests/scene_tests/test_conversions.py
+++ b/satpy/tests/scene_tests/test_conversions.py
@@ -13,8 +13,10 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Unit tests for Scene conversion functionality."""
-from datetime import datetime
+
+import datetime as dt
import pytest
import xarray as xr
@@ -61,7 +63,7 @@ def test_geoviews_basic_with_area(self):
{"proj": "geos", "lon_0": -95.5, "h": 35786023.0},
2, 2, [-200, -200, 200, 200])
scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"),
- attrs={"start_time": datetime(2018, 1, 1),
+ attrs={"start_time": dt.datetime(2018, 1, 1),
"area": area})
gv_obj = scn.to_geoviews()
# we assume that if we got something back, geoviews can use it
@@ -75,7 +77,7 @@ def test_geoviews_basic_with_swath(self):
lats = xr.DataArray(da.zeros((2, 2)))
area = SwathDefinition(lons, lats)
scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"),
- attrs={"start_time": datetime(2018, 1, 1),
+ attrs={"start_time": dt.datetime(2018, 1, 1),
"area": area})
gv_obj = scn.to_geoviews()
# we assume that if we got something back, geoviews can use it
@@ -89,7 +91,7 @@ def test_hvplot_basic_with_area(self):
{"proj": "geos", "lon_0": -95.5, "h": 35786023.0},
2, 2, [-200, -200, 200, 200])
scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"),
- attrs={"start_time": datetime(2018, 1, 1),
+ attrs={"start_time": dt.datetime(2018, 1, 1),
"area": area, "units": "m"})
hv_obj = scn.to_hvplot()
# we assume that if we got something back, hvplot can use it
@@ -103,13 +105,13 @@ def test_hvplot_rgb_with_area(self):
{"proj": "geos", "lon_0": -95.5, "h": 35786023.0},
2, 2, [-200, -200, 200, 200])
scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"),
- attrs={"start_time": datetime(2018, 1, 1),
+ attrs={"start_time": dt.datetime(2018, 1, 1),
"area": area, "units": "m"})
scn["ds2"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"),
- attrs={"start_time": datetime(2018, 1, 1),
+ attrs={"start_time": dt.datetime(2018, 1, 1),
"area": area, "units": "m"})
scn["ds3"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"),
- attrs={"start_time": datetime(2018, 1, 1),
+ attrs={"start_time": dt.datetime(2018, 1, 1),
"area": area, "units": "m"})
hv_obj = scn.to_hvplot()
# we assume that if we got something back, hvplot can use it
@@ -123,7 +125,7 @@ def test_hvplot_basic_with_swath(self):
latitude = xr.DataArray(da.zeros((2, 2)))
area = SwathDefinition(longitude, latitude)
scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"),
- attrs={"start_time": datetime(2018, 1, 1),
+ attrs={"start_time": dt.datetime(2018, 1, 1),
"area": area, "units": "m"})
hv_obj = scn.to_hvplot()
# we assume that if we got something back, hvplot can use it
@@ -150,7 +152,7 @@ def single_area_scn(self):
2, 2, [-200, -200, 200, 200])
data_array = xr.DataArray(da.zeros((2, 2), chunks=-1),
dims=("y", "x"),
- attrs={"start_time": datetime(2018, 1, 1), "area": area})
+ attrs={"start_time": dt.datetime(2018, 1, 1), "area": area})
scn = Scene()
scn["var1"] = data_array
return scn
@@ -169,10 +171,10 @@ def multi_area_scn(self):
data_array1 = xr.DataArray(da.zeros((2, 2), chunks=-1),
dims=("y", "x"),
- attrs={"start_time": datetime(2018, 1, 1), "area": area1})
+ attrs={"start_time": dt.datetime(2018, 1, 1), "area": area1})
data_array2 = xr.DataArray(da.zeros((4, 4), chunks=-1),
dims=("y", "x"),
- attrs={"start_time": datetime(2018, 1, 1), "area": area2})
+ attrs={"start_time": dt.datetime(2018, 1, 1), "area": area2})
scn = Scene()
scn["var1"] = data_array1
scn["var2"] = data_array2
diff --git a/satpy/tests/scene_tests/test_saving.py b/satpy/tests/scene_tests/test_saving.py
index 32c6ff61c2..b67f41cc2e 100644
--- a/satpy/tests/scene_tests/test_saving.py
+++ b/satpy/tests/scene_tests/test_saving.py
@@ -13,9 +13,11 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Unit tests for saving-related functionality in scene.py."""
+
+import datetime as dt
import os
-from datetime import datetime
from unittest import mock
import pytest
@@ -39,7 +41,7 @@ def test_save_datasets_default(self, tmp_path):
da.zeros((100, 200), chunks=50),
dims=("y", "x"),
attrs={"name": "test",
- "start_time": datetime(2018, 1, 1, 0, 0, 0)}
+ "start_time": dt.datetime(2018, 1, 1, 0, 0, 0)}
)
scn = Scene()
scn["test"] = ds1
@@ -52,7 +54,7 @@ def test_save_datasets_by_ext(self, tmp_path):
da.zeros((100, 200), chunks=50),
dims=("y", "x"),
attrs={"name": "test",
- "start_time": datetime(2018, 1, 1, 0, 0, 0)}
+ "start_time": dt.datetime(2018, 1, 1, 0, 0, 0)}
)
scn = Scene()
scn["test"] = ds1
@@ -70,7 +72,7 @@ def test_save_datasets_bad_writer(self, tmp_path):
da.zeros((100, 200), chunks=50),
dims=("y", "x"),
attrs={"name": "test",
- "start_time": datetime.utcnow()}
+ "start_time": dt.datetime.utcnow()}
)
scn = Scene()
scn["test"] = ds1
@@ -98,7 +100,7 @@ def test_save_dataset_default(self, tmp_path):
da.zeros((100, 200), chunks=50),
dims=("y", "x"),
attrs={"name": "test",
- "start_time": datetime(2018, 1, 1, 0, 0, 0)}
+ "start_time": dt.datetime(2018, 1, 1, 0, 0, 0)}
)
scn = Scene()
scn["test"] = ds1
diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py
index c075755d17..b8af090121 100644
--- a/satpy/tests/test_composites.py
+++ b/satpy/tests/test_composites.py
@@ -15,11 +15,12 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Tests for compositors in composites/__init__.py."""
+import datetime as dt
import os
import unittest
-from datetime import datetime
from unittest import mock
import dask
@@ -175,7 +176,7 @@ def setup_method(self):
{"proj": "merc"}, 2, 2,
(-2000, -2000, 2000, 2000))
attrs = {"area": area,
- "start_time": datetime(2018, 1, 1, 18),
+ "start_time": dt.datetime(2018, 1, 1, 18),
"modifiers": tuple(),
"resolution": 1000,
"calibration": "reflectance",
@@ -347,7 +348,7 @@ def setUp(self):
{"proj": "merc"}, 2, 2,
(-2000, -2000, 2000, 2000))
attrs = {"area": area,
- "start_time": datetime(2018, 1, 1, 18),
+ "start_time": dt.datetime(2018, 1, 1, 18),
"modifiers": tuple(),
"resolution": 1000,
"name": "test_vis"}
@@ -430,7 +431,7 @@ class TestDayNightCompositor(unittest.TestCase):
def setUp(self):
"""Create test data."""
bands = ["R", "G", "B"]
- start_time = datetime(2018, 1, 1, 18, 0, 0)
+ start_time = dt.datetime(2018, 1, 1, 18, 0, 0)
# RGB
a = np.zeros((3, 2, 2), dtype=np.float32)
diff --git a/satpy/tests/test_dataset.py b/satpy/tests/test_dataset.py
index 82b3a6c1cd..6ca3b25d72 100644
--- a/satpy/tests/test_dataset.py
+++ b/satpy/tests/test_dataset.py
@@ -13,10 +13,11 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Test objects and functions in the dataset module."""
+import datetime as dt
import unittest
-from datetime import datetime
import numpy as np
import pytest
@@ -103,38 +104,38 @@ def setUp(self):
"""Set up the test case."""
# The times need to be in ascending order (oldest first)
self.start_time_dts = (
- {"start_time": datetime(2018, 2, 1, 11, 58, 0)},
- {"start_time": datetime(2018, 2, 1, 11, 59, 0)},
- {"start_time": datetime(2018, 2, 1, 12, 0, 0)},
- {"start_time": datetime(2018, 2, 1, 12, 1, 0)},
- {"start_time": datetime(2018, 2, 1, 12, 2, 0)},
+ {"start_time": dt.datetime(2018, 2, 1, 11, 58, 0)},
+ {"start_time": dt.datetime(2018, 2, 1, 11, 59, 0)},
+ {"start_time": dt.datetime(2018, 2, 1, 12, 0, 0)},
+ {"start_time": dt.datetime(2018, 2, 1, 12, 1, 0)},
+ {"start_time": dt.datetime(2018, 2, 1, 12, 2, 0)},
)
self.end_time_dts = (
- {"end_time": datetime(2018, 2, 1, 11, 58, 0)},
- {"end_time": datetime(2018, 2, 1, 11, 59, 0)},
- {"end_time": datetime(2018, 2, 1, 12, 0, 0)},
- {"end_time": datetime(2018, 2, 1, 12, 1, 0)},
- {"end_time": datetime(2018, 2, 1, 12, 2, 0)},
+ {"end_time": dt.datetime(2018, 2, 1, 11, 58, 0)},
+ {"end_time": dt.datetime(2018, 2, 1, 11, 59, 0)},
+ {"end_time": dt.datetime(2018, 2, 1, 12, 0, 0)},
+ {"end_time": dt.datetime(2018, 2, 1, 12, 1, 0)},
+ {"end_time": dt.datetime(2018, 2, 1, 12, 2, 0)},
)
self.other_time_dts = (
- {"other_time": datetime(2018, 2, 1, 11, 58, 0)},
- {"other_time": datetime(2018, 2, 1, 11, 59, 0)},
- {"other_time": datetime(2018, 2, 1, 12, 0, 0)},
- {"other_time": datetime(2018, 2, 1, 12, 1, 0)},
- {"other_time": datetime(2018, 2, 1, 12, 2, 0)},
+ {"other_time": dt.datetime(2018, 2, 1, 11, 58, 0)},
+ {"other_time": dt.datetime(2018, 2, 1, 11, 59, 0)},
+ {"other_time": dt.datetime(2018, 2, 1, 12, 0, 0)},
+ {"other_time": dt.datetime(2018, 2, 1, 12, 1, 0)},
+ {"other_time": dt.datetime(2018, 2, 1, 12, 2, 0)},
)
self.start_time_dts_with_none = (
{"start_time": None},
- {"start_time": datetime(2018, 2, 1, 11, 59, 0)},
- {"start_time": datetime(2018, 2, 1, 12, 0, 0)},
- {"start_time": datetime(2018, 2, 1, 12, 1, 0)},
- {"start_time": datetime(2018, 2, 1, 12, 2, 0)},
+ {"start_time": dt.datetime(2018, 2, 1, 11, 59, 0)},
+ {"start_time": dt.datetime(2018, 2, 1, 12, 0, 0)},
+ {"start_time": dt.datetime(2018, 2, 1, 12, 1, 0)},
+ {"start_time": dt.datetime(2018, 2, 1, 12, 2, 0)},
)
self.end_time_dts_with_none = (
- {"end_time": datetime(2018, 2, 1, 11, 58, 0)},
- {"end_time": datetime(2018, 2, 1, 11, 59, 0)},
- {"end_time": datetime(2018, 2, 1, 12, 0, 0)},
- {"end_time": datetime(2018, 2, 1, 12, 1, 0)},
+ {"end_time": dt.datetime(2018, 2, 1, 11, 58, 0)},
+ {"end_time": dt.datetime(2018, 2, 1, 11, 59, 0)},
+ {"end_time": dt.datetime(2018, 2, 1, 12, 0, 0)},
+ {"end_time": dt.datetime(2018, 2, 1, 12, 1, 0)},
{"end_time": None},
)
@@ -142,11 +143,11 @@ def test_average_datetimes(self):
"""Test the average_datetimes helper function."""
from satpy.dataset.metadata import average_datetimes
dts = (
- datetime(2018, 2, 1, 11, 58, 0),
- datetime(2018, 2, 1, 11, 59, 0),
- datetime(2018, 2, 1, 12, 0, 0),
- datetime(2018, 2, 1, 12, 1, 0),
- datetime(2018, 2, 1, 12, 2, 0),
+ dt.datetime(2018, 2, 1, 11, 58, 0),
+ dt.datetime(2018, 2, 1, 11, 59, 0),
+ dt.datetime(2018, 2, 1, 12, 0, 0),
+ dt.datetime(2018, 2, 1, 12, 1, 0),
+ dt.datetime(2018, 2, 1, 12, 2, 0),
)
ret = average_datetimes(dts)
assert dts[2] == ret
@@ -373,10 +374,10 @@ def test_combine_dicts_close():
"c": [1, 2, 3],
"d": {
"e": np.str_("bar"),
- "f": datetime(2020, 1, 1, 12, 15, 30),
+ "f": dt.datetime(2020, 1, 1, 12, 15, 30),
"g": np.array([1, 2, 3]),
},
- "h": np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)])
+ "h": np.array([dt.datetime(2020, 1, 1), dt.datetime(2020, 1, 1)])
}
}
attrs_close = {
@@ -386,10 +387,10 @@ def test_combine_dicts_close():
"c": np.array([1, 2, 3]) + 1E-12,
"d": {
"e": np.str_("bar"),
- "f": datetime(2020, 1, 1, 12, 15, 30),
+ "f": dt.datetime(2020, 1, 1, 12, 15, 30),
"g": np.array([1, 2, 3]) + 1E-12
},
- "h": np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)])
+ "h": np.array([dt.datetime(2020, 1, 1), dt.datetime(2020, 1, 1)])
}
}
test_metadata = [attrs, attrs_close]
diff --git a/satpy/tests/test_file_handlers.py b/satpy/tests/test_file_handlers.py
index 925da3e561..7e1424414e 100644
--- a/satpy/tests/test_file_handlers.py
+++ b/satpy/tests/test_file_handlers.py
@@ -15,10 +15,11 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""test file handler baseclass."""
+import datetime as dt
import unittest
-from datetime import datetime, timedelta
from unittest import mock
import numpy as np
@@ -49,8 +50,8 @@ def setUp(self):
"""Set up the test."""
self.fh = BaseFileHandler(
"filename", {"filename_info": "bla"}, "filetype_info")
- self.early_time = datetime(2024, 2, 12, 11, 00)
- self.late_time = datetime(2024, 2, 12, 12, 00)
+ self.early_time = dt.datetime(2024, 2, 12, 11, 00)
+ self.late_time = dt.datetime(2024, 2, 12, 12, 00)
def test_combine_times(self):
"""Combine times."""
@@ -161,13 +162,13 @@ def test_combine_orbital_parameters(self):
def test_combine_time_parameters(self):
"""Combine times in 'time_parameters."""
time_params1 = {
- "nominal_start_time": datetime(2020, 1, 1, 12, 0, 0),
- "nominal_end_time": datetime(2020, 1, 1, 12, 2, 30),
- "observation_start_time": datetime(2020, 1, 1, 12, 0, 2, 23821),
- "observation_end_time": datetime(2020, 1, 1, 12, 2, 23, 12348),
+ "nominal_start_time": dt.datetime(2020, 1, 1, 12, 0, 0),
+ "nominal_end_time": dt.datetime(2020, 1, 1, 12, 2, 30),
+ "observation_start_time": dt.datetime(2020, 1, 1, 12, 0, 2, 23821),
+ "observation_end_time": dt.datetime(2020, 1, 1, 12, 2, 23, 12348),
}
time_params2 = {}
- time_shift = timedelta(seconds=1.5)
+ time_shift = dt.timedelta(seconds=1.5)
for key, value in time_params1.items():
time_params2[key] = value + time_shift
res = self.fh.combine_info([
@@ -175,10 +176,10 @@ def test_combine_time_parameters(self):
{"time_parameters": time_params2}
])
res_time_params = res["time_parameters"]
- assert res_time_params["nominal_start_time"] == datetime(2020, 1, 1, 12, 0, 0)
- assert res_time_params["nominal_end_time"] == datetime(2020, 1, 1, 12, 2, 31, 500000)
- assert res_time_params["observation_start_time"] == datetime(2020, 1, 1, 12, 0, 2, 23821)
- assert res_time_params["observation_end_time"] == datetime(2020, 1, 1, 12, 2, 24, 512348)
+ assert res_time_params["nominal_start_time"] == dt.datetime(2020, 1, 1, 12, 0, 0)
+ assert res_time_params["nominal_end_time"] == dt.datetime(2020, 1, 1, 12, 2, 31, 500000)
+ assert res_time_params["observation_start_time"] == dt.datetime(2020, 1, 1, 12, 0, 2, 23821)
+ assert res_time_params["observation_end_time"] == dt.datetime(2020, 1, 1, 12, 2, 24, 512348)
def test_file_is_kept_intact(self):
"""Test that the file object passed (string, path, or other) is kept intact."""
diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py
index 0c8eb51b3f..3bc7ca91c8 100644
--- a/satpy/tests/test_modifiers.py
+++ b/satpy/tests/test_modifiers.py
@@ -15,9 +15,11 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Tests for modifiers in modifiers/__init__.py."""
+
+import datetime as dt
import unittest
-from datetime import datetime
from unittest import mock
import dask.array as da
@@ -57,7 +59,7 @@ def _sunz_stacked_area_def():
def _shared_sunz_attrs(area_def):
attrs = {"area": area_def,
- "start_time": datetime(2018, 1, 1, 18),
+ "start_time": dt.datetime(2018, 1, 1, 18),
"modifiers": tuple(),
"name": "test_vis"}
return attrs
@@ -591,7 +593,7 @@ def test_call(self):
lats[1, 1] = np.inf
lats = da.from_array(lats, chunks=5)
area = SwathDefinition(lons, lats)
- stime = datetime(2020, 1, 1, 12, 0, 0)
+ stime = dt.datetime(2020, 1, 1, 12, 0, 0)
orb_params = {
"satellite_actual_altitude": 12345678,
"nadir_longitude": 0.0,
diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py
index db3d1ccb1d..0f2244348d 100644
--- a/satpy/tests/test_readers.py
+++ b/satpy/tests/test_readers.py
@@ -15,10 +15,12 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Test classes and functions in the readers/__init__.py module."""
import builtins
import contextlib
+import datetime as dt
import os
import sys
import unittest
@@ -399,43 +401,37 @@ def test_missing_requirements(self, *mocks):
def test_all_filtered(self):
"""Test behaviour if no file matches the filter parameters."""
- import datetime
-
from satpy.readers import load_readers
filenames = {
"viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"],
}
- filter_params = {"start_time": datetime.datetime(1970, 1, 1),
- "end_time": datetime.datetime(1970, 1, 2),
+ filter_params = {"start_time": dt.datetime(1970, 1, 1),
+ "end_time": dt.datetime(1970, 1, 2),
"area": None}
with pytest.raises(ValueError, match="No dataset could be loaded.*"):
load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params})
def test_all_filtered_multiple(self):
"""Test behaviour if no file matches the filter parameters."""
- import datetime
-
from satpy.readers import load_readers
filenames = {
"viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"],
"abi_l1b": ["OR_ABI-L1b-RadF-M3C01_G16_s20120561730408_e20120561741175_c20172631741218.nc"],
}
- filter_params = {"start_time": datetime.datetime(1970, 1, 1),
- "end_time": datetime.datetime(1970, 1, 2)}
+ filter_params = {"start_time": dt.datetime(1970, 1, 1),
+ "end_time": dt.datetime(1970, 1, 2)}
with pytest.raises(ValueError, match="No dataset could be loaded."):
load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params})
def test_almost_all_filtered(self):
"""Test behaviour if only one reader has datasets."""
- import datetime
-
from satpy.readers import load_readers
filenames = {
"viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"],
"abi_l1b": ["OR_ABI-L1b-RadF-M3C01_G16_s20172631730408_e20172631741175_c20172631741218.nc"],
}
- filter_params = {"start_time": datetime.datetime(2012, 2, 25),
- "end_time": datetime.datetime(2012, 2, 26)}
+ filter_params = {"start_time": dt.datetime(2012, 2, 25),
+ "end_time": dt.datetime(2012, 2, 26)}
# viirs has data that matches the request, abi doesn't
readers = load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params})
assert "viirs_sdr" in readers
@@ -480,11 +476,9 @@ def test_reader_other_name(self, monkeypatch, tmp_path):
def test_reader_name_matched_start_end_time(self, viirs_file):
"""Test with start and end time matching the filename."""
- from datetime import datetime
-
ri = find_files_and_readers(reader="viirs_sdr",
- start_time=datetime(2012, 2, 25, 18, 0, 0),
- end_time=datetime(2012, 2, 25, 19, 0, 0),
+ start_time=dt.datetime(2012, 2, 25, 18, 0, 0),
+ end_time=dt.datetime(2012, 2, 25, 19, 0, 0),
)
assert list(ri.keys()) == ["viirs_sdr"]
assert ri["viirs_sdr"] == [viirs_file]
@@ -494,9 +488,7 @@ def test_reader_name_matched_start_time(self, viirs_file):
Start time in the middle of the file time should still match the file.
"""
- from datetime import datetime
-
- ri = find_files_and_readers(reader="viirs_sdr", start_time=datetime(2012, 2, 25, 18, 1, 30))
+ ri = find_files_and_readers(reader="viirs_sdr", start_time=dt.datetime(2012, 2, 25, 18, 1, 30))
assert list(ri.keys()) == ["viirs_sdr"]
assert ri["viirs_sdr"] == [viirs_file]
@@ -506,20 +498,16 @@ def test_reader_name_matched_end_time(self, viirs_file):
End time in the middle of the file time should still match the file.
"""
- from datetime import datetime
-
- ri = find_files_and_readers(reader="viirs_sdr", end_time=datetime(2012, 2, 25, 18, 1, 30))
+ ri = find_files_and_readers(reader="viirs_sdr", end_time=dt.datetime(2012, 2, 25, 18, 1, 30))
assert list(ri.keys()) == ["viirs_sdr"]
assert ri["viirs_sdr"] == [viirs_file]
def test_reader_name_unmatched_start_end_time(self, viirs_file):
"""Test with start and end time matching the filename."""
- from datetime import datetime
-
with pytest.raises(ValueError, match="No supported files found"):
find_files_and_readers(reader="viirs_sdr",
- start_time=datetime(2012, 2, 26, 18, 0, 0),
- end_time=datetime(2012, 2, 26, 19, 0, 0))
+ start_time=dt.datetime(2012, 2, 26, 18, 0, 0),
+ end_time=dt.datetime(2012, 2, 26, 19, 0, 0))
def test_no_parameters(self, viirs_file):
"""Test with no limiting parameters."""
diff --git a/satpy/tests/test_writers.py b/satpy/tests/test_writers.py
index bc68d767c1..701347cdbe 100644
--- a/satpy/tests/test_writers.py
+++ b/satpy/tests/test_writers.py
@@ -18,7 +18,7 @@
from __future__ import annotations
-import datetime
+import datetime as dt
import os
import shutil
import unittest
@@ -546,7 +546,6 @@ class TestComputeWriterResults(unittest.TestCase):
def setUp(self):
"""Create temporary directory to save files to and a mock scene."""
import tempfile
- from datetime import datetime
from pyresample.geometry import AreaDefinition
@@ -560,7 +559,7 @@ def setUp(self):
da.zeros((100, 200), chunks=50),
dims=("y", "x"),
attrs={"name": "test",
- "start_time": datetime(2018, 1, 1, 0, 0, 0),
+ "start_time": dt.datetime(2018, 1, 1, 0, 0, 0),
"area": adef}
)
self.scn = Scene()
@@ -655,7 +654,6 @@ class TestBaseWriter:
def setup_method(self):
"""Set up tests."""
import tempfile
- from datetime import datetime
from pyresample.geometry import AreaDefinition
@@ -670,7 +668,7 @@ def setup_method(self):
dims=("y", "x"),
attrs={
"name": "test",
- "start_time": datetime(2018, 1, 1, 0, 0, 0),
+ "start_time": dt.datetime(2018, 1, 1, 0, 0, 0),
"sensor": "fake_sensor",
"area": adef,
}
@@ -881,7 +879,7 @@ def test_group_results_by_output_file(tmp_path):
"kraken_depth": dat},
daskify=True,
area=fake_area,
- common_attrs={"start_time": datetime.datetime(2022, 11, 16, 13, 27)})
+ common_attrs={"start_time": dt.datetime(2022, 11, 16, 13, 27)})
# NB: even if compute=False, ``save_datasets`` creates (empty) files
(sources, targets) = fake_scene.save_datasets(
filename=os.fspath(tmp_path / "test-{name}.tif"),
diff --git a/satpy/tests/test_yaml_reader.py b/satpy/tests/test_yaml_reader.py
index 0b0293e453..699f6619b6 100644
--- a/satpy/tests/test_yaml_reader.py
+++ b/satpy/tests/test_yaml_reader.py
@@ -15,12 +15,13 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+
"""Testing the yaml_reader module."""
+import datetime as dt
import os
import random
import unittest
-from datetime import datetime
from tempfile import mkdtemp
from unittest.mock import MagicMock, call, patch
@@ -182,8 +183,8 @@ def __init__(self, filename, filename_info, filetype_info):
"""Initialize the dummy reader."""
super(DummyReader, self).__init__(
filename, filename_info, filetype_info)
- self._start_time = datetime(2000, 1, 1, 12, 1)
- self._end_time = datetime(2000, 1, 1, 12, 2)
+ self._start_time = dt.datetime(2000, 1, 1, 12, 1)
+ self._end_time = dt.datetime(2000, 1, 1, 12, 2)
self.metadata = {}
@property
@@ -227,8 +228,8 @@ def setUp(self):
self.config = res_dict
self.reader = yr.FileYAMLReader(self.config,
filter_parameters={
- "start_time": datetime(2000, 1, 1),
- "end_time": datetime(2000, 1, 2)})
+ "start_time": dt.datetime(2000, 1, 1),
+ "end_time": dt.datetime(2000, 1, 2)})
def test_select_from_pathnames(self):
"""Check select_files_from_pathnames."""
@@ -280,8 +281,8 @@ def setUp(self):
self.config = MHS_YAML_READER_DICT
self.reader = yr.FileYAMLReader(MHS_YAML_READER_DICT,
filter_parameters={
- "start_time": datetime(2000, 1, 1),
- "end_time": datetime(2000, 1, 2),
+ "start_time": dt.datetime(2000, 1, 1),
+ "end_time": dt.datetime(2000, 1, 2),
})
def test_custom_type_with_dict_contents_gets_parsed_correctly(self):
@@ -321,8 +322,8 @@ def setUp(self):
self.config = res_dict
self.reader = yr.FileYAMLReader(res_dict,
filter_parameters={
- "start_time": datetime(2000, 1, 1),
- "end_time": datetime(2000, 1, 2),
+ "start_time": dt.datetime(2000, 1, 1),
+ "end_time": dt.datetime(2000, 1, 2),
})
def test_deprecated_passing_config_files(self):
@@ -362,17 +363,18 @@ def test_available_dataset_names(self):
def test_filter_fh_by_time(self):
"""Check filtering filehandlers by time."""
- fh0 = FakeFH(datetime(1999, 12, 30), datetime(1999, 12, 31))
- fh1 = FakeFH(datetime(1999, 12, 31, 10, 0),
- datetime(2000, 1, 1, 12, 30))
- fh2 = FakeFH(datetime(2000, 1, 1, 10, 0),
- datetime(2000, 1, 1, 12, 30))
- fh3 = FakeFH(datetime(2000, 1, 1, 12, 30),
- datetime(2000, 1, 2, 12, 30))
- fh4 = FakeFH(datetime(2000, 1, 2, 12, 30),
- datetime(2000, 1, 3, 12, 30))
- fh5 = FakeFH(datetime(1999, 12, 31, 10, 0),
- datetime(2000, 1, 3, 12, 30))
+ fh0 = FakeFH(dt.datetime(1999, 12, 30),
+ dt.datetime(1999, 12, 31))
+ fh1 = FakeFH(dt.datetime(1999, 12, 31, 10, 0),
+ dt.datetime(2000, 1, 1, 12, 30))
+ fh2 = FakeFH(dt.datetime(2000, 1, 1, 10, 0),
+ dt.datetime(2000, 1, 1, 12, 30))
+ fh3 = FakeFH(dt.datetime(2000, 1, 1, 12, 30),
+ dt.datetime(2000, 1, 2, 12, 30))
+ fh4 = FakeFH(dt.datetime(2000, 1, 2, 12, 30),
+ dt.datetime(2000, 1, 3, 12, 30))
+ fh5 = FakeFH(dt.datetime(1999, 12, 31, 10, 0),
+ dt.datetime(2000, 1, 3, 12, 30))
for idx, fh in enumerate([fh0, fh1, fh2, fh3, fh4, fh5]):
res = self.reader.time_matches(fh.start_time, fh.end_time)
@@ -388,8 +390,8 @@ def test_filter_fh_by_time(self):
@patch("satpy.readers.yaml_reader.Boundary")
def test_file_covers_area(self, bnd, adb, gad):
"""Test that area coverage is checked properly."""
- file_handler = FakeFH(datetime(1999, 12, 31, 10, 0),
- datetime(2000, 1, 3, 12, 30))
+ file_handler = FakeFH(dt.datetime(1999, 12, 31, 10, 0),
+ dt.datetime(2000, 1, 3, 12, 30))
self.reader.filter_parameters["area"] = True
bnd.return_value.contour_poly.intersection.return_value = True
@@ -417,18 +419,18 @@ def test_start_end_time(self):
with pytest.raises(RuntimeError):
self.reader.end_time
- fh0 = FakeFH(datetime(1999, 12, 30, 0, 0),
- datetime(1999, 12, 31, 0, 0))
- fh1 = FakeFH(datetime(1999, 12, 31, 10, 0),
- datetime(2000, 1, 1, 12, 30))
- fh2 = FakeFH(datetime(2000, 1, 1, 10, 0),
- datetime(2000, 1, 1, 12, 30))
- fh3 = FakeFH(datetime(2000, 1, 1, 12, 30),
- datetime(2000, 1, 2, 12, 30))
- fh4 = FakeFH(datetime(2000, 1, 2, 12, 30),
- datetime(2000, 1, 3, 12, 30))
- fh5 = FakeFH(datetime(1999, 12, 31, 10, 0),
- datetime(2000, 1, 3, 12, 30))
+ fh0 = FakeFH(dt.datetime(1999, 12, 30, 0, 0),
+ dt.datetime(1999, 12, 31, 0, 0))
+ fh1 = FakeFH(dt.datetime(1999, 12, 31, 10, 0),
+ dt.datetime(2000, 1, 1, 12, 30))
+ fh2 = FakeFH(dt.datetime(2000, 1, 1, 10, 0),
+ dt.datetime(2000, 1, 1, 12, 30))
+ fh3 = FakeFH(dt.datetime(2000, 1, 1, 12, 30),
+ dt.datetime(2000, 1, 2, 12, 30))
+ fh4 = FakeFH(dt.datetime(2000, 1, 2, 12, 30),
+ dt.datetime(2000, 1, 3, 12, 30))
+ fh5 = FakeFH(dt.datetime(1999, 12, 31, 10, 0),
+ dt.datetime(2000, 1, 3, 12, 30))
self.reader.file_handlers = {
"0": [fh1, fh2, fh3, fh4, fh5],
@@ -436,8 +438,8 @@ def test_start_end_time(self):
"2": [fh2, fh3],
}
- assert self.reader.start_time == datetime(1999, 12, 30, 0, 0)
- assert self.reader.end_time == datetime(2000, 1, 3, 12, 30)
+ assert self.reader.start_time == dt.datetime(1999, 12, 30, 0, 0)
+ assert self.reader.end_time == dt.datetime(2000, 1, 3, 12, 30)
def test_select_from_pathnames(self):
"""Check select_files_from_pathnames."""
@@ -572,8 +574,8 @@ def setUp(self):
self.config = res_dict
self.reader = yr.FileYAMLReader(res_dict,
filter_parameters={
- "start_time": datetime(2000, 1, 1),
- "end_time": datetime(2000, 1, 2),
+ "start_time": dt.datetime(2000, 1, 1),
+ "end_time": dt.datetime(2000, 1, 2),
})
fake_fh = FakeFH(None, None)
self.lons = xr.DataArray(np.ones((2, 2)) * 2,
diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py
index a6ebf8753e..b2e3576d0f 100644
--- a/satpy/tests/utils.py
+++ b/satpy/tests/utils.py
@@ -16,8 +16,8 @@
# along with this program. If not, see .
"""Utilities for various satpy tests."""
+import datetime as dt
from contextlib import contextmanager
-from datetime import datetime
from typing import Any
from unittest import mock
@@ -34,8 +34,8 @@
from satpy.modifiers import ModifierBase
from satpy.readers.file_handlers import BaseFileHandler
-FAKE_FILEHANDLER_START = datetime(2020, 1, 1, 0, 0, 0)
-FAKE_FILEHANDLER_END = datetime(2020, 1, 1, 1, 0, 0)
+FAKE_FILEHANDLER_START = dt.datetime(2020, 1, 1, 0, 0, 0)
+FAKE_FILEHANDLER_END = dt.datetime(2020, 1, 1, 1, 0, 0)
def make_dataid(**items):
diff --git a/satpy/tests/writer_tests/test_awips_tiled.py b/satpy/tests/writer_tests/test_awips_tiled.py
index dbc1bc82d7..364d0c6b8e 100644
--- a/satpy/tests/writer_tests/test_awips_tiled.py
+++ b/satpy/tests/writer_tests/test_awips_tiled.py
@@ -17,10 +17,10 @@
# satpy. If not, see .
"""Tests for the AWIPS Tiled writer."""
+import datetime as dt
import logging
import os
import shutil
-from datetime import datetime, timedelta
from glob import glob
import dask
@@ -32,8 +32,8 @@
from satpy.resample import update_resampled_coords
-START_TIME = datetime(2018, 1, 1, 12, 0, 0)
-END_TIME = START_TIME + timedelta(minutes=20)
+START_TIME = dt.datetime(2018, 1, 1, 12, 0, 0)
+END_TIME = START_TIME + dt.timedelta(minutes=20)
# NOTE:
# The following fixtures are not defined in this file, but are used and injected by Pytest:
@@ -378,7 +378,7 @@ def test_lettered_tiles_sector_ref(self, tmp_path):
unmasked_ds = xr.open_dataset(fn, mask_and_scale=False)
masked_ds = xr.open_dataset(fn, mask_and_scale=True)
check_required_properties(unmasked_ds, masked_ds)
- expected_start = (START_TIME + timedelta(minutes=20)).strftime("%Y-%m-%dT%H:%M:%S")
+ expected_start = (START_TIME + dt.timedelta(minutes=20)).strftime("%Y-%m-%dT%H:%M:%S")
assert masked_ds.attrs["start_date_time"] == expected_start
def test_lettered_tiles_no_fit(self, tmp_path):
diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py
index 6d1d15527b..18a3682fc7 100644
--- a/satpy/tests/writer_tests/test_cf.py
+++ b/satpy/tests/writer_tests/test_cf.py
@@ -17,10 +17,10 @@
# satpy. If not, see .
"""Tests for the CF writer."""
+import datetime as dt
import os
import tempfile
import warnings
-from datetime import datetime
import numpy as np
import pytest
@@ -74,8 +74,8 @@ def test_init(self):
def test_save_array(self):
"""Test saving an array to netcdf/cf."""
scn = Scene()
- start_time = datetime(2018, 5, 30, 10, 0)
- end_time = datetime(2018, 5, 30, 10, 15)
+ start_time = dt.datetime(2018, 5, 30, 10, 0)
+ end_time = dt.datetime(2018, 5, 30, 10, 15)
scn["test-array"] = xr.DataArray([1, 2, 3],
attrs=dict(start_time=start_time,
end_time=end_time,
@@ -90,8 +90,8 @@ def test_save_array(self):
def test_save_array_coords(self):
"""Test saving array with coordinates."""
scn = Scene()
- start_time = datetime(2018, 5, 30, 10, 0)
- end_time = datetime(2018, 5, 30, 10, 15)
+ start_time = dt.datetime(2018, 5, 30, 10, 0)
+ end_time = dt.datetime(2018, 5, 30, 10, 15)
coords = {
"x": np.arange(3),
"y": np.arange(1),
@@ -162,8 +162,8 @@ def test_ancillary_variables(self):
"""Test ancillary_variables cited each other."""
from satpy.tests.utils import make_dataid
scn = Scene()
- start_time = datetime(2018, 5, 30, 10, 0)
- end_time = datetime(2018, 5, 30, 10, 15)
+ start_time = dt.datetime(2018, 5, 30, 10, 0)
+ end_time = dt.datetime(2018, 5, 30, 10, 15)
da = xr.DataArray([1, 2, 3],
attrs=dict(start_time=start_time,
end_time=end_time,
@@ -180,8 +180,8 @@ def test_ancillary_variables(self):
def test_groups(self):
"""Test creating a file with groups."""
- tstart = datetime(2019, 4, 1, 12, 0)
- tend = datetime(2019, 4, 1, 12, 15)
+ tstart = dt.datetime(2019, 4, 1, 12, 0)
+ tend = dt.datetime(2019, 4, 1, 12, 15)
data_visir = [[1, 2], [3, 4]]
y_visir = [1, 2]
@@ -238,8 +238,8 @@ def test_groups(self):
def test_single_time_value(self):
"""Test setting a single time value."""
scn = Scene()
- start_time = datetime(2018, 5, 30, 10, 0)
- end_time = datetime(2018, 5, 30, 10, 15)
+ start_time = dt.datetime(2018, 5, 30, 10, 0)
+ end_time = dt.datetime(2018, 5, 30, 10, 15)
test_array = np.array([[1, 2], [3, 4]])
scn["test-array"] = xr.DataArray(test_array,
dims=["x", "y"],
@@ -272,8 +272,8 @@ def test_time_coordinate_on_a_swath(self):
def test_bounds(self):
"""Test setting time bounds."""
scn = Scene()
- start_time = datetime(2018, 5, 30, 10, 0)
- end_time = datetime(2018, 5, 30, 10, 15)
+ start_time = dt.datetime(2018, 5, 30, 10, 0)
+ end_time = dt.datetime(2018, 5, 30, 10, 15)
test_array = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1)
scn["test-array"] = xr.DataArray(test_array,
dims=["x", "y", "time"],
@@ -307,10 +307,10 @@ def test_bounds(self):
def test_bounds_minimum(self):
"""Test minimum bounds."""
scn = Scene()
- start_timeA = datetime(2018, 5, 30, 10, 0) # expected to be used
- end_timeA = datetime(2018, 5, 30, 10, 20)
- start_timeB = datetime(2018, 5, 30, 10, 3)
- end_timeB = datetime(2018, 5, 30, 10, 15) # expected to be used
+ start_timeA = dt.datetime(2018, 5, 30, 10, 0) # expected to be used
+ end_timeA = dt.datetime(2018, 5, 30, 10, 20)
+ start_timeB = dt.datetime(2018, 5, 30, 10, 3)
+ end_timeB = dt.datetime(2018, 5, 30, 10, 15) # expected to be used
test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1)
test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1)
scn["test-arrayA"] = xr.DataArray(test_arrayA,
@@ -333,8 +333,8 @@ def test_bounds_minimum(self):
def test_bounds_missing_time_info(self):
"""Test time bounds generation in case of missing time."""
scn = Scene()
- start_timeA = datetime(2018, 5, 30, 10, 0)
- end_timeA = datetime(2018, 5, 30, 10, 15)
+ start_timeA = dt.datetime(2018, 5, 30, 10, 0)
+ end_timeA = dt.datetime(2018, 5, 30, 10, 15)
test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1)
test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1)
scn["test-arrayA"] = xr.DataArray(test_arrayA,
@@ -355,8 +355,8 @@ def test_bounds_missing_time_info(self):
def test_unlimited_dims_kwarg(self):
"""Test specification of unlimited dimensions."""
scn = Scene()
- start_time = datetime(2018, 5, 30, 10, 0)
- end_time = datetime(2018, 5, 30, 10, 15)
+ start_time = dt.datetime(2018, 5, 30, 10, 0)
+ end_time = dt.datetime(2018, 5, 30, 10, 15)
test_array = np.array([[1, 2], [3, 4]])
scn["test-array"] = xr.DataArray(test_array,
dims=["x", "y"],
@@ -372,8 +372,8 @@ def test_unlimited_dims_kwarg(self):
def test_header_attrs(self):
"""Check global attributes are set."""
scn = Scene()
- start_time = datetime(2018, 5, 30, 10, 0)
- end_time = datetime(2018, 5, 30, 10, 15)
+ start_time = dt.datetime(2018, 5, 30, 10, 0)
+ end_time = dt.datetime(2018, 5, 30, 10, 15)
scn["test-array"] = xr.DataArray([1, 2, 3],
attrs=dict(start_time=start_time,
end_time=end_time))
@@ -423,8 +423,8 @@ def test_load_module_with_old_pyproj(self):
def test_global_attr_default_history_and_Conventions(self):
"""Test saving global attributes history and Conventions."""
scn = Scene()
- start_time = datetime(2018, 5, 30, 10, 0)
- end_time = datetime(2018, 5, 30, 10, 15)
+ start_time = dt.datetime(2018, 5, 30, 10, 0)
+ end_time = dt.datetime(2018, 5, 30, 10, 15)
scn["test-array"] = xr.DataArray([[1, 2, 3]],
dims=("y", "x"),
attrs=dict(start_time=start_time,
@@ -439,8 +439,8 @@ def test_global_attr_default_history_and_Conventions(self):
def test_global_attr_history_and_Conventions(self):
"""Test saving global attributes history and Conventions."""
scn = Scene()
- start_time = datetime(2018, 5, 30, 10, 0)
- end_time = datetime(2018, 5, 30, 10, 15)
+ start_time = dt.datetime(2018, 5, 30, 10, 0)
+ end_time = dt.datetime(2018, 5, 30, 10, 15)
scn["test-array"] = xr.DataArray([[1, 2, 3]],
dims=("y", "x"),
attrs=dict(start_time=start_time,
@@ -465,8 +465,8 @@ def scene(self):
"""Create a fake scene."""
scn = Scene()
attrs = {
- "start_time": datetime(2018, 5, 30, 10, 0),
- "end_time": datetime(2018, 5, 30, 10, 15)
+ "start_time": dt.datetime(2018, 5, 30, 10, 0),
+ "end_time": dt.datetime(2018, 5, 30, 10, 15)
}
scn["test-array"] = xr.DataArray([1., 2, 3], attrs=attrs)
return scn
diff --git a/satpy/tests/writer_tests/test_geotiff.py b/satpy/tests/writer_tests/test_geotiff.py
index 8925857637..d0e879c4b2 100644
--- a/satpy/tests/writer_tests/test_geotiff.py
+++ b/satpy/tests/writer_tests/test_geotiff.py
@@ -17,7 +17,7 @@
# satpy. If not, see .
"""Tests for the geotiff writer."""
-from datetime import datetime
+import datetime as dt
from unittest import mock
import dask.array as da
@@ -42,7 +42,7 @@ def _get_test_datasets_2d():
da.zeros((100, 200), chunks=50),
dims=("y", "x"),
attrs={"name": "test",
- "start_time": datetime.utcnow(),
+ "start_time": dt.datetime.utcnow(),
"units": "K",
"area": adef}
)
@@ -72,7 +72,7 @@ def _get_test_datasets_3d():
dims=("bands", "y", "x"),
coords={"bands": ["R", "G", "B"]},
attrs={"name": "test",
- "start_time": datetime.utcnow(),
+ "start_time": dt.datetime.utcnow(),
"area": adef}
)
return [ds1]
diff --git a/satpy/tests/writer_tests/test_mitiff.py b/satpy/tests/writer_tests/test_mitiff.py
index 2dafdd5896..1642510583 100644
--- a/satpy/tests/writer_tests/test_mitiff.py
+++ b/satpy/tests/writer_tests/test_mitiff.py
@@ -20,6 +20,8 @@
Based on the test for geotiff writer
"""
+
+import datetime as dt
import logging
import os
import unittest
@@ -48,8 +50,6 @@ def tearDown(self):
def _get_test_datasets(self):
"""Create a datasets list."""
- from datetime import datetime
-
import dask.array as da
import xarray as xr
from pyproj import CRS
@@ -68,7 +68,7 @@ def _get_test_datasets(self):
da.zeros((100, 200), chunks=50),
dims=("y", "x"),
attrs={"name": "1",
- "start_time": datetime.utcnow(),
+ "start_time": dt.datetime.utcnow(),
"platform_name": "TEST_PLATFORM_NAME",
"sensor": "TEST_SENSOR_NAME",
"area": area_def,
@@ -91,7 +91,7 @@ def _get_test_datasets(self):
da.zeros((100, 200), chunks=50),
dims=("y", "x"),
attrs={"name": "4",
- "start_time": datetime.utcnow(),
+ "start_time": dt.datetime.utcnow(),
"platform_name": "TEST_PLATFORM_NAME",
"sensor": "TEST_SENSOR_NAME",
"area": area_def,
@@ -114,8 +114,6 @@ def _get_test_datasets(self):
def _get_test_datasets_sensor_set(self):
"""Create a datasets list."""
- from datetime import datetime
-
import dask.array as da
import xarray as xr
from pyproj import CRS
@@ -134,7 +132,7 @@ def _get_test_datasets_sensor_set(self):
da.zeros((100, 200), chunks=50),
dims=("y", "x"),
attrs={"name": "1",
- "start_time": datetime.utcnow(),
+ "start_time": dt.datetime.utcnow(),
"platform_name": "TEST_PLATFORM_NAME",
"sensor": {"TEST_SENSOR_NAME"},
"area": area_def,
@@ -157,7 +155,7 @@ def _get_test_datasets_sensor_set(self):
da.zeros((100, 200), chunks=50),
dims=("y", "x"),
attrs={"name": "4",
- "start_time": datetime.utcnow(),
+ "start_time": dt.datetime.utcnow(),
"platform_name": "TEST_PLATFORM_NAME",
"sensor": {"TEST_SENSOR_NAME"},
"area": area_def,
@@ -180,8 +178,6 @@ def _get_test_datasets_sensor_set(self):
def _get_test_dataset(self, bands=3):
"""Create a single test dataset."""
- from datetime import datetime
-
import dask.array as da
import xarray as xr
from pyproj import CRS
@@ -201,7 +197,7 @@ def _get_test_dataset(self, bands=3):
da.zeros((bands, 100, 200), chunks=50),
dims=("bands", "y", "x"),
attrs={"name": "test",
- "start_time": datetime.utcnow(),
+ "start_time": dt.datetime.utcnow(),
"platform_name": "TEST_PLATFORM_NAME",
"sensor": "TEST_SENSOR_NAME",
"area": area_def,
@@ -211,8 +207,6 @@ def _get_test_dataset(self, bands=3):
def _get_test_one_dataset(self):
"""Create a single test dataset."""
- from datetime import datetime
-
import dask.array as da
import xarray as xr
from pyproj import CRS
@@ -232,7 +226,7 @@ def _get_test_one_dataset(self):
da.zeros((100, 200), chunks=50),
dims=("y", "x"),
attrs={"name": "test",
- "start_time": datetime.utcnow(),
+ "start_time": dt.datetime.utcnow(),
"platform_name": "TEST_PLATFORM_NAME",
"sensor": "avhrr",
"area": area_def,
@@ -242,8 +236,6 @@ def _get_test_one_dataset(self):
def _get_test_one_dataset_sensor_set(self):
"""Create a single test dataset."""
- from datetime import datetime
-
import dask.array as da
import xarray as xr
from pyproj import CRS
@@ -263,7 +255,7 @@ def _get_test_one_dataset_sensor_set(self):
da.zeros((100, 200), chunks=50),
dims=("y", "x"),
attrs={"name": "test",
- "start_time": datetime.utcnow(),
+ "start_time": dt.datetime.utcnow(),
"platform_name": "TEST_PLATFORM_NAME",
"sensor": {"avhrr"},
"area": area_def,
@@ -273,8 +265,6 @@ def _get_test_one_dataset_sensor_set(self):
def _get_test_dataset_with_bad_values(self, bands=3):
"""Create a single test dataset."""
- from datetime import datetime
-
import xarray as xr
from pyproj import CRS
from pyresample.geometry import AreaDefinition
@@ -298,7 +288,7 @@ def _get_test_dataset_with_bad_values(self, bands=3):
ds1 = xr.DataArray(rgb_data,
dims=("bands", "y", "x"),
attrs={"name": "test",
- "start_time": datetime.utcnow(),
+ "start_time": dt.datetime.utcnow(),
"platform_name": "TEST_PLATFORM_NAME",
"sensor": "TEST_SENSOR_NAME",
"area": area_def,
@@ -307,8 +297,6 @@ def _get_test_dataset_with_bad_values(self, bands=3):
def _get_test_dataset_calibration(self, bands=6):
"""Create a single test dataset."""
- from datetime import datetime
-
import dask.array as da
import xarray as xr
from pyproj import CRS
@@ -362,7 +350,7 @@ def _get_test_dataset_calibration(self, bands=6):
bands.append(p.attrs["name"])
data["bands"] = list(bands)
new_attrs = {"name": "datasets",
- "start_time": datetime.utcnow(),
+ "start_time": dt.datetime.utcnow(),
"platform_name": "TEST_PLATFORM_NAME",
"sensor": "test-sensor",
"area": area_def,
@@ -411,8 +399,6 @@ def _get_test_dataset_calibration(self, bands=6):
def _get_test_dataset_calibration_one_dataset(self, bands=1):
"""Create a single test dataset."""
- from datetime import datetime
-
import dask.array as da
import xarray as xr
from pyproj import CRS
@@ -441,7 +427,7 @@ def _get_test_dataset_calibration_one_dataset(self, bands=1):
for p in scene:
calibration.append(p.attrs["calibration"])
new_attrs = {"name": "datasets",
- "start_time": datetime.utcnow(),
+ "start_time": dt.datetime.utcnow(),
"platform_name": "TEST_PLATFORM_NAME",
"sensor": "test-sensor",
"area": area_def,
@@ -465,8 +451,6 @@ def _get_test_dataset_calibration_one_dataset(self, bands=1):
def _get_test_dataset_three_bands_two_prereq(self, bands=3):
"""Create a single test dataset."""
- from datetime import datetime
-
import dask.array as da
import xarray as xr
from pyproj import CRS
@@ -488,7 +472,7 @@ def _get_test_dataset_three_bands_two_prereq(self, bands=3):
coords=[["R", "G", "B"], list(range(100)), list(range(200))],
dims=("bands", "y", "x"),
attrs={"name": "test",
- "start_time": datetime.utcnow(),
+ "start_time": dt.datetime.utcnow(),
"platform_name": "TEST_PLATFORM_NAME",
"sensor": "TEST_SENSOR_NAME",
"area": area_def,
@@ -499,8 +483,6 @@ def _get_test_dataset_three_bands_two_prereq(self, bands=3):
def _get_test_dataset_three_bands_prereq(self, bands=3):
"""Create a single test dataset."""
- from datetime import datetime
-
import dask.array as da
import xarray as xr
from pyproj import CRS
@@ -522,7 +504,7 @@ def _get_test_dataset_three_bands_prereq(self, bands=3):
coords=[["R", "G", "B"], list(range(100)), list(range(200))],
dims=("bands", "y", "x"),
attrs={"name": "test",
- "start_time": datetime.utcnow(),
+ "start_time": dt.datetime.utcnow(),
"platform_name": "TEST_PLATFORM_NAME",
"sensor": "TEST_SENSOR_NAME",
"area": area_def,
diff --git a/satpy/tests/writer_tests/test_simple_image.py b/satpy/tests/writer_tests/test_simple_image.py
index 01d89a22ad..6a4eba95e3 100644
--- a/satpy/tests/writer_tests/test_simple_image.py
+++ b/satpy/tests/writer_tests/test_simple_image.py
@@ -38,7 +38,7 @@ def tearDown(self):
@staticmethod
def _get_test_datasets():
"""Create DataArray for testing."""
- from datetime import datetime
+ import datetime as dt
import dask.array as da
import xarray as xr
@@ -46,7 +46,7 @@ def _get_test_datasets():
da.zeros((100, 200), chunks=50),
dims=("y", "x"),
attrs={"name": "test",
- "start_time": datetime.utcnow()}
+ "start_time": dt.datetime.utcnow()}
)
return [ds1]
diff --git a/satpy/writers/awips_tiled.py b/satpy/writers/awips_tiled.py
index 8fe9a8d2cc..1652a3786e 100644
--- a/satpy/writers/awips_tiled.py
+++ b/satpy/writers/awips_tiled.py
@@ -213,13 +213,14 @@
lettered tile locations.
"""
+
+import datetime as dt
import logging
import os
import string
import sys
import warnings
from collections import namedtuple
-from datetime import datetime, timedelta
import dask
import dask.array as da
@@ -1101,7 +1102,7 @@ def apply_misc_metadata(self, new_ds, sector_id=None, creator=None, creation_tim
if creator is None:
creator = "Satpy Version {} - AWIPS Tiled Writer".format(__version__)
if creation_time is None:
- creation_time = datetime.utcnow()
+ creation_time = dt.datetime.utcnow()
self._add_sector_id_global(new_ds, sector_id)
new_ds.attrs["Conventions"] = "CF-1.7"
@@ -1493,8 +1494,8 @@ def _save_nonempty_mfdatasets(self, datasets_to_save, output_filenames, **kwargs
def _adjust_metadata_times(self, ds_info):
debug_shift_time = int(os.environ.get("DEBUG_TIME_SHIFT", 0))
if debug_shift_time:
- ds_info["start_time"] += timedelta(minutes=debug_shift_time)
- ds_info["end_time"] += timedelta(minutes=debug_shift_time)
+ ds_info["start_time"] += dt.timedelta(minutes=debug_shift_time)
+ ds_info["end_time"] += dt.timedelta(minutes=debug_shift_time)
def _get_tile_data_info(self, data_arrs, creation_time, source_name):
# use the first data array as a "representative" for the group
@@ -1597,7 +1598,7 @@ def save_datasets(self, datasets, sector_id=None, # noqa: D417
area_data_arrs = self._group_by_area(datasets)
datasets_to_save = []
output_filenames = []
- creation_time = datetime.utcnow()
+ creation_time = dt.datetime.utcnow()
area_tile_data_gen = self._iter_area_tile_info_and_datasets(
area_data_arrs, template, lettered_grid, sector_id, num_subtiles,
tile_size, tile_count, use_sector_reference)
@@ -1775,7 +1776,7 @@ def create_debug_lettered_tiles(**writer_kwargs):
sector_info = writer.awips_sectors[sector_id]
area_def, arr = _create_debug_array(sector_info, save_kwargs["num_subtiles"])
- now = datetime.utcnow()
+ now = dt.datetime.utcnow()
product = xr.DataArray(da.from_array(arr, chunks="auto"), attrs=dict(
name="debug_{}".format(sector_id),
platform_name="DEBUG",