Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
joleenf committed Feb 19, 2024
2 parents 649e218 + e74729e commit 574c3ae
Show file tree
Hide file tree
Showing 33 changed files with 1,342 additions and 897 deletions.
10 changes: 5 additions & 5 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,10 @@ jobs:
fail-fast: true
matrix:
os: ["windows-latest", "ubuntu-latest", "macos-latest"]
python-version: ["3.9", "3.10", "3.11"]
python-version: ["3.9", "3.11", "3.12"]
experimental: [false]
include:
- python-version: "3.11"
- python-version: "3.12"
os: "ubuntu-latest"
experimental: true

Expand Down Expand Up @@ -51,7 +51,7 @@ jobs:
CONDA_PREFIX=$(python -c "import sys; print(sys.prefix)")
echo "CONDA_PREFIX=$CONDA_PREFIX" >> $GITHUB_ENV
- uses: actions/cache@v3
- uses: actions/cache@v4
with:
path: ${{ env.CONDA_PREFIX }}
key: ${{ matrix.os }}-${{matrix.python-version}}-conda-${{ hashFiles('continuous_integration/environment.yaml') }}-${{ env.DATE }}-${{matrix.experimental}}-${{ env.CACHE_NUMBER }}
Expand Down Expand Up @@ -110,7 +110,7 @@ jobs:
pytest --cov=satpy satpy/tests --cov-report=xml --cov-report=
- name: Upload unittest coverage to Codecov
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v4
with:
flags: unittests
file: ./coverage.xml
Expand All @@ -131,7 +131,7 @@ jobs:
coverage xml
- name: Upload behaviour test coverage to Codecov
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v4
with:
flags: behaviourtests
file: ./coverage.xml
Expand Down
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ fail_fast: false
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: 'v0.1.9'
rev: 'v0.2.0'
hooks:
- id: ruff
- repo: https://github.com/pre-commit/pre-commit-hooks
Expand All @@ -14,7 +14,7 @@ repos:
- id: check-yaml
args: [--unsafe]
- repo: https://github.com/PyCQA/bandit
rev: '1.7.6' # Update me!
rev: '1.7.7' # Update me!
hooks:
- id: bandit
args: [--ini, .bandit]
Expand Down
20 changes: 10 additions & 10 deletions asv.conf.json
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
// determined by looking for tools on the PATH environment
// variable.
//"environment_type": "virtualenv",
"environment_type": "conda",
"environment_type": "mamba",

// timeout in seconds for installing any dependencies in environment
// defaults to 10 min
Expand All @@ -58,7 +58,7 @@
// The Pythons you'd like to test against. If not provided, defaults
// to the current version of Python used to run `asv`.
// "pythons": ["2.7", "3.6"],
"pythons": ["3.9", "3.10"],
"pythons": ["3.11", "3.12"],

// The list of conda channel names to be searched for benchmark
// dependency packages in the specified order
Expand All @@ -80,14 +80,14 @@
// "pip+emcee": [""], // emcee is only available for install with pip.
// },
"matrix": {
"pyresample": ["1.22.3"],
"trollimage": ["1.17.0"],
"pyorbital": ["1.7.1"],
"pyspectral": ["0.10.6"],
"rasterio": ["1.2.10"],
"dask": ["2021.12.0"],
"xarray": ["0.20.2"],
"numpy": ["1.22.0"],
"pyresample": ["1.27.1"],
"trollimage": ["1.22.2"],
"pyorbital": ["1.8.1"],
"pyspectral": ["0.13.0"],
"rasterio": ["1.3.9"],
"dask": ["2024.1.1"],
"xarray": ["2024.1.1"],
"numpy": ["1.26.0"],
"s3fs": [],
"h5py": [],
"netCDF4": [],
Expand Down
4 changes: 2 additions & 2 deletions continuous_integration/environment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ dependencies:
- python-eccodes
# 2.19.1 seems to cause library linking issues
- eccodes>=2.20
- pytest
- pytest<8.0.0
- pytest-cov
- pytest-lazy-fixture
- fsspec
Expand All @@ -59,6 +59,6 @@ dependencies:
- bokeh
- pip:
- trollsift
- trollimage>=1.20
- trollimage>=1.23
- pyspectral
- pyorbital
10 changes: 6 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,19 +14,21 @@ known_first_party = "satpy"
line_length = 120

[tool.ruff]
line-length = 120

[tool.ruff.lint]
# See https://docs.astral.sh/ruff/rules/
# In the future, add "B", "S", "N"
select = ["A", "D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"]
line-length = 120

[tool.ruff.per-file-ignores]
[tool.ruff.lint.per-file-ignores]
"satpy/tests/*" = ["S101"] # assert allowed in tests
"utils/coord2area_def.py" = ["T201"] # allow print
"fetch_avhrr_calcoeffs.py" = ["T201"] # allow print

[tool.ruff.pydocstyle]
[tool.ruff.lint.pydocstyle]
convention = "google"

[tool.ruff.mccabe]
[tool.ruff.lint.mccabe]
# Unlike Flake8, default to a complexity level of 10.
max-complexity = 10
7 changes: 0 additions & 7 deletions satpy/composites/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1665,13 +1665,6 @@ def __call__(self, *args, **kwargs):
img.attrs["mode"] = "".join(img.bands.data)
img.attrs.pop("modifiers", None)
img.attrs.pop("calibration", None)
# Add start time if not present in the filename
if "start_time" not in img.attrs or not img.attrs["start_time"]:
import datetime as dt
img.attrs["start_time"] = dt.datetime.utcnow()
if "end_time" not in img.attrs or not img.attrs["end_time"]:
import datetime as dt
img.attrs["end_time"] = dt.datetime.utcnow()

return img

Expand Down
80 changes: 66 additions & 14 deletions satpy/dataset/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Utilities for merging metadata from various sources."""

import warnings
from collections.abc import Collection
from datetime import datetime
from functools import partial, reduce
Expand All @@ -27,33 +28,50 @@
from satpy.writers.utils import flatten_dict


def combine_metadata(*metadata_objects, average_times=True):
def combine_metadata(*metadata_objects, average_times=None):
"""Combine the metadata of two or more Datasets.
If the values corresponding to any keys are not equal or do not
exist in all provided dictionaries then they are not included in
the returned dictionary. By default any keys with the word 'time'
in them and consisting of datetime objects will be averaged. This
is to handle cases where data were observed at almost the same time
but not exactly. In the interest of time, lazy arrays are compared by
object identity rather than by their contents.
the returned dictionary.
All values of the keys containing the substring 'start_time' will be set
to the earliest value and similarly for 'end_time' to latest time. All
other keys containing the word 'time' are averaged. Before these adjustments,
`None` values resulting from data that don't have times associated to them
are removed. These rules are applied also to values in the 'time_parameters'
dictionary.
.. versionchanged:: 0.47
Before Satpy 0.47, all times, including `start_time` and `end_time`, were averaged.
In the interest of processing time, lazy arrays are compared by object
identity rather than by their contents.
Args:
*metadata_objects: MetadataObject or dict objects to combine
average_times (bool): Average any keys with 'time' in the name
Kwargs:
average_times (bool): Removed option to average all time attributes.
Returns:
dict: the combined metadata
"""
info_dicts = _get_valid_dicts(metadata_objects)
if average_times is not None:
warnings.warn(
"'average_time' option has been removed and start/end times are handled with min/max instead.",
UserWarning
)

info_dicts = _get_valid_dicts(metadata_objects)
if len(info_dicts) == 1:
return info_dicts[0].copy()

shared_keys = _shared_keys(info_dicts)

return _combine_shared_info(shared_keys, info_dicts, average_times)
return _combine_shared_info(shared_keys, info_dicts)


def _get_valid_dicts(metadata_objects):
Expand All @@ -75,17 +93,51 @@ def _shared_keys(info_dicts):
return reduce(set.intersection, key_sets)


def _combine_shared_info(shared_keys, info_dicts, average_times):
def _combine_shared_info(shared_keys, info_dicts):
shared_info = {}
for key in shared_keys:
values = [info[key] for info in info_dicts]
if "time" in key and isinstance(values[0], datetime) and average_times:
shared_info[key] = average_datetimes(values)
elif _are_values_combinable(values):
shared_info[key] = values[0]
_combine_values(key, values, shared_info)
return shared_info


def _combine_values(key, values, shared_info):
if "time" in key:
times = _combine_times(key, values)
if times is not None:
shared_info[key] = times
elif _are_values_combinable(values):
shared_info[key] = values[0]


def _combine_times(key, values):
if key == "time_parameters":
return _combine_time_parameters(values)
filtered_values = _filter_time_values(values)
if not filtered_values:
return None
if "end_time" in key:
return max(filtered_values)
elif "start_time" in key:
return min(filtered_values)
return average_datetimes(filtered_values)


def _combine_time_parameters(values):
# Assume the first item has all the keys
keys = values[0].keys()
res = {}
for key in keys:
sub_values = [itm[key] for itm in values]
res[key] = _combine_times(key, sub_values)
return res


def _filter_time_values(values):
"""Remove values that are not datetime objects."""
return [v for v in values if isinstance(v, datetime)]


def average_datetimes(datetime_list):
"""Average a series of datetime objects.
Expand Down
1 change: 1 addition & 0 deletions satpy/etc/composites/seviri.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -421,6 +421,7 @@ composites:
- name: HRV
modifiers: [sunz_corrected]
- IR_108

hrv_fog:
compositor: !!python/name:satpy.composites.GenericCompositor
standard_name: hrv_fog
Expand Down
25 changes: 24 additions & 1 deletion satpy/etc/enhancements/generic.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -271,6 +271,29 @@ enhancements:
stretch: linear
cutoffs: [0.005, 0.005]

four_level_cloud_mask:
standard_name: cloud_mask
reader: clavrx
operations:
- name: palettize
method: !!python/name:satpy.enhancements.palettize
kwargs:
palettes:
- {'values': [-127,# Fill Value
0, # Clear
1, # Probably Clear
2, # Probably Cloudy
3, # Cloudy
],
'colors': [[0, 0, 0], # black,-127 = Fill Value
[94, 79, 162], # blue, 0 = Clear
[73, 228, 242], # cyan, 1 = Probably Clear
[158, 1, 66], # red, 2 = Probably Cloudy
[255, 255, 255], # white, 3 = Cloudy
],
'color_scale': 255,
}

sar-ice:
standard_name: sar-ice
operations:
Expand Down Expand Up @@ -1104,7 +1127,7 @@ enhancements:
operations:
- name: stretch
method: !!python/name:satpy.enhancements.stretch
kwargs: {stretch: linear, cutoffs: [0.02, 0.02]}
kwargs: {stretch: linear, cutoffs: [[0.02, 0.02], [0.02, 0.02], [0.02, 0.02], [0.02, 0.02]]}
- name: inverse
method: !!python/name:satpy.enhancements.invert
args:
Expand Down
Loading

0 comments on commit 574c3ae

Please sign in to comment.