Skip to content

Commit

Permalink
Merge branch 'main' into feature-nwc-geo-hrw
Browse files Browse the repository at this point in the history
  • Loading branch information
lahtinep committed Mar 3, 2025
2 parents b2c4dc9 + a950fcc commit 9a9c0a7
Show file tree
Hide file tree
Showing 13 changed files with 265 additions and 55 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ jobs:
uses: codecov/codecov-action@v5
with:
flags: unittests
file: ./coverage.xml
files: ./coverage.xml
env_vars: OS,PYTHON_VERSION,UNSTABLE

- name: Coveralls Parallel
Expand All @@ -139,7 +139,7 @@ jobs:
uses: codecov/codecov-action@v5
with:
flags: behaviourtests
file: ./coverage.xml
files: ./coverage.xml
env_vars: OS,PYTHON_VERSION,UNSTABLE

coveralls:
Expand Down
1 change: 1 addition & 0 deletions continuous_integration/environment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ dependencies:
- pytest
- pytest-cov
- fsspec
- universal_pathlib
- botocore>=1.33
- s3fs
- python-geotiepoints
Expand Down
1 change: 0 additions & 1 deletion doc/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ help:

clean:
-rm -rf $(BUILDDIR)/*
-rm -rf source/api/*.rst

html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
Expand Down
3 changes: 1 addition & 2 deletions doc/rtd_environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,8 @@ dependencies:
- rioxarray
- setuptools
- setuptools_scm
- sphinx
- sphinx>=8.2.0
- sphinx_rtd_theme
- sphinxcontrib-apidoc
- trollsift
- xarray
- zarr
Expand Down
34 changes: 22 additions & 12 deletions doc/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,11 +82,14 @@ def __getattr__(cls, name):

# -- General configuration -----------------------------------------------------

# sphinxcontrib.apidoc was added to sphinx in 8.2.0 as sphinx.etx.apidoc
needs_sphinx = "8.2.0"

# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ["sphinx.ext.autodoc", "sphinx.ext.intersphinx", "sphinx.ext.todo", "sphinx.ext.coverage",
"sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.autosummary", "sphinx.ext.autosectionlabel",
"doi_role", "sphinx.ext.viewcode", "sphinxcontrib.apidoc",
"doi_role", "sphinx.ext.viewcode", "sphinx.ext.apidoc",
"sphinx.ext.mathjax"]

# Autosectionlabel
Expand All @@ -95,18 +98,25 @@ def __getattr__(cls, name):
autosectionlabel_maxdepth = 3

# API docs
apidoc_module_dir = "../../satpy"
apidoc_output_dir = "api"
apidoc_excluded_paths = [
"readers/caliop_l2_cloud.py",
"readers/ghrsst_l3c_sst.py",
"readers/li_l2.py",
"readers/scatsat1_l2b.py",
apidoc_modules = [
{
"path": "../../satpy",
"destination": "api/",
"exclude_patterns": [
"../../satpy/readers/caliop_l2_cloud.py",
"../../satpy/readers/ghrsst_l3c_sst.py",
"../../satpy/readers/scatsat1_l2b.py",
# Prefer to not document test modules. Most users will look at
# source code if needed and we want to avoid documentation builds
# suffering from import-time test data creation. We want to keep
# things contributors might be interested in like satpy.tests.utils.
"../../satpy/tests/test_*.py",
"../../satpy/tests/**/test_*.py",
],
},
]
apidoc_separate_modules = True
apidoc_extra_args = [
"--private",
]
apidoc_include_private = True

# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
Expand Down Expand Up @@ -297,7 +307,7 @@ def __getattr__(cls, name):
"scipy": ("https://scipy.github.io/devdocs", None),
"trollimage": ("https://trollimage.readthedocs.io/en/stable", None),
"trollsift": ("https://trollsift.readthedocs.io/en/stable", None),
"xarray": ("https://xarray.pydata.org/en/stable", None),
"xarray": ("https://docs.xarray.dev/en/stable", None),
"rasterio": ("https://rasterio.readthedocs.io/en/latest", None),
"donfig": ("https://donfig.readthedocs.io/en/latest", None),
"pooch": ("https://www.fatiando.org/pooch/latest/", None),
Expand Down
7 changes: 7 additions & 0 deletions satpy/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Pytest configuration and setup functions."""
import pytest


def pytest_configure(config):
Expand All @@ -28,3 +29,9 @@ def pytest_unconfigure(config):
"""Undo previous configurations."""
from satpy import aux_download
aux_download.RUNNING_TESTS = False


@pytest.fixture(scope="session")
def session_tmp_path(tmp_path_factory):
"""Generate a single temp path to use for the entire session."""
return tmp_path_factory.mktemp("data")
43 changes: 28 additions & 15 deletions satpy/readers/hrit_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
from pyresample import geometry

import satpy.readers.utils as utils
from satpy.readers import FSFile
from satpy.readers.eum_base import time_cds_short
from satpy.readers.file_handlers import BaseFileHandler
from satpy.readers.seviri_base import dec10216
Expand Down Expand Up @@ -88,14 +87,18 @@
}


def decompress(infile):
def decompress_file(infile) -> bytes:
"""Decompress an XRIT data file and return the decompressed buffer."""
from pyPublicDecompWT import xRITDecompress

# decompress in-memory
with open(infile, mode="rb") as fh:
xrit = xRITDecompress()
xrit.decompress(fh.read())
return decompress_buffer(fh.read())


def decompress_buffer(buffer) -> bytes:
"""Decompress buffer."""
from pyPublicDecompWT import xRITDecompress
xrit = xRITDecompress()
xrit.decompress(buffer)

return xrit.data()

Expand All @@ -117,28 +120,31 @@ class HRITFileHandler(BaseFileHandler):

def __init__(self, filename, filename_info, filetype_info, hdr_info):
"""Initialize the reader."""
super(HRITFileHandler, self).__init__(filename, filename_info,
filetype_info)
super().__init__(filename, filename_info, filetype_info)

self.mda = {}
self.hdr_info = hdr_info
self._get_hd(self.hdr_info)
self._start_time = filename_info["start_time"]
self._end_time = self._start_time + dt.timedelta(minutes=15)

def _get_hd(self, hdr_info):
def _get_hd(self, hdr_info, verbose=False):
"""Open the file, read and get the basic file header info and set the mda dictionary."""
hdr_map, variable_length_headers, text_headers = hdr_info

with utils.generic_open(self.filename, mode="rb") as fp:
total_header_length = 16
while fp.tell() < total_header_length:
hdr_id = get_header_id(fp)
if verbose:
print("hdr_id") # noqa: T201
print(f'np.void({hdr_id}, dtype=[("hdr_id", "u1"), ("record_length", ">u2")]),') # noqa: T201
the_type = hdr_map[hdr_id["hdr_id"]]
if the_type in variable_length_headers:
field_length = int((hdr_id["record_length"] - 3) /
the_type.itemsize)
current_hdr = get_header_content(fp, the_type, field_length)
if verbose:
print(f"np.zeros(({field_length}, ), dtype={the_type}),") # noqa: T201
key = variable_length_headers[the_type]
if key in self.mda:
if not isinstance(self.mda[key], list):
Expand All @@ -152,9 +158,13 @@ def _get_hd(self, hdr_info):
char = list(the_type.fields.values())[0][0].char
new_type = np.dtype(char + str(field_length))
current_hdr = get_header_content(fp, new_type)[0]
if verbose:
print(f'np.array({current_hdr}, dtype="{new_type}"),') # noqa: T201
self.mda[text_headers[the_type]] = current_hdr
else:
current_hdr = get_header_content(fp, the_type)[0]
if verbose:
print(f"np.void({current_hdr}, dtype={the_type}),") # noqa: T201
self.mda.update(
dict(zip(current_hdr.dtype.names, current_hdr)))

Expand Down Expand Up @@ -318,7 +328,7 @@ def _read_data_from_file(self):
return self._read_data_from_disk()

def _is_file_like(self):
return isinstance(self.filename, FSFile)
return not isinstance(self.filename, str)

def _read_data_from_disk(self):
# For reading the image data, unzip_context is faster than generic_open
Expand All @@ -327,7 +337,7 @@ def _read_data_from_disk(self):

if self.compressed:
return np.frombuffer(
decompress(fn),
decompress_file(fn),
offset=self.offset,
dtype=dtype,
count=np.prod(shape)
Expand All @@ -344,12 +354,15 @@ def _read_file_like(self):
# filename is likely to be a file-like object, already in memory
dtype, shape = self._get_input_info()
with utils.generic_open(self.filename, mode="rb") as fp:
decompressed_buffer = fp.read()
if self.compressed:
decompressed_buffer = decompress_buffer(decompressed_buffer)
no_elements = np.prod(shape)
fp.seek(self.offset)
return np.frombuffer(
fp.read(np.dtype(dtype).itemsize * no_elements),
decompressed_buffer,
dtype=np.dtype(dtype),
count=no_elements.item()
count=no_elements.item(),
offset=self.offset
).reshape(shape)

def _get_input_info(self):
Expand Down
16 changes: 14 additions & 2 deletions satpy/readers/netcdf_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,8 +211,20 @@ def _collect_global_attrs(self, obj):
self.file_content[fc_key] = global_attrs[key] = value
self.file_content["/attrs"] = global_attrs

def _get_object_attrs(self, obj):
return obj.__dict__
@staticmethod
def _get_object_attrs(obj):
"""Get object attributes using __dict__ but retrieve recoverable attributes on failure."""
try:
return obj.__dict__
except KeyError:
# Maybe unrecognised datatype.
atts = {}
for attname in obj.ncattrs():
try:
atts[attname] = obj.getncattr(attname)
except KeyError:
LOG.warning(f"Warning: Cannot load object ({obj.name}) attribute ({attname}).")
return atts

def _collect_attrs(self, name, obj):
"""Collect all the attributes for the provided file object."""
Expand Down
12 changes: 6 additions & 6 deletions satpy/readers/seviri_l1b_hrit.py
Original file line number Diff line number Diff line change
Expand Up @@ -312,7 +312,7 @@ class HRITMSGPrologueEpilogueBase(HRITFileHandler):

def __init__(self, filename, filename_info, filetype_info, hdr_info):
"""Initialize the file handler for prologue and epilogue files."""
super(HRITMSGPrologueEpilogueBase, self).__init__(filename, filename_info, filetype_info, hdr_info)
super().__init__(filename, filename_info, filetype_info, hdr_info)
self._reduced = None

def _reduce(self, mda, max_size):
Expand All @@ -333,11 +333,11 @@ def __init__(self, filename, filename_info, filetype_info, calib_mode="nominal",
ext_calib_coefs=None, include_raw_metadata=False,
mda_max_array_size=None, fill_hrv=None, mask_bad_quality_scan_lines=None):
"""Initialize the reader."""
super(HRITMSGPrologueFileHandler, self).__init__(filename, filename_info,
filetype_info,
(msg_hdr_map,
msg_variable_length_headers,
msg_text_headers))
super().__init__(filename, filename_info,
filetype_info,
(msg_hdr_map,
msg_variable_length_headers,
msg_text_headers))
self.prologue = {}
self.read_prologue()

Expand Down
8 changes: 4 additions & 4 deletions satpy/readers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -355,10 +355,10 @@ def generic_open(filename, *args, **kwargs):
fp = filename.open(*args, **kwargs)
except AttributeError:
fp = open(filename, *args, **kwargs)

yield fp

fp.close()
try:
yield fp
finally:
fp.close()


def fromfile(filename, dtype, count=1, offset=0):
Expand Down
2 changes: 1 addition & 1 deletion satpy/tests/reader_tests/test_hrit_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ def test_read_band_filepath(self, stub_compressed_hrit_file):
"""Test reading a single band from a filepath."""
filename = stub_compressed_hrit_file

with mock.patch("satpy.readers.hrit_base.decompress", side_effect=fake_decompress) as mock_decompress:
with mock.patch("satpy.readers.hrit_base.decompress_buffer", side_effect=fake_decompress) as mock_decompress:
with mock.patch.object(HRITFileHandler, "_get_hd", side_effect=new_get_hd, autospec=True) as get_hd:
self.reader = HRITFileHandler(filename,
{"platform_shortname": "MSG3",
Expand Down
Loading

0 comments on commit 9a9c0a7

Please sign in to comment.