From 7269a4f1a48778be934502388567a84e24bc62b3 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Wed, 21 Jun 2023 16:51:03 +0200 Subject: [PATCH 01/89] Bugfix getting TLEs from local directory of files Signed-off-by: Adam.Dybbroe --- pyorbital/tests/test_tlefile.py | 18 +++++++++++++++++- pyorbital/tlefile.py | 4 ++-- 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/pyorbital/tests/test_tlefile.py b/pyorbital/tests/test_tlefile.py index d0d53e99..6bed3874 100644 --- a/pyorbital/tests/test_tlefile.py +++ b/pyorbital/tests/test_tlefile.py @@ -273,10 +273,13 @@ def test_get_uris_and_open_func_using_tles_env(caplog, fake_local_tles_dir): Test providing no tle file but using the TLES env to find local tle files. """ + from collections.abc import Sequence + with caplog.at_level(logging.DEBUG): uris, _ = _get_uris_and_open_func() - assert uris[0] == str(fake_local_tles_dir) + assert isinstance(uris, Sequence) + assert uris[0] == str(fake_local_tles_dir / 'tle-202211180830.txt') log_message = "Reading TLE from {msg}".format(msg=str(fake_local_tles_dir)) assert log_message in caplog.text @@ -338,6 +341,19 @@ def test_from_file(self): finally: remove(filename) + # def test_from_local_files(self): + # """Test reading and parsing TLEs getting the latest TLE file from a local directory.""" + # from tempfile import mkstemp + # from os import write, close, remove + # filehandle, filename = mkstemp() + # try: + # write(filehandle, "\n".join([line0, line1, line2]).encode('utf-8')) + # close(filehandle) + # tle = Tle("NOAA-20", filename) + # self.check_example(tle) + # finally: + # remove(filename) + def test_from_file_with_hyphenated_platform_name(self): """Test reading and parsing from a file with a slightly different name.""" from tempfile import mkstemp diff --git a/pyorbital/tlefile.py b/pyorbital/tlefile.py index d44d7e1b..38aa6313 100644 --- a/pyorbital/tlefile.py +++ b/pyorbital/tlefile.py @@ -323,8 +323,8 @@ def _open(filename): elif local_tle_path: # TODO: get the TLE file closest in time to the actual satellite # overpass, NOT the latest! - uris = (max(glob.glob(local_tle_path), - key=os.path.getctime), ) + uris = (max(glob.glob(os.path.join(local_tle_path, '*')), + key=os.path.getctime), ) LOGGER.debug("Reading TLE from %s", uris[0]) open_func = _open else: From 09726e461ab577ff9d469dc717b1ef8d9ab51f23 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Wed, 21 Jun 2023 16:52:00 +0200 Subject: [PATCH 02/89] Clean up unused and commented out work-in-progress code Signed-off-by: Adam.Dybbroe --- pyorbital/tests/test_tlefile.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/pyorbital/tests/test_tlefile.py b/pyorbital/tests/test_tlefile.py index 6bed3874..f33a7652 100644 --- a/pyorbital/tests/test_tlefile.py +++ b/pyorbital/tests/test_tlefile.py @@ -341,19 +341,6 @@ def test_from_file(self): finally: remove(filename) - # def test_from_local_files(self): - # """Test reading and parsing TLEs getting the latest TLE file from a local directory.""" - # from tempfile import mkstemp - # from os import write, close, remove - # filehandle, filename = mkstemp() - # try: - # write(filehandle, "\n".join([line0, line1, line2]).encode('utf-8')) - # close(filehandle) - # tle = Tle("NOAA-20", filename) - # self.check_example(tle) - # finally: - # remove(filename) - def test_from_file_with_hyphenated_platform_name(self): """Test reading and parsing from a file with a slightly different name.""" from tempfile import mkstemp From 4355fd25ce18add9e674785de6ddf9d62ef3d8d2 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Wed, 21 Jun 2023 17:19:07 +0200 Subject: [PATCH 03/89] Skip Pythono 3.8 and add 3.11 for the CI Signed-off-by: Adam.Dybbroe --- .github/workflows/ci.yaml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c2f63752..9c20fa06 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -8,8 +8,13 @@ jobs: strategy: fail-fast: true matrix: - os: ["ubuntu-latest", "macos-latest", "windows-latest"] - python-version: ["3.8", "3.9", "3.10"] + os: ["windows-latest", "ubuntu-latest", "macos-latest"] + python-version: ["3.9", "3.10", "3.11"] + experimental: [false] + include: + - python-version: "3.11" + os: "ubuntu-latest" + experimental: true env: PYTHON_VERSION: ${{ matrix.python-version }} From a47950dc504d1d110a52ab17b7aeef0e72020c55 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Wed, 5 Jul 2023 15:30:35 +0200 Subject: [PATCH 04/89] Fix test make sure fake tle files have unique creation times Signed-off-by: Adam.Dybbroe --- pyorbital/tests/test_tlefile.py | 3 +++ pyorbital/tlefile.py | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/pyorbital/tests/test_tlefile.py b/pyorbital/tests/test_tlefile.py index f33a7652..f5a2e7b7 100644 --- a/pyorbital/tests/test_tlefile.py +++ b/pyorbital/tests/test_tlefile.py @@ -99,10 +99,13 @@ def fake_local_tles_dir(tmp_path, monkeypatch): """Make a list of fake tle files in a directory.""" file_path = tmp_path / 'tle-202211180230.txt' file_path.touch() + time.sleep(1) file_path = tmp_path / 'tle-202211180430.txt' file_path.touch() + time.sleep(1) file_path = tmp_path / 'tle-202211180630.txt' file_path.touch() + time.sleep(1) file_path = tmp_path / 'tle-202211180830.txt' file_path.touch() diff --git a/pyorbital/tlefile.py b/pyorbital/tlefile.py index 38aa6313..2b52b02b 100644 --- a/pyorbital/tlefile.py +++ b/pyorbital/tlefile.py @@ -323,8 +323,8 @@ def _open(filename): elif local_tle_path: # TODO: get the TLE file closest in time to the actual satellite # overpass, NOT the latest! - uris = (max(glob.glob(os.path.join(local_tle_path, '*')), - key=os.path.getctime), ) + list_of_tle_files = glob.glob(os.path.join(local_tle_path, '*')) + uris = (max(list_of_tle_files, key=os.path.getctime), ) LOGGER.debug("Reading TLE from %s", uris[0]) open_func = _open else: From aa0e9e23fb724ad36210486dd888f5eafa9fae64 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Wed, 5 Jul 2023 16:00:22 +0200 Subject: [PATCH 05/89] Fix Flake8 issues Signed-off-by: Adam.Dybbroe --- pyorbital/orbital.py | 8 ++++---- pyorbital/tests/test_aiaa.py | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/pyorbital/orbital.py b/pyorbital/orbital.py index 9b9d0f73..be9e4aef 100644 --- a/pyorbital/orbital.py +++ b/pyorbital/orbital.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2011, 2012, 2013, 2014, 2015. +# Copyright (c) 2011 - 2023 Pytroll Community # Author(s): @@ -631,11 +631,11 @@ def __init__(self, orbit_elements): self.xn_0 = orbit_elements.mean_motion # A30 = -XJ3 * AE**3 - if not(0 < self.eo < ECC_LIMIT_HIGH): + if not (0 < self.eo < ECC_LIMIT_HIGH): raise OrbitalError('Eccentricity out of range: %e' % self.eo) - elif not((0.0035 * 2 * np.pi / XMNPDA) < self.xn_0 < (18 * 2 * np.pi / XMNPDA)): + elif not ((0.0035 * 2 * np.pi / XMNPDA) < self.xn_0 < (18 * 2 * np.pi / XMNPDA)): raise OrbitalError('Mean motion out of range: %e' % self.xn_0) - elif not(0 < self.xincl < np.pi): + elif not (0 < self.xincl < np.pi): raise OrbitalError('Inclination out of range: %e' % self.xincl) if self.eo < 0: diff --git a/pyorbital/tests/test_aiaa.py b/pyorbital/tests/test_aiaa.py index 9c7e7614..6362ab4e 100644 --- a/pyorbital/tests/test_aiaa.py +++ b/pyorbital/tests/test_aiaa.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2011 - 2022 Pytroll Community +# Copyright (c) 2011 - 2023 Pytroll Community # Author(s): @@ -55,10 +55,10 @@ def get_results(satnumber, delay): path = os.path.dirname(os.path.abspath(__file__)) with open(os.path.join(path, "aiaa_results")) as f_2: line = f_2.readline() - while(line): + while line: if line.endswith(" xx\n") and int(line[:-3]) == satnumber: line = f_2.readline() - while(not line.startswith("%.8f" % delay)): + while (not line.startswith("%.8f" % delay)): line = f_2.readline() sline = line.split() if delay == 0: @@ -94,7 +94,7 @@ def test_aiaa(self): path = os.path.dirname(os.path.abspath(__file__)) with open(os.path.join(path, "SGP4-VER.TLE")) as f__: test_line = f__.readline() - while(test_line): + while test_line: if test_line.startswith("#"): test_name = test_line if test_line.startswith("1 "): From 5ada2ab7200ad5ca7a28d783efeea69947747de8 Mon Sep 17 00:00:00 2001 From: Adam Dybbroe Date: Thu, 6 Jul 2023 15:33:30 +0200 Subject: [PATCH 06/89] Fix copyright header Co-authored-by: David Hoese --- pyorbital/orbital.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyorbital/orbital.py b/pyorbital/orbital.py index be9e4aef..7f3c745d 100644 --- a/pyorbital/orbital.py +++ b/pyorbital/orbital.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2011 - 2023 Pytroll Community +# Copyright (c) 2011-2023 Pyorbital developers # Author(s): From 2903c5acdef05c4ad006eaa6da5692d054809850 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 6 Jul 2023 15:47:16 +0200 Subject: [PATCH 07/89] Require python 3.9 as minimum Signed-off-by: Adam.Dybbroe --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 578d2f7d..d6fd3509 100644 --- a/setup.py +++ b/setup.py @@ -56,6 +56,6 @@ package_data={'pyorbital': [os.path.join('etc', 'platforms.txt')]}, scripts=['bin/fetch_tles.py', ], install_requires=['numpy>=1.19.0', 'scipy', 'requests'], - python_requires='>=3.8', + python_requires='>=3.9', zip_safe=False, ) From 32e5813b48d2bcaffb397bfa5422f9d61ec60fe5 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 6 Jul 2023 15:48:32 +0200 Subject: [PATCH 08/89] Try add experimental to the Github CI checks taking from the setup used for Satpy Signed-off-by: Adam.Dybbroe --- .github/workflows/ci.yaml | 76 +++++++++++++++++++++++++++++++++++---- 1 file changed, 70 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9c20fa06..e07d5204 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -5,6 +5,7 @@ on: [push, pull_request] jobs: test: runs-on: ${{ matrix.os }} + continue-on-error: ${{ matrix.experimental }} strategy: fail-fast: true matrix: @@ -19,6 +20,7 @@ jobs: env: PYTHON_VERSION: ${{ matrix.python-version }} OS: ${{ matrix.os }} + UNSTABLE: ${{ matrix.experimental }} ACTIONS_ALLOW_UNSECURE_COMMANDS: true steps: @@ -28,22 +30,60 @@ jobs: - name: Setup Conda Environment uses: conda-incubator/setup-miniconda@v2 with: - miniconda-version: "latest" + miniforge-variant: Mambaforge + miniforge-version: latest + use-mamba: true python-version: ${{ matrix.python-version }} - mamba-version: "*" - channels: conda-forge,defaults - environment-file: continuous_integration/environment.yaml activate-environment: test-environment + - name: Set cache environment variables + shell: bash -l {0} + run: | + echo "DATE=$(date +'%Y%m%d')" >> $GITHUB_ENV + CONDA_PREFIX=$(python -c "import sys; print(sys.prefix)") + echo "CONDA_PREFIX=$CONDA_PREFIX" >> $GITHUB_ENV + + - uses: actions/cache@v3 + with: + path: ${{ env.CONDA_PREFIX }} + key: ${{ matrix.os }}-${{matrix.python-version}}-conda-${{ hashFiles('continuous_integration/environment.yaml') }}-${{ env.DATE }}-${{matrix.experimental}}-${{ env.CACHE_NUMBER }} + id: cache + + - name: Update environment + run: mamba env update -n test-environment -f continuous_integration/environment.yaml + if: steps.cache.outputs.cache-hit != 'true' + + - name: Install unstable dependencies + if: matrix.experimental == true + shell: bash -l {0} + # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels + # may break the conda-forge libraries trying to use newer glibc versions + run: | + python -m pip install \ + --index-url https://pypi.anaconda.org/scipy-wheels-nightly/simple/ \ + --trusted-host pypi.anaconda.org \ + --no-deps --pre --upgrade \ + matplotlib \ + numpy \ + pandas \ + scipy; \ + python -m pip install \ + --no-deps --upgrade \ + git+https://github.com/dask/dask \ + git+https://github.com/pydata/xarray \ + LD_PRELOAD=$(python -c "import sys; print(sys.prefix)")/lib/libstdc++.so + echo "LD_PRELOAD=${LD_PRELOAD}" >> $GITHUB_ENV + - name: Install Pyorbital shell: bash -l {0} run: | - pip install --no-deps -e . + python -m pip install --no-deps -e . - name: Run unit tests shell: bash -l {0} run: | - pytest --cov=pyorbital pyorbital/tests --cov-report=xml + export LD_PRELOAD=${{ env.LD_PRELOAD }}; + pytest --cov=pyorbital pyorbital/tests --cov-report=xml --cov-report= - name: Upload unittest coverage to Codecov uses: codecov/codecov-action@v3 @@ -51,3 +91,27 @@ jobs: flags: unittests file: ./coverage.xml env_vars: OS,PYTHON_VERSION,UNSTABLE + + - name: Coveralls Parallel + uses: AndreMiras/coveralls-python-action@develop + with: + flag-name: run-${{ matrix.test_number }} + parallel: true + if: runner.os == 'Linux' + + - name: Run behaviour tests + shell: bash -l {0} + run: | + export LD_PRELOAD=${{ env.LD_PRELOAD }}; + coverage run --source=pyorbital --tags=-download + coverage xml + + + coveralls: + needs: [test] + runs-on: ubuntu-latest + steps: + - name: Coveralls Finished + uses: AndreMiras/coveralls-python-action@develop + with: + parallel-finished: true From fbcd541bd57437d8e195d52684235f8e1924c112 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 6 Jul 2023 09:03:26 -0500 Subject: [PATCH 09/89] Remove unnecessary behavior tests in CI --- .github/workflows/ci.yaml | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e07d5204..e7dd117c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -2,6 +2,9 @@ name: CI on: [push, pull_request] +env: + CACHE_NUMBER: 1 + jobs: test: runs-on: ${{ matrix.os }} @@ -99,14 +102,6 @@ jobs: parallel: true if: runner.os == 'Linux' - - name: Run behaviour tests - shell: bash -l {0} - run: | - export LD_PRELOAD=${{ env.LD_PRELOAD }}; - coverage run --source=pyorbital --tags=-download - coverage xml - - coveralls: needs: [test] runs-on: ubuntu-latest From 7a1dd1c0689c78fe0fa7efa2aed1403eaf841646 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 6 Jul 2023 09:14:30 -0500 Subject: [PATCH 10/89] Update scientific python wheels URL in .github/workflows/ci.yaml --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e7dd117c..f38d02f6 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -63,7 +63,7 @@ jobs: # may break the conda-forge libraries trying to use newer glibc versions run: | python -m pip install \ - --index-url https://pypi.anaconda.org/scipy-wheels-nightly/simple/ \ + --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ --no-deps --pre --upgrade \ matplotlib \ From dd5baba1dd7c7791d38640445b00c8b3c5cf8a9e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 6 Jul 2023 09:19:24 -0500 Subject: [PATCH 11/89] Update .github/workflows/ci.yaml --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index f38d02f6..c4c0cfe5 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -73,7 +73,7 @@ jobs: python -m pip install \ --no-deps --upgrade \ git+https://github.com/dask/dask \ - git+https://github.com/pydata/xarray \ + git+https://github.com/pydata/xarray; LD_PRELOAD=$(python -c "import sys; print(sys.prefix)")/lib/libstdc++.so echo "LD_PRELOAD=${LD_PRELOAD}" >> $GITHUB_ENV From 6bf186044ef4ed7508b4675d9b265be1ac736b57 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 6 Jul 2023 18:02:45 +0200 Subject: [PATCH 12/89] Make fixture session wide, for performance considerations Signed-off-by: Adam.Dybbroe --- pyorbital/tests/test_tlefile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyorbital/tests/test_tlefile.py b/pyorbital/tests/test_tlefile.py index f5a2e7b7..0d2cd683 100644 --- a/pyorbital/tests/test_tlefile.py +++ b/pyorbital/tests/test_tlefile.py @@ -94,7 +94,7 @@ def fake_platforms_file(tmp_path): yield file_path -@pytest.fixture +@pytest.fixture(scope="session") def fake_local_tles_dir(tmp_path, monkeypatch): """Make a list of fake tle files in a directory.""" file_path = tmp_path / 'tle-202211180230.txt' From 806ae129ae25236f024b3e68ff306845f1077ee1 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 6 Jul 2023 19:31:09 +0200 Subject: [PATCH 13/89] Bugfix test using session scoped fixture Signed-off-by: Adam.Dybbroe --- pyorbital/tests/test_tlefile.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/pyorbital/tests/test_tlefile.py b/pyorbital/tests/test_tlefile.py index 0d2cd683..89698c8a 100644 --- a/pyorbital/tests/test_tlefile.py +++ b/pyorbital/tests/test_tlefile.py @@ -95,23 +95,22 @@ def fake_platforms_file(tmp_path): @pytest.fixture(scope="session") -def fake_local_tles_dir(tmp_path, monkeypatch): +def fake_local_tles_dir(tmp_path_factory): """Make a list of fake tle files in a directory.""" - file_path = tmp_path / 'tle-202211180230.txt' + tle_dir = tmp_path_factory.mktemp('tle_files') + file_path = tle_dir / 'tle-202211180230.txt' file_path.touch() time.sleep(1) - file_path = tmp_path / 'tle-202211180430.txt' + file_path = tle_dir / 'tle-202211180430.txt' file_path.touch() time.sleep(1) - file_path = tmp_path / 'tle-202211180630.txt' + file_path = tle_dir / 'tle-202211180630.txt' file_path.touch() time.sleep(1) - file_path = tmp_path / 'tle-202211180830.txt' + file_path = tle_dir / 'tle-202211180830.txt' file_path.touch() - monkeypatch.setenv('TLES', str(file_path.parent)) - - yield file_path.parent + yield tle_dir @pytest.fixture @@ -271,13 +270,14 @@ def test_get_local_tle_path(mock_env_tles): assert res == '/path/to/local/tles' -def test_get_uris_and_open_func_using_tles_env(caplog, fake_local_tles_dir): +def test_get_uris_and_open_func_using_tles_env(caplog, fake_local_tles_dir, monkeypatch): """Test getting the uris and associated open-function for reading tles. Test providing no tle file but using the TLES env to find local tle files. """ from collections.abc import Sequence + monkeypatch.setenv('TLES', str(fake_local_tles_dir)) with caplog.at_level(logging.DEBUG): uris, _ = _get_uris_and_open_func() From aa3a2169cb695068fd57cb257a5070736cc50098 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 12 Jul 2023 08:37:37 -0500 Subject: [PATCH 14/89] Update release notes for 1.8.0 --- CHANGELOG.md | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index aa9dcc07..d114f48d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,31 @@ +## Version 1.8.0 (2023/07/12) + +### Issues Closed + +* [Issue 112](https://github.com/pytroll/pyorbital/issues/112) - Is the TLES environment variable described? ([PR 113](https://github.com/pytroll/pyorbital/pull/113) by [@adybbroe](https://github.com/adybbroe)) + +In this release 1 issue was closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 129](https://github.com/pytroll/pyorbital/pull/129) - Fix bug getting local tlefiles +* [PR 128](https://github.com/pytroll/pyorbital/pull/128) - Fix typo in VIIRS geoloc definition +* [PR 121](https://github.com/pytroll/pyorbital/pull/121) - fixed geoloc_example and added variable descriptions + +#### Features added + +* [PR 120](https://github.com/pytroll/pyorbital/pull/120) - Update versioneer to stop using deprecated distutils module. +* [PR 113](https://github.com/pytroll/pyorbital/pull/113) - Make use of env variables free from satpy ([112](https://github.com/pytroll/pyorbital/issues/112)) + +#### Documentation changes + +* [PR 113](https://github.com/pytroll/pyorbital/pull/113) - Make use of env variables free from satpy ([112](https://github.com/pytroll/pyorbital/issues/112)) + +In this release 6 pull requests were closed. + + ## Version 1.7.3 (2022/07/11) ### Pull Requests Merged From a1a2a3d0f6f9ccf7824081bb26084c2f38be7d82 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Jul 2023 22:19:37 +0000 Subject: [PATCH 15/89] Bump pypa/gh-action-pypi-publish from 1.8.7 to 1.8.8 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.7 to 1.8.8. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.7...v1.8.8) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index c03e9590..ac07a2aa 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -19,7 +19,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.8.7 + uses: pypa/gh-action-pypi-publish@v1.8.8 with: user: __token__ password: ${{ secrets.pypi_password }} \ No newline at end of file From d21b7c2b2732a061944b905f3a646e869ae76956 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 17 Jul 2023 17:39:28 -0500 Subject: [PATCH 16/89] Fix coveralls coverage results --- setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.cfg b/setup.cfg index e0b60a22..5ba5071e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -17,6 +17,7 @@ versionfile_build = tag_prefix = v [coverage:run] +relative_files = True omit = pyorbital/version.py versioneer.py From 8521d3614c4ca3b16dc816bf98ca41bcdb8c6cbc Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 7 Aug 2023 08:40:07 -0500 Subject: [PATCH 17/89] Add .readthedocs.yaml --- .readthedocs.yaml | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 .readthedocs.yaml diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 00000000..0553d874 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,19 @@ +version: 2 + +build: + os: "ubuntu-20.04" + tools: + python: "mambaforge-4.10" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: doc/source/conf.py + fail_on_warning: true + +# conda: +# environment: docs/environment.yml + +python: + install: + - method: pip + path: . From 4ece5db62d1db9c7592b03bde95849f0616e07c1 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 7 Aug 2023 09:01:53 -0500 Subject: [PATCH 18/89] Add doc environment.yaml --- doc/environment.yaml | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 doc/environment.yaml diff --git a/doc/environment.yaml b/doc/environment.yaml new file mode 100644 index 00000000..560fd065 --- /dev/null +++ b/doc/environment.yaml @@ -0,0 +1,12 @@ +name: readthedocs +channels: + - conda-forge +dependencies: + - python=3.11 + - pip + - numpy + - scipy + - requests + - pytest + - pyproj + - sphinx_rtd_theme From 3fa660ebf564ba77a7bdee5ff779e3c2c703b79d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 7 Aug 2023 09:02:17 -0500 Subject: [PATCH 19/89] Use conda environment in RTD --- .readthedocs.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 0553d874..ce40b0fb 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -10,8 +10,8 @@ sphinx: configuration: doc/source/conf.py fail_on_warning: true -# conda: -# environment: docs/environment.yml +conda: + environment: doc/environment.yaml python: install: From 3a59f01bde0911b8be51c76ee5a6f52b89371bd0 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 7 Aug 2023 09:41:46 -0500 Subject: [PATCH 20/89] Fix various documentation issues --- doc/source/conf.py | 4 ++-- doc/source/index.rst | 15 +++++---------- 2 files changed, 7 insertions(+), 12 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 4ac50638..2734b585 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -29,7 +29,7 @@ # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.coverage'] +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.coverage', 'sphinx.ext.napoleon'] # Add any paths that contain templates here, relative to this directory. templates_path = ['.templates'] @@ -124,7 +124,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['.static'] +html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. diff --git a/doc/source/index.rst b/doc/source/index.rst index 6d73fbf8..6ae7be78 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -1,8 +1,3 @@ -.. pyorbital documentation master file, created by - sphinx-quickstart on Mon Oct 3 08:48:29 2011. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - Pyorbital ========= @@ -92,10 +87,10 @@ specific TLE file is provided or if the :envvar:`TLES` environment variable is n TLE download and database -~~~~~~~~~~~~~~~~~~~~~~~~~ +^^^^^^^^^^^^^^^^^^^^^^^^^ The historical TLE files can be requested from -`celestrak `_. +`celestrak's request page `_. There is also a script, ``fetch_tles.py``, that can be used to collect TLE data from several locations. The currently supported locations @@ -197,21 +192,21 @@ API --- Orbital computations -~~~~~~~~~~~~~~~~~~~~ +^^^^^^^^^^^^^^^^^^^^ .. automodule:: pyorbital.orbital :members: :undoc-members: TLE handling -~~~~~~~~~~~~ +^^^^^^^^^^^^ .. automodule:: pyorbital.tlefile :members: :undoc-members: Astronomical computations -~~~~~~~~~~~~~~~~~~~~~~~~~ +^^^^^^^^^^^^^^^^^^^^^^^^^ .. automodule:: pyorbital.astronomy :members: From 7b58f8624cd254143eb53d07761fd96ac601d65c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 7 Aug 2023 09:55:12 -0500 Subject: [PATCH 21/89] Add _static sphinx directory --- doc/source/_static/.gitkeep | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 doc/source/_static/.gitkeep diff --git a/doc/source/_static/.gitkeep b/doc/source/_static/.gitkeep new file mode 100644 index 00000000..e69de29b From 2996a046d753b34fdd1b3e3a68597891fd7d6653 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 14 Aug 2023 22:52:06 +0000 Subject: [PATCH 22/89] Bump pypa/gh-action-pypi-publish from 1.8.8 to 1.8.10 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.8 to 1.8.10. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.8...v1.8.10) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index ac07a2aa..fbae1058 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -19,7 +19,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.8.8 + uses: pypa/gh-action-pypi-publish@v1.8.10 with: user: __token__ password: ${{ secrets.pypi_password }} \ No newline at end of file From b9d6c65e03c6d1c84f1704e1f03530895f97af49 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Sep 2023 22:42:02 +0000 Subject: [PATCH 23/89] Bump actions/checkout from 3 to 4 Bumps [actions/checkout](https://github.com/actions/checkout) from 3 to 4. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yaml | 2 +- .github/workflows/deploy-sdist.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c4c0cfe5..5ca26508 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -28,7 +28,7 @@ jobs: steps: - name: Checkout source - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Conda Environment uses: conda-incubator/setup-miniconda@v2 diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index fbae1058..7553b59b 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -11,7 +11,7 @@ jobs: steps: - name: Checkout source - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Create sdist shell: bash -l {0} From db7b3a2eddeb46fdaa007b941554aaf37fc7fc3d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Nov 2023 22:05:32 +0000 Subject: [PATCH 24/89] Bump conda-incubator/setup-miniconda from 2 to 3 Bumps [conda-incubator/setup-miniconda](https://github.com/conda-incubator/setup-miniconda) from 2 to 3. - [Release notes](https://github.com/conda-incubator/setup-miniconda/releases) - [Changelog](https://github.com/conda-incubator/setup-miniconda/blob/main/CHANGELOG.md) - [Commits](https://github.com/conda-incubator/setup-miniconda/compare/v2...v3) --- updated-dependencies: - dependency-name: conda-incubator/setup-miniconda dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 5ca26508..17dd017f 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -31,7 +31,7 @@ jobs: uses: actions/checkout@v4 - name: Setup Conda Environment - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: miniforge-variant: Mambaforge miniforge-version: latest From 5f18a4fd5462c11fcab67ee7f3f22d17d9aab246 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Dec 2023 22:26:37 +0000 Subject: [PATCH 25/89] Bump pypa/gh-action-pypi-publish from 1.8.10 to 1.8.11 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.10 to 1.8.11. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.10...v1.8.11) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 7553b59b..65f390b2 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -19,7 +19,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.8.10 + uses: pypa/gh-action-pypi-publish@v1.8.11 with: user: __token__ password: ${{ secrets.pypi_password }} \ No newline at end of file From 3d18b1d9b2234dae00de55999b71be7eefca4f9a Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 14 Dec 2023 13:40:03 +0100 Subject: [PATCH 26/89] Try pretify the RTD pages Signed-off-by: Adam.Dybbroe --- doc/Makefile | 67 +++++--------------------- doc/environment.yaml | 13 +++++ doc/source/_static/main.js | 6 +++ doc/source/_static/theme_overrides.css | 13 +++++ 4 files changed, 45 insertions(+), 54 deletions(-) create mode 100644 doc/source/_static/main.js create mode 100644 doc/source/_static/theme_overrides.css diff --git a/doc/Makefile b/doc/Makefile index a8a1618d..e1f5e38d 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -12,26 +12,20 @@ PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest +.PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest help: @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* @@ -46,11 +40,6 @@ dirhtml: @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @@ -76,42 +65,12 @@ qthelp: @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pyorbital.qhc" -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/pyorbital" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/pyorbital" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - make -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ + "run these through (pdf)latex." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes diff --git a/doc/environment.yaml b/doc/environment.yaml index 560fd065..f5ce8316 100644 --- a/doc/environment.yaml +++ b/doc/environment.yaml @@ -4,9 +4,22 @@ channels: dependencies: - python=3.11 - pip + - dask + - dask-image + - defusedxml + - numba - numpy - scipy - requests - pytest - pyproj + - setuptools + - setuptools_scm + - sphinx - sphinx_rtd_theme + - sphinxcontrib-apidoc + - trollsift + - xarray + - pip: + - graphviz + - .. # relative path to the pyorbital project diff --git a/doc/source/_static/main.js b/doc/source/_static/main.js new file mode 100644 index 00000000..188a335e --- /dev/null +++ b/doc/source/_static/main.js @@ -0,0 +1,6 @@ +$(document).ready( function () { + $('table.datatable').DataTable( { + "paging": false, + "dom": 'lfitp' +} ); +} ); diff --git a/doc/source/_static/theme_overrides.css b/doc/source/_static/theme_overrides.css new file mode 100644 index 00000000..63ee6cc7 --- /dev/null +++ b/doc/source/_static/theme_overrides.css @@ -0,0 +1,13 @@ +/* override table width restrictions */ +@media screen and (min-width: 767px) { + + .wy-table-responsive table td { + /* !important prevents the common CSS stylesheets from overriding + this as on RTD they are loaded after this stylesheet */ + white-space: normal !important; + } + + .wy-table-responsive { + overflow: visible !important; + } +} From 0c5b5727b0ffbd24cdee1b02db3c469e1b01a0f6 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Fri, 15 Dec 2023 08:12:11 +0100 Subject: [PATCH 27/89] Change Sphinx theme and add some style additions for tables (taken from Satpy) Signed-off-by: Adam.Dybbroe --- doc/source/conf.py | 96 ++++++++++++++++++++++++++-------------------- setup.py | 1 + 2 files changed, 55 insertions(+), 42 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 2734b585..589a3004 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -10,6 +10,7 @@ # # All configuration values have a default; values that are commented out # serve to show the default. +"""Configurations for sphinx based documentation.""" import sys import os @@ -25,7 +26,7 @@ # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. @@ -38,14 +39,14 @@ source_suffix = '.rst' # The encoding of source files. -#source_encoding = 'utf-8-sig' +# #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'pyorbital' -copyright = u'2012-2015, 2018, The Pytroll crew' +copyright = u'2012-2015, 2018, 2023, The Pytroll crew' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -58,114 +59,125 @@ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. -#language = None +# #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# #today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None +# #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'default' +# The theme to use for HTML and HTML Help pages. Major themes that come with +# Sphinx are currently 'default' and 'sphinxdoc'. +html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +# #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] + +html_css_files = [ + "theme_overrides.css", # override wide tables in RTD theme + "https://cdn.datatables.net/1.10.23/css/jquery.dataTables.min.css", +] + +html_js_files = [ + "https://cdn.datatables.net/1.10.23/js/jquery.dataTables.min.js", + "main.js", +] + # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# #html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# #html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# #html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# #html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'pyorbitaldoc' @@ -174,10 +186,10 @@ # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). -#latex_paper_size = 'letter' +# #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). -#latex_font_size = '10pt' +# #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). @@ -188,26 +200,26 @@ # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# #latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# #latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# #latex_show_urls = False # Additional stuff for the LaTeX preamble. -#latex_preamble = '' +# #latex_preamble = '' # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# #latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- diff --git a/setup.py b/setup.py index d6fd3509..24ccab04 100644 --- a/setup.py +++ b/setup.py @@ -57,5 +57,6 @@ scripts=['bin/fetch_tles.py', ], install_requires=['numpy>=1.19.0', 'scipy', 'requests'], python_requires='>=3.9', + extras_require=["sphinx", "sphinx_rtd_theme", "sphinxcontrib-apidoc"], zip_safe=False, ) From b823613687c033fa7032db49f1ad09250bb5c247 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Fri, 15 Dec 2023 08:36:26 +0100 Subject: [PATCH 28/89] Fix copyright statement Signed-off-by: Adam.Dybbroe --- doc/source/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 589a3004..ab8067a5 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -46,7 +46,7 @@ # General information about the project. project = u'pyorbital' -copyright = u'2012-2015, 2018, 2023, The Pytroll crew' +copyright = u'2012-2023, The Pytroll crew' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the From 4ccdde8993e065738c1fa595974604ce7a565c08 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Fri, 15 Dec 2023 10:17:47 +0100 Subject: [PATCH 29/89] Bugfix for generating doc pages Signed-off-by: Adam.Dybbroe --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 24ccab04..43b8fbe8 100644 --- a/setup.py +++ b/setup.py @@ -57,6 +57,6 @@ scripts=['bin/fetch_tles.py', ], install_requires=['numpy>=1.19.0', 'scipy', 'requests'], python_requires='>=3.9', - extras_require=["sphinx", "sphinx_rtd_theme", "sphinxcontrib-apidoc"], + extras_require={"doc": ["sphinx", "sphinx_rtd_theme", "sphinxcontrib-apidoc"]}, zip_safe=False, ) From 7d40562e58b99831f2638f08298838ce99c2956d Mon Sep 17 00:00:00 2001 From: Nicolas Dagoneau Date: Sun, 31 Dec 2023 09:46:24 +0100 Subject: [PATCH 30/89] Update celestrak urls --- pyorbital/tlefile.py | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/pyorbital/tlefile.py b/pyorbital/tlefile.py index 2b52b02b..9b814129 100644 --- a/pyorbital/tlefile.py +++ b/pyorbital/tlefile.py @@ -37,16 +37,18 @@ from xml.etree import ElementTree as ET from itertools import zip_longest - -TLE_URLS = ('https://celestrak.org/NORAD/elements/active.txt', - 'https://celestrak.org/NORAD/elements/weather.txt', - 'https://celestrak.org/NORAD/elements/resource.txt', - 'https://celestrak.org/NORAD/elements/cubesat.txt', - 'https://celestrak.org/NORAD/elements/stations.txt', - 'https://celestrak.org/NORAD/elements/sarsat.txt', - 'https://celestrak.org/NORAD/elements/noaa.txt', - 'https://celestrak.org/NORAD/elements/amateur.txt', - 'https://celestrak.org/NORAD/elements/engineering.txt') +TLE_GROUPS = ('active', + 'weather', + 'resource', + 'cubesat', + 'stations', + 'sarsat', + 'noaa', + 'amateur', + 'engineering') + +TLE_URLS = [f'https://celestrak.org/NORAD/elements/gp.php?GROUP={group}&FORMAT=tle' + for group in TLE_GROUPS] LOGGER = logging.getLogger(__name__) From e76264b5d972a620d1abf7ef3e1627eb628b1120 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 5 Jan 2024 08:23:50 -0600 Subject: [PATCH 31/89] Update changelog for 1.8.1 --- CHANGELOG.md | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d114f48d..9d70595d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,23 @@ +## Version 1.8.1 (2024/01/05) + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 138](https://github.com/pytroll/pyorbital/pull/138) - Update celestrak urls ([139](https://github.com/pytroll/pyorbital/issues/139)) + +#### Features added + +* [PR 137](https://github.com/pytroll/pyorbital/pull/137) - Prettify the RTD pages + +#### Documentation changes + +* [PR 137](https://github.com/pytroll/pyorbital/pull/137) - Prettify the RTD pages +* [PR 132](https://github.com/pytroll/pyorbital/pull/132) - Add .readthedocs.yaml + +In this release 4 pull requests were closed. + + ## Version 1.8.0 (2023/07/12) ### Issues Closed From 15e6bb65c507bf2763156417f3cac3b99c0a4d10 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Wed, 10 Jan 2024 10:10:04 +0100 Subject: [PATCH 32/89] Add Meteosat-12 to platforms.txt --- pyorbital/etc/platforms.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/pyorbital/etc/platforms.txt b/pyorbital/etc/platforms.txt index 6da7e8d2..cb5fb375 100644 --- a/pyorbital/etc/platforms.txt +++ b/pyorbital/etc/platforms.txt @@ -48,6 +48,7 @@ Meteosat-8 27509 Meteosat-9 28912 Meteosat-10 38552 Meteosat-11 40732 +Meteosat-12 54743 Metop-A 29499 Metop-B 38771 Metop-C 43689 From 774d581ff37465556db723d740ee14ba0dac5552 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Jan 2024 22:28:46 +0000 Subject: [PATCH 33/89] Bump actions/cache from 3 to 4 Bumps [actions/cache](https://github.com/actions/cache) from 3 to 4. - [Release notes](https://github.com/actions/cache/releases) - [Changelog](https://github.com/actions/cache/blob/main/RELEASES.md) - [Commits](https://github.com/actions/cache/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/cache dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 17dd017f..84c0649d 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -46,7 +46,7 @@ jobs: CONDA_PREFIX=$(python -c "import sys; print(sys.prefix)") echo "CONDA_PREFIX=$CONDA_PREFIX" >> $GITHUB_ENV - - uses: actions/cache@v3 + - uses: actions/cache@v4 with: path: ${{ env.CONDA_PREFIX }} key: ${{ matrix.os }}-${{matrix.python-version}}-conda-${{ hashFiles('continuous_integration/environment.yaml') }}-${{ env.DATE }}-${{matrix.experimental}}-${{ env.CACHE_NUMBER }} From 093311372886a8e2b5d040a1df287483ae724e66 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 31 Jan 2024 10:09:29 +0200 Subject: [PATCH 34/89] Fix a bug in using TLES env variable --- pyorbital/tests/test_tlefile.py | 10 +++++----- pyorbital/tlefile.py | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pyorbital/tests/test_tlefile.py b/pyorbital/tests/test_tlefile.py index 89698c8a..b7eccc36 100644 --- a/pyorbital/tests/test_tlefile.py +++ b/pyorbital/tests/test_tlefile.py @@ -132,9 +132,9 @@ def mock_env_tles_missing(monkeypatch): @pytest.fixture -def mock_env_tles(monkeypatch): +def mock_env_tles(monkeypatch, fake_local_tles_dir): """Mock environment variable TLES.""" - monkeypatch.setenv('TLES', '/path/to/local/tles') + monkeypatch.setenv('TLES', os.path.join(fake_local_tles_dir, '*')) def test_get_config_path_no_env_defined(caplog, mock_env_ppp_config_dir_missing): @@ -264,10 +264,10 @@ def test_get_local_tle_path_tle_env_missing(mock_env_tles_missing): assert res is None -def test_get_local_tle_path(mock_env_tles): +def test_get_local_tle_path(mock_env_tles, fake_local_tles_dir): """Test getting the path to local TLE files.""" res = _get_local_tle_path_from_env() - assert res == '/path/to/local/tles' + assert res == os.path.join(fake_local_tles_dir, "*") def test_get_uris_and_open_func_using_tles_env(caplog, fake_local_tles_dir, monkeypatch): @@ -277,7 +277,7 @@ def test_get_uris_and_open_func_using_tles_env(caplog, fake_local_tles_dir, monk """ from collections.abc import Sequence - monkeypatch.setenv('TLES', str(fake_local_tles_dir)) + monkeypatch.setenv('TLES', str(os.path.join(fake_local_tles_dir, "*"))) with caplog.at_level(logging.DEBUG): uris, _ = _get_uris_and_open_func() diff --git a/pyorbital/tlefile.py b/pyorbital/tlefile.py index 9b814129..d2f04100 100644 --- a/pyorbital/tlefile.py +++ b/pyorbital/tlefile.py @@ -325,7 +325,7 @@ def _open(filename): elif local_tle_path: # TODO: get the TLE file closest in time to the actual satellite # overpass, NOT the latest! - list_of_tle_files = glob.glob(os.path.join(local_tle_path, '*')) + list_of_tle_files = glob.glob(local_tle_path) uris = (max(list_of_tle_files, key=os.path.getctime), ) LOGGER.debug("Reading TLE from %s", uris[0]) open_func = _open From 053bf6eb8921e608bb8fe16fcf7be17bf64eca6a Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 31 Jan 2024 11:10:39 +0200 Subject: [PATCH 35/89] Update documentation on TLES env variable --- doc/source/index.rst | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/doc/source/index.rst b/doc/source/index.rst index 6ae7be78..ec85ce52 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -79,9 +79,14 @@ Pyorbital has a module for parsing NORAD TLE-files >>> tle.inclination 99.043499999999995 -If no path is provided pyorbital first tries to read any local TLE files in the -directory given by the environment variable :envvar:`TLES`. If this variable is not -set Pyorbital will try get the earth observation TLE files over the internet +If no path is provided pyorbital first tries to read any local TLE files defined by the +environment variable :envvar:`TLES` giving a glob pattern that can be used to retrieve all relevant files: + +.. code:: + + TLES=/path/to/tle_files/*/tle*txt + +If this variable is not set Pyorbital will try get the earth observation TLE files over the internet from `celestrak`_. Note this downloading only happens if no specific TLE file is provided or if the :envvar:`TLES` environment variable is not set. From 97df4c178ddcb00580b3ed9170597c0e3e990382 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 31 Jan 2024 11:13:08 +0200 Subject: [PATCH 36/89] Update also envvar TLES section --- doc/source/index.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/source/index.rst b/doc/source/index.rst index ec85ce52..45aee1ed 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -184,9 +184,9 @@ The astronomy module enables computation of certain parameters of interest for s for instance. Also, it may not be sustainable in a production environment. However, it is possible to let Pyorbital look for the necessary and more - optimal TLE data locally, by specifying the directory where such local TLE - files are located. If the TLES environment variable is set to point at an - existing local directory Pyorbital will first search for the needed TLEs + optimal TLE data locally, by specifying locations where such local TLE + files are located. If the TLES environment variable is set to a glob pattern to + local locations, Pyorbital will first search for the needed TLEs there. This can both be useful in an operational setup where access to the internet is restricted, and when processing old/historic satellite data. From 18340caf3a5ac94afad70b1a0016c3a8b294124c Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 5 Feb 2024 15:54:10 +0100 Subject: [PATCH 37/89] Update changelog for v1.8.2 --- CHANGELOG.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9d70595d..b964104c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,25 @@ +## Version 1.8.2 (2024/02/05) + +### Issues Closed + +* [Issue 140](https://github.com/pytroll/pyorbital/issues/140) - pyorbital cannot read TLE for MTG-I1 / Meteosat-12 ([PR 141](https://github.com/pytroll/pyorbital/pull/141) by [@gerritholl](https://github.com/gerritholl)) +* [Issue 139](https://github.com/pytroll/pyorbital/issues/139) - `Orbital` cannot get TLEs from the internet + +In this release 2 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 143](https://github.com/pytroll/pyorbital/pull/143) - Fix a bug in using TLES env variable + +#### Features added + +* [PR 141](https://github.com/pytroll/pyorbital/pull/141) - Add Meteosat-12 to platforms.txt ([140](https://github.com/pytroll/pyorbital/issues/140)) + +In this release 2 pull requests were closed. + + ## Version 1.8.1 (2024/01/05) ### Pull Requests Merged From a2b5cd32173a8ab6936ce02b3a2db0e23ca83dfd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Feb 2024 22:38:16 +0000 Subject: [PATCH 38/89] Bump codecov/codecov-action from 3 to 4 Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 3 to 4. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/v3...v4) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 84c0649d..44ab3031 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -89,7 +89,7 @@ jobs: pytest --cov=pyorbital pyorbital/tests --cov-report=xml --cov-report= - name: Upload unittest coverage to Codecov - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 with: flags: unittests file: ./coverage.xml From c9a139e9b9e5868f52274ebeb61f17bea9233f50 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 16 Feb 2024 21:13:34 +0200 Subject: [PATCH 39/89] Update CI to use Python 3.10 - 3.12 --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 44ab3031..1e2aa52d 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -13,7 +13,7 @@ jobs: fail-fast: true matrix: os: ["windows-latest", "ubuntu-latest", "macos-latest"] - python-version: ["3.9", "3.10", "3.11"] + python-version: ["3.10", "3.11", "3.12"] experimental: [false] include: - python-version: "3.11" From d004160a2ecb505d5bf4a5ff08d46425730a66b5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Mar 2024 22:52:29 +0000 Subject: [PATCH 40/89] Bump pypa/gh-action-pypi-publish from 1.8.11 to 1.8.12 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.11 to 1.8.12. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.11...v1.8.12) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 65f390b2..f63cc431 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -19,7 +19,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.8.11 + uses: pypa/gh-action-pypi-publish@v1.8.12 with: user: __token__ password: ${{ secrets.pypi_password }} \ No newline at end of file From 7f7ae3ca254acf4e8a18719660f4ea69539fa54d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Mar 2024 22:46:06 +0000 Subject: [PATCH 41/89] Bump pypa/gh-action-pypi-publish from 1.8.12 to 1.8.14 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.12 to 1.8.14. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.12...v1.8.14) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index f63cc431..a95a5932 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -19,7 +19,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.8.12 + uses: pypa/gh-action-pypi-publish@v1.8.14 with: user: __token__ password: ${{ secrets.pypi_password }} \ No newline at end of file From 65f97deb7ab40fca535f9ee1a818d54becfabed9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Jun 2024 22:46:00 +0000 Subject: [PATCH 42/89] Bump pypa/gh-action-pypi-publish from 1.8.14 to 1.9.0 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.14 to 1.9.0. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.14...v1.9.0) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index a95a5932..50804325 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -19,7 +19,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.8.14 + uses: pypa/gh-action-pypi-publish@v1.9.0 with: user: __token__ password: ${{ secrets.pypi_password }} \ No newline at end of file From 4a3ea44bd2f7e9848aa19332b8828b2e7f0484e7 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 21 Jun 2024 10:18:25 -0500 Subject: [PATCH 43/89] Convert astronomy tests to pytest --- pyorbital/tests/test_astronomy.py | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/pyorbital/tests/test_astronomy.py b/pyorbital/tests/test_astronomy.py index 25846805..0d53028b 100644 --- a/pyorbital/tests/test_astronomy.py +++ b/pyorbital/tests/test_astronomy.py @@ -20,25 +20,23 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import unittest - from datetime import datetime -import pyorbital.astronomy as astr +import pytest + +import pyorbital.astronomy as astr -class TestAstronomy(unittest.TestCase): - def setUp(self): - pass +class TestAstronomy: def test_jdays(self): """Test julian day functions.""" t = datetime(2000, 1, 1, 12, 0) - self.assertEqual(astr.jdays(t), 2451545.0) - self.assertEqual(astr.jdays2000(t), 0) + assert astr.jdays(t) == 2451545.0 + assert astr.jdays2000(t) == 0 t = datetime(2009, 10, 8, 14, 30) - self.assertEqual(astr.jdays(t), 2455113.1041666665) - self.assertEqual(astr.jdays2000(t), 3568.1041666666665) + assert astr.jdays(t) == 2455113.1041666665 + assert astr.jdays2000(t) == 3568.1041666666665 def test_sunangles(self): """Test the sun-angle calculations.""" @@ -46,13 +44,13 @@ def test_sunangles(self): time_slot = datetime(2011, 9, 23, 12, 0) sun_theta = astr.sun_zenith_angle(time_slot, lon, lat) - self.assertAlmostEqual(sun_theta, 60.371433482557833, places=8) + assert sun_theta == pytest.approx(60.371433482557833, abs=1e-8) sun_theta = astr.sun_zenith_angle(time_slot, 0., 0.) - self.assertAlmostEqual(sun_theta, 1.8751916863323426, places=8) + assert sun_theta == pytest.approx(1.8751916863323426, abs=1e-8) def test_sun_earth_distance_correction(self): """Test the sun-earth distance correction.""" utc_time = datetime(2022, 6, 15, 12, 0, 0) corr = astr.sun_earth_distance_correction(utc_time) corr_exp = 1.0156952156742332 - self.assertAlmostEqual(corr, corr_exp, places=8) + assert corr == pytest.approx(corr_exp, abs=1e-8) From 1f931079c04c8e537d50fe17e4d5d1befdeb01e5 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 21 Jun 2024 10:47:21 -0500 Subject: [PATCH 44/89] Add more parametrization and cases for astronomy tests --- pyorbital/tests/test_astronomy.py | 47 ++++++++++++++++++++++--------- 1 file changed, 34 insertions(+), 13 deletions(-) diff --git a/pyorbital/tests/test_astronomy.py b/pyorbital/tests/test_astronomy.py index 0d53028b..17e88475 100644 --- a/pyorbital/tests/test_astronomy.py +++ b/pyorbital/tests/test_astronomy.py @@ -22,6 +22,7 @@ from datetime import datetime +import numpy as np import pytest import pyorbital.astronomy as astr @@ -29,24 +30,44 @@ class TestAstronomy: - def test_jdays(self): + @pytest.mark.parametrize( + ("dt", "exp_jdays", "exp_j2000"), + [ + (datetime(2000, 1, 1, 12, 0), 2451545.0, 0), + (datetime(2009, 10, 8, 14, 30), 2455113.1041666665, 3568.1041666666665), + ] + ) + def test_jdays(self, dt, exp_jdays, exp_j2000): """Test julian day functions.""" - t = datetime(2000, 1, 1, 12, 0) - assert astr.jdays(t) == 2451545.0 - assert astr.jdays2000(t) == 0 - t = datetime(2009, 10, 8, 14, 30) - assert astr.jdays(t) == 2455113.1041666665 - assert astr.jdays2000(t) == 3568.1041666666665 - - def test_sunangles(self): + assert astr.jdays(dt) == exp_jdays + assert astr.jdays2000(dt) == exp_j2000 + + @pytest.mark.parametrize( + ("lon", "lat", "exp_theta"), + [ + # Norrkoping + (16.1833, 58.6167, 60.371433482557833), + (0.0, 0.0, 1.8751916863323426), + ] + ) + @pytest.mark.parametrize("dtype", [None, np.float32, np.float64]) + def test_sunangles(self, lon, lat, exp_theta, dtype): """Test the sun-angle calculations.""" - lat, lon = 58.6167, 16.1833 # Norrkoping time_slot = datetime(2011, 9, 23, 12, 0) + abs_tolerance = 1e-8 + if dtype is not None: + lon = np.array([lon], dtype=dtype) + lat = np.array([lat], dtype=dtype) + if np.dtype(dtype).itemsize < 8: + abs_tolerance = 1e-4 sun_theta = astr.sun_zenith_angle(time_slot, lon, lat) - assert sun_theta == pytest.approx(60.371433482557833, abs=1e-8) - sun_theta = astr.sun_zenith_angle(time_slot, 0., 0.) - assert sun_theta == pytest.approx(1.8751916863323426, abs=1e-8) + if dtype is None: + assert sun_theta == pytest.approx(exp_theta, abs=abs_tolerance) + assert isinstance(sun_theta, float) + else: + assert sun_theta.dtype == dtype + np.testing.assert_allclose(sun_theta, exp_theta, atol=abs_tolerance) def test_sun_earth_distance_correction(self): """Test the sun-earth distance correction.""" From cefaa1af3c5d4fec3bf6cb1410ec804ce91024bd Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 25 Jun 2024 20:30:20 -0500 Subject: [PATCH 45/89] Remove old testing suite and mock references --- continuous_integration/environment.yaml | 1 - pyorbital/tests/__init__.py | 22 ---------------------- pyorbital/tests/test_orbital.py | 5 +---- 3 files changed, 1 insertion(+), 27 deletions(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index aea32b87..8ddf082c 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -19,7 +19,6 @@ dependencies: - coverage - codecov - behave - - mock - zarr - geoviews - pytest diff --git a/pyorbital/tests/__init__.py b/pyorbital/tests/__init__.py index c41997e2..0c555632 100644 --- a/pyorbital/tests/__init__.py +++ b/pyorbital/tests/__init__.py @@ -20,25 +20,3 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . """The tests package.""" - -from pyorbital.tests import (test_aiaa, test_tlefile, test_orbital, - test_astronomy, test_geoloc) -import unittest - - -def suite(): - """The global test suite.""" - mysuite = unittest.TestSuite() - # Test the documentation strings - # mysuite.addTests(doctest.DocTestSuite(image)) - # Use the unittests also - mysuite.addTests(test_aiaa.suite()) - mysuite.addTests(test_tlefile.suite()) - mysuite.addTests(test_orbital.suite()) - mysuite.addTests(test_astronomy.suite()) - mysuite.addTests(test_geoloc.suite()) - return mysuite - - -if __name__ == '__main__': - unittest.TextTestRunner(verbosity=2).run(suite()) diff --git a/pyorbital/tests/test_orbital.py b/pyorbital/tests/test_orbital.py index 2b507f2c..e3475944 100644 --- a/pyorbital/tests/test_orbital.py +++ b/pyorbital/tests/test_orbital.py @@ -24,10 +24,7 @@ """ import unittest -try: - from unittest import mock -except ImportError: - import mock +from unittest import mock from datetime import datetime, timedelta import numpy as np From c65b560bee6465411e5d559959b23cc1c1e94d73 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 25 Jun 2024 20:30:36 -0500 Subject: [PATCH 46/89] Add Python 3.12 to CI --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 44ab3031..ceffa091 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -13,10 +13,10 @@ jobs: fail-fast: true matrix: os: ["windows-latest", "ubuntu-latest", "macos-latest"] - python-version: ["3.9", "3.10", "3.11"] + python-version: ["3.9", "3.11", "3.12"] experimental: [false] include: - - python-version: "3.11" + - python-version: "3.12" os: "ubuntu-latest" experimental: true From 3247b15ed51b5e1107ae3c87656f91dcdf532a1f Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 25 Jun 2024 20:50:19 -0500 Subject: [PATCH 47/89] Update astronomy to preserve dtype with numpy 2 --- pyorbital/astronomy.py | 90 ++++++++++++++++++++++++------- pyorbital/tests/test_astronomy.py | 47 ++++++++++++++-- 2 files changed, 113 insertions(+), 24 deletions(-) diff --git a/pyorbital/astronomy.py b/pyorbital/astronomy.py index 3c212d49..881e97d2 100644 --- a/pyorbital/astronomy.py +++ b/pyorbital/astronomy.py @@ -1,28 +1,42 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- - +# # Copyright (c) 2011, 2013 - +# # Author(s): - +# # Martin Raspaud - +# # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. - +# # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. - +# # You should have received a copy of the GNU General Public License # along with this program. If not, see . +"""Angle and time-based astronomy functions. -"""Astronomy module. Parts taken from http://www.geoastro.de/elevaz/basics/index.htm + +Note on argument types +---------------------- + +Many of these functions accept Python datetime objects, +numpy datetime64 objects, or anything that can be turned +into a numpy array of datetime64 objects. These objects are inherently +64-bit so if other arguments (ex. longitude and latitude arrays) are +32-bit floats internal operations will be automatically promoted to +64-bit floating point numbers. Where possible these are then converted +back to 32-bit before being returned. In general scalar inputs will also +produce scalar outputs. + """ +import datetime import numpy as np @@ -42,12 +56,14 @@ def jdays2000(utc_time): def jdays(utc_time): """Get the julian day of *utc_time*. """ - return jdays2000(utc_time) + 2451545 + return jdays2000(utc_time) + 2451545.0 def _days(dt): """Get the days (floating point) from *d_t*. """ + if hasattr(dt, "shape"): + dt = np.asanyarray(dt, dtype=np.timedelta64) return dt / np.timedelta64(1, 'D') @@ -117,6 +133,7 @@ def _local_hour_angle(utc_time, longitude, right_ascension): def get_alt_az(utc_time, lon, lat): """Return sun altitude and azimuth from *utc_time*, *lon*, and *lat*. + lon,lat in degrees The returned angles are given in radians. """ @@ -125,10 +142,13 @@ def get_alt_az(utc_time, lon, lat): ra_, dec = sun_ra_dec(utc_time) h__ = _local_hour_angle(utc_time, lon, ra_) - return (np.arcsin(np.sin(lat) * np.sin(dec) + - np.cos(lat) * np.cos(dec) * np.cos(h__)), - np.arctan2(-np.sin(h__), (np.cos(lat) * np.tan(dec) - - np.sin(lat) * np.cos(h__)))) + alt_az = (np.arcsin(np.sin(lat) * np.sin(dec) + + np.cos(lat) * np.cos(dec) * np.cos(h__)), + np.arctan2(-np.sin(h__), (np.cos(lat) * np.tan(dec) - + np.sin(lat) * np.cos(h__)))) + if not isinstance(lon, float): + alt_az = (alt_az[0].astype(lon.dtype), alt_az[1].astype(lon.dtype)) + return alt_az def cos_zen(utc_time, lon, lat): @@ -141,7 +161,10 @@ def cos_zen(utc_time, lon, lat): r_a, dec = sun_ra_dec(utc_time) h__ = _local_hour_angle(utc_time, lon, r_a) - return (np.sin(lat) * np.sin(dec) + np.cos(lat) * np.cos(dec) * np.cos(h__)) + csza = (np.sin(lat) * np.sin(dec) + np.cos(lat) * np.cos(dec) * np.cos(h__)) + if not isinstance(lon, float): + csza = csza.astype(lon.dtype) + return csza def sun_zenith_angle(utc_time, lon, lat): @@ -149,13 +172,15 @@ def sun_zenith_angle(utc_time, lon, lat): lon,lat in degrees. The angle returned is given in degrees """ - return np.rad2deg(np.arccos(cos_zen(utc_time, lon, lat))) + sza = np.rad2deg(np.arccos(cos_zen(utc_time, lon, lat))) + if not isinstance(lon, float): + sza = sza.astype(lon.dtype) + return sza def sun_earth_distance_correction(utc_time): """Calculate the sun earth distance correction, relative to 1 AU. """ - # Computation according to # https://web.archive.org/web/20150117190838/http://curious.astro.cornell.edu/question.php?number=582 # with @@ -175,11 +200,10 @@ def sun_earth_distance_correction(utc_time): # "=" 1 - 0.0167 * np.cos(theta) corr = 1 - 0.0167 * np.cos(2 * np.pi * (jdays2000(utc_time) - 3) / 365.25636) - return corr -def observer_position(time, lon, lat, alt): +def observer_position(utc_time, lon, lat, alt): """Calculate observer ECI position. http://celestrak.com/columns/v02n03/ @@ -188,7 +212,7 @@ def observer_position(time, lon, lat, alt): lon = np.deg2rad(lon) lat = np.deg2rad(lat) - theta = (gmst(time) + lon) % (2 * np.pi) + theta = (gmst(utc_time) + lon) % (2 * np.pi) c = 1 / np.sqrt(1 + F * (F - 2) * np.sin(lat)**2) sq = c * (1 - F)**2 @@ -199,6 +223,32 @@ def observer_position(time, lon, lat, alt): vx = -MFACTOR * y # kilometers/second vy = MFACTOR * x - vz = 0 - + vz = _float_to_sibling_result(0.0, vx) + + if not isinstance(lon, float): + x = x.astype(lon.dtype, copy=False) + y = y.astype(lon.dtype, copy=False) + z = z.astype(lon.dtype, copy=False) + vx = vx.astype(lon.dtype, copy=False) + vy = vy.astype(lon.dtype, copy=False) + vz = vz.astype(lon.dtype, copy=False) # type: ignore[union-attr] return (x, y, z), (vx, vy, vz) + + +def _float_to_sibling_result(result_to_convert, template_result): + """Convert a scalar to the same type as another return type. + + This is mostly used to make a static value consistent with the types of + other returned values. + + """ + if isinstance(template_result, float): + return result_to_convert + # get any array like object that might be wrapped by our template (ex. xarray DataArray) + array_like = template_result if hasattr(template_result, "__array_function__") else template_result.data + array_convert = np.asarray(result_to_convert, like=array_like) + if not hasattr(template_result, "__array_function__"): + # the template result has some wrapper class (likely xarray DataArray) + # recreate the wrapper object + array_convert = template_result.__class__(array_convert) + return array_convert diff --git a/pyorbital/tests/test_astronomy.py b/pyorbital/tests/test_astronomy.py index 17e88475..6eba33a8 100644 --- a/pyorbital/tests/test_astronomy.py +++ b/pyorbital/tests/test_astronomy.py @@ -22,11 +22,33 @@ from datetime import datetime +import dask.array as da import numpy as np +import numpy.typing as npt import pytest import pyorbital.astronomy as astr +try: + from xarray import DataArray +except ImportError: + DataArray = None + + +def _create_dask_array(input_list: list, dtype: npt.DTypeLike) -> da.Array: + np_arr = np.array(input_list, dtype=dtype) + return da.from_array(np_arr) + + +def _create_xarray_numpy(input_list: list, dtype: npt.DTypeLike) -> DataArray: + np_arr = np.array(input_list, dtype=dtype) + return DataArray(np_arr) + + +def _create_xarray_dask(input_list: list, dtype: npt.DTypeLike) -> DataArray: + dask_arr = _create_dask_array(input_list, dtype) + return DataArray(dask_arr) + class TestAstronomy: @@ -50,14 +72,30 @@ def test_jdays(self, dt, exp_jdays, exp_j2000): (0.0, 0.0, 1.8751916863323426), ] ) - @pytest.mark.parametrize("dtype", [None, np.float32, np.float64]) - def test_sunangles(self, lon, lat, exp_theta, dtype): + @pytest.mark.parametrize( + ("dtype", "array_construct"), + [ + (None, None), + (np.float32, np.array), + (np.float64, np.array), + (np.float32, _create_dask_array), + (np.float64, _create_dask_array), + (np.float32, _create_xarray_numpy), + (np.float64, _create_xarray_numpy), + (np.float32, _create_xarray_dask), + (np.float64, _create_xarray_dask), + ] + ) + def test_sunangles(self, lon, lat, exp_theta, dtype, array_construct): """Test the sun-angle calculations.""" + if array_construct is None and dtype is not None: + pytest.skip(reason="Xarray dependency unavailable") + time_slot = datetime(2011, 9, 23, 12, 0) abs_tolerance = 1e-8 if dtype is not None: - lon = np.array([lon], dtype=dtype) - lat = np.array([lat], dtype=dtype) + lon = array_construct([lon], dtype=dtype) + lat = array_construct([lat], dtype=dtype) if np.dtype(dtype).itemsize < 8: abs_tolerance = 1e-4 @@ -68,6 +106,7 @@ def test_sunangles(self, lon, lat, exp_theta, dtype): else: assert sun_theta.dtype == dtype np.testing.assert_allclose(sun_theta, exp_theta, atol=abs_tolerance) + assert isinstance(sun_theta, type(lon)) def test_sun_earth_distance_correction(self): """Test the sun-earth distance correction.""" From 5b4f33698773d89a2224c39c22ced639b34b039d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 25 Jun 2024 21:13:20 -0500 Subject: [PATCH 48/89] Update release notes for v1.8.3 --- CHANGELOG.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b964104c..7f013ddb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,20 @@ +## Version 1.8.3 (2024/06/25) + +### Issues Closed + +* [Issue 151](https://github.com/pytroll/pyorbital/issues/151) - Issue Calculating Accurate View Zenith Angles on Terra Satellite Overpasses + +In this release 1 issue was closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 156](https://github.com/pytroll/pyorbital/pull/156) - Fix dtype preservation in astronomy functions + +In this release 1 pull request was closed. + + ## Version 1.8.2 (2024/02/05) ### Issues Closed From 85424473854ec9ee1c0bb5dec2cb0b0f4e781d14 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 3 Jul 2024 18:00:41 +0200 Subject: [PATCH 49/89] Update `get_next_passes` docstring --- pyorbital/orbital.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyorbital/orbital.py b/pyorbital/orbital.py index 7f3c745d..acc58c54 100644 --- a/pyorbital/orbital.py +++ b/pyorbital/orbital.py @@ -339,7 +339,7 @@ def get_next_passes(self, utc_time, length, lon, lat, alt, tol=0.001, horizon=0) :length: Number of hours to find passes (int) :lon: Longitude of observer position on ground (float) :lat: Latitude of observer position on ground (float) - :alt: Altitude above sea-level (geoid) of observer position on ground (float) + :alt: Altitude above sea-level (geoid) in km of observer position on ground (float) :tol: precision of the result in seconds :horizon: the elevation of horizon to compute risetime and falltime. From 84e57b0e2d3594f75bdc2981aa14ea0456a669f2 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Wed, 17 Jul 2024 17:01:48 +0200 Subject: [PATCH 50/89] Fix so a normal datetime.satetime object can be passed as well to the get last ascending node function Signed-off-by: Adam.Dybbroe --- pyorbital/orbital.py | 55 ++++++++++++++----------- pyorbital/tests/test_orbital.py | 72 +++++++++++++++++++++++---------- 2 files changed, 81 insertions(+), 46 deletions(-) diff --git a/pyorbital/orbital.py b/pyorbital/orbital.py index acc58c54..72cafbf0 100644 --- a/pyorbital/orbital.py +++ b/pyorbital/orbital.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2011-2023 Pyorbital developers +# Copyright (c) 2011-2024 Pyorbital developers # Author(s): @@ -86,11 +86,14 @@ class OrbitalError(Exception): + """Custom exception for the Orbital class.""" + pass def get_observer_look(sat_lon, sat_lat, sat_alt, utc_time, lon, lat, alt): """Calculate observers look angle to a satellite. + http://celestrak.com/columns/v02n02/ :utc_time: Observation time (datetime object) @@ -142,7 +145,6 @@ def get_observer_look(sat_lon, sat_lat, sat_alt, utc_time, lon, lat, alt): class Orbital(object): - """Class for orbital computations. The *satellite* parameter is the name of the satellite to work on and is @@ -151,6 +153,7 @@ class Orbital(object): """ def __init__(self, satellite, tle_file=None, line1=None, line2=None): + """Initialize the class.""" satellite = satellite.upper() self.satellite_name = satellite self.tle = tlefile.read(satellite, tle_file=tle_file, @@ -159,16 +162,14 @@ def __init__(self, satellite, tle_file=None, line1=None, line2=None): self._sgdp4 = _SGDP4(self.orbit_elements) def __str__(self): + """Print the Orbital object state.""" return self.satellite_name + " " + str(self.tle) def get_last_an_time(self, utc_time): - """Calculate time of last ascending node relative to the - specified time - """ - + """Calculate time of last ascending node relative to the specified time.""" # Propagate backwards to ascending node dt = np.timedelta64(10, 'm') - t_old = utc_time + t_old = np.datetime64(utc_time) t_new = t_old - dt pos0, vel0 = self.get_position(t_old, normalize=False) pos1, vel1 = self.get_position(t_new, normalize=False) @@ -236,14 +237,17 @@ def get_lonlatalt(self, utc_time): return np.rad2deg(lon), np.rad2deg(lat), alt def find_aos(self, utc_time, lon, lat): + """Find AOS.""" pass def find_aol(self, utc_time, lon, lat): + """Find AOL.""" pass def get_observer_look(self, utc_time, lon, lat, alt): """Calculate observers look angle to a satellite. - http://celestrak.com/columns/v02n02/ + + See http://celestrak.com/columns/v02n02/ utc_time: Observation time (datetime object) lon: Longitude of observer position on ground in degrees east @@ -251,8 +255,8 @@ def get_observer_look(self, utc_time, lon, lat, alt): alt: Altitude above sea-level (geoid) of observer position on ground in km Return: (Azimuth, Elevation) - """ + """ utc_time = dt2np(utc_time) (pos_x, pos_y, pos_z), (vel_x, vel_y, vel_z) = self.get_position( utc_time, normalize=False) @@ -330,8 +334,7 @@ def get_orbit_number(self, utc_time, tbus_style=False, as_float=False): return orbit def get_next_passes(self, utc_time, length, lon, lat, alt, tol=0.001, horizon=0): - """Calculate passes for the next hours for a given start time and a - given observer. + """Calculate passes for the next hours for a given start time and a given observer. Original by Martin. @@ -344,8 +347,8 @@ def get_next_passes(self, utc_time, length, lon, lat, alt, tol=0.001, horizon=0) :horizon: the elevation of horizon to compute risetime and falltime. :return: [(rise-time, fall-time, max-elevation-time), ...] - """ + """ def elevation(minutes): """Compute the elevation.""" return self.get_observer_look(utc_time + @@ -358,7 +361,7 @@ def elevation_inv(minutes): return -elevation(minutes) def get_root(fun, start, end, tol=0.01): - """Root finding scheme""" + """Root finding scheme.""" x_0 = end x_1 = start fx_0 = fun(end) @@ -432,15 +435,18 @@ def get_max_parab(fun, start, end, tol=0.01): return res def _get_time_at_horizon(self, utc_time, obslon, obslat, **kwargs): - """Get the time closest in time to *utc_time* when the - satellite is at the horizon relative to the position of an observer on - ground (altitude = 0) + """Determine when the satellite is at the horizon relative to an observer on ground. + + Get the time closest in time to *utc_time* when the satellite is at the + horizon relative to the position of an observer on ground (altitude = + 0). Note: This is considered deprecated and it's functionality is currently replaced by 'get_next_passes'. + """ warnings.warn("_get_time_at_horizon is replaced with get_next_passes", - DeprecationWarning) + DeprecationWarning, stacklevel=2) if "precision" in kwargs: precision = kwargs['precision'] else: @@ -518,7 +524,7 @@ def get_equatorial_crossing_time(self, tstart, tend, node='ascending', local_tim return None elif n_end - n_start > 1: warnings.warn('Multiple revolutions between start and end time. Computing crossing ' - 'time for the last revolution in that interval.') + 'time for the last revolution in that interval.', stacklevel=2) # Let n'(t) = n(t) - offset. Determine offset so that n'(tstart) < 0 and n'(tend) > 0 and # n'(tcross) = 0. @@ -555,11 +561,10 @@ def _nprime(time_f): class OrbitElements(object): - - """Class holding the orbital elements. - """ + """Class holding the orbital elements.""" def __init__(self, tle): + """Initialize the class.""" self.epoch = tle.epoch self.excentricity = tle.excentricity self.inclination = np.deg2rad(tle.inclination) @@ -609,9 +614,7 @@ def __init__(self, tle): class _SGDP4(object): - - """Class for the SGDP4 computations. - """ + """Class for the SGDP4 computations.""" def __init__(self, orbit_elements): self.mode = None @@ -918,6 +921,10 @@ def propagate(self, utc_time): def kep2xyz(kep): + """Keppler to cartesian coordinates conversion. + + (Not sure what 'kep' acttually refers to, just guessing! FIXME!) + """ sinT = np.sin(kep['theta']) cosT = np.cos(kep['theta']) sinI = np.sin(kep['eqinc']) diff --git a/pyorbital/tests/test_orbital.py b/pyorbital/tests/test_orbital.py index e3475944..8e89eebd 100644 --- a/pyorbital/tests/test_orbital.py +++ b/pyorbital/tests/test_orbital.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2012-2014, 2022 Pytroll Community +# Copyright (c) 2012-2024 Pytroll Community # Author(s): @@ -20,24 +20,23 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -"""Test the geoloc orbital. -""" +"""Test the geoloc orbital.""" +import pytest import unittest from unittest import mock from datetime import datetime, timedelta - import numpy as np - from pyorbital import orbital eps_deg = 10e-3 class Test(unittest.TestCase): + """Basic test class for unittesting the pyorbital.orbital class.""" def test_get_orbit_number(self): - """Testing getting the orbitnumber from the tle""" + """Testing getting the orbitnumber from the TLEs.""" sat = orbital.Orbital("NPP", line1="1 37849U 11061A 12017.90990040 " "-.00000112 00000-0 -32693-4 0 772", @@ -48,6 +47,7 @@ def test_get_orbit_number(self): self.assertEqual(orbnum, 1163) def test_sublonlat(self): + """Test getting the sub-satellite position.""" sat = orbital.Orbital("ISS (ZARYA)", line1="1 25544U 98067A 03097.78853147 " ".00021906 00000-0 28403-3 0 8652", @@ -66,6 +66,7 @@ def test_sublonlat(self): 'Calculation of altitude failed') def test_observer_look(self): + """Test getting the observer look angles.""" sat = orbital.Orbital("ISS (ZARYA)", line1="1 25544U 98067A 03097.78853147 " ".00021906 00000-0 28403-3 0 8652", @@ -81,6 +82,7 @@ def test_observer_look(self): 'Calculation of elevation failed') def test_orbit_num_an(self): + """Test getting orbit number - ascending node.""" sat = orbital.Orbital("METOP-A", line1="1 29499U 06044A 11254.96536486 " ".00000092 00000-0 62081-4 0 5221", @@ -90,6 +92,7 @@ def test_orbit_num_an(self): self.assertEqual(sat.get_orbit_number(d), 25437) def test_orbit_num_non_an(self): + """Test getting orbit number - not ascending node.""" sat = orbital.Orbital("METOP-A", line1="1 29499U 06044A 13060.48822809 " ".00000017 00000-0 27793-4 0 9819", @@ -99,6 +102,7 @@ def test_orbit_num_non_an(self): self.assertEqual(sat.get_orbit_number(sat.tle.epoch + dt), 33028) def test_orbit_num_equator(self): + """Test getting orbit numbers when being around equator.""" sat = orbital.Orbital("SUOMI NPP", line1="1 37849U 11061A 13061.24611272 " ".00000048 00000-0 43679-4 0 4334", @@ -131,7 +135,7 @@ def test_get_next_passes_apogee(self): timedelta(seconds=0.01)) def test_get_next_passes_tricky(self): - """ Check issue #34 for reference """ + """Check issue #34 for reference.""" line1 = "1 43125U 18004Q 18251.42128650 " \ "+.00001666 +00000-0 +73564-4 0 9991" @@ -153,7 +157,7 @@ def test_get_next_passes_tricky(self): self.assertTrue(len(res) == 15) def test_get_next_passes_issue_22(self): - """Check that max""" + """Check that max.""" line1 = '1 28654U 05018A 21083.16603416 .00000102 00000-0 79268-4 0 9999' line2 = '2 28654 99.0035 147.6583 0014816 159.4931 200.6838 14.12591533816498' @@ -166,6 +170,7 @@ def test_get_next_passes_issue_22(self): @mock.patch('pyorbital.orbital.Orbital.get_lonlatalt') def test_utc2local(self, get_lonlatalt): + """Test converting UTC to local time.""" get_lonlatalt.return_value = -45, None, None sat = orbital.Orbital("METOP-A", line1="1 29499U 06044A 13060.48822809 " @@ -178,6 +183,7 @@ def test_utc2local(self, get_lonlatalt): @mock.patch('pyorbital.orbital.Orbital.utc2local') @mock.patch('pyorbital.orbital.Orbital.get_orbit_number') def test_get_equatorial_crossing_time(self, get_orbit_number, utc2local): + """Test get the equatorial crossing time.""" def get_orbit_number_patched(utc_time, **kwargs): utc_time = np.datetime64(utc_time) diff = (utc_time - np.datetime64('2009-07-01 12:38:12')) / np.timedelta64(7200, 's') @@ -212,9 +218,10 @@ def get_orbit_number_patched(utc_time, **kwargs): class TestGetObserverLook(unittest.TestCase): - """Test the get_observer_look function""" + """Test the get_observer_look function.""" def setUp(self): + """Set up the test environment.""" self.t = datetime(2018, 1, 1, 0, 0, 0) self.sat_lon = np.array([[-89.5, -89.4, -89.5, -89.4], [-89.3, -89.2, -89.3, -89.2]]) @@ -232,7 +239,7 @@ def setUp(self): [83.17507167, 90, 66.559906, 81.010018]]) def test_basic_numpy(self): - """Test with numpy array inputs""" + """Test with numpy array inputs.""" from pyorbital import orbital azi, elev = orbital.get_observer_look(self.sat_lon, self.sat_lat, self.sat_alt, self.t, @@ -241,7 +248,7 @@ def test_basic_numpy(self): np.testing.assert_allclose(elev, self.exp_elev) def test_basic_dask(self): - """Test with dask array inputs""" + """Test with dask array inputs.""" from pyorbital import orbital import dask.array as da sat_lon = da.from_array(self.sat_lon, chunks=2) @@ -257,7 +264,7 @@ def test_basic_dask(self): np.testing.assert_allclose(elev.compute(), self.exp_elev) def test_xarray_with_numpy(self): - """Test with xarray DataArray with numpy array as inputs""" + """Test with xarray DataArray with numpy array as inputs.""" from pyorbital import orbital import xarray as xr @@ -276,7 +283,7 @@ def _xarr_conv(input): np.testing.assert_allclose(elev.data, self.exp_elev) def test_xarray_with_dask(self): - """Test with xarray DataArray with dask array as inputs""" + """Test with xarray DataArray with dask array as inputs.""" from pyorbital import orbital import dask.array as da import xarray as xr @@ -306,13 +313,14 @@ class TestGetObserverLookNadir(unittest.TestCase): """Test the get_observer_look function when satellite is at nadir.""" def setUp(self): - """Setup for test observer at nadir. + """Set up for test observer at nadir. + Note that rounding error differs between array types. With 1000 elements a test gives: - 1 error for basic numpy - 41 errors for basic dask - 63 errors for xarray with dask - 2 error for xarray with numpy + 1 error for basic numpy + 41 errors for basic dask + 63 errors for xarray with dask + 2 error for xarray with numpy """ rng = np.random.RandomState(125) self.t = datetime(2018, 1, 1, 0, 0, 0) @@ -325,7 +333,7 @@ def setUp(self): self.exp_elev = np.zeros((100)) + 90 def test_basic_numpy(self): - """Test with numpy array inputs""" + """Test with numpy array inputs.""" from pyorbital import orbital azi, elev = orbital.get_observer_look(self.sat_lon, self.sat_lat, self.sat_alt, self.t, @@ -335,7 +343,7 @@ def test_basic_numpy(self): np.testing.assert_allclose(elev, self.exp_elev) def test_basic_dask(self): - """Test with dask array inputs""" + """Test with dask array inputs.""" from pyorbital import orbital import dask.array as da sat_lon = da.from_array(self.sat_lon, chunks=2) @@ -352,7 +360,7 @@ def test_basic_dask(self): np.testing.assert_allclose(elev.compute(), self.exp_elev) def test_xarray_with_numpy(self): - """Test with xarray DataArray with numpy array as inputs""" + """Test with xarray DataArray with numpy array as inputs.""" from pyorbital import orbital import xarray as xr @@ -372,7 +380,7 @@ def _xarr_conv(input): np.testing.assert_allclose(elev.data, self.exp_elev) def test_xarray_with_dask(self): - """Test with xarray DataArray with dask array as inputs""" + """Test with xarray DataArray with dask array as inputs.""" from pyorbital import orbital import dask.array as da import xarray as xr @@ -407,3 +415,23 @@ def test_63(self): line2="2 37849 98.7092 229.3263 0000715 98.5313 290.6262 14.19554485413345") orb.get_next_passes(parser.parse("2019-10-21 16:00:00"), 12, 123.29736, -13.93763, 0) warnings.filterwarnings('default') + + +@pytest.mark.parametrize('dtime, expected', + [(datetime(2024, 6, 25, 11, 0, 18), + np.datetime64('2024-06-25T10:44:18.234375')), + (datetime(2024, 6, 25, 11, 5, 0), + np.datetime64('2024-06-25T10:44:18.234375')), + (np.datetime64('2024-06-25T11:10:00.000000'), + np.datetime64('2024-06-25T10:44:18.234375')), + ] + ) +def test_get_last_an_time_scalar_input(dtime, expected): + """Test getting the time of the last ascending node - input time is a scalar.""" + from pyorbital.orbital import Orbital + orb = Orbital("NOAA-20", + line1='1 43013U 17073A 24176.73674251 .00000000 00000+0 11066-3 0 00014', + line2='2 43013 98.7060 114.5340 0001454 139.3958 190.7541 14.19599847341971') + + result = orb.get_last_an_time(dtime) + assert abs(expected - result) < np.timedelta64(1, 's') From 26ce23db453f151afa0d246b5d0ead5937a90752 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Fri, 19 Jul 2024 09:05:24 +0200 Subject: [PATCH 51/89] Handle timezone aware datetime objects Signed-off-by: Adam.Dybbroe --- pyorbital/orbital.py | 18 +++++++++++++++++- pyorbital/tests/test_orbital.py | 33 +++++++++++++++++++++++++-------- 2 files changed, 42 insertions(+), 9 deletions(-) diff --git a/pyorbital/orbital.py b/pyorbital/orbital.py index 72cafbf0..e6fd99c6 100644 --- a/pyorbital/orbital.py +++ b/pyorbital/orbital.py @@ -27,6 +27,7 @@ import logging import warnings from datetime import datetime, timedelta +import pytz import numpy as np from scipy import optimize @@ -169,7 +170,7 @@ def get_last_an_time(self, utc_time): """Calculate time of last ascending node relative to the specified time.""" # Propagate backwards to ascending node dt = np.timedelta64(10, 'm') - t_old = np.datetime64(utc_time) + t_old = np.datetime64(_get_tz_unaware_utctime(utc_time)) t_new = t_old - dt pos0, vel0 = self.get_position(t_old, normalize=False) pos1, vel1 = self.get_position(t_new, normalize=False) @@ -920,6 +921,21 @@ def propagate(self, utc_time): return kep +def _get_tz_unaware_utctime(utc_time): + """Return timzone unaware datetime object. + + The input *utc_time* is either a timezone unaware object assumed to be in + UTC, or a timezone aware datetime object in UTC. + """ + if not hasattr(utc_time, 'tzinfo') or utc_time.tzinfo is None: + return utc_time + + if utc_time.tzinfo != pytz.utc: + raise AttributeError("UTC time expected! Parsing a timezone aware datetime object requires it to be UTC!") + + return utc_time.replace(tzinfo=None) + + def kep2xyz(kep): """Keppler to cartesian coordinates conversion. diff --git a/pyorbital/tests/test_orbital.py b/pyorbital/tests/test_orbital.py index 8e89eebd..8b005b00 100644 --- a/pyorbital/tests/test_orbital.py +++ b/pyorbital/tests/test_orbital.py @@ -26,6 +26,7 @@ import unittest from unittest import mock from datetime import datetime, timedelta +import pytz import numpy as np from pyorbital import orbital @@ -417,21 +418,37 @@ def test_63(self): warnings.filterwarnings('default') -@pytest.mark.parametrize('dtime, expected', - [(datetime(2024, 6, 25, 11, 0, 18), - np.datetime64('2024-06-25T10:44:18.234375')), - (datetime(2024, 6, 25, 11, 5, 0), - np.datetime64('2024-06-25T10:44:18.234375')), - (np.datetime64('2024-06-25T11:10:00.000000'), - np.datetime64('2024-06-25T10:44:18.234375')), +@pytest.mark.parametrize('dtime', + [datetime(2024, 6, 25, 11, 0, 18), + datetime(2024, 6, 25, 11, 5, 0, 0, pytz.UTC), + np.datetime64('2024-06-25T11:10:00.000000') ] ) -def test_get_last_an_time_scalar_input(dtime, expected): +def test_get_last_an_time_scalar_input(dtime): """Test getting the time of the last ascending node - input time is a scalar.""" from pyorbital.orbital import Orbital orb = Orbital("NOAA-20", line1='1 43013U 17073A 24176.73674251 .00000000 00000+0 11066-3 0 00014', line2='2 43013 98.7060 114.5340 0001454 139.3958 190.7541 14.19599847341971') + expected = np.datetime64('2024-06-25T10:44:18.234375') result = orb.get_last_an_time(dtime) assert abs(expected - result) < np.timedelta64(1, 's') + + +@pytest.mark.parametrize('dtime', + [datetime(2024, 6, 25, 11, 5, 0, 0, pytz.timezone('Europe/Stockholm')), + ] + ) +def test_get_last_an_time_wrong_input(dtime): + """Test getting the time of the last ascending node - wrong input.""" + from pyorbital.orbital import Orbital + orb = Orbital("NOAA-20", + line1='1 43013U 17073A 24176.73674251 .00000000 00000+0 11066-3 0 00014', + line2='2 43013 98.7060 114.5340 0001454 139.3958 190.7541 14.19599847341971') + + with pytest.raises(AttributeError) as exec_info: + _ = orb.get_last_an_time(dtime) + + expected = "UTC time expected! Parsing a timezone aware datetime object requires it to be UTC!" + assert str(exec_info.value) == expected From f68b9c43958df9ec2657eea7fe6e8c9db70c4726 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Fri, 19 Jul 2024 16:13:28 +0200 Subject: [PATCH 52/89] Improve readability Signed-off-by: Adam.Dybbroe --- pyorbital/orbital.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/pyorbital/orbital.py b/pyorbital/orbital.py index e6fd99c6..f08c638d 100644 --- a/pyorbital/orbital.py +++ b/pyorbital/orbital.py @@ -927,13 +927,12 @@ def _get_tz_unaware_utctime(utc_time): The input *utc_time* is either a timezone unaware object assumed to be in UTC, or a timezone aware datetime object in UTC. """ - if not hasattr(utc_time, 'tzinfo') or utc_time.tzinfo is None: - return utc_time + if isinstance(utc_time, datetime): + if utc_time.tzinfo and utc_time.tzinfo != pytz.utc: + raise AttributeError("UTC time expected! Parsing a timezone aware datetime object requires it to be UTC!") + return utc_time.replace(tzinfo=None) - if utc_time.tzinfo != pytz.utc: - raise AttributeError("UTC time expected! Parsing a timezone aware datetime object requires it to be UTC!") - - return utc_time.replace(tzinfo=None) + return utc_time def kep2xyz(kep): From 397cdc656ff4915c20bbcfd5986082fc9499808a Mon Sep 17 00:00:00 2001 From: Adam Dybbroe Date: Mon, 22 Jul 2024 13:38:03 +0200 Subject: [PATCH 53/89] Update pyorbital/orbital.py Co-authored-by: David Hoese --- pyorbital/orbital.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyorbital/orbital.py b/pyorbital/orbital.py index f08c638d..dc2b89e4 100644 --- a/pyorbital/orbital.py +++ b/pyorbital/orbital.py @@ -938,7 +938,7 @@ def _get_tz_unaware_utctime(utc_time): def kep2xyz(kep): """Keppler to cartesian coordinates conversion. - (Not sure what 'kep' acttually refers to, just guessing! FIXME!) + (Not sure what 'kep' actually refers to, just guessing! FIXME!) """ sinT = np.sin(kep['theta']) cosT = np.cos(kep['theta']) From 14db2a94215e2a42c64dd47b5e5635a980c3c551 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Mon, 22 Jul 2024 13:42:49 +0200 Subject: [PATCH 54/89] Fix using adequate exception error Signed-off-by: Adam.Dybbroe --- pyorbital/orbital.py | 2 +- pyorbital/tests/test_orbital.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyorbital/orbital.py b/pyorbital/orbital.py index dc2b89e4..07f2e40c 100644 --- a/pyorbital/orbital.py +++ b/pyorbital/orbital.py @@ -929,7 +929,7 @@ def _get_tz_unaware_utctime(utc_time): """ if isinstance(utc_time, datetime): if utc_time.tzinfo and utc_time.tzinfo != pytz.utc: - raise AttributeError("UTC time expected! Parsing a timezone aware datetime object requires it to be UTC!") + raise ValueError("UTC time expected! Parsing a timezone aware datetime object requires it to be UTC!") return utc_time.replace(tzinfo=None) return utc_time diff --git a/pyorbital/tests/test_orbital.py b/pyorbital/tests/test_orbital.py index 8b005b00..74e9dacb 100644 --- a/pyorbital/tests/test_orbital.py +++ b/pyorbital/tests/test_orbital.py @@ -447,7 +447,7 @@ def test_get_last_an_time_wrong_input(dtime): line1='1 43013U 17073A 24176.73674251 .00000000 00000+0 11066-3 0 00014', line2='2 43013 98.7060 114.5340 0001454 139.3958 190.7541 14.19599847341971') - with pytest.raises(AttributeError) as exec_info: + with pytest.raises(ValueError) as exec_info: _ = orb.get_last_an_time(dtime) expected = "UTC time expected! Parsing a timezone aware datetime object requires it to be UTC!" From 431ff9fd0acd479ea6e6c5fb728a0a59b20e9462 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 23 Aug 2024 10:38:44 +0200 Subject: [PATCH 55/89] Add new platforms and clarify help message for `check_platform` --- pyorbital/check_platform.py | 2 +- pyorbital/etc/platforms.txt | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/pyorbital/check_platform.py b/pyorbital/check_platform.py index 2a14e1d8..441a5e59 100755 --- a/pyorbital/check_platform.py +++ b/pyorbital/check_platform.py @@ -29,7 +29,7 @@ parser = argparse.ArgumentParser( description='Check if a satellite is supported.') parser.add_argument("-s", "--satellite", - help=("Name of the Satellite - following WMO Oscar naming."), + help=("Name of the Satellite [in upper case] - following WMO Oscar naming."), default=None, required=True, type=str) diff --git a/pyorbital/etc/platforms.txt b/pyorbital/etc/platforms.txt index cb5fb375..0a9d7676 100644 --- a/pyorbital/etc/platforms.txt +++ b/pyorbital/etc/platforms.txt @@ -28,10 +28,16 @@ FY-3A 32958 FY-3B 37214 FY-3C 39260 FY-3D 43010 +FY-3E 49008 +FY-3F 57490 +FY-3G 56232 GOES-13 29155 GOES-14 35491 GOES-15 36411 GOES-16 41866 +GOES-17 43226 +GOES-18 51850 +GOES-19 60133 Himawari-6 28622 Himawari-7 28937 Himawari-8 40267 @@ -70,6 +76,9 @@ NOAA-20 43013 NOAA-21 54234 RadarSat-2 32382 Sentinel-1A 39634 +Sentinel-1B 41456 +Sentinel-2A 40697 +Sentinel-2B 42063 Sentinel-3A 41335 Sentinel-3B 43437 Sentinel-5P 42969 From 03580df1433c1af870256763186bab3082713d6e Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 23 Aug 2024 10:47:01 +0200 Subject: [PATCH 56/89] Replace `master` with `main` in PR template. --- .github/PULL_REQUEST_TEMPLATE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index a8db991b..6deee859 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,4 +1,4 @@ - + From d011acc96444c03023fae55cd338f83ee20b187b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 22:35:55 +0000 Subject: [PATCH 57/89] Bump pypa/gh-action-pypi-publish from 1.9.0 to 1.10.0 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.9.0 to 1.10.0. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.9.0...v1.10.0) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 50804325..e69a33d8 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -19,7 +19,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.9.0 + uses: pypa/gh-action-pypi-publish@v1.10.0 with: user: __token__ password: ${{ secrets.pypi_password }} \ No newline at end of file From d18464fd75d1203ffa5603c7b19f95cf2e76d074 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 22:58:20 +0000 Subject: [PATCH 58/89] Bump pypa/gh-action-pypi-publish from 1.10.0 to 1.10.1 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.10.0 to 1.10.1. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.10.0...v1.10.1) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index e69a33d8..da9db486 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -19,7 +19,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.10.0 + uses: pypa/gh-action-pypi-publish@v1.10.1 with: user: __token__ password: ${{ secrets.pypi_password }} \ No newline at end of file From 6e18491c62800800bb4e9d4f469b27c4ec630427 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 22:49:16 +0000 Subject: [PATCH 59/89] Bump pypa/gh-action-pypi-publish from 1.10.1 to 1.10.2 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.10.1 to 1.10.2. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.10.1...v1.10.2) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index da9db486..c53785b1 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -19,7 +19,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.10.1 + uses: pypa/gh-action-pypi-publish@v1.10.2 with: user: __token__ password: ${{ secrets.pypi_password }} \ No newline at end of file From e87147188b111841f5c2cec0c3a6264067a604d6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 22:19:27 +0000 Subject: [PATCH 60/89] Bump pypa/gh-action-pypi-publish from 1.10.2 to 1.10.3 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.10.2 to 1.10.3. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.10.2...v1.10.3) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index c53785b1..7f86c843 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -19,7 +19,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.10.2 + uses: pypa/gh-action-pypi-publish@v1.10.3 with: user: __token__ password: ${{ secrets.pypi_password }} \ No newline at end of file From 238911bcc454d67541a60ff89ce760a87a54ea86 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 22:48:45 +0000 Subject: [PATCH 61/89] Bump pypa/gh-action-pypi-publish from 1.10.3 to 1.11.0 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.10.3 to 1.11.0. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.10.3...v1.11.0) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 7f86c843..700901e8 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -19,7 +19,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.10.3 + uses: pypa/gh-action-pypi-publish@v1.11.0 with: user: __token__ password: ${{ secrets.pypi_password }} \ No newline at end of file From 2c3ea0c44cd110747b49ab45d37631a253386aa5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Nov 2024 22:20:44 +0000 Subject: [PATCH 62/89] Bump pypa/gh-action-pypi-publish from 1.11.0 to 1.12.2 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.11.0 to 1.12.2. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.11.0...v1.12.2) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 700901e8..23fa0ff8 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -19,7 +19,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.11.0 + uses: pypa/gh-action-pypi-publish@v1.12.2 with: user: __token__ password: ${{ secrets.pypi_password }} \ No newline at end of file From 04e53e93529622f13e3ccb9d96714e48a4dca757 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Nov 2024 22:46:15 +0000 Subject: [PATCH 63/89] Bump codecov/codecov-action from 4 to 5 Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 4 to 5. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/v4...v5) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ceffa091..ab237919 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -89,7 +89,7 @@ jobs: pytest --cov=pyorbital pyorbital/tests --cov-report=xml --cov-report= - name: Upload unittest coverage to Codecov - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@v5 with: flags: unittests file: ./coverage.xml From eb37c5a627d5e78ea24be3d3468082ad2bca12ff Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 20 Nov 2024 18:33:50 +0200 Subject: [PATCH 64/89] Switch to plain miniconda, enforce conda-forge channel --- .github/workflows/ci.yaml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e58b0fd9..76e03e83 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -33,11 +33,12 @@ jobs: - name: Setup Conda Environment uses: conda-incubator/setup-miniconda@v3 with: - miniforge-variant: Mambaforge miniforge-version: latest - use-mamba: true python-version: ${{ matrix.python-version }} activate-environment: test-environment + channels: conda-forge + conda-remove-defaults: true + channel-priority: strict - name: Set cache environment variables shell: bash -l {0} From 2595b10762c058a8bc630dd0a070f3bbdee91cbb Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Wed, 20 Nov 2024 21:03:55 +0100 Subject: [PATCH 65/89] Switch to use pyproject.toml instead of setup.py, and skip versioneer Signed-off-by: Adam.Dybbroe --- .pre-commit-config.yaml | 40 +- pyorbital/__init__.py | 14 +- {bin => pyorbital}/fetch_tles.py | 25 +- pyorbital/version.py | 658 ------------------------------- pyproject.toml | 82 ++++ setup.cfg | 23 -- setup.py | 62 --- 7 files changed, 134 insertions(+), 770 deletions(-) rename {bin => pyorbital}/fetch_tles.py (71%) delete mode 100644 pyorbital/version.py create mode 100644 pyproject.toml delete mode 100644 setup.cfg delete mode 100644 setup.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 68fd7028..fe36218d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,8 +1,40 @@ exclude: '^$' fail_fast: false repos: -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.2.3 + - repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: 'v0.7.2' hooks: - - id: flake8 - additional_dependencies: [flake8-docstrings, flake8-debugger, flake8-bugbear] + - id: ruff + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + args: [--unsafe] + - repo: https://github.com/PyCQA/bandit + rev: '1.7.10' # Update me! + hooks: + - id: bandit + args: [--ini, .bandit] + - repo: https://github.com/pre-commit/mirrors-mypy + rev: 'v1.13.0' # Use the sha / tag you want to point at + hooks: + - id: mypy + additional_dependencies: + - types-docutils + - types-setuptools + - types-PyYAML + - types-requests + args: ["--python-version", "3.10", "--ignore-missing-imports"] + - repo: https://github.com/pycqa/isort + rev: 5.13.2 + hooks: + - id: isort + language_version: python3 +ci: + # To trigger manually, comment on a pull request with "pre-commit.ci autofix" + autofix_prs: false + autoupdate_schedule: "monthly" + skip: [bandit] diff --git a/pyorbital/__init__.py b/pyorbital/__init__.py index c72b692c..d8e4719c 100644 --- a/pyorbital/__init__.py +++ b/pyorbital/__init__.py @@ -1,10 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2017 - -# Author(s): - -# Martin Raspaud +# Copyright (c) 2017-2024 Pytroll Community # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -19,14 +15,14 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . +"""Package file.""" + import numpy as np -from .version import get_versions -__version__ = get_versions()['version'] -del get_versions def dt2np(utc_time): + """Convert datetime to numpy datetime64 object.""" try: return np.datetime64(utc_time) except ValueError: - return utc_time.astype('datetime64[ns]') + return utc_time.astype("datetime64[ns]") diff --git a/bin/fetch_tles.py b/pyorbital/fetch_tles.py similarity index 71% rename from bin/fetch_tles.py rename to pyorbital/fetch_tles.py index e6461673..fd8805ae 100755 --- a/bin/fetch_tles.py +++ b/pyorbital/fetch_tles.py @@ -2,35 +2,36 @@ """Script to download and store satellite TLE data.""" -import sys import logging import logging.config +import sys import yaml + from pyorbital.tlefile import Downloader, SQLiteTLE def read_config(config_fname): """Read and parse config file.""" - with open(config_fname, 'r') as fid: + with open(config_fname, "r") as fid: config = yaml.load(fid, Loader=yaml.SafeLoader) return config -def main(): +def run(): """Run TLE downloader.""" config = read_config(sys.argv[1]) - if 'logging' in config: - logging.config.dictConfig(config['logging']) + if "logging" in config: + logging.config.dictConfig(config["logging"]) else: logging.basicConfig(level=logging.INFO) downloader = Downloader(config) - db = SQLiteTLE(config['database']['path'], config['platforms'], - config['text_writer']) + db = SQLiteTLE(config["database"]["path"], config["platforms"], + config["text_writer"]) logging.info("Start downloading TLEs") - for dl_ in config['downloaders']: + for dl_ in config["downloaders"]: fetcher = getattr(downloader, dl_) tles = fetcher() if isinstance(tles, dict): @@ -38,16 +39,12 @@ def main(): for tle in tles[source]: db.update_db(tle, source) else: - source = 'file' + source = "file" if "spacetrack" in dl_: - source = 'spacetrack' + source = "spacetrack" for tle in tles: db.update_db(tle, source) db.write_tle_txt() db.close() logging.info("TLE downloading finished") - - -if __name__ == "__main__": - main() diff --git a/pyorbital/version.py b/pyorbital/version.py deleted file mode 100644 index 152e2d52..00000000 --- a/pyorbital/version.py +++ /dev/null @@ -1,658 +0,0 @@ - -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. -# Generated by versioneer-0.28 -# https://github.com/python-versioneer/python-versioneer - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys -from typing import Callable, Dict -import functools - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "$Format:%d$" - git_full = "$Format:%H$" - git_date = "$Format:%ci$" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "pep440" - cfg.tag_prefix = "v" - cfg.parentdir_prefix = "None" - cfg.versionfile_source = "pyorbital/version.py" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None), **popen_kwargs) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, process.returncode - return stdout, process.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %s but none started with prefix %s" % - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %s" % r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=not verbose) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, [ - "describe", "--tags", "--dirty", "--always", "--long", - "--match", f"{tag_prefix}[[:digit:]]*" - ], cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%s'" - % describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" - % (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) - pieces["distance"] = len(out.split()) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) - else: - rendered += ".post0.dev%d" % (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for _ in cfg.versionfile_source.split('/'): - root = os.path.dirname(root) - except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..1a7ff556 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,82 @@ +[project] +name = "pyorbital" +dynamic = ["version"] +description = "Scheduling satellite passes in Python" +authors = [ + { name = "The Pytroll Team", email = "pytroll@googlegroups.com" } +] +dependencies = ["numpy>=1.19.0", + "scipy", + "requests"] +readme = "README.md" +requires-python = ">=3.10" +license = {file = "LICENSE.txt"} +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Topic :: Scientific/Engineering", + "Topic :: Scientific/Engineering :: Astronomy" +] + +[project.scripts] +fetch_tles = "pyorbital.fetch_tles:run" + +[project.urls] +"Documentation" = "https://pyorbital.readthedocs.io/en/latest/" + +[project.optional-dependencies] +doc = ["sphinx", "sphinx_rtd_theme", "sphinxcontrib-apidoc"] + +[build-system] +requires = ["hatchling", "hatch-vcs"] +build-backend = "hatchling.build" + +[tool.rye] +managed = true +dev-dependencies = [] + +[tool.hatch.metadata] +allow-direct-references = true + +[tool.hatch.build.targets.wheel] +packages = ["pyorbital"] + +[tool.hatch.version] +source = "vcs" + +[tool.hatch.build.hooks.vcs] +version-file = "pyorbital/version.py" + +[tool.isort] +sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"] +profile = "black" +skip_gitignore = true +default_section = "THIRDPARTY" +known_first_party = "pyorbital" +line_length = 120 + +[tool.ruff] +line-length = 120 + +[tool.ruff.lint] +# See https://docs.astral.sh/ruff/rules/ +# In the future, add "B", "S", "N" +select = ["A", "D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20", "NPY"] + +[tool.ruff.lint.per-file-ignores] +"trollsched/tests/*" = ["S101"] # assert allowed in tests +"trollsched/version.py" = ["D100", "Q000"] # automatically generated by hatch-vcs + +[tool.ruff.lint.pydocstyle] +convention = "google" + +[tool.ruff.lint.mccabe] +# Unlike Flake8, default to a complexity level of 10. +max-complexity = 10 + +[tool.coverage.run] +relative_files = true +omit = ["pyorbital/version.py"] diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 5ba5071e..00000000 --- a/setup.cfg +++ /dev/null @@ -1,23 +0,0 @@ -[bdist_rpm] -requires=numpy -release=1 -doc_files = doc/Makefile doc/source/*.rst - -[bdist_wheel] -universal=1 - -[flake8] -max-line-length = 120 - -[versioneer] -VCS = git -style = pep440 -versionfile_source = pyorbital/version.py -versionfile_build = -tag_prefix = v - -[coverage:run] -relative_files = True -omit = - pyorbital/version.py - versioneer.py diff --git a/setup.py b/setup.py deleted file mode 100644 index 43b8fbe8..00000000 --- a/setup.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2011-2023 Pytroll Community -# -# Author(s): -# -# Martin Raspaud -# Panu Lahtinen -# Adam Dybbroe -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -"""Setup for pyorbital.""" - -import os -from setuptools import setup, find_packages -import versioneer - -try: - with open('./README.md', 'r') as fd: - long_description = fd.read() -except IOError: - long_description = '' - - -setup(name='pyorbital', - version=versioneer.get_version(), - cmdclass=versioneer.get_cmdclass(), - description='Orbital parameters and astronomical computations in Python', - author='The Pytroll Team', - author_email='pytroll@googlegroups.com', - classifiers=["Development Status :: 5 - Production/Stable", - "Intended Audience :: Science/Research", - "License :: OSI Approved :: GNU General Public License v3 " + - "or later (GPLv3+)", - "Operating System :: OS Independent", - "Programming Language :: Python", - "Topic :: Scientific/Engineering", - "Topic :: Scientific/Engineering :: Astronomy"], - url="https://github.com/pytroll/pyorbital", - long_description=long_description, - long_description_content_type='text/markdown', - packages=find_packages(), - package_data={'pyorbital': [os.path.join('etc', 'platforms.txt')]}, - scripts=['bin/fetch_tles.py', ], - install_requires=['numpy>=1.19.0', 'scipy', 'requests'], - python_requires='>=3.9', - extras_require={"doc": ["sphinx", "sphinx_rtd_theme", "sphinxcontrib-apidoc"]}, - zip_safe=False, - ) From e349c037be0f35018745d3945ce89f22bf74e59d Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 21 Nov 2024 13:10:29 +0100 Subject: [PATCH 66/89] Fix styles using ruff and settings in pyproject.toml Signed-off-by: Adam.Dybbroe --- pyorbital/__init__.py | 6 + pyorbital/check_platform.py | 5 +- pyproject.toml | 15 +- versioneer.py | 2205 ----------------------------------- 4 files changed, 21 insertions(+), 2210 deletions(-) delete mode 100644 versioneer.py diff --git a/pyorbital/__init__.py b/pyorbital/__init__.py index d8e4719c..594d6eec 100644 --- a/pyorbital/__init__.py +++ b/pyorbital/__init__.py @@ -19,6 +19,8 @@ import numpy as np +from pyorbital import version + def dt2np(utc_time): """Convert datetime to numpy datetime64 object.""" @@ -26,3 +28,7 @@ def dt2np(utc_time): return np.datetime64(utc_time) except ValueError: return utc_time.astype("datetime64[ns]") + +def get_version(): + """Return the Pyorbital version tag.""" + return version.__version__ diff --git a/pyorbital/check_platform.py b/pyorbital/check_platform.py index 441a5e59..b292ed71 100755 --- a/pyorbital/check_platform.py +++ b/pyorbital/check_platform.py @@ -22,12 +22,13 @@ import argparse import logging -from pyorbital.tlefile import check_is_platform_supported + from pyorbital.logger import logging_on +from pyorbital.tlefile import check_is_platform_supported if __name__ == "__main__": parser = argparse.ArgumentParser( - description='Check if a satellite is supported.') + description="Check if a satellite is supported.") parser.add_argument("-s", "--satellite", help=("Name of the Satellite [in upper case] - following WMO Oscar naming."), default=None, diff --git a/pyproject.toml b/pyproject.toml index 1a7ff556..5e229f7c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,11 @@ authors = [ ] dependencies = ["numpy>=1.19.0", "scipy", - "requests"] + "requests", + "pytz", + "dateutil", + "defusedxml", + ] readme = "README.md" requires-python = ">=3.10" license = {file = "LICENSE.txt"} @@ -67,8 +71,13 @@ line-length = 120 select = ["A", "D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20", "NPY"] [tool.ruff.lint.per-file-ignores] -"trollsched/tests/*" = ["S101"] # assert allowed in tests -"trollsched/version.py" = ["D100", "Q000"] # automatically generated by hatch-vcs +"pyorbital/tests/*" = ["S101"] # assert allowed in tests +"pyorbital/version.py" = ["D100", "Q000"] # automatically generated by hatch-vcs +"pyorbital/tlefile.py" = ["T201", "T203"] # allow print and pprint +"pyorbital/geoloc.py" = ["T201"] # allow print +"pyorbital/geoloc_example.py" = ["T201"] # allow print +"pyorbital/orbital.py" = ["T201"] # allow print + [tool.ruff.lint.pydocstyle] convention = "google" diff --git a/versioneer.py b/versioneer.py deleted file mode 100644 index 18e34c2f..00000000 --- a/versioneer.py +++ /dev/null @@ -1,2205 +0,0 @@ - -# Version: 0.28 - -"""The Versioneer - like a rocketeer, but for versions. - -The Versioneer -============== - -* like a rocketeer, but for versions! -* https://github.com/python-versioneer/python-versioneer -* Brian Warner -* License: Public Domain (Unlicense) -* Compatible with: Python 3.7, 3.8, 3.9, 3.10 and pypy3 -* [![Latest Version][pypi-image]][pypi-url] -* [![Build Status][travis-image]][travis-url] - -This is a tool for managing a recorded version number in setuptools-based -python projects. The goal is to remove the tedious and error-prone "update -the embedded version string" step from your release process. Making a new -release should be as easy as recording a new tag in your version-control -system, and maybe making new tarballs. - - -## Quick Install - -Versioneer provides two installation modes. The "classic" vendored mode installs -a copy of versioneer into your repository. The experimental build-time dependency mode -is intended to allow you to skip this step and simplify the process of upgrading. - -### Vendored mode - -* `pip install versioneer` to somewhere in your $PATH - * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is - available, so you can also use `conda install -c conda-forge versioneer` -* add a `[tool.versioneer]` section to your `pyproject.toml` or a - `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) - * Note that you will need to add `tomli; python_version < "3.11"` to your - build-time dependencies if you use `pyproject.toml` -* run `versioneer install --vendor` in your source tree, commit the results -* verify version information with `python setup.py version` - -### Build-time dependency mode - -* `pip install versioneer` to somewhere in your $PATH - * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is - available, so you can also use `conda install -c conda-forge versioneer` -* add a `[tool.versioneer]` section to your `pyproject.toml` or a - `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) -* add `versioneer` (with `[toml]` extra, if configuring in `pyproject.toml`) - to the `requires` key of the `build-system` table in `pyproject.toml`: - ```toml - [build-system] - requires = ["setuptools", "versioneer[toml]"] - build-backend = "setuptools.build_meta" - ``` -* run `versioneer install --no-vendor` in your source tree, commit the results -* verify version information with `python setup.py version` - -## Version Identifiers - -Source trees come from a variety of places: - -* a version-control system checkout (mostly used by developers) -* a nightly tarball, produced by build automation -* a snapshot tarball, produced by a web-based VCS browser, like github's - "tarball from tag" feature -* a release tarball, produced by "setup.py sdist", distributed through PyPI - -Within each source tree, the version identifier (either a string or a number, -this tool is format-agnostic) can come from a variety of places: - -* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows - about recent "tags" and an absolute revision-id -* the name of the directory into which the tarball was unpacked -* an expanded VCS keyword ($Id$, etc) -* a `_version.py` created by some earlier build step - -For released software, the version identifier is closely related to a VCS -tag. Some projects use tag names that include more than just the version -string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool -needs to strip the tag prefix to extract the version identifier. For -unreleased software (between tags), the version identifier should provide -enough information to help developers recreate the same tree, while also -giving them an idea of roughly how old the tree is (after version 1.2, before -version 1.3). Many VCS systems can report a description that captures this, -for example `git describe --tags --dirty --always` reports things like -"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the -0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has -uncommitted changes). - -The version identifier is used for multiple purposes: - -* to allow the module to self-identify its version: `myproject.__version__` -* to choose a name and prefix for a 'setup.py sdist' tarball - -## Theory of Operation - -Versioneer works by adding a special `_version.py` file into your source -tree, where your `__init__.py` can import it. This `_version.py` knows how to -dynamically ask the VCS tool for version information at import time. - -`_version.py` also contains `$Revision$` markers, and the installation -process marks `_version.py` to have this marker rewritten with a tag name -during the `git archive` command. As a result, generated tarballs will -contain enough information to get the proper version. - -To allow `setup.py` to compute a version too, a `versioneer.py` is added to -the top level of your source tree, next to `setup.py` and the `setup.cfg` -that configures it. This overrides several distutils/setuptools commands to -compute the version when invoked, and changes `setup.py build` and `setup.py -sdist` to replace `_version.py` with a small static file that contains just -the generated version data. - -## Installation - -See [INSTALL.md](./INSTALL.md) for detailed installation instructions. - -## Version-String Flavors - -Code which uses Versioneer can learn about its version string at runtime by -importing `_version` from your main `__init__.py` file and running the -`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can -import the top-level `versioneer.py` and run `get_versions()`. - -Both functions return a dictionary with different flavors of version -information: - -* `['version']`: A condensed version string, rendered using the selected - style. This is the most commonly used value for the project's version - string. The default "pep440" style yields strings like `0.11`, - `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section - below for alternative styles. - -* `['full-revisionid']`: detailed revision identifier. For Git, this is the - full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". - -* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the - commit date in ISO 8601 format. This will be None if the date is not - available. - -* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that - this is only accurate if run in a VCS checkout, otherwise it is likely to - be False or None - -* `['error']`: if the version string could not be computed, this will be set - to a string describing the problem, otherwise it will be None. It may be - useful to throw an exception in setup.py if this is set, to avoid e.g. - creating tarballs with a version string of "unknown". - -Some variants are more useful than others. Including `full-revisionid` in a -bug report should allow developers to reconstruct the exact code being tested -(or indicate the presence of local changes that should be shared with the -developers). `version` is suitable for display in an "about" box or a CLI -`--version` output: it can be easily compared against release notes and lists -of bugs fixed in various releases. - -The installer adds the following text to your `__init__.py` to place a basic -version in `YOURPROJECT.__version__`: - - from ._version import get_versions - __version__ = get_versions()['version'] - del get_versions - -## Styles - -The setup.cfg `style=` configuration controls how the VCS information is -rendered into a version string. - -The default style, "pep440", produces a PEP440-compliant string, equal to the -un-prefixed tag name for actual releases, and containing an additional "local -version" section with more detail for in-between builds. For Git, this is -TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags ---dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the -tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and -that this commit is two revisions ("+2") beyond the "0.11" tag. For released -software (exactly equal to a known tag), the identifier will only contain the -stripped tag, e.g. "0.11". - -Other styles are available. See [details.md](details.md) in the Versioneer -source tree for descriptions. - -## Debugging - -Versioneer tries to avoid fatal errors: if something goes wrong, it will tend -to return a version of "0+unknown". To investigate the problem, run `setup.py -version`, which will run the version-lookup code in a verbose mode, and will -display the full contents of `get_versions()` (including the `error` string, -which may help identify what went wrong). - -## Known Limitations - -Some situations are known to cause problems for Versioneer. This details the -most significant ones. More can be found on Github -[issues page](https://github.com/python-versioneer/python-versioneer/issues). - -### Subprojects - -Versioneer has limited support for source trees in which `setup.py` is not in -the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are -two common reasons why `setup.py` might not be in the root: - -* Source trees which contain multiple subprojects, such as - [Buildbot](https://github.com/buildbot/buildbot), which contains both - "master" and "slave" subprojects, each with their own `setup.py`, - `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI - distributions (and upload multiple independently-installable tarballs). -* Source trees whose main purpose is to contain a C library, but which also - provide bindings to Python (and perhaps other languages) in subdirectories. - -Versioneer will look for `.git` in parent directories, and most operations -should get the right version string. However `pip` and `setuptools` have bugs -and implementation details which frequently cause `pip install .` from a -subproject directory to fail to find a correct version string (so it usually -defaults to `0+unknown`). - -`pip install --editable .` should work correctly. `setup.py install` might -work too. - -Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in -some later version. - -[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking -this issue. The discussion in -[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the -issue from the Versioneer side in more detail. -[pip PR#3176](https://github.com/pypa/pip/pull/3176) and -[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve -pip to let Versioneer work correctly. - -Versioneer-0.16 and earlier only looked for a `.git` directory next to the -`setup.cfg`, so subprojects were completely unsupported with those releases. - -### Editable installs with setuptools <= 18.5 - -`setup.py develop` and `pip install --editable .` allow you to install a -project into a virtualenv once, then continue editing the source code (and -test) without re-installing after every change. - -"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a -convenient way to specify executable scripts that should be installed along -with the python package. - -These both work as expected when using modern setuptools. When using -setuptools-18.5 or earlier, however, certain operations will cause -`pkg_resources.DistributionNotFound` errors when running the entrypoint -script, which must be resolved by re-installing the package. This happens -when the install happens with one version, then the egg_info data is -regenerated while a different version is checked out. Many setup.py commands -cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into -a different virtualenv), so this can be surprising. - -[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes -this one, but upgrading to a newer version of setuptools should probably -resolve it. - - -## Updating Versioneer - -To upgrade your project to a new release of Versioneer, do the following: - -* install the new Versioneer (`pip install -U versioneer` or equivalent) -* edit `setup.cfg` and `pyproject.toml`, if necessary, - to include any new configuration settings indicated by the release notes. - See [UPGRADING](./UPGRADING.md) for details. -* re-run `versioneer install --[no-]vendor` in your source tree, to replace - `SRC/_version.py` -* commit any changed files - -## Future Directions - -This tool is designed to make it easily extended to other version-control -systems: all VCS-specific components are in separate directories like -src/git/ . The top-level `versioneer.py` script is assembled from these -components by running make-versioneer.py . In the future, make-versioneer.py -will take a VCS name as an argument, and will construct a version of -`versioneer.py` that is specific to the given VCS. It might also take the -configuration arguments that are currently provided manually during -installation by editing setup.py . Alternatively, it might go the other -direction and include code from all supported VCS systems, reducing the -number of intermediate scripts. - -## Similar projects - -* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time - dependency -* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of - versioneer -* [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools - plugin - -## License - -To make Versioneer easier to embed, all its code is dedicated to the public -domain. The `_version.py` that it creates is also in the public domain. -Specifically, both are released under the "Unlicense", as described in -https://unlicense.org/. - -[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg -[pypi-url]: https://pypi.python.org/pypi/versioneer/ -[travis-image]: -https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg -[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer - -""" -# pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring -# pylint:disable=missing-class-docstring,too-many-branches,too-many-statements -# pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error -# pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with -# pylint:disable=attribute-defined-outside-init,too-many-arguments - -import configparser -import errno -import json -import os -import re -import subprocess -import sys -from pathlib import Path -from typing import Callable, Dict -import functools - -have_tomllib = True -if sys.version_info >= (3, 11): - import tomllib -else: - try: - import tomli as tomllib - except ImportError: - have_tomllib = False - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_root(): - """Get the project root directory. - - We require that all commands are run from the project root, i.e. the - directory that contains setup.py, setup.cfg, and versioneer.py . - """ - root = os.path.realpath(os.path.abspath(os.getcwd())) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - # allow 'python path/to/setup.py COMMAND' - root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - err = ("Versioneer was unable to run the project root directory. " - "Versioneer requires setup.py to be executed from " - "its immediate directory (like 'python setup.py COMMAND'), " - "or in a way that lets it use sys.argv[0] to find the root " - "(like 'python path/to/setup.py COMMAND').") - raise VersioneerBadRootError(err) - try: - # Certain runtime workflows (setup.py install/develop in a setuptools - # tree) execute all dependencies in a single python process, so - # "versioneer" may be imported multiple times, and python's shared - # module-import table will cache the first one. So we can't use - # os.path.dirname(__file__), as that will find whichever - # versioneer.py was first imported, even in later projects. - my_path = os.path.realpath(os.path.abspath(__file__)) - me_dir = os.path.normcase(os.path.splitext(my_path)[0]) - vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) - if me_dir != vsr_dir and "VERSIONEER_PEP518" not in globals(): - print("Warning: build in %s is using versioneer.py from %s" - % (os.path.dirname(my_path), versioneer_py)) - except NameError: - pass - return root - - -def get_config_from_root(root): - """Read the project setup.cfg file to determine Versioneer config.""" - # This might raise OSError (if setup.cfg is missing), or - # configparser.NoSectionError (if it lacks a [versioneer] section), or - # configparser.NoOptionError (if it lacks "VCS="). See the docstring at - # the top of versioneer.py for instructions on writing your setup.cfg . - root = Path(root) - pyproject_toml = root / "pyproject.toml" - setup_cfg = root / "setup.cfg" - section = None - if pyproject_toml.exists() and have_tomllib: - try: - with open(pyproject_toml, 'rb') as fobj: - pp = tomllib.load(fobj) - section = pp['tool']['versioneer'] - except (tomllib.TOMLDecodeError, KeyError): - pass - if not section: - parser = configparser.ConfigParser() - with open(setup_cfg) as cfg_file: - parser.read_file(cfg_file) - parser.get("versioneer", "VCS") # raise error if missing - - section = parser["versioneer"] - - cfg = VersioneerConfig() - cfg.VCS = section['VCS'] - cfg.style = section.get("style", "") - cfg.versionfile_source = section.get("versionfile_source") - cfg.versionfile_build = section.get("versionfile_build") - cfg.tag_prefix = section.get("tag_prefix") - if cfg.tag_prefix in ("''", '""', None): - cfg.tag_prefix = "" - cfg.parentdir_prefix = section.get("parentdir_prefix") - cfg.verbose = section.get("verbose") - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -# these dictionaries contain VCS-specific tools -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - HANDLERS.setdefault(vcs, {})[method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None), **popen_kwargs) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, process.returncode - return stdout, process.returncode - - -LONG_VERSION_PY['git'] = r''' -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. -# Generated by versioneer-0.28 -# https://github.com/python-versioneer/python-versioneer - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys -from typing import Callable, Dict -import functools - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" - git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" - git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "%(STYLE)s" - cfg.tag_prefix = "%(TAG_PREFIX)s" - cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" - cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None), **popen_kwargs) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %%s" %% dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %%s" %% (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %%s (error)" %% dispcmd) - print("stdout was %%s" %% stdout) - return None, process.returncode - return stdout, process.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %%s but none started with prefix %%s" %% - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %%d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%%s', no digits" %% ",".join(refs - tags)) - if verbose: - print("likely tags: %%s" %% ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %%s" %% r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=not verbose) - if rc != 0: - if verbose: - print("Directory %%s not under git control" %% root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, [ - "describe", "--tags", "--dirty", "--always", "--long", - "--match", f"{tag_prefix}[[:digit:]]*" - ], cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%%s'" - %% describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%%s' doesn't start with prefix '%%s'" - print(fmt %% (full_tag, tag_prefix)) - pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" - %% (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) - pieces["distance"] = len(out.split()) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%%d.dev%%d" %% (post_version + 1, pieces["distance"]) - else: - rendered += ".post0.dev%%d" %% (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%%d" %% pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%%s'" %% style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for _ in cfg.versionfile_source.split('/'): - root = os.path.dirname(root) - except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} -''' - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %s" % r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=not verbose) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, [ - "describe", "--tags", "--dirty", "--always", "--long", - "--match", f"{tag_prefix}[[:digit:]]*" - ], cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%s'" - % describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" - % (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) - pieces["distance"] = len(out.split()) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def do_vcs_install(versionfile_source, ipy): - """Git-specific installation logic for Versioneer. - - For Git, this means creating/changing .gitattributes to mark _version.py - for export-subst keyword substitution. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - files = [versionfile_source] - if ipy: - files.append(ipy) - if "VERSIONEER_PEP518" not in globals(): - try: - my_path = __file__ - if my_path.endswith((".pyc", ".pyo")): - my_path = os.path.splitext(my_path)[0] + ".py" - versioneer_file = os.path.relpath(my_path) - except NameError: - versioneer_file = "versioneer.py" - files.append(versioneer_file) - present = False - try: - with open(".gitattributes", "r") as fobj: - for line in fobj: - if line.strip().startswith(versionfile_source): - if "export-subst" in line.strip().split()[1:]: - present = True - break - except OSError: - pass - if not present: - with open(".gitattributes", "a+") as fobj: - fobj.write(f"{versionfile_source} export-subst\n") - files.append(".gitattributes") - run_command(GITS, ["add", "--"] + files) - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %s but none started with prefix %s" % - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -SHORT_VERSION_PY = """ -# This file was generated by 'versioneer.py' (0.28) from -# revision-control system data, or from the parent directory name of an -# unpacked source archive. Distribution tarballs contain a pre-generated copy -# of this file. - -import json - -version_json = ''' -%s -''' # END VERSION_JSON - - -def get_versions(): - return json.loads(version_json) -""" - - -def versions_from_file(filename): - """Try to determine the version from _version.py if present.""" - try: - with open(filename) as f: - contents = f.read() - except OSError: - raise NotThisMethod("unable to read _version.py") - mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) - if not mo: - mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) - if not mo: - raise NotThisMethod("no version_json in _version.py") - return json.loads(mo.group(1)) - - -def write_to_version_file(filename, versions): - """Write the given version number to the given _version.py file.""" - os.unlink(filename) - contents = json.dumps(versions, sort_keys=True, - indent=1, separators=(",", ": ")) - with open(filename, "w") as f: - f.write(SHORT_VERSION_PY % contents) - - print("set %s to '%s'" % (filename, versions["version"])) - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) - else: - rendered += ".post0.dev%d" % (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -class VersioneerBadRootError(Exception): - """The project root directory is unknown or missing key files.""" - - -def get_versions(verbose=False): - """Get the project version from whatever source is available. - - Returns dict with two keys: 'version' and 'full'. - """ - if "versioneer" in sys.modules: - # see the discussion in cmdclass.py:get_cmdclass() - del sys.modules["versioneer"] - - root = get_root() - cfg = get_config_from_root(root) - - assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" - handlers = HANDLERS.get(cfg.VCS) - assert handlers, "unrecognized VCS '%s'" % cfg.VCS - verbose = verbose or cfg.verbose - assert cfg.versionfile_source is not None, \ - "please set versioneer.versionfile_source" - assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" - - versionfile_abs = os.path.join(root, cfg.versionfile_source) - - # extract version from first of: _version.py, VCS command (e.g. 'git - # describe'), parentdir. This is meant to work for developers using a - # source checkout, for users of a tarball created by 'setup.py sdist', - # and for users of a tarball/zipball created by 'git archive' or github's - # download-from-tag feature or the equivalent in other VCSes. - - get_keywords_f = handlers.get("get_keywords") - from_keywords_f = handlers.get("keywords") - if get_keywords_f and from_keywords_f: - try: - keywords = get_keywords_f(versionfile_abs) - ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) - if verbose: - print("got version from expanded keyword %s" % ver) - return ver - except NotThisMethod: - pass - - try: - ver = versions_from_file(versionfile_abs) - if verbose: - print("got version from file %s %s" % (versionfile_abs, ver)) - return ver - except NotThisMethod: - pass - - from_vcs_f = handlers.get("pieces_from_vcs") - if from_vcs_f: - try: - pieces = from_vcs_f(cfg.tag_prefix, root, verbose) - ver = render(pieces, cfg.style) - if verbose: - print("got version from VCS %s" % ver) - return ver - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - if verbose: - print("got version from parentdir %s" % ver) - return ver - except NotThisMethod: - pass - - if verbose: - print("unable to compute version") - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, "error": "unable to compute version", - "date": None} - - -def get_version(): - """Get the short version string for this project.""" - return get_versions()["version"] - - -def get_cmdclass(cmdclass=None): - """Get the custom setuptools subclasses used by Versioneer. - - If the package uses a different cmdclass (e.g. one from numpy), it - should be provide as an argument. - """ - if "versioneer" in sys.modules: - del sys.modules["versioneer"] - # this fixes the "python setup.py develop" case (also 'install' and - # 'easy_install .'), in which subdependencies of the main project are - # built (using setup.py bdist_egg) in the same python process. Assume - # a main project A and a dependency B, which use different versions - # of Versioneer. A's setup.py imports A's Versioneer, leaving it in - # sys.modules by the time B's setup.py is executed, causing B to run - # with the wrong versioneer. Setuptools wraps the sub-dep builds in a - # sandbox that restores sys.modules to it's pre-build state, so the - # parent is protected against the child's "import versioneer". By - # removing ourselves from sys.modules here, before the child build - # happens, we protect the child from the parent's versioneer too. - # Also see https://github.com/python-versioneer/python-versioneer/issues/52 - - cmds = {} if cmdclass is None else cmdclass.copy() - - # we add "version" to setuptools - from setuptools import Command - - class cmd_version(Command): - description = "report generated version string" - user_options = [] - boolean_options = [] - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - def run(self): - vers = get_versions(verbose=True) - print("Version: %s" % vers["version"]) - print(" full-revisionid: %s" % vers.get("full-revisionid")) - print(" dirty: %s" % vers.get("dirty")) - print(" date: %s" % vers.get("date")) - if vers["error"]: - print(" error: %s" % vers["error"]) - cmds["version"] = cmd_version - - # we override "build_py" in setuptools - # - # most invocation pathways end up running build_py: - # distutils/build -> build_py - # distutils/install -> distutils/build ->.. - # setuptools/bdist_wheel -> distutils/install ->.. - # setuptools/bdist_egg -> distutils/install_lib -> build_py - # setuptools/install -> bdist_egg ->.. - # setuptools/develop -> ? - # pip install: - # copies source tree to a tempdir before running egg_info/etc - # if .git isn't copied too, 'git describe' will fail - # then does setup.py bdist_wheel, or sometimes setup.py install - # setup.py egg_info -> ? - - # pip install -e . and setuptool/editable_wheel will invoke build_py - # but the build_py command is not expected to copy any files. - - # we override different "build_py" commands for both environments - if 'build_py' in cmds: - _build_py = cmds['build_py'] - else: - from setuptools.command.build_py import build_py as _build_py - - class cmd_build_py(_build_py): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_py.run(self) - if getattr(self, "editable_mode", False): - # During editable installs `.py` and data files are - # not copied to build_lib - return - # now locate _version.py in the new build/ directory and replace - # it with an updated value - if cfg.versionfile_build: - target_versionfile = os.path.join(self.build_lib, - cfg.versionfile_build) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - cmds["build_py"] = cmd_build_py - - if 'build_ext' in cmds: - _build_ext = cmds['build_ext'] - else: - from setuptools.command.build_ext import build_ext as _build_ext - - class cmd_build_ext(_build_ext): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_ext.run(self) - if self.inplace: - # build_ext --inplace will only build extensions in - # build/lib<..> dir with no _version.py to write to. - # As in place builds will already have a _version.py - # in the module dir, we do not need to write one. - return - # now locate _version.py in the new build/ directory and replace - # it with an updated value - if not cfg.versionfile_build: - return - target_versionfile = os.path.join(self.build_lib, - cfg.versionfile_build) - if not os.path.exists(target_versionfile): - print(f"Warning: {target_versionfile} does not exist, skipping " - "version update. This can happen if you are running build_ext " - "without first running build_py.") - return - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - cmds["build_ext"] = cmd_build_ext - - if "cx_Freeze" in sys.modules: # cx_freeze enabled? - from cx_Freeze.dist import build_exe as _build_exe - # nczeczulin reports that py2exe won't like the pep440-style string - # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. - # setup(console=[{ - # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION - # "product_version": versioneer.get_version(), - # ... - - class cmd_build_exe(_build_exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _build_exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - cmds["build_exe"] = cmd_build_exe - del cmds["build_py"] - - if 'py2exe' in sys.modules: # py2exe enabled? - try: - from py2exe.setuptools_buildexe import py2exe as _py2exe - except ImportError: - from py2exe.distutils_buildexe import py2exe as _py2exe - - class cmd_py2exe(_py2exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _py2exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - cmds["py2exe"] = cmd_py2exe - - # sdist farms its file list building out to egg_info - if 'egg_info' in cmds: - _egg_info = cmds['egg_info'] - else: - from setuptools.command.egg_info import egg_info as _egg_info - - class cmd_egg_info(_egg_info): - def find_sources(self): - # egg_info.find_sources builds the manifest list and writes it - # in one shot - super().find_sources() - - # Modify the filelist and normalize it - root = get_root() - cfg = get_config_from_root(root) - self.filelist.append('versioneer.py') - if cfg.versionfile_source: - # There are rare cases where versionfile_source might not be - # included by default, so we must be explicit - self.filelist.append(cfg.versionfile_source) - self.filelist.sort() - self.filelist.remove_duplicates() - - # The write method is hidden in the manifest_maker instance that - # generated the filelist and was thrown away - # We will instead replicate their final normalization (to unicode, - # and POSIX-style paths) - from setuptools import unicode_utils - normalized = [unicode_utils.filesys_decode(f).replace(os.sep, '/') - for f in self.filelist.files] - - manifest_filename = os.path.join(self.egg_info, 'SOURCES.txt') - with open(manifest_filename, 'w') as fobj: - fobj.write('\n'.join(normalized)) - - cmds['egg_info'] = cmd_egg_info - - # we override different "sdist" commands for both environments - if 'sdist' in cmds: - _sdist = cmds['sdist'] - else: - from setuptools.command.sdist import sdist as _sdist - - class cmd_sdist(_sdist): - def run(self): - versions = get_versions() - self._versioneer_generated_versions = versions - # unless we update this, the command will keep using the old - # version - self.distribution.metadata.version = versions["version"] - return _sdist.run(self) - - def make_release_tree(self, base_dir, files): - root = get_root() - cfg = get_config_from_root(root) - _sdist.make_release_tree(self, base_dir, files) - # now locate _version.py in the new base_dir directory - # (remembering that it may be a hardlink) and replace it with an - # updated value - target_versionfile = os.path.join(base_dir, cfg.versionfile_source) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, - self._versioneer_generated_versions) - cmds["sdist"] = cmd_sdist - - return cmds - - -CONFIG_ERROR = """ -setup.cfg is missing the necessary Versioneer configuration. You need -a section like: - - [versioneer] - VCS = git - style = pep440 - versionfile_source = src/myproject/_version.py - versionfile_build = myproject/_version.py - tag_prefix = - parentdir_prefix = myproject- - -You will also need to edit your setup.py to use the results: - - import versioneer - setup(version=versioneer.get_version(), - cmdclass=versioneer.get_cmdclass(), ...) - -Please read the docstring in ./versioneer.py for configuration instructions, -edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. -""" - -SAMPLE_CONFIG = """ -# See the docstring in versioneer.py for instructions. Note that you must -# re-run 'versioneer.py setup' after changing this section, and commit the -# resulting files. - -[versioneer] -#VCS = git -#style = pep440 -#versionfile_source = -#versionfile_build = -#tag_prefix = -#parentdir_prefix = - -""" - -OLD_SNIPPET = """ -from ._version import get_versions -__version__ = get_versions()['version'] -del get_versions -""" - -INIT_PY_SNIPPET = """ -from . import {0} -__version__ = {0}.get_versions()['version'] -""" - - -def do_setup(): - """Do main VCS-independent setup function for installing Versioneer.""" - root = get_root() - try: - cfg = get_config_from_root(root) - except (OSError, configparser.NoSectionError, - configparser.NoOptionError) as e: - if isinstance(e, (OSError, configparser.NoSectionError)): - print("Adding sample versioneer config to setup.cfg", - file=sys.stderr) - with open(os.path.join(root, "setup.cfg"), "a") as f: - f.write(SAMPLE_CONFIG) - print(CONFIG_ERROR, file=sys.stderr) - return 1 - - print(" creating %s" % cfg.versionfile_source) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - - ipy = os.path.join(os.path.dirname(cfg.versionfile_source), - "__init__.py") - if os.path.exists(ipy): - try: - with open(ipy, "r") as f: - old = f.read() - except OSError: - old = "" - module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] - snippet = INIT_PY_SNIPPET.format(module) - if OLD_SNIPPET in old: - print(" replacing boilerplate in %s" % ipy) - with open(ipy, "w") as f: - f.write(old.replace(OLD_SNIPPET, snippet)) - elif snippet not in old: - print(" appending to %s" % ipy) - with open(ipy, "a") as f: - f.write(snippet) - else: - print(" %s unmodified" % ipy) - else: - print(" %s doesn't exist, ok" % ipy) - ipy = None - - # Make VCS-specific changes. For git, this means creating/changing - # .gitattributes to mark _version.py for export-subst keyword - # substitution. - do_vcs_install(cfg.versionfile_source, ipy) - return 0 - - -def scan_setup_py(): - """Validate the contents of setup.py against Versioneer's expectations.""" - found = set() - setters = False - errors = 0 - with open("setup.py", "r") as f: - for line in f.readlines(): - if "import versioneer" in line: - found.add("import") - if "versioneer.get_cmdclass()" in line: - found.add("cmdclass") - if "versioneer.get_version()" in line: - found.add("get_version") - if "versioneer.VCS" in line: - setters = True - if "versioneer.versionfile_source" in line: - setters = True - if len(found) != 3: - print("") - print("Your setup.py appears to be missing some important items") - print("(but I might be wrong). Please make sure it has something") - print("roughly like the following:") - print("") - print(" import versioneer") - print(" setup( version=versioneer.get_version(),") - print(" cmdclass=versioneer.get_cmdclass(), ...)") - print("") - errors += 1 - if setters: - print("You should remove lines like 'versioneer.VCS = ' and") - print("'versioneer.versionfile_source = ' . This configuration") - print("now lives in setup.cfg, and should be removed from setup.py") - print("") - errors += 1 - return errors - - -def setup_command(): - """Set up Versioneer and exit with appropriate error code.""" - errors = do_setup() - errors += scan_setup_py() - sys.exit(1 if errors else 0) - - -if __name__ == "__main__": - cmd = sys.argv[1] - if cmd == "setup": - setup_command() From 532c5aec55a5c262348e09aed8c97cc4a6064b5a Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 21 Nov 2024 14:52:12 +0100 Subject: [PATCH 67/89] Fix style and try please ruff - fix tests etc Signed-off-by: Adam.Dybbroe --- doc/source/conf.py | 39 +- pyorbital/astronomy.py | 50 +-- pyorbital/geoloc.py | 26 +- pyorbital/geoloc_example.py | 21 +- pyorbital/geoloc_instrument_definitions.py | 21 +- pyorbital/logger.py | 8 +- pyorbital/orbital.py | 90 ++--- pyorbital/tests/test_aiaa.py | 37 +- pyorbital/tests/test_geoloc.py | 7 +- pyorbital/tests/test_orbital.py | 145 ++++--- pyorbital/tests/test_tlefile.py | 422 ++++++++++----------- pyorbital/tlefile.py | 140 +++---- 12 files changed, 507 insertions(+), 499 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index ab8067a5..f3bba78e 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -12,16 +12,17 @@ # serve to show the default. """Configurations for sphinx based documentation.""" -import sys +import datetime as dt import os +import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath('../../')) -sys.path.insert(0, os.path.abspath('../../pyorbital')) -from pyorbital import __version__ # noqa +sys.path.insert(0, os.path.abspath("../../")) +sys.path.insert(0, os.path.abspath("../../pyorbital")) +from pyorbital.version import __version__ # noqa # -- General configuration ----------------------------------------------------- @@ -30,30 +31,32 @@ # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.coverage', 'sphinx.ext.napoleon'] +extensions = ["sphinx.ext.autodoc", "sphinx.ext.doctest", "sphinx.ext.coverage", "sphinx.ext.napoleon"] # Add any paths that contain templates here, relative to this directory. -templates_path = ['.templates'] +templates_path = [".templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. # #source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'pyorbital' -copyright = u'2012-2023, The Pytroll crew' +project = u"pyorbital" +copyright = u"2012, 2024-{}, The PyTroll Team".format(dt.datetime.utcnow().strftime("%Y")) # noqa: A001 + + # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = __version__.split('+')[0] +version = __version__.split("+")[0] # The full version, including alpha/beta/rc tags. release = __version__ @@ -69,7 +72,7 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = [] +# exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all documents. # #default_role = None @@ -86,7 +89,7 @@ # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # #modindex_common_prefix = [] @@ -180,7 +183,7 @@ # #html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = 'pyorbitaldoc' +htmlhelp_basename = "pyorbitaldoc" # -- Options for LaTeX output -------------------------------------------------- @@ -194,8 +197,8 @@ # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'pyorbital.tex', u'pyorbital Documentation', - u'The Pytroll crew', 'manual'), + ("index", "pyorbital.tex", u"pyorbital Documentation", + u"The Pytroll crew", "manual"), ] # The name of an image file (relative to this directory) to place at the top of @@ -227,6 +230,6 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('index', 'pyorbital', u'pyorbital Documentation', - [u'The Pytroll crew'], 1) + ("index", "pyorbital", u"pyorbital Documentation", + [u"The Pytroll crew"], 1) ] diff --git a/pyorbital/astronomy.py b/pyorbital/astronomy.py index 881e97d2..27935538 100644 --- a/pyorbital/astronomy.py +++ b/pyorbital/astronomy.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- # -# Copyright (c) 2011, 2013 +# Copyright (c) 2011, 2013, 2024 # # Author(s): # @@ -36,7 +36,6 @@ produce scalar outputs. """ -import datetime import numpy as np @@ -48,23 +47,20 @@ def jdays2000(utc_time): - """Get the days since year 2000. - """ - return _days(dt2np(utc_time) - np.datetime64('2000-01-01T12:00')) + """Get the days since year 2000.""" + return _days(dt2np(utc_time) - np.datetime64("2000-01-01T12:00")) def jdays(utc_time): - """Get the julian day of *utc_time*. - """ + """Get the julian day of *utc_time*.""" return jdays2000(utc_time) + 2451545.0 def _days(dt): - """Get the days (floating point) from *d_t*. - """ + """Get the days (floating point) from *d_t*.""" if hasattr(dt, "shape"): dt = np.asanyarray(dt, dtype=np.timedelta64) - return dt / np.timedelta64(1, 'D') + return dt / np.timedelta64(1, "D") def gmst(utc_time): @@ -81,14 +77,16 @@ def gmst(utc_time): def _lmst(utc_time, longitude): """Local mean sidereal time, computed from *utc_time* and *longitude*. - In radians. + + utc_time: The UTC time as a datetime.datetime object. + Logitude: The longitude in radians. + Returns: local mean sideral time in radians. """ return gmst(utc_time) + longitude def sun_ecliptic_longitude(utc_time): - """Ecliptic longitude of the sun at *utc_time*. - """ + """Ecliptic longitude of the sun at *utc_time*.""" jdate = jdays2000(utc_time) / 36525.0 # mean anomaly, rad m_a = np.deg2rad(357.52910 + @@ -105,8 +103,7 @@ def sun_ecliptic_longitude(utc_time): def sun_ra_dec(utc_time): - """Right ascension and declination of the sun at *utc_time*. - """ + """Right ascension and declination of the sun at *utc_time*.""" jdate = jdays2000(utc_time) / 36525.0 eps = np.deg2rad(23.0 + 26.0 / 60.0 + 21.448 / 3600.0 - (46.8150 * jdate + 0.00059 * jdate * jdate - @@ -124,9 +121,12 @@ def sun_ra_dec(utc_time): def _local_hour_angle(utc_time, longitude, right_ascension): - """Hour angle at *utc_time* for the given *longitude* and - *right_ascension* - longitude in radians + """Derive the hour angle at *utc_time* for the given *longitude* and *right_ascension*. + + utc_time: datetime.datetime instance of the UTC time + longitude: Longitude in radians. + right_ascension: The right ascension in radians. + Returns: Hour angle in radians. """ return _lmst(utc_time, longitude) - right_ascension @@ -152,9 +152,12 @@ def get_alt_az(utc_time, lon, lat): def cos_zen(utc_time, lon, lat): - """Cosine of the sun-zenith angle for *lon*, *lat* at *utc_time*. + """Derive the cosine of the sun-zenith angle for *lon*, *lat* at *utc_time*. + utc_time: datetime.datetime instance of the UTC time - lon and lat in degrees. + lon: Longitude in degrees + lat: Latitude in degrees. + Returns: Cosine of the sun zenith angle. """ lon = np.deg2rad(lon) lat = np.deg2rad(lat) @@ -169,8 +172,9 @@ def cos_zen(utc_time, lon, lat): def sun_zenith_angle(utc_time, lon, lat): """Sun-zenith angle for *lon*, *lat* at *utc_time*. + lon,lat in degrees. - The angle returned is given in degrees + The sun zenith angle returned is in degrees. """ sza = np.rad2deg(np.arccos(cos_zen(utc_time, lon, lat))) if not isinstance(lon, float): @@ -179,8 +183,7 @@ def sun_zenith_angle(utc_time, lon, lat): def sun_earth_distance_correction(utc_time): - """Calculate the sun earth distance correction, relative to 1 AU. - """ + """Calculate the sun earth distance correction, relative to 1 AU.""" # Computation according to # https://web.archive.org/web/20150117190838/http://curious.astro.cornell.edu/question.php?number=582 # with @@ -208,7 +211,6 @@ def observer_position(utc_time, lon, lat, alt): http://celestrak.com/columns/v02n03/ """ - lon = np.deg2rad(lon) lat = np.deg2rad(lat) diff --git a/pyorbital/geoloc.py b/pyorbital/geoloc.py index 00f10a52..60a324c8 100644 --- a/pyorbital/geoloc.py +++ b/pyorbital/geoloc.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2011 - 2019 Pytroll Community +# Copyright (c) 2011 - 2019, 2024 Pytroll Community # Author(s): @@ -21,8 +21,7 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -"""Module to compute geolocalization of a satellite scene. -""" +"""Module to compute geolocalization of a satellite scene.""" # TODO: # - Attitude correction @@ -31,13 +30,13 @@ # - test !!! from __future__ import print_function + import numpy as np # DIRTY STUFF. Needed the get_lonlatalt function to work on pos directly if # we want to print out lonlats in the end. from pyorbital import astronomy -from pyorbital.orbital import XKMPER, F -from pyorbital.orbital import Orbital +from pyorbital.orbital import XKMPER, F, Orbital A = 6378.137 # WGS84 Equatorial radius (km) B = 6356.75231414 # km, GRS80 @@ -45,6 +44,7 @@ def geodetic_lat(point, a=A, b=B): + """Get the Geodetic latitude of a point.""" x, y, z = point r = np.sqrt(x * x + y * y) geoc_lat = np.arctan2(z, r) @@ -84,8 +84,9 @@ class ScanGeometry(object): """ def __init__(self, fovs, times, attitude=(0, 0, 0)): + """Initialize the class.""" self.fovs = np.array(fovs) - self._times = np.array(times) * np.timedelta64(1000000000, 'ns') + self._times = np.array(times) * np.timedelta64(1000000000, "ns") self.attitude = attitude def vectors(self, pos, vel, roll=0.0, pitch=0.0, yaw=0.0): @@ -120,6 +121,7 @@ def vectors(self, pos, vel, roll=0.0, pitch=0.0, yaw=0.0): return qrotate(xy_rotated, nadir, yaw) def times(self, start_of_scan): + """Return an array with the times of each scan line.""" # tds = [timedelta(seconds=i) for i in self._times] # tds = self._times.astype('timedelta64[us]') try: @@ -129,12 +131,15 @@ def times(self, start_of_scan): class Quaternion(object): + """Some class, that I don't know what is doing...""" def __init__(self, scalar, vector): + """Initialize the class.""" self.__x, self.__y, self.__z = vector.reshape((3, -1)) self.__w = scalar.ravel() def rotation_matrix(self): + """Get the rotation matrix.""" x, y, z, w = self.__x, self.__y, self.__z, self.__w zero = np.zeros_like(x) return np.array( @@ -240,27 +245,28 @@ def compute_pixels(orb, sgeom, times, rpy=(0.0, 0.0, 0.0)): def norm(v): + """Return the norm of the vector *v*.""" return np.sqrt(np.dot(v, v.conj())) def mnorm(m, axis=None): - """norm of a matrix of vectors stacked along the *axis* dimension.""" + """Norm of a matrix of vectors stacked along the *axis* dimension.""" if axis is None: axis = np.ndim(m) - 1 return np.sqrt((m**2).sum(axis)) def vnorm(m): - """norms of a matrix of column vectors.""" + """Norms of a matrix of column vectors.""" return np.sqrt((m**2).sum(0)) def hnorm(m): - """norms of a matrix of row vectors.""" + """Norms of a matrix of row vectors.""" return np.sqrt((m**2).sum(1)) -if __name__ == '__main__': +if __name__ == "__main__": # NOAA 18 (from the 2011-10-12, 16:55 utc) # 1 28654U 05018A 11284.35271227 .00000478 00000-0 28778-3 0 9246 # 2 28654 99.0096 235.8581 0014859 135.4286 224.8087 14.11526826329313 diff --git a/pyorbital/geoloc_example.py b/pyorbital/geoloc_example.py index c3c12965..1b8b5671 100644 --- a/pyorbital/geoloc_example.py +++ b/pyorbital/geoloc_example.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2013 Martin Raspaud +# Copyright (c) 2013, 2024 Martin Raspaud # Author(s): @@ -20,14 +20,15 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -"""Simple usage for geoloc. -""" +"""Simple usage for geoloc.""" -import numpy as np from datetime import datetime -from pyorbital.geoloc import ScanGeometry, compute_pixels, get_lonlatalt -from mpl_toolkits.basemap import Basemap + import matplotlib.pyplot as plt +import numpy as np +from mpl_toolkits.basemap import Basemap + +from pyorbital.geoloc import ScanGeometry, compute_pixels, get_lonlatalt # Couple of example Two Line Elements tle1 = "1 33591U 09005A 12345.45213434 .00000391 00000-0 24004-3 0 6113" @@ -72,14 +73,14 @@ print(pos_time) # Plot the result -m = Basemap(projection='stere', llcrnrlat=24, urcrnrlat=70, llcrnrlon=-25, urcrnrlon=120, - lat_ts=58, lat_0=58, lon_0=14, resolution='l') +m = Basemap(projection="stere", llcrnrlat=24, urcrnrlat=70, llcrnrlon=-25, urcrnrlon=120, + lat_ts=58, lat_0=58, lon_0=14, resolution="l") # convert and plot the predicted pixels in red x, y = m(pos_time[0], pos_time[1]) -p1 = m.plot(x, y, marker='+', color='red', markerfacecolor='red', markeredgecolor='red', markersize=1, markevery=1, +p1 = m.plot(x, y, marker="+", color="red", markerfacecolor="red", markeredgecolor="red", markersize=1, markevery=1, zorder=4, linewidth=0.0) -m.fillcontinents(color='0.85', lake_color=None, zorder=3) +m.fillcontinents(color="0.85", lake_color=None, zorder=3) m.drawparallels(np.arange(-90., 90., 5.), labels=[1, 0, 1, 0], fontsize=10, dashes=[1, 0], color=[0.8, 0.8, 0.8], zorder=1) m.drawmeridians(np.arange(-180., 180., 5.), labels=[0, 1, 0, 1], fontsize=10, dashes=[1, 0], diff --git a/pyorbital/geoloc_instrument_definitions.py b/pyorbital/geoloc_instrument_definitions.py index 465ebc63..28cfb2a8 100644 --- a/pyorbital/geoloc_instrument_definitions.py +++ b/pyorbital/geoloc_instrument_definitions.py @@ -41,7 +41,6 @@ from pyorbital.geoloc import ScanGeometry - ################################################################ # # AVHRR @@ -78,7 +77,7 @@ def avhrr(scans_nb, scan_points, def avhrr_gac(scan_times, scan_points, scan_angle=55.37, frequency=0.5): - """Definition of the avhrr instrument, gac version + """Definition of the avhrr instrument, gac version. Source: NOAA KLM User's Guide, Appendix J http://www.ncdc.noaa.gov/oa/pod-guide/ncdc/docs/klm/html/j/app-j.htm @@ -154,9 +153,8 @@ def viirs(scans_nb, scan_indices=slice(0, None), 99 emtpy (excluded) scans """ - entire_width = np.arange(chn_pixels) - scan_points = entire_width[scan_indices].astype('int') + scan_points = entire_width[scan_indices].astype("int") scan_pixels = len(scan_points) # Initial angle 55.84 deg replaced with 56.28 deg found in @@ -210,7 +208,7 @@ def viirs_edge_geom(scans_nb): ################################################################ def amsua(scans_nb, scan_points=None): - """ Describe AMSU-A instrument geometry + """Describe AMSU-A instrument geometry. Parameters: scans_nb | int - number of scan lines @@ -222,7 +220,6 @@ def amsua(scans_nb, scan_points=None): pyorbital.geoloc.ScanGeometry object """ - scan_len = 30 # 30 samples per scan scan_rate = 8 # single scan, seconds scan_angle = -48.3 # swath, degrees @@ -255,7 +252,7 @@ def amsua(scans_nb, scan_points=None): ################################################################ def mhs(scans_nb, scan_points=None): - """ Describe MHS instrument geometry + """Describe MHS instrument geometry. See: @@ -274,7 +271,6 @@ def mhs(scans_nb, scan_points=None): pyorbital.geoloc.ScanGeometry object """ - scan_len = 90 # 90 samples per scan scan_rate = 8 / 3. # single scan, seconds scan_angle = -49.444 # swath, degrees @@ -332,7 +328,6 @@ def hirs4(scans_nb, scan_points=None): pyorbital.geoloc.ScanGeometry object """ - scan_len = 56 # 56 samples per scan scan_rate = 6.4 # single scan, seconds scan_angle = -49.5 # swath, degrees @@ -363,7 +358,7 @@ def hirs4(scans_nb, scan_points=None): ################################################################ def atms(scans_nb, scan_points=None): - """ Describe ATMS instrument geometry + """Describe ATMS instrument geometry See: - https://dtcenter.org/com-GSI/users/docs/presentations/2013_workshop/ @@ -382,7 +377,6 @@ def atms(scans_nb, scan_points=None): pyorbital.geoloc.ScanGeometry object """ - scan_len = 96 # 96 samples per scan scan_rate = 8 / 3. # single scan, seconds scan_angle = -52.7 # swath, degrees @@ -413,7 +407,7 @@ def atms(scans_nb, scan_points=None): ################################################################ def mwhs2(scans_nb, scan_points=None): - """Describe MWHS-2 instrument geometry + """Describe MWHS-2 instrument geometry. The scanning period is 2.667 s. Main beams of the antenna scan over the ob- serving swath (±53.35◦ from nadir) in the cross-track direction at a @@ -434,7 +428,6 @@ def mwhs2(scans_nb, scan_points=None): pyorbital.geoloc.ScanGeometry object """ - scan_len = 98 # 98 samples per scan scan_rate = 8 / 3. # single scan, seconds scan_angle = -53.35 # swath, degrees @@ -485,7 +478,6 @@ def olci(scans_nb, scan_points=None): Source: Sentinel-3 OLCI Coverage https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-olci/coverage """ - if scan_points is None: scan_len = 4000 # samples per scan scan_points = np.arange(4000) @@ -519,7 +511,6 @@ def ascat(scan_nb, scan_points=None): sub-satellite track. """ - if scan_points is None: scan_len = 42 # samples per scan scan_points = np.arange(42) diff --git a/pyorbital/logger.py b/pyorbital/logger.py index b4506643..c3070a7b 100644 --- a/pyorbital/logger.py +++ b/pyorbital/logger.py @@ -38,12 +38,12 @@ def logging_on(level=logging.WARNING): console = logging.StreamHandler() console.setFormatter(logging.Formatter("[%(levelname)s: %(asctime)s :" " %(name)s] %(message)s", - '%Y-%m-%d %H:%M:%S')) + "%Y-%m-%d %H:%M:%S")) console.setLevel(level) - logging.getLogger('').addHandler(console) + logging.getLogger("").addHandler(console) _is_logging_on = True - log = logging.getLogger('') + log = logging.getLogger("") log.setLevel(level) for h in log.handlers: h.setLevel(level) @@ -58,7 +58,7 @@ def emit(self, record): def logging_off(): """Turn logging off.""" - logging.getLogger('').handlers = [NullHandler()] + logging.getLogger("").handlers = [NullHandler()] def get_logger(name): diff --git a/pyorbital/orbital.py b/pyorbital/orbital.py index 07f2e40c..bba8bd98 100644 --- a/pyorbital/orbital.py +++ b/pyorbital/orbital.py @@ -27,9 +27,9 @@ import logging import warnings from datetime import datetime, timedelta -import pytz import numpy as np +import pytz from scipy import optimize from pyorbital import astronomy, dt2np, tlefile @@ -169,7 +169,7 @@ def __str__(self): def get_last_an_time(self, utc_time): """Calculate time of last ascending node relative to the specified time.""" # Propagate backwards to ascending node - dt = np.timedelta64(10, 'm') + dt = np.timedelta64(10, "m") t_old = np.datetime64(_get_tz_unaware_utctime(utc_time)) t_new = t_old - dt pos0, vel0 = self.get_position(t_old, normalize=False) @@ -298,6 +298,7 @@ def get_orbit_number(self, utc_time, tbus_style=False, as_float=False): """Calculate orbit number at specified time. Args: + utc_time: UTC time as a datetime.datetime object. tbus_style: If True, use TBUS-style orbit numbering (TLE orbit number + 1) as_float: Return a continuous orbit number as float. """ @@ -318,7 +319,7 @@ def get_orbit_number(self, utc_time, tbus_style=False, as_float=False): self.orbit_elements.an_period = self.orbit_elements.an_time - \ self.get_last_an_time(self.orbit_elements.an_time - - np.timedelta64(10, 'm')) + - np.timedelta64(10, "m")) dt = astronomy._days(utc_time - self.orbit_elements.an_time) orbit_period = astronomy._days(self.orbit_elements.an_period) @@ -385,7 +386,7 @@ def get_max_parab(fun, start, end, tol=0.01): f_c = fun(c) x = b - with np.errstate(invalid='raise'): + with np.errstate(invalid="raise"): while True: try: x = x - 0.5 * (((b - a) ** 2 * (f_b - f_c) @@ -398,7 +399,7 @@ def get_max_parab(fun, start, end, tol=0.01): f_x = fun(x) # sometimes the estimation diverges... return best guess if f_x > f_b: - logger.info('Parabolic interpolation did not converge, returning best guess so far.') + logger.info("Parabolic interpolation did not converge, returning best guess so far.") return b a, b, c = (a + x) / 2.0, x, (x + c) / 2.0 @@ -449,7 +450,7 @@ def _get_time_at_horizon(self, utc_time, obslon, obslat, **kwargs): warnings.warn("_get_time_at_horizon is replaced with get_next_passes", DeprecationWarning, stacklevel=2) if "precision" in kwargs: - precision = kwargs['precision'] + precision = kwargs["precision"] else: precision = timedelta(seconds=0.001) if "max_iterations" in kwargs: @@ -497,7 +498,7 @@ def utc2local(self, utc_time): lon, _, _ = self.get_lonlatalt(utc_time) return utc_time + timedelta(hours=lon * 24 / 360.0) - def get_equatorial_crossing_time(self, tstart, tend, node='ascending', local_time=False, + def get_equatorial_crossing_time(self, tstart, tend, node="ascending", local_time=False, rtol=1E-9): """Estimate the equatorial crossing time of an orbit. @@ -524,19 +525,19 @@ def get_equatorial_crossing_time(self, tstart, tend, node='ascending', local_tim # Orbit doesn't cross the equator in the given time interval return None elif n_end - n_start > 1: - warnings.warn('Multiple revolutions between start and end time. Computing crossing ' - 'time for the last revolution in that interval.', stacklevel=2) + warnings.warn("Multiple revolutions between start and end time. Computing crossing " + "time for the last revolution in that interval.", stacklevel=2) # Let n'(t) = n(t) - offset. Determine offset so that n'(tstart) < 0 and n'(tend) > 0 and # n'(tcross) = 0. offset = int(n_end) - if node == 'descending': + if node == "descending": offset = offset + 0.5 # Use bisection algorithm to find the root of n'(t), which is the crossing time. The # algorithm requires continuous time coordinates, so convert timestamps to microseconds # since 1970. - time_unit = 'us' # same precision as datetime + time_unit = "us" # same precision as datetime def _nprime(time_f): """Continuous orbit number as a function of time.""" @@ -618,6 +619,7 @@ class _SGDP4(object): """Class for the SGDP4 computations.""" def __init__(self, orbit_elements): + """Initialize class.""" self.mode = None # perigee = orbit_elements.perigee @@ -636,11 +638,11 @@ def __init__(self, orbit_elements): # A30 = -XJ3 * AE**3 if not (0 < self.eo < ECC_LIMIT_HIGH): - raise OrbitalError('Eccentricity out of range: %e' % self.eo) + raise OrbitalError("Eccentricity out of range: %e" % self.eo) elif not ((0.0035 * 2 * np.pi / XMNPDA) < self.xn_0 < (18 * 2 * np.pi / XMNPDA)): - raise OrbitalError('Mean motion out of range: %e' % self.xn_0) + raise OrbitalError("Mean motion out of range: %e" % self.xn_0) elif not (0 < self.xincl < np.pi): - raise OrbitalError('Inclination out of range: %e' % self.xincl) + raise OrbitalError("Inclination out of range: %e" % self.xincl) if self.eo < 0: self.mode = self.SGDP4_ZERO_ECC @@ -776,7 +778,7 @@ def __init__(self, orbit_elements): 15.0 * c1sq * (2.0 * self.d2 + c1sq))) elif self.mode == SGDP4_DEEP_NORM: - raise NotImplementedError('Deep space calculations not supported') + raise NotImplementedError("Deep space calculations not supported") def propagate(self, utc_time): kep = {} @@ -786,7 +788,7 @@ def propagate(self, utc_time): # print utc_time.shape # print self.t_0 utc_time = dt2np(utc_time) - ts = (utc_time - self.t_0) / np.timedelta64(1, 'm') + ts = (utc_time - self.t_0) / np.timedelta64(1, "m") em = self.eo xinc = self.xincl @@ -796,7 +798,7 @@ def propagate(self, utc_time): omega = self.omegao + self.omgdot * ts if self.mode == SGDP4_ZERO_ECC: - raise NotImplementedError('Mode SGDP4_ZERO_ECC not implemented') + raise NotImplementedError("Mode SGDP4_ZERO_ECC not implemented") elif self.mode == SGDP4_NEAR_SIMP: raise NotImplementedError('Mode "Near-space, simplified equations"' ' not implemented') @@ -819,12 +821,12 @@ def propagate(self, utc_time): xl = xmp + omega + xnode + self.xnodp * templ else: - raise NotImplementedError('Deep space calculations not supported') + raise NotImplementedError("Deep space calculations not supported") if np.any(a < 1): - raise Exception('Satellite crashed at time %s', utc_time) + raise Exception("Satellite crashed at time %s", utc_time) elif np.any(e < ECC_LIMIT_LOW): - raise ValueError('Satellite modified eccentricity too low: %s < %e' + raise ValueError("Satellite modified eccentricity too low: %s < %e" % (str(e[e < ECC_LIMIT_LOW]), ECC_LIMIT_LOW)) e = np.where(e < ECC_EPS, ECC_EPS, e) @@ -844,14 +846,14 @@ def propagate(self, utc_time): elsq = axn**2 + ayn**2 if np.any(elsq >= 1): - raise Exception('e**2 >= 1 at %s', utc_time) + raise Exception("e**2 >= 1 at %s", utc_time) - kep['ecc'] = np.sqrt(elsq) + kep["ecc"] = np.sqrt(elsq) epw = np.fmod(xlt - xnode, 2 * np.pi) # needs a copy in case of an array capu = np.array(epw) - maxnr = kep['ecc'] + maxnr = kep["ecc"] for i in range(10): sinEPW = np.sin(epw) cosEPW = np.cos(epw) @@ -899,7 +901,7 @@ def propagate(self, utc_time): xinck = xinc + 1.5 * temp2 * self.cosIO * self.sinIO * cos2u if np.any(rk < 1): - raise Exception('Satellite crashed at time %s', utc_time) + raise Exception("Satellite crashed at time %s", utc_time) temp0 = np.sqrt(a) temp2 = XKE / (a * temp0) @@ -909,14 +911,14 @@ def propagate(self, utc_time): (self.x1mth2 * cos2u + 1.5 * self.x3thm1)) * (XKMPER / AE * XMNPDA / 86400.0)) - kep['radius'] = rk * XKMPER / AE - kep['theta'] = uk - kep['eqinc'] = xinck - kep['ascn'] = xnodek - kep['argp'] = omega - kep['smjaxs'] = a * XKMPER / AE - kep['rdotk'] = rdotk - kep['rfdotk'] = rfdotk + kep["radius"] = rk * XKMPER / AE + kep["theta"] = uk + kep["eqinc"] = xinck + kep["ascn"] = xnodek + kep["argp"] = omega + kep["smjaxs"] = a * XKMPER / AE + kep["rdotk"] = rdotk + kep["rfdotk"] = rfdotk return kep @@ -940,12 +942,12 @@ def kep2xyz(kep): (Not sure what 'kep' actually refers to, just guessing! FIXME!) """ - sinT = np.sin(kep['theta']) - cosT = np.cos(kep['theta']) - sinI = np.sin(kep['eqinc']) - cosI = np.cos(kep['eqinc']) - sinS = np.sin(kep['ascn']) - cosS = np.cos(kep['ascn']) + sinT = np.sin(kep["theta"]) + cosT = np.cos(kep["theta"]) + sinI = np.sin(kep["eqinc"]) + cosI = np.cos(kep["eqinc"]) + sinS = np.sin(kep["ascn"]) + cosS = np.cos(kep["ascn"]) xmx = -sinS * cosI xmy = cosS * cosI @@ -954,17 +956,17 @@ def kep2xyz(kep): uy = xmy * sinT + sinS * cosT uz = sinI * sinT - x = kep['radius'] * ux - y = kep['radius'] * uy - z = kep['radius'] * uz + x = kep["radius"] * ux + y = kep["radius"] * uy + z = kep["radius"] * uz vx = xmx * cosT - cosS * sinT vy = xmy * cosT - sinS * sinT vz = sinI * cosT - v_x = kep['rdotk'] * ux + kep['rfdotk'] * vx - v_y = kep['rdotk'] * uy + kep['rfdotk'] * vy - v_z = kep['rdotk'] * uz + kep['rfdotk'] * vz + v_x = kep["rdotk"] * ux + kep["rfdotk"] * vx + v_y = kep["rdotk"] * uy + kep["rfdotk"] * vy + v_z = kep["rdotk"] * uz + kep["rfdotk"] * vz return np.array((x, y, z)), np.array((v_x, v_y, v_z)) diff --git a/pyorbital/tests/test_aiaa.py b/pyorbital/tests/test_aiaa.py index 6362ab4e..290c7187 100644 --- a/pyorbital/tests/test_aiaa.py +++ b/pyorbital/tests/test_aiaa.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2011 - 2023 Pytroll Community +# Copyright (c) 2011 - 2024 Pytroll Community # Author(s): @@ -20,8 +20,8 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -"""Test cases from the AIAA article. -""" +"""Test cases from the AIAA article.""" + # TODO: right formal unit tests. from __future__ import print_function, with_statement @@ -38,10 +38,10 @@ class LineOrbital(Orbital): - """Read TLE lines instead of file. - """ + """Read TLE lines instead of file.""" def __init__(self, satellite, line1, line2): + """Initialize the class.""" satellite = satellite.upper() self.satellite_name = satellite self.tle = tlefile.read(satellite, line1=line1, line2=line2) @@ -50,8 +50,7 @@ def __init__(self, satellite, line1, line2): def get_results(satnumber, delay): - """Get expected results from result file. - """ + """Get expected results from result file.""" path = os.path.dirname(os.path.abspath(__file__)) with open(os.path.join(path, "aiaa_results")) as f_2: line = f_2.readline() @@ -87,10 +86,9 @@ class AIAAIntegrationTest(unittest.TestCase): @unittest.skipIf( not os.path.exists(os.path.join(_DATAPATH, "SGP4-VER.TLE")), - 'SGP4-VER.TLE not available') + "SGP4-VER.TLE not available") def test_aiaa(self): - """Do the tests against AIAA test cases. - """ + """Do the tests against AIAA test cases.""" path = os.path.dirname(os.path.abspath(__file__)) with open(os.path.join(path, "SGP4-VER.TLE")) as f__: test_line = f__.readline() @@ -116,12 +114,11 @@ def test_aiaa(self): test_line = f__.readline() continue except ChecksumError: - self.assertTrue(test_line.split()[1] in [ - "33333", "33334", "33335"]) + assert test_line.split()[1] in ["33333", "33334", "33335"] for delay in times: try: test_time = delay.astype( - 'timedelta64[m]') + o.tle.epoch + "timedelta64[m]") + o.tle.epoch pos, vel = o.get_position(test_time, False) res = get_results( int(o.tle.satnumber), float(delay)) @@ -136,14 +133,14 @@ def test_aiaa(self): delta_pos = 5e-6 # km = 5 mm delta_vel = 5e-9 # km/s = 5 um/s delta_time = 1e-3 # 1 millisecond - self.assertTrue(abs(res[0] - pos[0]) < delta_pos) - self.assertTrue(abs(res[1] - pos[1]) < delta_pos) - self.assertTrue(abs(res[2] - pos[2]) < delta_pos) - self.assertTrue(abs(res[3] - vel[0]) < delta_vel) - self.assertTrue(abs(res[4] - vel[1]) < delta_vel) - self.assertTrue(abs(res[5] - vel[2]) < delta_vel) + assert abs(res[0] - pos[0]) < delta_pos + assert abs(res[1] - pos[1]) < delta_pos + assert abs(res[2] - pos[2]) < delta_pos + assert abs(res[3] - vel[0]) < delta_vel + assert abs(res[4] - vel[1]) < delta_vel + assert abs(res[5] - vel[2]) < delta_vel if res[6] is not None: dt = astronomy._days(res[6] - test_time) * 24 * 60 - self.assertTrue(abs(dt) < delta_time) + assert abs(dt) < delta_time test_line = f__.readline() diff --git a/pyorbital/tests/test_geoloc.py b/pyorbital/tests/test_geoloc.py index e5797784..eceb9d33 100644 --- a/pyorbital/tests/test_geoloc.py +++ b/pyorbital/tests/test_geoloc.py @@ -23,10 +23,11 @@ """Test the geoloc module.""" from datetime import datetime + import numpy as np from pyorbital.geoloc import ScanGeometry, geodetic_lat, qrotate, subpoint -from pyorbital.geoloc_instrument_definitions import avhrr, viirs, amsua, mhs, hirs4, atms, ascat +from pyorbital.geoloc_instrument_definitions import amsua, ascat, atms, avhrr, hirs4, mhs, viirs class TestQuaternion: @@ -110,8 +111,8 @@ def test_scan_geometry(self): times = instrument.times(start_of_scan) assert times[0, 1] == start_of_scan - assert times[0, 0] == start_of_scan - np.timedelta64(100, 'ms') - assert times[0, 2] == start_of_scan + np.timedelta64(100, 'ms') + assert times[0, 0] == start_of_scan - np.timedelta64(100, "ms") + assert times[0, 2] == start_of_scan + np.timedelta64(100, "ms") def test_geodetic_lat(self): """Test the determination of the geodetic latitude.""" diff --git a/pyorbital/tests/test_orbital.py b/pyorbital/tests/test_orbital.py index 74e9dacb..1cc46b85 100644 --- a/pyorbital/tests/test_orbital.py +++ b/pyorbital/tests/test_orbital.py @@ -22,12 +22,14 @@ """Test the geoloc orbital.""" -import pytest import unittest -from unittest import mock from datetime import datetime, timedelta -import pytz +from unittest import mock + import numpy as np +import pytest +import pytz + from pyorbital import orbital eps_deg = 10e-3 @@ -45,7 +47,7 @@ def test_get_orbit_number(self): "92.4533 267.6830 14.19582686 11574") dobj = datetime(2012, 1, 18, 8, 4, 19) orbnum = sat.get_orbit_number(dobj) - self.assertEqual(orbnum, 1163) + assert orbnum == 1163 def test_sublonlat(self): """Test getting the sub-satellite position.""" @@ -59,12 +61,9 @@ def test_sublonlat(self): expected_lon = -68.199894472013213 expected_lat = 23.159747677881075 expected_alt = 392.01953430856935 - self.assertTrue(np.abs(lon - expected_lon) < eps_deg, - 'Calculation of sublon failed') - self.assertTrue(np.abs(lat - expected_lat) < eps_deg, - 'Calculation of sublat failed') - self.assertTrue(np.abs(alt - expected_alt) < eps_deg, - 'Calculation of altitude failed') + assert np.abs(lon - expected_lon) < eps_deg, "Calculation of sublon failed" + assert np.abs(lat - expected_lat) < eps_deg, "Calculation of sublat failed" + assert np.abs(alt - expected_alt) < eps_deg, "Calculation of altitude failed" def test_observer_look(self): """Test getting the observer look angles.""" @@ -77,10 +76,8 @@ def test_observer_look(self): az, el = sat.get_observer_look(d, -84.39733, 33.775867, 0) expected_az = 122.45169655331965 expected_el = 1.9800219611255456 - self.assertTrue(np.abs(az - expected_az) < eps_deg, - 'Calculation of azimut failed') - self.assertTrue(np.abs(el - expected_el) < eps_deg, - 'Calculation of elevation failed') + assert np.abs(az - expected_az) < eps_deg, "Calculation of azimut failed" + assert np.abs(el - expected_el) < eps_deg, "Calculation of elevation failed" def test_orbit_num_an(self): """Test getting orbit number - ascending node.""" @@ -90,7 +87,7 @@ def test_orbit_num_an(self): line2="2 29499 98.6804 312.6735 0001758 " "111.9178 248.2152 14.21501774254058") d = datetime(2011, 9, 14, 5, 30) - self.assertEqual(sat.get_orbit_number(d), 25437) + assert sat.get_orbit_number(d) == 25437 def test_orbit_num_non_an(self): """Test getting orbit number - not ascending node.""" @@ -99,8 +96,8 @@ def test_orbit_num_non_an(self): ".00000017 00000-0 27793-4 0 9819", line2="2 29499 98.6639 121.6164 0001449 " "71.9056 43.3132 14.21510544330271") - dt = np.timedelta64(98, 'm') - self.assertEqual(sat.get_orbit_number(sat.tle.epoch + dt), 33028) + dt = np.timedelta64(98, "m") + assert sat.get_orbit_number(sat.tle.epoch + dt) == 33028 def test_orbit_num_equator(self): """Test getting orbit numbers when being around equator.""" @@ -113,13 +110,13 @@ def test_orbit_num_equator(self): t2 = datetime(2013, 3, 2, 22, 2, 26) on1 = sat.get_orbit_number(t1) on2 = sat.get_orbit_number(t2) - self.assertEqual(on1, 6973) - self.assertEqual(on2, 6974) + assert on1 == 6973 + assert on2 == 6974 pos1, vel1 = sat.get_position(t1, normalize=False) pos2, vel2 = sat.get_position(t2, normalize=False) del vel1, vel2 - self.assertTrue(pos1[2] < 0) - self.assertTrue(pos2[2] > 0) + assert pos1[2] < 0 + assert pos2[2] > 0 def test_get_next_passes_apogee(self): """Regression test #22.""" @@ -128,12 +125,10 @@ def test_get_next_passes_apogee(self): line2 = "2 24793 86.3994 209.3241 0002020 " \ "89.8714 270.2713 14.34246429 90794" - orb = orbital.Orbital('IRIDIUM 7 [+]', line1=line1, line2=line2) + orb = orbital.Orbital("IRIDIUM 7 [+]", line1=line1, line2=line2) d = datetime(2018, 3, 7, 3, 30, 15) res = orb.get_next_passes(d, 1, 170.556, -43.368, 0.5, horizon=40) - self.assertTrue(abs( - res[0][2] - datetime(2018, 3, 7, 3, 48, 13, 178439)) < - timedelta(seconds=0.01)) + assert abs(res[0][2] - datetime(2018, 3, 7, 3, 48, 13, 178439)) < timedelta(seconds=0.01) def test_get_next_passes_tricky(self): """Check issue #34 for reference.""" @@ -143,33 +138,28 @@ def test_get_next_passes_tricky(self): line2 = "2 43125 097.5269 314.3317 0010735 "\ "157.6344 202.5362 15.23132245036381" - orb = orbital.Orbital('LEMUR-2-BROWNCOW', line1=line1, line2=line2) + orb = orbital.Orbital("LEMUR-2-BROWNCOW", line1=line1, line2=line2) d = datetime(2018, 9, 8) res = orb.get_next_passes(d, 72, -8.174163, 51.953319, 0.05, horizon=5) - self.assertTrue(abs( - res[0][2] - datetime(2018, 9, 8, 9, 5, 46, 375248)) < - timedelta(seconds=0.01)) - self.assertTrue(abs( - res[-1][2] - datetime(2018, 9, 10, 22, 15, 3, 143469)) < - timedelta(seconds=0.01)) + assert abs(res[0][2] - datetime(2018, 9, 8, 9, 5, 46, 375248)) < timedelta(seconds=0.01) + assert abs(res[-1][2] - datetime(2018, 9, 10, 22, 15, 3, 143469)) < timedelta(seconds=0.01) - self.assertTrue(len(res) == 15) + assert len(res) == 15 def test_get_next_passes_issue_22(self): """Check that max.""" - line1 = '1 28654U 05018A 21083.16603416 .00000102 00000-0 79268-4 0 9999' - line2 = '2 28654 99.0035 147.6583 0014816 159.4931 200.6838 14.12591533816498' + line1 = "1 28654U 05018A 21083.16603416 .00000102 00000-0 79268-4 0 9999" + line2 = "2 28654 99.0035 147.6583 0014816 159.4931 200.6838 14.12591533816498" orb = orbital.Orbital("NOAA 18", line1=line1, line2=line2) t = datetime(2021, 3, 9, 22) next_passes = orb.get_next_passes(t, 1, -15.6335, 27.762, 0.) rise, fall, max_elevation = next_passes[0] assert rise < max_elevation < fall - print(next_passes) - @mock.patch('pyorbital.orbital.Orbital.get_lonlatalt') + @mock.patch("pyorbital.orbital.Orbital.get_lonlatalt") def test_utc2local(self, get_lonlatalt): """Test converting UTC to local time.""" get_lonlatalt.return_value = -45, None, None @@ -178,20 +168,19 @@ def test_utc2local(self, get_lonlatalt): ".00000017 00000-0 27793-4 0 9819", line2="2 29499 98.6639 121.6164 0001449 " "71.9056 43.3132 14.21510544330271") - self.assertEqual(sat.utc2local(datetime(2009, 7, 1, 12)), - datetime(2009, 7, 1, 9)) + assert sat.utc2local(datetime(2009, 7, 1, 12)) == datetime(2009, 7, 1, 9) - @mock.patch('pyorbital.orbital.Orbital.utc2local') - @mock.patch('pyorbital.orbital.Orbital.get_orbit_number') + @mock.patch("pyorbital.orbital.Orbital.utc2local") + @mock.patch("pyorbital.orbital.Orbital.get_orbit_number") def test_get_equatorial_crossing_time(self, get_orbit_number, utc2local): """Test get the equatorial crossing time.""" def get_orbit_number_patched(utc_time, **kwargs): utc_time = np.datetime64(utc_time) - diff = (utc_time - np.datetime64('2009-07-01 12:38:12')) / np.timedelta64(7200, 's') + diff = (utc_time - np.datetime64("2009-07-01 12:38:12")) / np.timedelta64(7200, "s") return 1234 + diff get_orbit_number.side_effect = get_orbit_number_patched - utc2local.return_value = 'local_time' + utc2local.return_value = "local_time" sat = orbital.Orbital("METOP-A", line1="1 29499U 06044A 13060.48822809 " ".00000017 00000-0 27793-4 0 9819", @@ -202,20 +191,20 @@ def get_orbit_number_patched(utc_time, **kwargs): res = sat.get_equatorial_crossing_time(tstart=datetime(2009, 7, 1, 12), tend=datetime(2009, 7, 1, 13)) exp = datetime(2009, 7, 1, 12, 38, 12) - self.assertTrue((res - exp) < timedelta(seconds=0.01)) + assert res - exp < timedelta(seconds=0.01) # Descending node res = sat.get_equatorial_crossing_time(tstart=datetime(2009, 7, 1, 12), tend=datetime(2009, 7, 1, 14, 0), - node='descending') + node="descending") exp = datetime(2009, 7, 1, 13, 38, 12) - self.assertTrue((res - exp) < timedelta(seconds=0.01)) + assert res - exp < timedelta(seconds=0.01) # Conversion to local time res = sat.get_equatorial_crossing_time(tstart=datetime(2009, 7, 1, 12), tend=datetime(2009, 7, 1, 14), local_time=True) - self.assertEqual(res, 'local_time') + assert res == "local_time" class TestGetObserverLook(unittest.TestCase): @@ -250,8 +239,9 @@ def test_basic_numpy(self): def test_basic_dask(self): """Test with dask array inputs.""" - from pyorbital import orbital import dask.array as da + + from pyorbital import orbital sat_lon = da.from_array(self.sat_lon, chunks=2) sat_lat = da.from_array(self.sat_lat, chunks=2) sat_alt = da.from_array(self.sat_alt, chunks=2) @@ -266,9 +256,10 @@ def test_basic_dask(self): def test_xarray_with_numpy(self): """Test with xarray DataArray with numpy array as inputs.""" - from pyorbital import orbital import xarray as xr + from pyorbital import orbital + def _xarr_conv(input): return xr.DataArray(input) sat_lon = _xarr_conv(self.sat_lon) @@ -285,10 +276,11 @@ def _xarr_conv(input): def test_xarray_with_dask(self): """Test with xarray DataArray with dask array as inputs.""" - from pyorbital import orbital import dask.array as da import xarray as xr + from pyorbital import orbital + def _xarr_conv(input): return xr.DataArray(da.from_array(input, chunks=2)) sat_lon = _xarr_conv(self.sat_lon) @@ -339,14 +331,15 @@ def test_basic_numpy(self): azi, elev = orbital.get_observer_look(self.sat_lon, self.sat_lat, self.sat_alt, self.t, self.lon, self.lat, self.alt) - self.assertEqual(np.sum(np.isnan(azi)), 0) - self.assertFalse(np.isnan(azi).any()) + assert np.sum(np.isnan(azi)) == 0 + assert not np.isnan(azi).any() np.testing.assert_allclose(elev, self.exp_elev) def test_basic_dask(self): """Test with dask array inputs.""" - from pyorbital import orbital import dask.array as da + + from pyorbital import orbital sat_lon = da.from_array(self.sat_lon, chunks=2) sat_lat = da.from_array(self.sat_lat, chunks=2) sat_alt = da.from_array(self.sat_alt, chunks=2) @@ -356,15 +349,16 @@ def test_basic_dask(self): azi, elev = orbital.get_observer_look(sat_lon, sat_lat, sat_alt, self.t, lon, lat, alt) - self.assertEqual(np.sum(np.isnan(azi)), 0) - self.assertFalse(np.isnan(azi).any()) + assert np.sum(np.isnan(azi)) == 0 + assert not np.isnan(azi).any() np.testing.assert_allclose(elev.compute(), self.exp_elev) def test_xarray_with_numpy(self): """Test with xarray DataArray with numpy array as inputs.""" - from pyorbital import orbital import xarray as xr + from pyorbital import orbital + def _xarr_conv(input): return xr.DataArray(input) sat_lon = _xarr_conv(self.sat_lon) @@ -376,16 +370,17 @@ def _xarr_conv(input): azi, elev = orbital.get_observer_look(sat_lon, sat_lat, sat_alt, self.t, lon, lat, alt) - self.assertEqual(np.sum(np.isnan(azi)), 0) - self.assertFalse(np.isnan(azi).any()) + assert np.sum(np.isnan(azi)) == 0 + assert not np.isnan(azi).any() np.testing.assert_allclose(elev.data, self.exp_elev) def test_xarray_with_dask(self): """Test with xarray DataArray with dask array as inputs.""" - from pyorbital import orbital import dask.array as da import xarray as xr + from pyorbital import orbital + def _xarr_conv(input): return xr.DataArray(da.from_array(input, chunks=2)) sat_lon = _xarr_conv(self.sat_lon) @@ -397,8 +392,8 @@ def _xarr_conv(input): azi, elev = orbital.get_observer_look(sat_lon, sat_lat, sat_alt, self.t, lon, lat, alt) - self.assertEqual(np.sum(np.isnan(azi)), 0) - self.assertFalse(np.isnan(azi).any()) + assert np.sum(np.isnan(azi)) == 0 + assert not np.isnan(azi).any() np.testing.assert_allclose(elev.data.compute(), self.exp_elev) @@ -408,44 +403,46 @@ class TestRegressions(unittest.TestCase): def test_63(self): """Check that no runtimewarning is raised, #63.""" import warnings - from pyorbital.orbital import Orbital + from dateutil import parser - warnings.filterwarnings('error') + + from pyorbital.orbital import Orbital + warnings.filterwarnings("error") orb = Orbital("Suomi-NPP", line1="1 37849U 11061A 19292.84582509 .00000011 00000-0 25668-4 0 9997", line2="2 37849 98.7092 229.3263 0000715 98.5313 290.6262 14.19554485413345") orb.get_next_passes(parser.parse("2019-10-21 16:00:00"), 12, 123.29736, -13.93763, 0) - warnings.filterwarnings('default') + warnings.filterwarnings("default") -@pytest.mark.parametrize('dtime', +@pytest.mark.parametrize("dtime", [datetime(2024, 6, 25, 11, 0, 18), datetime(2024, 6, 25, 11, 5, 0, 0, pytz.UTC), - np.datetime64('2024-06-25T11:10:00.000000') + np.datetime64("2024-06-25T11:10:00.000000") ] ) def test_get_last_an_time_scalar_input(dtime): """Test getting the time of the last ascending node - input time is a scalar.""" from pyorbital.orbital import Orbital orb = Orbital("NOAA-20", - line1='1 43013U 17073A 24176.73674251 .00000000 00000+0 11066-3 0 00014', - line2='2 43013 98.7060 114.5340 0001454 139.3958 190.7541 14.19599847341971') + line1="1 43013U 17073A 24176.73674251 .00000000 00000+0 11066-3 0 00014", + line2="2 43013 98.7060 114.5340 0001454 139.3958 190.7541 14.19599847341971") - expected = np.datetime64('2024-06-25T10:44:18.234375') + expected = np.datetime64("2024-06-25T10:44:18.234375") result = orb.get_last_an_time(dtime) - assert abs(expected - result) < np.timedelta64(1, 's') + assert abs(expected - result) < np.timedelta64(1, "s") -@pytest.mark.parametrize('dtime', - [datetime(2024, 6, 25, 11, 5, 0, 0, pytz.timezone('Europe/Stockholm')), +@pytest.mark.parametrize("dtime", + [datetime(2024, 6, 25, 11, 5, 0, 0, pytz.timezone("Europe/Stockholm")), ] ) def test_get_last_an_time_wrong_input(dtime): """Test getting the time of the last ascending node - wrong input.""" from pyorbital.orbital import Orbital orb = Orbital("NOAA-20", - line1='1 43013U 17073A 24176.73674251 .00000000 00000+0 11066-3 0 00014', - line2='2 43013 98.7060 114.5340 0001454 139.3958 190.7541 14.19599847341971') + line1="1 43013U 17073A 24176.73674251 .00000000 00000+0 11066-3 0 00014", + line2="2 43013 98.7060 114.5340 0001454 139.3958 190.7541 14.19599847341971") with pytest.raises(ValueError) as exec_info: _ = orb.get_last_an_time(dtime) diff --git a/pyorbital/tests/test_tlefile.py b/pyorbital/tests/test_tlefile.py index b7eccc36..a921b8b2 100644 --- a/pyorbital/tests/test_tlefile.py +++ b/pyorbital/tests/test_tlefile.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- # -# Copyright (c) 2014-2023 Pytroll Community +# Copyright (c) 2014-2024 Pytroll Community # # Author(s): # @@ -24,23 +24,26 @@ """Test TLE file reading, TLE downloading and stroging TLEs to database.""" -from pyorbital.tlefile import Tle -from pyorbital.tlefile import (_get_config_path, - read_platform_numbers, - _get_local_tle_path_from_env, - _get_uris_and_open_func, - check_is_platform_supported, - PKG_CONFIG_DIR) - -import logging import datetime +import logging +import os +import time import unittest -from unittest.mock import patch +from contextlib import suppress from unittest import mock +from unittest.mock import patch + import pytest -import os -from contextlib import suppress -import time + +from pyorbital.tlefile import ( + PKG_CONFIG_DIR, + Tle, + _get_config_path, + _get_local_tle_path_from_env, + _get_uris_and_open_func, + check_is_platform_supported, + read_platform_numbers, +) line0 = "ISS (ZARYA)" line1 = "1 25544U 98067A 08264.51782528 -.00002182 00000-0 -11606-4 0 2927" @@ -56,85 +59,85 @@ NOAA19_3LINES = "NOAA 19\n" + NOAA19_2LINES -tle_xml = '\n'.join( +tle_xml = "\n".join( ('', - '', - '', - '', - '', - '' + line1 + '', - '' + line2 + '', - '', - '', - '', - '', - '', - '', - '' + line1_2 + '', - '' + line2_2 + '', - '', - '', - '', - '')) + "", + "", + "", + "", + "" + line1 + "", + "" + line2 + "", + "", + "", + "", + "", + "", + "", + "" + line1_2 + "", + "" + line2_2 + "", + "", + "", + "", + "")) @pytest.fixture def fake_platforms_file(tmp_path): """Return file path to a fake platforms.txt file.""" - file_path = tmp_path / 'platforms.txt' - lines = ['# Some header lines - line 1\n', - '# Some header lines - line 2\n', - 'NOAA-21 54234\n', - 'NOAA-20 43013\n', - 'UNKNOWN SATELLITE 99999\n' + file_path = tmp_path / "platforms.txt" + lines = ["# Some header lines - line 1\n", + "# Some header lines - line 2\n", + "NOAA-21 54234\n", + "NOAA-20 43013\n", + "UNKNOWN SATELLITE 99999\n" ] - with open(file_path, 'w') as fpt: + with open(file_path, "w") as fpt: fpt.writelines(lines) - yield file_path + return file_path @pytest.fixture(scope="session") def fake_local_tles_dir(tmp_path_factory): """Make a list of fake tle files in a directory.""" - tle_dir = tmp_path_factory.mktemp('tle_files') - file_path = tle_dir / 'tle-202211180230.txt' + tle_dir = tmp_path_factory.mktemp("tle_files") + file_path = tle_dir / "tle-202211180230.txt" file_path.touch() time.sleep(1) - file_path = tle_dir / 'tle-202211180430.txt' + file_path = tle_dir / "tle-202211180430.txt" file_path.touch() time.sleep(1) - file_path = tle_dir / 'tle-202211180630.txt' + file_path = tle_dir / "tle-202211180630.txt" file_path.touch() time.sleep(1) - file_path = tle_dir / 'tle-202211180830.txt' + file_path = tle_dir / "tle-202211180830.txt" file_path.touch() - yield tle_dir + return tle_dir @pytest.fixture def mock_env_ppp_config_dir(monkeypatch): """Mock environment variable PPP_CONFIG_DIR.""" - monkeypatch.setenv('PPP_CONFIG_DIR', '/path/to/old/mpop/config/dir') + monkeypatch.setenv("PPP_CONFIG_DIR", "/path/to/old/mpop/config/dir") @pytest.fixture def mock_env_ppp_config_dir_missing(monkeypatch): """Mock that the environment variable PPP_CONFIG_DIR is missing.""" - monkeypatch.delenv('PPP_CONFIG_DIR', raising=False) + monkeypatch.delenv("PPP_CONFIG_DIR", raising=False) @pytest.fixture def mock_env_tles_missing(monkeypatch): """Mock that the environment variable TLES is missing.""" - monkeypatch.delenv('TLES', raising=False) + monkeypatch.delenv("TLES", raising=False) @pytest.fixture def mock_env_tles(monkeypatch, fake_local_tles_dir): """Mock environment variable TLES.""" - monkeypatch.setenv('TLES', os.path.join(fake_local_tles_dir, '*')) + monkeypatch.setenv("TLES", os.path.join(fake_local_tles_dir, "*")) def test_get_config_path_no_env_defined(caplog, mock_env_ppp_config_dir_missing): @@ -143,15 +146,15 @@ def test_get_config_path_no_env_defined(caplog, mock_env_ppp_config_dir_missing) res = _get_config_path() assert res == PKG_CONFIG_DIR - assert caplog.text == '' + assert caplog.text == "" def test_check_is_platform_supported_existing(caplog, mock_env_ppp_config_dir_missing): """Test the function to check if an existing platform is supported on default.""" with caplog.at_level(logging.INFO): - check_is_platform_supported('NOAA-21') + check_is_platform_supported("NOAA-21") - logoutput_lines = caplog.text.split('\n') + logoutput_lines = caplog.text.split("\n") expected1 = "Satellite NOAA-21 is supported. NORAD number: 54234" expected2 = "Satellite names and NORAD numbers are defined in {path}".format(path=PKG_CONFIG_DIR) @@ -162,11 +165,11 @@ def test_check_is_platform_supported_existing(caplog, mock_env_ppp_config_dir_mi def test_check_is_platform_supported_unknown(caplog, mock_env_ppp_config_dir_missing): """Test the function to check if an unknown platform is supported on default.""" - sat = 'UNKNOWN' + sat = "UNKNOWN" with caplog.at_level(logging.INFO): check_is_platform_supported(sat) - logoutput_lines = caplog.text.split('\n') + logoutput_lines = caplog.text.split("\n") expected1 = "Satellite {satellite} is NOT supported.".format(satellite=sat) expected2 = ("Please add it to a local copy of the platforms.txt file and put in " + @@ -178,19 +181,12 @@ def test_check_is_platform_supported_unknown(caplog, mock_env_ppp_config_dir_mis assert expected3 in logoutput_lines[2] -@patch( - 'pyorbital.version.get_versions', - return_value=dict([('version', '1.9.1+1.some-futur.dirty'), - ('full-revisionid', 'some-future-git-version-hash'), - ('dirty', True), - ('error', None), - ('date', '2023-01-20T09:37:30+0100') - ]) -) +@patch("pyorbital.get_version", + return_value="1.9.1+1.some-future.dirty") def test_get_config_path_ppp_config_set_but_not_pyorbital_future(mock, caplog, monkeypatch): """Test getting the config path.""" - monkeypatch.setenv('SATPY_CONFIG_PATH', '/path/to/satpy/etc') - monkeypatch.setenv('PPP_CONFIG_DIR', '/path/to/old/mpop/config/dir') + monkeypatch.setenv("SATPY_CONFIG_PATH", "/path/to/satpy/etc") + monkeypatch.setenv("PPP_CONFIG_DIR", "/path/to/old/mpop/config/dir") with caplog.at_level(logging.WARNING): res = _get_config_path() @@ -208,31 +204,31 @@ def test_get_config_path_ppp_config_set_but_not_pyorbital_is_deprecated(caplog, set but the deprecated (old) Satpy/MPOP one is set. """ - monkeypatch.setenv('SATPY_CONFIG_PATH', '/path/to/satpy/etc') - monkeypatch.setenv('PPP_CONFIG_DIR', '/path/to/old/mpop/config/dir') + monkeypatch.setenv("SATPY_CONFIG_PATH", "/path/to/satpy/etc") + monkeypatch.setenv("PPP_CONFIG_DIR", "/path/to/old/mpop/config/dir") with caplog.at_level(logging.WARNING): res = _get_config_path() - assert res == '/path/to/old/mpop/config/dir' + assert res == "/path/to/old/mpop/config/dir" - log_output = ('The use of PPP_CONFIG_DIR is deprecated and will be removed in version 1.9!' + - ' Please use PYORBITAL_CONFIG_PATH if you need a custom config path for pyorbital!') + log_output = ("The use of PPP_CONFIG_DIR is deprecated and will be removed in version 1.9!" + + " Please use PYORBITAL_CONFIG_PATH if you need a custom config path for pyorbital!") assert log_output in caplog.text def test_get_config_path_ppp_config_set_and_pyorbital(caplog, monkeypatch): """Test getting the config path.""" - pyorbital_config_dir = '/path/to/pyorbital/config/dir' - monkeypatch.setenv('PYORBITAL_CONFIG_PATH', pyorbital_config_dir) - monkeypatch.setenv('PPP_CONFIG_DIR', '/path/to/old/mpop/config/dir') + pyorbital_config_dir = "/path/to/pyorbital/config/dir" + monkeypatch.setenv("PYORBITAL_CONFIG_PATH", pyorbital_config_dir) + monkeypatch.setenv("PPP_CONFIG_DIR", "/path/to/old/mpop/config/dir") with caplog.at_level(logging.WARNING): res = _get_config_path() assert res == pyorbital_config_dir - assert caplog.text == '' + assert caplog.text == "" def test_get_config_path_pyorbital_ppp_missing(caplog, monkeypatch, mock_env_ppp_config_dir_missing): @@ -240,8 +236,8 @@ def test_get_config_path_pyorbital_ppp_missing(caplog, monkeypatch, mock_env_ppp The old mpop PPP_CONFIG_PATH is not set but the PYORBITAL one is. """ - pyorbital_config_dir = '/path/to/pyorbital/config/dir' - monkeypatch.setenv('PYORBITAL_CONFIG_PATH', pyorbital_config_dir) + pyorbital_config_dir = "/path/to/pyorbital/config/dir" + monkeypatch.setenv("PYORBITAL_CONFIG_PATH", pyorbital_config_dir) with caplog.at_level(logging.DEBUG): res = _get_config_path() @@ -255,7 +251,7 @@ def test_get_config_path_pyorbital_ppp_missing(caplog, monkeypatch, mock_env_ppp def test_read_platform_numbers(fake_platforms_file): """Test reading the platform names and associated catalougue numbers.""" res = read_platform_numbers(str(fake_platforms_file)) - assert res == {'NOAA-21': '54234', 'NOAA-20': '43013', 'UNKNOWN SATELLITE': '99999'} + assert res == {"NOAA-21": "54234", "NOAA-20": "43013", "UNKNOWN SATELLITE": "99999"} def test_get_local_tle_path_tle_env_missing(mock_env_tles_missing): @@ -277,12 +273,12 @@ def test_get_uris_and_open_func_using_tles_env(caplog, fake_local_tles_dir, monk """ from collections.abc import Sequence - monkeypatch.setenv('TLES', str(os.path.join(fake_local_tles_dir, "*"))) + monkeypatch.setenv("TLES", str(os.path.join(fake_local_tles_dir, "*"))) with caplog.at_level(logging.DEBUG): uris, _ = _get_uris_and_open_func() assert isinstance(uris, Sequence) - assert uris[0] == str(fake_local_tles_dir / 'tle-202211180830.txt') + assert uris[0] == str(fake_local_tles_dir / "tle-202211180830.txt") log_message = "Reading TLE from {msg}".format(msg=str(fake_local_tles_dir)) assert log_message in caplog.text @@ -301,30 +297,30 @@ class TLETest(unittest.TestCase): def check_example(self, tle): """Check the *tle* instance against predetermined values.""" # line 1 - self.assertEqual(tle.satnumber, "25544") - self.assertEqual(tle.classification, "U") - self.assertEqual(tle.id_launch_year, "98") - self.assertEqual(tle.id_launch_number, "067") - self.assertEqual(tle.id_launch_piece.strip(), "A") - self.assertEqual(tle.epoch_year, "08") - self.assertEqual(tle.epoch_day, 264.51782528) + assert tle.satnumber == "25544" + assert tle.classification == "U" + assert tle.id_launch_year == "98" + assert tle.id_launch_number == "067" + assert tle.id_launch_piece.strip() == "A" + assert tle.epoch_year == "08" + assert tle.epoch_day == 264.51782528 epoch = (datetime.datetime(2008, 1, 1) + datetime.timedelta(days=264.51782528 - 1)) - self.assertEqual(tle.epoch, epoch) - self.assertEqual(tle.mean_motion_derivative, -.00002182) - self.assertEqual(tle.mean_motion_sec_derivative, 0.0) - self.assertEqual(tle.bstar, -.11606e-4) - self.assertEqual(tle.ephemeris_type, 0) - self.assertEqual(tle.element_number, 292) + assert tle.epoch == epoch + assert tle.mean_motion_derivative == -2.182e-05 + assert tle.mean_motion_sec_derivative == 0.0 + assert tle.bstar == -1.1606e-05 + assert tle.ephemeris_type == 0 + assert tle.element_number == 292 # line 2 - self.assertEqual(tle.inclination, 51.6416) - self.assertEqual(tle.right_ascension, 247.4627) - self.assertEqual(tle.excentricity, .0006703) - self.assertEqual(tle.arg_perigee, 130.5360) - self.assertEqual(tle.mean_anomaly, 325.0288) - self.assertEqual(tle.mean_motion, 15.72125391) - self.assertEqual(tle.orbit, 56353) + assert tle.inclination == 51.6416 + assert tle.right_ascension == 247.4627 + assert tle.excentricity == 0.0006703 + assert tle.arg_perigee == 130.536 + assert tle.mean_anomaly == 325.0288 + assert tle.mean_motion == 15.72125391 + assert tle.orbit == 56353 def test_from_line(self): """Test parsing from line elements.""" @@ -333,11 +329,11 @@ def test_from_line(self): def test_from_file(self): """Test reading and parsing from a file.""" + from os import close, remove, write from tempfile import mkstemp - from os import write, close, remove filehandle, filename = mkstemp() try: - write(filehandle, "\n".join([line0, line1, line2]).encode('utf-8')) + write(filehandle, "\n".join([line0, line1, line2]).encode("utf-8")) close(filehandle) tle = Tle("ISS (ZARYA)", filename) self.check_example(tle) @@ -346,11 +342,11 @@ def test_from_file(self): def test_from_file_with_hyphenated_platform_name(self): """Test reading and parsing from a file with a slightly different name.""" + from os import close, remove, write from tempfile import mkstemp - from os import write, close, remove filehandle, filename = mkstemp() try: - write(filehandle, NOAA19_3LINES.encode('utf-8')) + write(filehandle, NOAA19_3LINES.encode("utf-8")) close(filehandle) tle = Tle("NOAA-19", filename) assert tle.satnumber == "33591" @@ -359,11 +355,11 @@ def test_from_file_with_hyphenated_platform_name(self): def test_from_file_with_no_platform_name(self): """Test reading and parsing from a file with a slightly different name.""" + from os import close, remove, write from tempfile import mkstemp - from os import write, close, remove filehandle, filename = mkstemp() try: - write(filehandle, NOAA19_2LINES.encode('utf-8')) + write(filehandle, NOAA19_2LINES.encode("utf-8")) close(filehandle) tle = Tle("NOAA-19", filename) assert tle.satnumber == "33591" @@ -376,8 +372,8 @@ def test_from_mmam_xml(self): save_dir = TemporaryDirectory() with save_dir: - fname = os.path.join(save_dir.name, '20210420_Metop-B_ADMIN_MESSAGE_NO_127.xml') - with open(fname, 'w') as fid: + fname = os.path.join(save_dir.name, "20210420_Metop-B_ADMIN_MESSAGE_NO_127.xml") + with open(fname, "w") as fid: fid.write(tle_xml) tle = Tle("", tle_file=fname) self.check_example(tle) @@ -410,7 +406,7 @@ def test_init(self): """Test the initialization.""" assert self.dl.config is self.config - @mock.patch('pyorbital.tlefile.requests') + @mock.patch("pyorbital.tlefile.requests") def test_fetch_plain_tle_not_configured(self, requests): """Test downloading and a TLE file from internet.""" requests.get = mock.MagicMock() @@ -419,10 +415,10 @@ def test_fetch_plain_tle_not_configured(self, requests): # Not configured self.dl.config["downloaders"] = {} res = self.dl.fetch_plain_tle() - self.assertTrue(res == {}) + assert res == {} requests.get.assert_not_called() - @mock.patch('pyorbital.tlefile.requests') + @mock.patch("pyorbital.tlefile.requests") def test_fetch_plain_tle_two_sources(self, requests): """Test downloading and a TLE file from internet.""" requests.get = mock.MagicMock() @@ -432,16 +428,16 @@ def test_fetch_plain_tle_two_sources(self, requests): self.dl.config["downloaders"] = FETCH_PLAIN_TLE_CONFIG res = self.dl.fetch_plain_tle() - self.assertTrue("source_1" in res) - self.assertEqual(len(res["source_1"]), 3) - self.assertEqual(res["source_1"][0].line1, line1) - self.assertEqual(res["source_1"][0].line2, line2) - self.assertTrue("source_2" in res) - self.assertEqual(len(res["source_2"]), 1) - self.assertTrue(mock.call("mocked_url_1") in requests.get.mock_calls) - self.assertEqual(len(requests.get.mock_calls), 4) - - @mock.patch('pyorbital.tlefile.requests') + assert "source_1" in res + assert len(res["source_1"]) == 3 + assert res["source_1"][0].line1 == line1 + assert res["source_1"][0].line2 == line2 + assert "source_2" in res + assert len(res["source_2"]) == 1 + assert mock.call("mocked_url_1", timeout=15) in requests.get.mock_calls + assert len(requests.get.mock_calls) == 4 + + @mock.patch("pyorbital.tlefile.requests") def test_fetch_plain_tle_server_is_a_teapot(self, requests): """Test downloading a TLE file from internet.""" requests.get = mock.MagicMock() @@ -453,14 +449,15 @@ def test_fetch_plain_tle_server_is_a_teapot(self, requests): res = self.dl.fetch_plain_tle() # The sources are in the dict ... - self.assertEqual(len(res), 2) + assert len(res) == 2 # ... but there are no TLEs - self.assertEqual(len(res["source_1"]), 0) - self.assertEqual(len(res["source_2"]), 0) - self.assertTrue(mock.call("mocked_url_1") in requests.get.mock_calls) - self.assertEqual(len(requests.get.mock_calls), 4) + assert len(res["source_1"]) == 0 + assert len(res["source_2"]) == 0 - @mock.patch('pyorbital.tlefile.requests') + assert mock.call("mocked_url_1", timeout=15) in requests.get.mock_calls + assert len(requests.get.mock_calls) == 4 + + @mock.patch("pyorbital.tlefile.requests") def test_fetch_spacetrack_login_fails(self, requests): """Test downloading TLEs from space-track.org.""" mock_post = mock.MagicMock() @@ -469,7 +466,7 @@ def test_fetch_spacetrack_login_fails(self, requests): requests.Session.return_value.__enter__.return_value = mock_session self.dl.config["platforms"] = { - 25544: 'ISS' + 25544: "ISS" } self.dl.config["downloaders"] = FETCH_SPACETRACK_CONFIG @@ -477,13 +474,13 @@ def test_fetch_spacetrack_login_fails(self, requests): mock_post.return_value.status_code = 418 res = self.dl.fetch_spacetrack() # Empty list of TLEs is returned - self.assertTrue(res == []) + assert res == [] # The login was anyway attempted mock_post.assert_called_with( - 'https://www.space-track.org/ajaxauth/login', - data={'identity': 'username', 'password': 'passw0rd'}) + "https://www.space-track.org/ajaxauth/login", + data={"identity": "username", "password": "passw0rd"}) - @mock.patch('pyorbital.tlefile.requests') + @mock.patch("pyorbital.tlefile.requests") def test_fetch_spacetrack_get_fails(self, requests): """Test downloading TLEs from space-track.org.""" mock_post = mock.MagicMock() @@ -494,7 +491,7 @@ def test_fetch_spacetrack_get_fails(self, requests): requests.Session.return_value.__enter__.return_value = mock_session self.dl.config["platforms"] = { - 25544: 'ISS' + 25544: "ISS" } self.dl.config["downloaders"] = FETCH_SPACETRACK_CONFIG @@ -502,12 +499,12 @@ def test_fetch_spacetrack_get_fails(self, requests): mock_post.return_value.status_code = 200 mock_get.return_value.status_code = 418 res = self.dl.fetch_spacetrack() - self.assertTrue(res == []) + assert res == [] mock_get.assert_called_with("https://www.space-track.org/" "basicspacedata/query/class/tle_latest/" "ORDINAL/1/NORAD_CAT_ID/25544/format/tle") - @mock.patch('pyorbital.tlefile.requests') + @mock.patch("pyorbital.tlefile.requests") def test_fetch_spacetrack_success(self, requests): """Test downloading TLEs from space-track.org.""" mock_post = mock.MagicMock() @@ -517,9 +514,9 @@ def test_fetch_spacetrack_success(self, requests): mock_session.get = mock_get requests.Session.return_value.__enter__.return_value = mock_session - tle_text = '\n'.join((line0, line1, line2)) + tle_text = "\n".join((line0, line1, line2)) self.dl.config["platforms"] = { - 25544: 'ISS' + 25544: "ISS" } self.dl.config["downloaders"] = FETCH_SPACETRACK_CONFIG @@ -528,34 +525,34 @@ def test_fetch_spacetrack_success(self, requests): mock_get.return_value.status_code = 200 mock_get.return_value.text = tle_text res = self.dl.fetch_spacetrack() - self.assertEqual(len(res), 1) - self.assertEqual(res[0].line1, line1) - self.assertEqual(res[0].line2, line2) + assert len(res) == 1 + assert res[0].line1 == line1 + assert res[0].line2 == line2 def test_read_tle_files(self): """Test reading TLE files from a file system.""" from tempfile import TemporaryDirectory - tle_text = '\n'.join((line0, line1, line2)) + tle_text = "\n".join((line0, line1, line2)) save_dir = TemporaryDirectory() with save_dir: - fname = os.path.join(save_dir.name, 'tle_20200129_1600.txt') - with open(fname, 'w') as fid: + fname = os.path.join(save_dir.name, "tle_20200129_1600.txt") + with open(fname, "w") as fid: fid.write(tle_text) # Add a non-existent file, it shouldn't cause a crash - nonexistent = os.path.join(save_dir.name, 'not_here.txt') + nonexistent = os.path.join(save_dir.name, "not_here.txt") # Use a wildcard to collect files (passed to glob) - starred_fname = os.path.join(save_dir.name, 'tle*txt') + starred_fname = os.path.join(save_dir.name, "tle*txt") self.dl.config["downloaders"] = { "read_tle_files": { "paths": [fname, nonexistent, starred_fname] } } res = self.dl.read_tle_files() - self.assertEqual(len(res), 2) - self.assertEqual(res[0].line1, line1) - self.assertEqual(res[0].line2, line2) + assert len(res) == 2 + assert res[0].line1 == line1 + assert res[0].line2 == line2 def test_read_xml_admin_messages(self): """Test reading TLE files from a file system.""" @@ -563,13 +560,13 @@ def test_read_xml_admin_messages(self): save_dir = TemporaryDirectory() with save_dir: - fname = os.path.join(save_dir.name, '20210420_Metop-B_ADMIN_MESSAGE_NO_127.xml') - with open(fname, 'w') as fid: + fname = os.path.join(save_dir.name, "20210420_Metop-B_ADMIN_MESSAGE_NO_127.xml") + with open(fname, "w") as fid: fid.write(tle_xml) # Add a non-existent file, it shouldn't cause a crash - nonexistent = os.path.join(save_dir.name, 'not_here.txt') + nonexistent = os.path.join(save_dir.name, "not_here.txt") # Use a wildcard to collect files (passed to glob) - starred_fname = os.path.join(save_dir.name, '*.xml') + starred_fname = os.path.join(save_dir.name, "*.xml") self.dl.config["downloaders"] = { "read_xml_admin_messages": { "paths": [fname, nonexistent, starred_fname] @@ -579,17 +576,17 @@ def test_read_xml_admin_messages(self): # There are two sets of TLEs in the file. And as the same file is # parsed twice, 4 TLE objects are returned - self.assertEqual(len(res), 4) - self.assertEqual(res[0].line1, line1) - self.assertEqual(res[0].line2, line2) - self.assertEqual(res[1].line1, line1_2) - self.assertEqual(res[1].line2, line2_2) + assert len(res) == 4 + assert res[0].line1 == line1 + assert res[0].line2 == line2 + assert res[1].line1 == line1_2 + assert res[1].line2 == line2_2 def _get_req_response(code): req = mock.MagicMock() req.status_code = code - req.text = '\n'.join((line0, line1, line2)) + req.text = "\n".join((line0, line1, line2)) return req @@ -598,21 +595,21 @@ class TestSQLiteTLE(unittest.TestCase): def setUp(self): """Create a database instance.""" - from pyorbital.tlefile import SQLiteTLE - from pyorbital.tlefile import Tle from tempfile import TemporaryDirectory + from pyorbital.tlefile import SQLiteTLE, Tle + self.temp_dir = TemporaryDirectory() - self.db_fname = os.path.join(self.temp_dir.name, 'tle.db') + self.db_fname = os.path.join(self.temp_dir.name, "tle.db") self.platforms = {25544: "ISS"} self.writer_config = { - "output_dir": os.path.join(self.temp_dir.name, 'tle_dir'), + "output_dir": os.path.join(self.temp_dir.name, "tle_dir"), "filename_pattern": "tle_%Y%m%d_%H%M%S.%f.txt", "write_name": True, "write_always": False } self.db = SQLiteTLE(self.db_fname, self.platforms, self.writer_config) - self.tle = Tle('ISS', line1=line1, line2=line2) + self.tle = Tle("ISS", line1=line1, line2=line2) def tearDown(self): """Clean temporary files.""" @@ -621,73 +618,72 @@ def tearDown(self): def test_init(self): """Test that the init did what it should have.""" - from pyorbital.tlefile import table_exists, PLATFORM_NAMES_TABLE + from pyorbital.tlefile import PLATFORM_NAMES_TABLE, table_exists columns = [col.strip() for col in - PLATFORM_NAMES_TABLE.strip('()').split(',')] + PLATFORM_NAMES_TABLE.strip("()").split(",")] num_columns = len(columns) - self.assertTrue(os.path.exists(self.db_fname)) - self.assertTrue(table_exists(self.db.db, "platform_names")) - res = self.db.db.execute('select * from platform_names') + assert os.path.exists(self.db_fname) + assert table_exists(self.db.db, "platform_names") + res = self.db.db.execute("select * from platform_names") names = [description[0] for description in res.description] - self.assertEqual(len(names), num_columns) + assert len(names) == num_columns for col in columns: - self.assertTrue(col.split(' ')[0] in names) + assert col.split(" ")[0] in names def test_update_db(self): """Test updating database with new data.""" - from pyorbital.tlefile import (table_exists, SATID_TABLE, - ISO_TIME_FORMAT) + from pyorbital.tlefile import ISO_TIME_FORMAT, SATID_TABLE, table_exists # Get the column names columns = [col.strip() for col in - SATID_TABLE.replace("'{}' (", "").strip(')').split(',')] + SATID_TABLE.replace("'{}' (", "").strip(")").split(",")] # Platform number satid = str(list(self.platforms.keys())[0]) # Data from a platform that isn't configured self.db.platforms = {} - self.db.update_db(self.tle, 'foo') - self.assertFalse(table_exists(self.db.db, satid)) - self.assertFalse(self.db.updated) + self.db.update_db(self.tle, "foo") + assert not table_exists(self.db.db, satid) + assert not self.db.updated # Configured platform self.db.platforms = self.platforms - self.db.update_db(self.tle, 'foo') - self.assertTrue(table_exists(self.db.db, satid)) - self.assertTrue(self.db.updated) + self.db.update_db(self.tle, "foo") + assert table_exists(self.db.db, satid) + assert self.db.updated # Check that all the columns were added - res = self.db.db.execute("select * from '%s'" % satid) + res = self.db.db.execute(f"select * from '{satid:d}'") # noseq names = [description[0] for description in res.description] for col in columns: - self.assertTrue(col.split(' ')[0] in names) + assert col.split(" ")[0] in names # Check the data data = res.fetchall() - self.assertEqual(len(data), 1) + assert len(data) == 1 # epoch - self.assertEqual(data[0][0], '2008-09-20T12:25:40.104192') + assert data[0][0] == "2008-09-20T12:25:40.104192" # TLE - self.assertEqual(data[0][1], '\n'.join((line1, line2))) + assert data[0][1] == "\n".join((line1, line2)) # Date when the data were added should be close to current time date_added = datetime.datetime.strptime(data[0][2], ISO_TIME_FORMAT) now = datetime.datetime.utcnow() - self.assertTrue((now - date_added).total_seconds() < 1.0) + assert (now - date_added).total_seconds() < 1.0 # Source of the data - self.assertTrue(data[0][3] == 'foo') + assert data[0][3] == "foo" # Try to add the same data again. Nothing should change even # if the source is different if the epoch is the same - self.db.update_db(self.tle, 'bar') - res = self.db.db.execute("select * from '%s'" % satid) + self.db.update_db(self.tle, "bar") + res = self.db.db.execute(f"select * from '{satid:d}'") # noseq data = res.fetchall() - self.assertEqual(len(data), 1) + assert len(data) == 1 date_added2 = datetime.datetime.strptime(data[0][2], ISO_TIME_FORMAT) - self.assertEqual(date_added, date_added2) + assert date_added == date_added2 # Source of the data - self.assertTrue(data[0][3] == 'foo') + assert data[0][3] == "foo" def test_write_tle_txt(self): """Test reading data from the database and writing it to a file.""" @@ -695,7 +691,7 @@ def test_write_tle_txt(self): tle_dir = self.writer_config["output_dir"] # Put some data in the database - self.db.update_db(self.tle, 'foo') + self.db.update_db(self.tle, "foo") # Fake that the database hasn't been updated self.db.updated = False @@ -704,34 +700,34 @@ def test_write_tle_txt(self): self.db.write_tle_txt() # The output dir hasn't been created - self.assertFalse(os.path.exists(tle_dir)) + assert not os.path.exists(tle_dir) self.db.updated = True self.db.write_tle_txt() # The dir should be there - self.assertTrue(os.path.exists(tle_dir)) + assert os.path.exists(tle_dir) # There should be one file in the directory - files = glob.glob(os.path.join(tle_dir, 'tle_*txt')) - self.assertEqual(len(files), 1) + files = glob.glob(os.path.join(tle_dir, "tle_*txt")) + assert len(files) == 1 # The file should have been named with the date ('%' characters # not there anymore) - self.assertTrue('%' not in files[0]) + assert "%" not in files[0] # The satellite name should be in the file - with open(files[0], 'r') as fid: - data = fid.read().split('\n') - self.assertEqual(len(data), 3) - self.assertTrue('ISS' in data[0]) - self.assertEqual(data[1], line1) - self.assertEqual(data[2], line2) + with open(files[0], "r") as fid: + data = fid.read().split("\n") + assert len(data) == 3 + assert "ISS" in data[0] + assert data[1] == line1 + assert data[2] == line2 # Call the writing again, nothing should be written. In # real-life this assumes a re-run has been done without new # TLE data self.db.updated = False self.db.write_tle_txt() - files = glob.glob(os.path.join(tle_dir, 'tle_*txt')) - self.assertEqual(len(files), 1) + files = glob.glob(os.path.join(tle_dir, "tle_*txt")) + assert len(files) == 1 # Force writing with every call # Do not write the satellite name @@ -740,10 +736,10 @@ def test_write_tle_txt(self): # Wait a bit to ensure different filename time.sleep(2) self.db.write_tle_txt() - files = sorted(glob.glob(os.path.join(tle_dir, 'tle_*txt'))) - self.assertEqual(len(files), 2) - with open(files[1], 'r') as fid: - data = fid.read().split('\n') - self.assertEqual(len(data), 2) - self.assertEqual(data[0], line1) - self.assertEqual(data[1], line2) + files = sorted(glob.glob(os.path.join(tle_dir, "tle_*txt"))) + assert len(files) == 2 + with open(files[1], "r") as fid: + data = fid.read().split("\n") + assert len(data) == 2 + assert data[0] == line1 + assert data[1] == line2 diff --git a/pyorbital/tlefile.py b/pyorbital/tlefile.py index d2f04100..6a827e45 100644 --- a/pyorbital/tlefile.py +++ b/pyorbital/tlefile.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- # -# Copyright (c) 2011-2023 Pytroll Community +# Copyright (c) 2011-2024 Pytroll Community # # Author(s): # @@ -25,58 +25,63 @@ """Classes and functions for handling TLE files.""" +import datetime as dt +import glob import io import logging -import datetime as dt -from urllib.request import urlopen import os -import glob -import numpy as np -import requests import sqlite3 -from xml.etree import ElementTree as ET from itertools import zip_longest +from urllib.request import urlopen + +#from xml.etree import ElementTree as ET +import defusedxml.ElementTree as ET +import numpy as np +import requests -TLE_GROUPS = ('active', - 'weather', - 'resource', - 'cubesat', - 'stations', - 'sarsat', - 'noaa', - 'amateur', - 'engineering') - -TLE_URLS = [f'https://celestrak.org/NORAD/elements/gp.php?GROUP={group}&FORMAT=tle' +TLE_GROUPS = ("active", + "weather", + "resource", + "cubesat", + "stations", + "sarsat", + "noaa", + "amateur", + "engineering") + +TLE_URLS = [f"https://celestrak.org/NORAD/elements/gp.php?GROUP={group}&FORMAT=tle" for group in TLE_GROUPS] LOGGER = logging.getLogger(__name__) -PKG_CONFIG_DIR = os.path.join(os.path.realpath(os.path.dirname(__file__)), 'etc') +PKG_CONFIG_DIR = os.path.join(os.path.realpath(os.path.dirname(__file__)), "etc") + +class TleDownloadTimeoutError(Exception): + """TLE download timeout exception.""" def _check_support_limit_ppp_config_dir(): """Check the version where PPP_CONFIG_DIR will no longer be supported.""" - from pyorbital import version - return version.get_versions()['version'] >= '1.9' + from pyorbital import get_version + return get_version() >= "1.9" def _get_config_path(): """Get the config path for Pyorbital.""" - if 'PPP_CONFIG_DIR' in os.environ and 'PYORBITAL_CONFIG_PATH' not in os.environ: + if "PPP_CONFIG_DIR" in os.environ and "PYORBITAL_CONFIG_PATH" not in os.environ: if _check_support_limit_ppp_config_dir(): LOGGER.warning( - 'The use of PPP_CONFIG_DIR is no longer supported!' + - ' Please use PYORBITAL_CONFIG_PATH if you need a custom config path for pyorbital!') - LOGGER.debug('Using the package default for configuration: %s', PKG_CONFIG_DIR) + "The use of PPP_CONFIG_DIR is no longer supported!" + + " Please use PYORBITAL_CONFIG_PATH if you need a custom config path for pyorbital!") + LOGGER.debug("Using the package default for configuration: %s", PKG_CONFIG_DIR) return PKG_CONFIG_DIR else: LOGGER.warning( - 'The use of PPP_CONFIG_DIR is deprecated and will be removed in version 1.9!' + - ' Please use PYORBITAL_CONFIG_PATH if you need a custom config path for pyorbital!') - pyorbital_config_path = os.getenv('PPP_CONFIG_DIR', PKG_CONFIG_DIR) + "The use of PPP_CONFIG_DIR is deprecated and will be removed in version 1.9!" + + " Please use PYORBITAL_CONFIG_PATH if you need a custom config path for pyorbital!") + pyorbital_config_path = os.getenv("PPP_CONFIG_DIR", PKG_CONFIG_DIR) else: - pyorbital_config_path = os.getenv('PYORBITAL_CONFIG_PATH', PKG_CONFIG_DIR) + pyorbital_config_path = os.getenv("PYORBITAL_CONFIG_PATH", PKG_CONFIG_DIR) LOGGER.debug("Path to the Pyorbital configuration (where e.g. platforms.txt is found): %s", str(pyorbital_config_path)) @@ -89,9 +94,9 @@ def get_platforms_filepath(): Check that the file exists or raise an error. """ config_path = _get_config_path() - platform_file = os.path.join(config_path, 'platforms.txt') + platform_file = os.path.join(config_path, "platforms.txt") if not os.path.isfile(platform_file): - platform_file = os.path.join(PKG_CONFIG_DIR, 'platforms.txt') + platform_file = os.path.join(PKG_CONFIG_DIR, "platforms.txt") if not os.path.isfile(platform_file): raise OSError("Platform file {filepath} does not exist!".format(filepath=platform_file)) @@ -102,15 +107,15 @@ def read_platform_numbers(filename, in_upper=False, num_as_int=False): """Read platform numbers from $PYORBITAL_CONFIG_PATH/platforms.txt.""" out_dict = {} - with open(filename, 'r') as fid: + with open(filename, "r") as fid: for row in fid: # skip comment lines - if not row.startswith('#'): + if not row.startswith("#"): parts = row.split() if len(parts) < 2: continue # The satellite name might have whitespace - platform = ' '.join(parts[:-1]) + platform = " ".join(parts[:-1]) num = parts[-1] if in_upper: platform = platform.upper() @@ -161,12 +166,17 @@ def read(platform, tle_file=None, line1=None, line2=None): """ return Tle(platform, tle_file=tle_file, line1=line1, line2=line2) +# req = urllib.request.Request('http://www.example.com') +# with urllib.request.urlopen(req) as response: +# the_page = response.read() def fetch(destination): """Fetch TLE from internet and save it to `destination`.""" with io.open(destination, mode="w", encoding="utf-8") as dest: for url in TLE_URLS: - response = urlopen(url) + if not url.lower().startswith("http"): + raise ValueError(f"{str(url)} is not accepted!") + response = urlopen(url) # nosec dest.write(response.read().decode("utf-8")) @@ -248,7 +258,7 @@ def _read_tle(self): if not tle: raise KeyError("Found no TLE entry for '%s'" % self._platform) - self._line1, self._line2 = tle.split('\n') + self._line1, self._line2 = tle.split("\n") def _parse_tle(self): """Parse values from TLE data.""" @@ -272,7 +282,7 @@ def _read_tle_decimal(rep): self.epoch_day = float(self._line1[20:32]) self.epoch = \ np.datetime64(dt.datetime.strptime(self.epoch_year, "%y") + - dt.timedelta(days=self.epoch_day - 1), 'us') + dt.timedelta(days=self.epoch_day - 1), "us") self.mean_motion_derivative = float(self._line1[33:43]) self.mean_motion_sec_derivative = _read_tle_decimal(self._line1[44:52]) self.bstar = _read_tle_decimal(self._line1[53:61]) @@ -295,20 +305,20 @@ def __str__(self): import pprint s_var = io.StringIO() d_var = dict(([(k, v) for k, v in - list(self.__dict__.items()) if k[0] != '_'])) + list(self.__dict__.items()) if k[0] != "_"])) pprint.pprint(d_var, s_var) return s_var.getvalue()[:-1] def _get_local_tle_path_from_env(): """Get the path to possible local TLE files using the environment variable.""" - return os.environ.get('TLES') + return os.environ.get("TLES") def _get_uris_and_open_func(tle_file=None): """Get the uri's and the adequate file open call for the TLE files.""" def _open(filename): - return io.open(filename, 'rb') + return io.open(filename, "rb") local_tle_path = _get_local_tle_path_from_env() @@ -337,13 +347,13 @@ def _open(filename): return uris, open_func -def _get_first_tle(uris, open_func, platform=''): +def _get_first_tle(uris, open_func, platform=""): return _get_tles_from_uris(uris, open_func, platform=platform, only_first=True) -def _get_tles_from_uris(uris, open_func, platform='', only_first=True): +def _get_tles_from_uris(uris, open_func, platform="", only_first=True): tles = [] - designator = "1 " + SATELLITES.get(platform, '') + designator = "1 " + SATELLITES.get(platform, "") for url in uris: fid = open_func(url) for l_0 in fid: @@ -375,7 +385,7 @@ def _get_tles_from_uris(uris, open_func, platform='', only_first=True): def _decode(itm): if isinstance(itm, str): return itm - return itm.decode('utf-8') + return itm.decode("utf-8") PLATFORM_NAMES_TABLE = "(satid text primary key, platform_name text)" @@ -402,7 +412,10 @@ def fetch_plain_tle(self): tles[source] = [] failures = [] for uri in sources[source]: - req = requests.get(uri) + try: + req = requests.get(uri, timeout=15) # 15 seconds + except requests.exceptions.Timeout: + raise TleDownloadTimeoutError(f"Failed to make request to {str(uri)} within 15 seconds!") if req.status_code == 200: tles[source] += _parse_tles_for_downloader((req.text,), io.StringIO) else: @@ -410,7 +423,7 @@ def fetch_plain_tle(self): if len(failures) > 0: logging.error( "Could not fetch TLEs from %s, %d failure(s): [%s]", - source, len(failures), ', '.join(failures)) + source, len(failures), ", ".join(failures)) logging.info("Downloaded %d TLEs from %s", len(tles[source]), source) return tles @@ -422,8 +435,8 @@ def fetch_spacetrack(self): download_url = ("https://www.space-track.org/basicspacedata/query/" "class/tle_latest/ORDINAL/1/NORAD_CAT_ID/%s/format/" "tle") - download_url = download_url % ','.join( - [str(key) for key in self.config['platforms']]) + download_url = download_url % ",".join( + [str(key) for key in self.config["platforms"]]) user = self.config["downloaders"]["fetch_spacetrack"]["user"] password = self.config["downloaders"]["fetch_spacetrack"]["password"] @@ -470,15 +483,15 @@ def read_xml_admin_messages(self): def _parse_tles_for_downloader(item, open_func): - return [Tle('', tle_file=io.StringIO(tle)) for tle in - _get_tles_from_uris(item, open_func, platform='', only_first=False)] + return [Tle("", tle_file=io.StringIO(tle)) for tle in + _get_tles_from_uris(item, open_func, platform="", only_first=False)] def collect_filenames(paths): """Collect all filenames from *paths*.""" fnames = [] for path in paths: - if '*' in path: + if "*" in path: fnames += glob.glob(path) else: if not os.path.exists(path): @@ -494,10 +507,10 @@ def read_tles_from_mmam_xml_files(paths): fnames = collect_filenames(paths) tles = [] for fname in fnames: - data = read_tle_from_mmam_xml_file(fname).split('\n') + data = read_tle_from_mmam_xml_file(fname).split("\n") for two_lines in _group_iterable_to_chunks(2, data): - tl_stream = io.StringIO('\n'.join(two_lines)) - tles.append(Tle('', tle_file=tl_stream)) + tl_stream = io.StringIO("\n".join(two_lines)) + tles.append(Tle("", tle_file=tl_stream)) return tles @@ -559,7 +572,7 @@ def update_db(self, tle, source): self.platforms[num], num) cmd = SATID_VALUES.format(num) epoch = tle.epoch.item().isoformat() - tle = '\n'.join([tle.line1, tle.line2]) + tle = "\n".join([tle.line1, tle.line2]) now = dt.datetime.utcnow().isoformat() try: with self.db: @@ -572,7 +585,7 @@ def update_db(self, tle, source): def write_tle_txt(self): """Write TLE data to a text file.""" - if not self.updated and not self.writer_config.get('write_always', + if not self.updated and not self.writer_config.get("write_always", False): return pattern = os.path.join(self.writer_config["output_dir"], @@ -588,9 +601,8 @@ def write_tle_txt(self): for satid, platform_name in self.platforms.items(): if self.writer_config.get("write_name", False): data.append(platform_name) - query = ("SELECT epoch, tle FROM '%s' ORDER BY " - "epoch DESC LIMIT 1" % satid) - epoch, tle = self.db.execute(query).fetchone() + query = f"SELECT epoch, tle FROM '{satid:d}' ORDER BY epoch DESC LIMIT 1" # noseq + epoch, tle = self.db.execute(query).fetchone() # nosec date_epoch = dt.datetime.strptime(epoch, ISO_TIME_FORMAT) tle_age = ( dt.datetime.utcnow() - date_epoch).total_seconds() / 3600. @@ -598,8 +610,8 @@ def write_tle_txt(self): satid, platform_name, int(tle_age)) data.append(tle) - with open(fname, 'w') as fid: - fid.write('\n'.join(data)) + with open(fname, "w") as fid: + fid.write("\n".join(data)) logging.info("Wrote %d TLEs to %s", len(data), fname) @@ -612,14 +624,14 @@ def table_exists(db, name): """Check if the table 'name' exists in the database.""" name = str(name) query = "SELECT 1 FROM sqlite_master WHERE type='table' and name=?" - return db.execute(query, (name,)).fetchone() is not None + return db.execute(query, (name,)).fetchone() is not None # nosec def main(): """Run a test TLE reading.""" - tle_data = read('Noaa-19') + tle_data = read("Noaa-19") print(tle_data) -if __name__ == '__main__': +if __name__ == "__main__": main() From 6d977dddbe31935edcb84082e36bcead008fd575 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 21 Nov 2024 14:55:22 +0100 Subject: [PATCH 68/89] Added missing package for CI tests Signed-off-by: Adam.Dybbroe --- continuous_integration/environment.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 8ddf082c..ab1473c9 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -24,6 +24,7 @@ dependencies: - pytest - pytest-cov - fsspec + - defusedxml - pip - pip: - trollsift From 965cae95c50b649b4dcc54915071012a491850c7 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 21 Nov 2024 15:19:54 +0100 Subject: [PATCH 69/89] Fix test Signed-off-by: Adam.Dybbroe --- pyorbital/tests/test_tlefile.py | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/pyorbital/tests/test_tlefile.py b/pyorbital/tests/test_tlefile.py index a921b8b2..a0f8b3d1 100644 --- a/pyorbital/tests/test_tlefile.py +++ b/pyorbital/tests/test_tlefile.py @@ -117,30 +117,31 @@ def fake_local_tles_dir(tmp_path_factory): @pytest.fixture -def mock_env_ppp_config_dir(monkeypatch): +def _mock_env_ppp_config_dir(monkeypatch): """Mock environment variable PPP_CONFIG_DIR.""" monkeypatch.setenv("PPP_CONFIG_DIR", "/path/to/old/mpop/config/dir") @pytest.fixture -def mock_env_ppp_config_dir_missing(monkeypatch): +def _mock_env_ppp_config_dir_missing(monkeypatch): """Mock that the environment variable PPP_CONFIG_DIR is missing.""" monkeypatch.delenv("PPP_CONFIG_DIR", raising=False) @pytest.fixture -def mock_env_tles_missing(monkeypatch): +def _mock_env_tles_missing(monkeypatch): """Mock that the environment variable TLES is missing.""" monkeypatch.delenv("TLES", raising=False) @pytest.fixture -def mock_env_tles(monkeypatch, fake_local_tles_dir): +def _mock_env_tles(monkeypatch, fake_local_tles_dir): """Mock environment variable TLES.""" monkeypatch.setenv("TLES", os.path.join(fake_local_tles_dir, "*")) -def test_get_config_path_no_env_defined(caplog, mock_env_ppp_config_dir_missing): +@pytest.mark.usefixtures("_mock_env_ppp_config_dir_missing") +def test_get_config_path_no_env_defined(caplog): """Test getting the config path.""" with caplog.at_level(logging.WARNING): res = _get_config_path() @@ -149,7 +150,8 @@ def test_get_config_path_no_env_defined(caplog, mock_env_ppp_config_dir_missing) assert caplog.text == "" -def test_check_is_platform_supported_existing(caplog, mock_env_ppp_config_dir_missing): +@pytest.mark.usefixtures("_mock_env_ppp_config_dir_missing") +def test_check_is_platform_supported_existing(caplog): """Test the function to check if an existing platform is supported on default.""" with caplog.at_level(logging.INFO): check_is_platform_supported("NOAA-21") @@ -163,7 +165,8 @@ def test_check_is_platform_supported_existing(caplog, mock_env_ppp_config_dir_mi assert expected2 in logoutput_lines[1] -def test_check_is_platform_supported_unknown(caplog, mock_env_ppp_config_dir_missing): +@pytest.mark.usefixtures("_mock_env_ppp_config_dir_missing") +def test_check_is_platform_supported_unknown(caplog): """Test the function to check if an unknown platform is supported on default.""" sat = "UNKNOWN" with caplog.at_level(logging.INFO): @@ -231,7 +234,8 @@ def test_get_config_path_ppp_config_set_and_pyorbital(caplog, monkeypatch): assert caplog.text == "" -def test_get_config_path_pyorbital_ppp_missing(caplog, monkeypatch, mock_env_ppp_config_dir_missing): +@pytest.mark.usefixtures("_mock_env_ppp_config_dir_missing") +def test_get_config_path_pyorbital_ppp_missing(caplog, monkeypatch): """Test getting the config path. The old mpop PPP_CONFIG_PATH is not set but the PYORBITAL one is. @@ -254,13 +258,15 @@ def test_read_platform_numbers(fake_platforms_file): assert res == {"NOAA-21": "54234", "NOAA-20": "43013", "UNKNOWN SATELLITE": "99999"} -def test_get_local_tle_path_tle_env_missing(mock_env_tles_missing): +@pytest.mark.usefixtures("_mock_env_tles_missing") +def test_get_local_tle_path_tle_env_missing(): """Test getting the path to local TLE files - env TLES missing.""" res = _get_local_tle_path_from_env() assert res is None -def test_get_local_tle_path(mock_env_tles, fake_local_tles_dir): +@pytest.mark.usefixtures("_mock_env_tles") +def test_get_local_tle_path(fake_local_tles_dir): """Test getting the path to local TLE files.""" res = _get_local_tle_path_from_env() assert res == os.path.join(fake_local_tles_dir, "*") @@ -640,7 +646,7 @@ def test_update_db(self): columns = [col.strip() for col in SATID_TABLE.replace("'{}' (", "").strip(")").split(",")] # Platform number - satid = str(list(self.platforms.keys())[0]) + satid = int(list(self.platforms.keys())[0]) # Data from a platform that isn't configured self.db.platforms = {} From 206e34bad5056d003eea7ae099876e3993bd3035 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 21 Nov 2024 09:43:28 -0600 Subject: [PATCH 70/89] Remove unused stickler config --- .stickler.yml | 4 ---- 1 file changed, 4 deletions(-) delete mode 100644 .stickler.yml diff --git a/.stickler.yml b/.stickler.yml deleted file mode 100644 index 1c51f637..00000000 --- a/.stickler.yml +++ /dev/null @@ -1,4 +0,0 @@ -linters: - flake8: - python: 3 - config: setup.cfg From b2bb7175910c55ca3cac6144a18e94805d9f4e7c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 21 Nov 2024 10:15:14 -0600 Subject: [PATCH 71/89] Add .bandit config --- .bandit | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .bandit diff --git a/.bandit b/.bandit new file mode 100644 index 00000000..c9a5f28b --- /dev/null +++ b/.bandit @@ -0,0 +1,3 @@ +[bandit] +skips: B506 +exclude: pyorbital/tests From 68488f0eb3dfafa5aaba06ee374ae432f50e68eb Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 21 Nov 2024 10:16:32 -0600 Subject: [PATCH 72/89] Fix nosec comment typo --- pyorbital/tlefile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyorbital/tlefile.py b/pyorbital/tlefile.py index 6a827e45..7e873962 100644 --- a/pyorbital/tlefile.py +++ b/pyorbital/tlefile.py @@ -601,7 +601,7 @@ def write_tle_txt(self): for satid, platform_name in self.platforms.items(): if self.writer_config.get("write_name", False): data.append(platform_name) - query = f"SELECT epoch, tle FROM '{satid:d}' ORDER BY epoch DESC LIMIT 1" # noseq + query = f"SELECT epoch, tle FROM '{satid:d}' ORDER BY epoch DESC LIMIT 1" # nosec epoch, tle = self.db.execute(query).fetchone() # nosec date_epoch = dt.datetime.strptime(epoch, ISO_TIME_FORMAT) tle_age = ( From 7fa732b506e7983f0470f61aa60279bd631df5cc Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 21 Nov 2024 17:17:57 +0100 Subject: [PATCH 73/89] Bugfix and remove unused inactive code Signed-off-by: Adam.Dybbroe --- pyorbital/tlefile.py | 3 --- pyproject.toml | 2 +- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/pyorbital/tlefile.py b/pyorbital/tlefile.py index 6a827e45..ac67130e 100644 --- a/pyorbital/tlefile.py +++ b/pyorbital/tlefile.py @@ -166,9 +166,6 @@ def read(platform, tle_file=None, line1=None, line2=None): """ return Tle(platform, tle_file=tle_file, line1=line1, line2=line2) -# req = urllib.request.Request('http://www.example.com') -# with urllib.request.urlopen(req) as response: -# the_page = response.read() def fetch(destination): """Fetch TLE from internet and save it to `destination`.""" diff --git a/pyproject.toml b/pyproject.toml index 5e229f7c..45300036 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ dependencies = ["numpy>=1.19.0", "scipy", "requests", "pytz", - "dateutil", + "python-dateutil", "defusedxml", ] readme = "README.md" From 3097d8176b293e8a52fd0708680d6ad4544e29f8 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 21 Nov 2024 10:18:16 -0600 Subject: [PATCH 74/89] Add mypy stub packages to precommit --- .pre-commit-config.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fe36218d..b2b8efa8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,6 +27,8 @@ repos: - types-setuptools - types-PyYAML - types-requests + - types-pytz + - types-python-dateutil args: ["--python-version", "3.10", "--ignore-missing-imports"] - repo: https://github.com/pycqa/isort rev: 5.13.2 From e42764a2dfa65b1e6a5d8c3b64960372a79811cd Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 21 Nov 2024 10:18:32 -0600 Subject: [PATCH 75/89] Fix whitespace issues in issue template --- .github/ISSUE_TEMPLATE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index 18c258ad..7831c592 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -6,7 +6,7 @@ ``` #### Problem description -[this should also explain **why** the current behaviour is a problem and why the +[this should also explain **why** the current behaviour is a problem and why the expected output is a better solution.] #### Expected Output From 721c7b58a37c268b51bb5fbc3b1dd4517ac65d43 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 21 Nov 2024 17:20:36 +0100 Subject: [PATCH 76/89] Increase test coverage Signed-off-by: Adam.Dybbroe --- pyorbital/tests/test_logging.py | 38 +++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 pyorbital/tests/test_logging.py diff --git a/pyorbital/tests/test_logging.py b/pyorbital/tests/test_logging.py new file mode 100644 index 00000000..35f56ccf --- /dev/null +++ b/pyorbital/tests/test_logging.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2024 Pytroll Community + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +"""Test the logging module.""" + +import logging + +from pyorbital.logger import get_logger, logging_off, logging_on + + +def test_logging_on_and_off(caplog): + """Test that switching logging on and off works.""" + logger = get_logger("pyorbital.spam") + logging_on() + with caplog.at_level(logging.WARNING): + logger.debug("I'd like to leave the army please, sir.") + logger.warning("Stop that! It's SPAM.") + assert "Stop that! It's SPAM" in caplog.text + assert "I'd like to leave the army please, sir." not in caplog.text + logging_off() + with caplog.at_level(logging.DEBUG): + logger.warning("You've got a nice army base here, Colonel.") + assert "You've got a nice army base here, Colonel." not in caplog.text From 4553a0c402aef22bcc48ed6dd03d2cf3d8f43465 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 21 Nov 2024 10:22:22 -0600 Subject: [PATCH 77/89] Restore pyorbital.__version__ --- pyorbital/__init__.py | 5 +---- pyorbital/tests/test_tlefile.py | 26 ++++++++++++-------------- pyorbital/tlefile.py | 27 ++++++++++----------------- 3 files changed, 23 insertions(+), 35 deletions(-) diff --git a/pyorbital/__init__.py b/pyorbital/__init__.py index 594d6eec..e5f39f2b 100644 --- a/pyorbital/__init__.py +++ b/pyorbital/__init__.py @@ -19,7 +19,7 @@ import numpy as np -from pyorbital import version +from pyorbital.version import __version__ # noqa def dt2np(utc_time): @@ -29,6 +29,3 @@ def dt2np(utc_time): except ValueError: return utc_time.astype("datetime64[ns]") -def get_version(): - """Return the Pyorbital version tag.""" - return version.__version__ diff --git a/pyorbital/tests/test_tlefile.py b/pyorbital/tests/test_tlefile.py index a0f8b3d1..6a2c8799 100644 --- a/pyorbital/tests/test_tlefile.py +++ b/pyorbital/tests/test_tlefile.py @@ -184,20 +184,18 @@ def test_check_is_platform_supported_unknown(caplog): assert expected3 in logoutput_lines[2] -@patch("pyorbital.get_version", - return_value="1.9.1+1.some-future.dirty") -def test_get_config_path_ppp_config_set_but_not_pyorbital_future(mock, caplog, monkeypatch): - """Test getting the config path.""" - monkeypatch.setenv("SATPY_CONFIG_PATH", "/path/to/satpy/etc") - monkeypatch.setenv("PPP_CONFIG_DIR", "/path/to/old/mpop/config/dir") - - with caplog.at_level(logging.WARNING): - res = _get_config_path() - - log_output = ("The use of PPP_CONFIG_DIR is no longer supported! " + - "Please use PYORBITAL_CONFIG_PATH if you need a custom config path for pyorbital!") - assert log_output in caplog.text - assert res == PKG_CONFIG_DIR +#def test_get_config_path_ppp_config_set_but_not_pyorbital_future(mock, caplog, monkeypatch): +# """Test getting the config path.""" +# monkeypatch.setenv("SATPY_CONFIG_PATH", "/path/to/satpy/etc") +# monkeypatch.setenv("PPP_CONFIG_DIR", "/path/to/old/mpop/config/dir") +# +# with caplog.at_level(logging.WARNING): +# res = _get_config_path() +# +# log_output = ("The use of PPP_CONFIG_DIR is no longer supported! " + +# "Please use PYORBITAL_CONFIG_PATH if you need a custom config path for pyorbital!") +# assert log_output in caplog.text +# assert res == PKG_CONFIG_DIR def test_get_config_path_ppp_config_set_but_not_pyorbital_is_deprecated(caplog, monkeypatch): diff --git a/pyorbital/tlefile.py b/pyorbital/tlefile.py index ffc7253f..e8f03052 100644 --- a/pyorbital/tlefile.py +++ b/pyorbital/tlefile.py @@ -60,26 +60,19 @@ class TleDownloadTimeoutError(Exception): """TLE download timeout exception.""" -def _check_support_limit_ppp_config_dir(): - """Check the version where PPP_CONFIG_DIR will no longer be supported.""" - from pyorbital import get_version - return get_version() >= "1.9" - - def _get_config_path(): """Get the config path for Pyorbital.""" if "PPP_CONFIG_DIR" in os.environ and "PYORBITAL_CONFIG_PATH" not in os.environ: - if _check_support_limit_ppp_config_dir(): - LOGGER.warning( - "The use of PPP_CONFIG_DIR is no longer supported!" + - " Please use PYORBITAL_CONFIG_PATH if you need a custom config path for pyorbital!") - LOGGER.debug("Using the package default for configuration: %s", PKG_CONFIG_DIR) - return PKG_CONFIG_DIR - else: - LOGGER.warning( - "The use of PPP_CONFIG_DIR is deprecated and will be removed in version 1.9!" + - " Please use PYORBITAL_CONFIG_PATH if you need a custom config path for pyorbital!") - pyorbital_config_path = os.getenv("PPP_CONFIG_DIR", PKG_CONFIG_DIR) + # XXX: Swap when pyorbital 1.9 is released + #LOGGER.warning( + # "The use of PPP_CONFIG_DIR is no longer supported!" + + # " Please use PYORBITAL_CONFIG_PATH if you need a custom config path for pyorbital!") + #LOGGER.debug("Using the package default for configuration: %s", PKG_CONFIG_DIR) + #return PKG_CONFIG_DIR + LOGGER.warning( + "The use of PPP_CONFIG_DIR is deprecated and will be removed in version 1.9!" + + " Please use PYORBITAL_CONFIG_PATH if you need a custom config path for pyorbital!") + pyorbital_config_path = os.getenv("PPP_CONFIG_DIR", PKG_CONFIG_DIR) else: pyorbital_config_path = os.getenv("PYORBITAL_CONFIG_PATH", PKG_CONFIG_DIR) From 38e2b3d4ecfd031802991ad484ed5343665155da Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 21 Nov 2024 10:27:12 -0600 Subject: [PATCH 78/89] Fix basic style issues in various files --- .github/workflows/deploy-sdist.yaml | 2 +- CHANGELOG.md | 2 +- README.md | 1 - changelog.rst | 3 --- doc/source/index.rst | 4 ++-- pyorbital/__init__.py | 1 - 6 files changed, 4 insertions(+), 9 deletions(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 23fa0ff8..410bb00c 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -22,4 +22,4 @@ jobs: uses: pypa/gh-action-pypi-publish@v1.12.2 with: user: __token__ - password: ${{ secrets.pypi_password }} \ No newline at end of file + password: ${{ secrets.pypi_password }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 7f013ddb..67f97225 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -163,7 +163,7 @@ In this release 5 pull requests were closed. ### Issues Closed * [Issue 63](https://github.com/pytroll/pyorbital/issues/63) - Runtime error in get_next_passes ([PR 64](https://github.com/pytroll/pyorbital/pull/64)) -* [Issue 62](https://github.com/pytroll/pyorbital/issues/62) - can this tool run +* [Issue 62](https://github.com/pytroll/pyorbital/issues/62) - can this tool run * [Issue 22](https://github.com/pytroll/pyorbital/issues/22) - get_next_passes returns max-elevation-time time not between rise & fall time ([PR 76](https://github.com/pytroll/pyorbital/pull/76)) In this release 3 issues were closed. diff --git a/README.md b/README.md index cc3102a6..87fe885e 100644 --- a/README.md +++ b/README.md @@ -12,4 +12,3 @@ This is the Pyorbital, a Python package for computing orbital parameters from TL files, and making various astronomical computations. It is part of the Pytroll project: http://pytroll.org - diff --git a/changelog.rst b/changelog.rst index 1c588c41..9824089e 100644 --- a/changelog.rst +++ b/changelog.rst @@ -531,6 +531,3 @@ v0.1.0 (2011-10-03) - Cleanup of astronomy file. [Martin Raspaud] - Added a readme file. [Martin Raspaud] - Added astronomy.py file. [Martin Raspaud] - - - diff --git a/doc/source/index.rst b/doc/source/index.rst index 45aee1ed..d93bd17b 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -15,7 +15,7 @@ the conda-forge conda channel. To install from PyPI in an existing environment: .. code-block:: bash pip install pyorbital - + Or in an existing conda-based environment: .. code-block:: bash @@ -31,7 +31,7 @@ from the latest in-development version on GitHub you can run: .. code-block:: bash pip install git+https://github.com/pytroll/pyorbital.git - + However, if you instead want to edit the source code and see the changes reflected when you run the code you can clone the git repository and install it in "editable" mode: diff --git a/pyorbital/__init__.py b/pyorbital/__init__.py index e5f39f2b..01cfa348 100644 --- a/pyorbital/__init__.py +++ b/pyorbital/__init__.py @@ -28,4 +28,3 @@ def dt2np(utc_time): return np.datetime64(utc_time) except ValueError: return utc_time.astype("datetime64[ns]") - From 2b8f3f98f10cf5bee925a6bc0499aa94d857915c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 21 Nov 2024 10:28:47 -0600 Subject: [PATCH 79/89] Ignore test data file from ruff formatting --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 45300036..77b5dd92 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,6 +64,7 @@ line_length = 120 [tool.ruff] line-length = 120 +extend-exclude = ["pyorbital/tests/SGP4-VER.TLE"] [tool.ruff.lint] # See https://docs.astral.sh/ruff/rules/ From bcef955f6143761ce82c8d6d47e2fa1ee836f555 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 21 Nov 2024 10:38:31 -0600 Subject: [PATCH 80/89] Exclude test data file from whitespace fixing --- .pre-commit-config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b2b8efa8..db557ace 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,6 +10,7 @@ repos: rev: v5.0.0 hooks: - id: trailing-whitespace + exclude: pyorbital/tests/SGP4-VER.TLE - id: end-of-file-fixer - id: check-yaml args: [--unsafe] From d50ddd9e6005e6e39489322e425a9e34423c02d5 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 21 Nov 2024 10:42:33 -0600 Subject: [PATCH 81/89] Restore fetch_tles.py script in bin --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 77b5dd92..4635135e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,6 +26,7 @@ classifiers = [ ] [project.scripts] +"fetch_tles.py" = "pyorbital.fetch_tles:run" fetch_tles = "pyorbital.fetch_tles:run" [project.urls] From ac018a038733e7120269bd5927ea62a396dce230 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 21 Nov 2024 17:51:31 +0100 Subject: [PATCH 82/89] Add test for printing content of the Tle object Signed-off-by: Adam.Dybbroe --- .github/workflows/deploy-sdist.yaml | 2 +- CHANGELOG.md | 2 +- README.md | 1 - changelog.rst | 3 -- doc/source/index.rst | 4 +- pyorbital/tests/SGP4-VER.TLE | 2 +- pyorbital/tests/test_tlefile.py | 68 ++++++++++++++++------------- 7 files changed, 43 insertions(+), 39 deletions(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 23fa0ff8..410bb00c 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -22,4 +22,4 @@ jobs: uses: pypa/gh-action-pypi-publish@v1.12.2 with: user: __token__ - password: ${{ secrets.pypi_password }} \ No newline at end of file + password: ${{ secrets.pypi_password }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 7f013ddb..67f97225 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -163,7 +163,7 @@ In this release 5 pull requests were closed. ### Issues Closed * [Issue 63](https://github.com/pytroll/pyorbital/issues/63) - Runtime error in get_next_passes ([PR 64](https://github.com/pytroll/pyorbital/pull/64)) -* [Issue 62](https://github.com/pytroll/pyorbital/issues/62) - can this tool run +* [Issue 62](https://github.com/pytroll/pyorbital/issues/62) - can this tool run * [Issue 22](https://github.com/pytroll/pyorbital/issues/22) - get_next_passes returns max-elevation-time time not between rise & fall time ([PR 76](https://github.com/pytroll/pyorbital/pull/76)) In this release 3 issues were closed. diff --git a/README.md b/README.md index cc3102a6..87fe885e 100644 --- a/README.md +++ b/README.md @@ -12,4 +12,3 @@ This is the Pyorbital, a Python package for computing orbital parameters from TL files, and making various astronomical computations. It is part of the Pytroll project: http://pytroll.org - diff --git a/changelog.rst b/changelog.rst index 1c588c41..9824089e 100644 --- a/changelog.rst +++ b/changelog.rst @@ -531,6 +531,3 @@ v0.1.0 (2011-10-03) - Cleanup of astronomy file. [Martin Raspaud] - Added a readme file. [Martin Raspaud] - Added astronomy.py file. [Martin Raspaud] - - - diff --git a/doc/source/index.rst b/doc/source/index.rst index 45aee1ed..d93bd17b 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -15,7 +15,7 @@ the conda-forge conda channel. To install from PyPI in an existing environment: .. code-block:: bash pip install pyorbital - + Or in an existing conda-based environment: .. code-block:: bash @@ -31,7 +31,7 @@ from the latest in-development version on GitHub you can run: .. code-block:: bash pip install git+https://github.com/pytroll/pyorbital.git - + However, if you instead want to edit the source code and see the changes reflected when you run the code you can clone the git repository and install it in "editable" mode: diff --git a/pyorbital/tests/SGP4-VER.TLE b/pyorbital/tests/SGP4-VER.TLE index cfa31334..a13a1698 100644 --- a/pyorbital/tests/SGP4-VER.TLE +++ b/pyorbital/tests/SGP4-VER.TLE @@ -99,7 +99,7 @@ # # check error code 4 1 33333U 05037B 05333.02012661 .25992681 00000-0 24476-3 0 1534 2 33333 96.4736 157.9986 9950000 244.0492 110.6523 4.00004038 10708 0.0 150.0 5.00 -# # try and check error code 2 but this +# # try and check error code 2 but this 1 33334U 78066F 06174.85818871 .00000620 00000-0 10000-3 0 6809 2 33334 68.4714 236.1303 5602877 123.7484 302.5767 0.00001000 67521 0.0 1440.0 1.00 # # try to check error code 3 looks like ep never goes below zero, tied close to ecc diff --git a/pyorbital/tests/test_tlefile.py b/pyorbital/tests/test_tlefile.py index a0f8b3d1..e2a57338 100644 --- a/pyorbital/tests/test_tlefile.py +++ b/pyorbital/tests/test_tlefile.py @@ -45,12 +45,12 @@ read_platform_numbers, ) -line0 = "ISS (ZARYA)" -line1 = "1 25544U 98067A 08264.51782528 -.00002182 00000-0 -11606-4 0 2927" -line2 = "2 25544 51.6416 247.4627 0006703 130.5360 325.0288 15.72125391563537" +LINE0 = "ISS (ZARYA)" +LINE1 = "1 25544U 98067A 08264.51782528 -.00002182 00000-0 -11606-4 0 2927" +LINE2 = "2 25544 51.6416 247.4627 0006703 130.5360 325.0288 15.72125391563537" -line1_2 = "1 38771U 12049A 21137.30264622 .00000000 00000+0 -49996-5 0 00017" -line2_2 = "2 38771 98.7162 197.7716 0002383 106.1049 122.6344 14.21477797449453" +LINE1_2 = "1 38771U 12049A 21137.30264622 .00000000 00000+0 -49996-5 0 00017" +LINE2_2 = "2 38771 98.7162 197.7716 0002383 106.1049 122.6344 14.21477797449453" NOAA19_2LINES = """1 33591U 09005A 21355.91138073 .00000074 00000+0 65091-4 0 9998 @@ -65,16 +65,16 @@ "", "", "", - "" + line1 + "", - "" + line2 + "", + "" + LINE1 + "", + "" + LINE2 + "", "", "", "", "", "", "", - "" + line1_2 + "", - "" + line2_2 + "", + "" + LINE1_2 + "", + "" + LINE2_2 + "", "", "", "", @@ -330,7 +330,7 @@ def check_example(self, tle): def test_from_line(self): """Test parsing from line elements.""" - tle = Tle("ISS (ZARYA)", line1=line1, line2=line2) + tle = Tle("ISS (ZARYA)", line1=LINE1, line2=LINE2) self.check_example(tle) def test_from_file(self): @@ -339,7 +339,7 @@ def test_from_file(self): from tempfile import mkstemp filehandle, filename = mkstemp() try: - write(filehandle, "\n".join([line0, line1, line2]).encode("utf-8")) + write(filehandle, "\n".join([LINE0, LINE1, LINE2]).encode("utf-8")) close(filehandle) tle = Tle("ISS (ZARYA)", filename) self.check_example(tle) @@ -436,8 +436,8 @@ def test_fetch_plain_tle_two_sources(self, requests): res = self.dl.fetch_plain_tle() assert "source_1" in res assert len(res["source_1"]) == 3 - assert res["source_1"][0].line1 == line1 - assert res["source_1"][0].line2 == line2 + assert res["source_1"][0].line1 == LINE1 + assert res["source_1"][0].line2 == LINE2 assert "source_2" in res assert len(res["source_2"]) == 1 assert mock.call("mocked_url_1", timeout=15) in requests.get.mock_calls @@ -520,7 +520,7 @@ def test_fetch_spacetrack_success(self, requests): mock_session.get = mock_get requests.Session.return_value.__enter__.return_value = mock_session - tle_text = "\n".join((line0, line1, line2)) + tle_text = "\n".join((LINE0, LINE1, LINE2)) self.dl.config["platforms"] = { 25544: "ISS" } @@ -532,14 +532,14 @@ def test_fetch_spacetrack_success(self, requests): mock_get.return_value.text = tle_text res = self.dl.fetch_spacetrack() assert len(res) == 1 - assert res[0].line1 == line1 - assert res[0].line2 == line2 + assert res[0].line1 == LINE1 + assert res[0].line2 == LINE2 def test_read_tle_files(self): """Test reading TLE files from a file system.""" from tempfile import TemporaryDirectory - tle_text = "\n".join((line0, line1, line2)) + tle_text = "\n".join((LINE0, LINE1, LINE2)) save_dir = TemporaryDirectory() with save_dir: @@ -557,8 +557,8 @@ def test_read_tle_files(self): } res = self.dl.read_tle_files() assert len(res) == 2 - assert res[0].line1 == line1 - assert res[0].line2 == line2 + assert res[0].line1 == LINE1 + assert res[0].line2 == LINE2 def test_read_xml_admin_messages(self): """Test reading TLE files from a file system.""" @@ -583,16 +583,16 @@ def test_read_xml_admin_messages(self): # There are two sets of TLEs in the file. And as the same file is # parsed twice, 4 TLE objects are returned assert len(res) == 4 - assert res[0].line1 == line1 - assert res[0].line2 == line2 - assert res[1].line1 == line1_2 - assert res[1].line2 == line2_2 + assert res[0].line1 == LINE1 + assert res[0].line2 == LINE2 + assert res[1].line1 == LINE1_2 + assert res[1].line2 == LINE2_2 def _get_req_response(code): req = mock.MagicMock() req.status_code = code - req.text = "\n".join((line0, line1, line2)) + req.text = "\n".join((LINE0, LINE1, LINE2)) return req @@ -615,7 +615,7 @@ def setUp(self): "write_always": False } self.db = SQLiteTLE(self.db_fname, self.platforms, self.writer_config) - self.tle = Tle("ISS", line1=line1, line2=line2) + self.tle = Tle("ISS", line1=LINE1, line2=LINE2) def tearDown(self): """Clean temporary files.""" @@ -672,7 +672,7 @@ def test_update_db(self): # epoch assert data[0][0] == "2008-09-20T12:25:40.104192" # TLE - assert data[0][1] == "\n".join((line1, line2)) + assert data[0][1] == "\n".join((LINE1, LINE2)) # Date when the data were added should be close to current time date_added = datetime.datetime.strptime(data[0][2], ISO_TIME_FORMAT) now = datetime.datetime.utcnow() @@ -724,8 +724,8 @@ def test_write_tle_txt(self): data = fid.read().split("\n") assert len(data) == 3 assert "ISS" in data[0] - assert data[1] == line1 - assert data[2] == line2 + assert data[1] == LINE1 + assert data[2] == LINE2 # Call the writing again, nothing should be written. In # real-life this assumes a re-run has been done without new @@ -747,5 +747,13 @@ def test_write_tle_txt(self): with open(files[1], "r") as fid: data = fid.read().split("\n") assert len(data) == 2 - assert data[0] == line1 - assert data[1] == line2 + assert data[0] == LINE1 + assert data[1] == LINE2 + +def test_tle_instance_printing(): + """Test the print the Tle instance.""" + tle = Tle("ISS", line1=LINE1, line2=LINE2) + + expected = "{'arg_perigee': 130.536,\n 'bstar': -1.1606e-05,\n 'classification': 'U',\n 'element_number': 292,\n 'ephemeris_type': 0,\n 'epoch': np.datetime64('2008-09-20T12:25:40.104192'),\n 'epoch_day': 264.51782528,\n 'epoch_year': '08',\n 'excentricity': 0.0006703,\n 'id_launch_number': '067',\n 'id_launch_piece': 'A ',\n 'id_launch_year': '98',\n 'inclination': 51.6416,\n 'mean_anomaly': 325.0288,\n 'mean_motion': 15.72125391,\n 'mean_motion_derivative': -2.182e-05,\n 'mean_motion_sec_derivative': 0.0,\n 'orbit': 56353,\n 'right_ascension': 247.4627,\n 'satnumber': '25544'}" # noqa + + assert str(tle) == expected From 07a0f9b485031313ef88356bb94ec4ab4cdeeac4 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 21 Nov 2024 18:12:12 +0100 Subject: [PATCH 83/89] Fix some ruff issues Signed-off-by: Adam.Dybbroe --- pyorbital/orbital.py | 1 + pyorbital/tests/test_orbital.py | 22 ++++++++++------------ pyorbital/tests/test_tlefile.py | 1 - 3 files changed, 11 insertions(+), 13 deletions(-) diff --git a/pyorbital/orbital.py b/pyorbital/orbital.py index bba8bd98..6dc76053 100644 --- a/pyorbital/orbital.py +++ b/pyorbital/orbital.py @@ -170,6 +170,7 @@ def get_last_an_time(self, utc_time): """Calculate time of last ascending node relative to the specified time.""" # Propagate backwards to ascending node dt = np.timedelta64(10, "m") + t_old = np.datetime64(_get_tz_unaware_utctime(utc_time)) t_new = t_old - dt pos0, vel0 = self.get_position(t_old, normalize=False) diff --git a/pyorbital/tests/test_orbital.py b/pyorbital/tests/test_orbital.py index 1cc46b85..3f4f032d 100644 --- a/pyorbital/tests/test_orbital.py +++ b/pyorbital/tests/test_orbital.py @@ -260,8 +260,8 @@ def test_xarray_with_numpy(self): from pyorbital import orbital - def _xarr_conv(input): - return xr.DataArray(input) + def _xarr_conv(input_array): + return xr.DataArray(input_array) sat_lon = _xarr_conv(self.sat_lon) sat_lat = _xarr_conv(self.sat_lat) sat_alt = _xarr_conv(self.sat_alt) @@ -281,8 +281,8 @@ def test_xarray_with_dask(self): from pyorbital import orbital - def _xarr_conv(input): - return xr.DataArray(da.from_array(input, chunks=2)) + def _xarr_conv(input_array): + return xr.DataArray(da.from_array(input_array, chunks=2)) sat_lon = _xarr_conv(self.sat_lon) sat_lat = _xarr_conv(self.sat_lat) sat_alt = _xarr_conv(self.sat_alt) @@ -359,8 +359,8 @@ def test_xarray_with_numpy(self): from pyorbital import orbital - def _xarr_conv(input): - return xr.DataArray(input) + def _xarr_conv(input_array): + return xr.DataArray(input_array) sat_lon = _xarr_conv(self.sat_lon) sat_lat = _xarr_conv(self.sat_lat) sat_alt = _xarr_conv(self.sat_alt) @@ -381,8 +381,8 @@ def test_xarray_with_dask(self): from pyorbital import orbital - def _xarr_conv(input): - return xr.DataArray(da.from_array(input, chunks=2)) + def _xarr_conv(input_array): + return xr.DataArray(da.from_array(input_array, chunks=2)) sat_lon = _xarr_conv(self.sat_lon) sat_lat = _xarr_conv(self.sat_lat) sat_alt = _xarr_conv(self.sat_alt) @@ -444,8 +444,6 @@ def test_get_last_an_time_wrong_input(dtime): line1="1 43013U 17073A 24176.73674251 .00000000 00000+0 11066-3 0 00014", line2="2 43013 98.7060 114.5340 0001454 139.3958 190.7541 14.19599847341971") - with pytest.raises(ValueError) as exec_info: - _ = orb.get_last_an_time(dtime) - expected = "UTC time expected! Parsing a timezone aware datetime object requires it to be UTC!" - assert str(exec_info.value) == expected + with pytest.raises(ValueError, match=expected) as exec_info: + _ = orb.get_last_an_time(dtime) diff --git a/pyorbital/tests/test_tlefile.py b/pyorbital/tests/test_tlefile.py index 418ef44e..c04572d1 100644 --- a/pyorbital/tests/test_tlefile.py +++ b/pyorbital/tests/test_tlefile.py @@ -31,7 +31,6 @@ import unittest from contextlib import suppress from unittest import mock -from unittest.mock import patch import pytest From b52bc2d07eb9d301e7e55835c89bbdb792916f80 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 21 Nov 2024 11:23:15 -0600 Subject: [PATCH 84/89] Remove unnecessary dateutil dependency --- .pre-commit-config.yaml | 1 - pyorbital/tests/test_orbital.py | 4 +--- pyproject.toml | 1 - 3 files changed, 1 insertion(+), 5 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index db557ace..ae519516 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,6 @@ repos: - types-PyYAML - types-requests - types-pytz - - types-python-dateutil args: ["--python-version", "3.10", "--ignore-missing-imports"] - repo: https://github.com/pycqa/isort rev: 5.13.2 diff --git a/pyorbital/tests/test_orbital.py b/pyorbital/tests/test_orbital.py index 3f4f032d..0e75cc83 100644 --- a/pyorbital/tests/test_orbital.py +++ b/pyorbital/tests/test_orbital.py @@ -404,14 +404,12 @@ def test_63(self): """Check that no runtimewarning is raised, #63.""" import warnings - from dateutil import parser - from pyorbital.orbital import Orbital warnings.filterwarnings("error") orb = Orbital("Suomi-NPP", line1="1 37849U 11061A 19292.84582509 .00000011 00000-0 25668-4 0 9997", line2="2 37849 98.7092 229.3263 0000715 98.5313 290.6262 14.19554485413345") - orb.get_next_passes(parser.parse("2019-10-21 16:00:00"), 12, 123.29736, -13.93763, 0) + orb.get_next_passes(datetime(2019, 10, 21, 16, 0, 0), 12, 123.29736, -13.93763, 0) warnings.filterwarnings("default") diff --git a/pyproject.toml b/pyproject.toml index 4635135e..788f65ac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,6 @@ dependencies = ["numpy>=1.19.0", "scipy", "requests", "pytz", - "python-dateutil", "defusedxml", ] readme = "README.md" From 5b861618df5b6390afcaa67d0af337003a420e8d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 21 Nov 2024 11:23:54 -0600 Subject: [PATCH 85/89] Add version.py to gitignore --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index 7a2d2e6d..629beb40 100644 --- a/.gitignore +++ b/.gitignore @@ -39,3 +39,6 @@ nosetests.xml # rope .ropeproject + +pyorbital/version.py + From 8add986dc7f4c9aa168fa45a74be0756e5ed9a71 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 21 Nov 2024 18:25:38 +0100 Subject: [PATCH 86/89] Fix further ruff issues. Signed-off-by: Adam.Dybbroe --- pyorbital/geoloc_instrument_definitions.py | 18 +++++++++++------- pyorbital/tests/test_astronomy.py | 9 ++++++++- pyorbital/tests/test_orbital.py | 2 +- 3 files changed, 20 insertions(+), 9 deletions(-) diff --git a/pyorbital/geoloc_instrument_definitions.py b/pyorbital/geoloc_instrument_definitions.py index 28cfb2a8..3c8f311d 100644 --- a/pyorbital/geoloc_instrument_definitions.py +++ b/pyorbital/geoloc_instrument_definitions.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2013 - 2021 PyTroll Community +# Copyright (c) 2013 - 2021, 2024 PyTroll Community # Author(s): @@ -107,6 +107,7 @@ def avhrr_gac(scan_times, scan_points, def avhrr_all_geom(scans_nb): + """Get all the AVHRR scan points.""" # we take all pixels scan_points = np.arange(2048) return avhrr(scans_nb, scan_points) @@ -118,7 +119,7 @@ def avhrr_all_geom(scans_nb): def avhrr_edge_geom(scans_nb): - # we take only edge pixels + """Getting the AVHRR scan edges only.""" scan_points = np.array([0, 2047]) return avhrr(scans_nb, scan_points) @@ -129,7 +130,7 @@ def avhrr_edge_geom(scans_nb): def avhrr_40_geom(scans_nb): - # we take only every 40th pixel + """Description of the AVHRR scan in terms of every 40th pixel per line.""" scan_points = np.arange(24, 2048, 40) return avhrr(scans_nb, scan_points) @@ -143,6 +144,7 @@ def avhrr_40_geom(scans_nb): def viirs(scans_nb, scan_indices=slice(0, None), chn_pixels=6400, scan_lines=32, scan_step=1): """Describe VIIRS instrument geometry, I-band by default. + VIIRS scans several lines simultaneously (there are 16 detectors for each M-band, 32 detectors for each I-band) so the scan angles (and times) are two-dimensional arrays, contrary to AVHRR for example. @@ -196,7 +198,7 @@ def viirs(scans_nb, scan_indices=slice(0, None), def viirs_edge_geom(scans_nb): - # we take only edge pixels + """Definition of the VIIRS scane edges.""" scan_indices = [0, -1] return viirs(scans_nb, scan_indices) @@ -358,7 +360,8 @@ def hirs4(scans_nb, scan_points=None): ################################################################ def atms(scans_nb, scan_points=None): - """Describe ATMS instrument geometry + """Describe ATMS instrument geometry. + See: - https://dtcenter.org/com-GSI/users/docs/presentations/2013_workshop/ @@ -507,9 +510,10 @@ def olci(scans_nb, scan_points=None): def ascat(scan_nb, scan_points=None): - """ASCAT make two scans one to the left and one to the right of the - sub-satellite track. + """Describing the ASCAT scanning geometry. + make two scans one to the left and one to the right of the sub-satellite + track. """ if scan_points is None: scan_len = 42 # samples per scan diff --git a/pyorbital/tests/test_astronomy.py b/pyorbital/tests/test_astronomy.py index 6eba33a8..8e4895ba 100644 --- a/pyorbital/tests/test_astronomy.py +++ b/pyorbital/tests/test_astronomy.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2013, 2014, 2022 Pytroll Community +# Copyright (c) 2013, 2014, 2022, 2024 Pytroll Community # Author(s): @@ -20,6 +20,9 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . +"""Unit testing the Astronomy methods and functions.""" + + from datetime import datetime import dask.array as da @@ -36,21 +39,25 @@ def _create_dask_array(input_list: list, dtype: npt.DTypeLike) -> da.Array: + """Create a dummy dask array for testing.""" np_arr = np.array(input_list, dtype=dtype) return da.from_array(np_arr) def _create_xarray_numpy(input_list: list, dtype: npt.DTypeLike) -> DataArray: + """Create a dummy xarray DataArray for testing.""" np_arr = np.array(input_list, dtype=dtype) return DataArray(np_arr) def _create_xarray_dask(input_list: list, dtype: npt.DTypeLike) -> DataArray: + """Create a dummy daskified xarray DataArray for testing.""" dask_arr = _create_dask_array(input_list, dtype) return DataArray(dask_arr) class TestAstronomy: + """Testing the Astronomy class.""" @pytest.mark.parametrize( ("dt", "exp_jdays", "exp_j2000"), diff --git a/pyorbital/tests/test_orbital.py b/pyorbital/tests/test_orbital.py index 3f4f032d..37d2bc6a 100644 --- a/pyorbital/tests/test_orbital.py +++ b/pyorbital/tests/test_orbital.py @@ -445,5 +445,5 @@ def test_get_last_an_time_wrong_input(dtime): line2="2 43013 98.7060 114.5340 0001454 139.3958 190.7541 14.19599847341971") expected = "UTC time expected! Parsing a timezone aware datetime object requires it to be UTC!" - with pytest.raises(ValueError, match=expected) as exec_info: + with pytest.raises(ValueError, match=expected): _ = orb.get_last_an_time(dtime) From b4c594c13922e9caa6823a6144bc5615f9ece5c9 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 21 Nov 2024 11:32:29 -0600 Subject: [PATCH 87/89] Fix whitespace in gitignore --- .gitignore | 1 - 1 file changed, 1 deletion(-) diff --git a/.gitignore b/.gitignore index 629beb40..f9221bba 100644 --- a/.gitignore +++ b/.gitignore @@ -41,4 +41,3 @@ nosetests.xml .ropeproject pyorbital/version.py - From 830f2edc7ca6493ebb82c9e6a7b665ee8fb74046 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 21 Nov 2024 12:00:20 -0600 Subject: [PATCH 88/89] Reduce complexity of get_next_passes method --- pyorbital/orbital.py | 142 ++++++++++++++++++++++--------------------- 1 file changed, 74 insertions(+), 68 deletions(-) diff --git a/pyorbital/orbital.py b/pyorbital/orbital.py index 6dc76053..3cf99928 100644 --- a/pyorbital/orbital.py +++ b/pyorbital/orbital.py @@ -27,6 +27,7 @@ import logging import warnings from datetime import datetime, timedelta +from functools import partial import numpy as np import pytz @@ -352,60 +353,6 @@ def get_next_passes(self, utc_time, length, lon, lat, alt, tol=0.001, horizon=0) :return: [(rise-time, fall-time, max-elevation-time), ...] """ - def elevation(minutes): - """Compute the elevation.""" - return self.get_observer_look(utc_time + - timedelta( - minutes=np.float64(minutes)), - lon, lat, alt)[1] - horizon - - def elevation_inv(minutes): - """Compute the inverse of elevation.""" - return -elevation(minutes) - - def get_root(fun, start, end, tol=0.01): - """Root finding scheme.""" - x_0 = end - x_1 = start - fx_0 = fun(end) - fx_1 = fun(start) - if abs(fx_0) < abs(fx_1): - fx_0, fx_1 = fx_1, fx_0 - x_0, x_1 = x_1, x_0 - - x_n = optimize.brentq(fun, x_0, x_1) - return x_n - - def get_max_parab(fun, start, end, tol=0.01): - """Successive parabolic interpolation.""" - a = float(start) - c = float(end) - b = (a + c) / 2.0 - - f_a = fun(a) - f_b = fun(b) - f_c = fun(c) - - x = b - with np.errstate(invalid="raise"): - while True: - try: - x = x - 0.5 * (((b - a) ** 2 * (f_b - f_c) - - (b - c) ** 2 * (f_b - f_a)) / - ((b - a) * (f_b - f_c) - (b - c) * (f_b - f_a))) - except FloatingPointError: - return b - if abs(b - x) <= tol: - return x - f_x = fun(x) - # sometimes the estimation diverges... return best guess - if f_x > f_b: - logger.info("Parabolic interpolation did not converge, returning best guess so far.") - return b - - a, b, c = (a + x) / 2.0, x, (x + c) / 2.0 - f_a, f_b, f_c = fun(a), f_x, fun(c) - # every minute times = utc_time + np.array([timedelta(minutes=minutes) for minutes in range(length * 60)]) @@ -413,9 +360,11 @@ def get_max_parab(fun, start, end, tol=0.01): zcs = np.where(np.diff(np.sign(elev)))[0] res = [] risetime = None + risemins = None + elev_func = partial(self._elevation, utc_time, lon, lat, alt, horizon) + elev_inv_func = partial(self._elevation_inv, utc_time, lon, lat, alt, horizon) for guess in zcs: - horizon_mins = get_root( - elevation, guess, guess + 1.0, tol=tol / 60.0) + horizon_mins = _get_root(elev_func, guess, guess + 1.0, tol=tol / 60.0) horizon_time = utc_time + timedelta(minutes=horizon_mins) if elev[guess] < 0: risetime = horizon_time @@ -423,18 +372,18 @@ def get_max_parab(fun, start, end, tol=0.01): else: falltime = horizon_time fallmins = horizon_mins - if risetime: - int_start = max(0, int(np.floor(risemins))) - int_end = min(len(elev), int(np.ceil(fallmins) + 1)) - middle = int_start + np.argmax(elev[int_start:int_end]) - highest = utc_time + \ - timedelta(minutes=get_max_parab( - elevation_inv, - max(risemins, middle - 1), min(fallmins, middle + 1), - tol=tol / 60.0 - )) - res += [(risetime, falltime, highest)] - risetime = None + if risetime is None: + continue + int_start = max(0, int(np.floor(risemins))) + int_end = min(len(elev), int(np.ceil(fallmins) + 1)) + middle = int_start + np.argmax(elev[int_start:int_end]) + highest = utc_time + \ + timedelta(minutes=_get_max_parab( + elev_inv_func, + max(risemins, middle - 1), min(fallmins, middle + 1), + tol=tol / 60.0 + )) + res += [(risetime, falltime, highest)] return res def _get_time_at_horizon(self, utc_time, obslon, obslat, **kwargs): @@ -563,6 +512,63 @@ def _nprime(time_f): return tcross + def _elevation(self, utc_time, lon, lat, alt, horizon, minutes): + """Compute the elevation.""" + return self.get_observer_look(utc_time + + timedelta(minutes=np.float64(minutes)), + lon, lat, alt)[1] - horizon + + + def _elevation_inv(self, utc_time, lon, lat, alt, horizon, minutes): + """Compute the inverse of elevation.""" + return -self._elevation(utc_time, lon, lat, alt, horizon, minutes) + + +def _get_root(fun, start, end, tol=0.01): + """Root finding scheme.""" + x_0 = end + x_1 = start + fx_0 = fun(end) + fx_1 = fun(start) + if abs(fx_0) < abs(fx_1): + fx_0, fx_1 = fx_1, fx_0 + x_0, x_1 = x_1, x_0 + + x_n = optimize.brentq(fun, x_0, x_1) + return x_n + + +def _get_max_parab(fun, start, end, tol=0.01): + """Successive parabolic interpolation.""" + a = float(start) + c = float(end) + b = (a + c) / 2.0 + + f_a = fun(a) + f_b = fun(b) + f_c = fun(c) + + x = b + with np.errstate(invalid="raise"): + while True: + try: + x = x - 0.5 * (((b - a) ** 2 * (f_b - f_c) + - (b - c) ** 2 * (f_b - f_a)) / + ((b - a) * (f_b - f_c) - (b - c) * (f_b - f_a))) + except FloatingPointError: + return b + if abs(b - x) <= tol: + return x + f_x = fun(x) + # sometimes the estimation diverges... return best guess + if f_x > f_b: + logger.info("Parabolic interpolation did not converge, returning best guess so far.") + return b + + a, b, c = (a + x) / 2.0, x, (x + c) / 2.0 + f_a, f_b, f_c = fun(a), f_x, fun(c) + + class OrbitElements(object): """Class holding the orbital elements.""" From e27a1d48fab4e036df0ff599de10cfbb8a88de5e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 21 Nov 2024 12:10:51 -0600 Subject: [PATCH 89/89] Reduce complexity --- pyorbital/orbital.py | 2 +- pyorbital/tests/test_aiaa.py | 91 +++++++++++++++++++----------------- 2 files changed, 48 insertions(+), 45 deletions(-) diff --git a/pyorbital/orbital.py b/pyorbital/orbital.py index 3cf99928..aaf2d0ae 100644 --- a/pyorbital/orbital.py +++ b/pyorbital/orbital.py @@ -625,7 +625,7 @@ def __init__(self, tle): class _SGDP4(object): """Class for the SGDP4 computations.""" - def __init__(self, orbit_elements): + def __init__(self, orbit_elements): # noqa: C901 """Initialize class.""" self.mode = None diff --git a/pyorbital/tests/test_aiaa.py b/pyorbital/tests/test_aiaa.py index 290c7187..9ec2596b 100644 --- a/pyorbital/tests/test_aiaa.py +++ b/pyorbital/tests/test_aiaa.py @@ -98,49 +98,52 @@ def test_aiaa(self): if test_line.startswith("1 "): line1 = test_line if test_line.startswith("2 "): - line2 = test_line[:69] - times = str.split(test_line[69:]) - times = np.arange(float(times[0]), - float(times[1]) + 1, - float(times[2])) - if test_name.startswith("# SL-14 DEB"): - # FIXME: we have to handle decaying satellites! - test_line = f__.readline() - continue - - try: - o = LineOrbital("unknown", line1, line2) - except NotImplementedError: - test_line = f__.readline() - continue - except ChecksumError: - assert test_line.split()[1] in ["33333", "33334", "33335"] - for delay in times: - try: - test_time = delay.astype( - "timedelta64[m]") + o.tle.epoch - pos, vel = o.get_position(test_time, False) - res = get_results( - int(o.tle.satnumber), float(delay)) - except NotImplementedError: - # Skipping deep-space - break - # except ValueError, e: - # from warnings import warn - # warn(test_name + ' ' + str(e)) - # break - - delta_pos = 5e-6 # km = 5 mm - delta_vel = 5e-9 # km/s = 5 um/s - delta_time = 1e-3 # 1 millisecond - assert abs(res[0] - pos[0]) < delta_pos - assert abs(res[1] - pos[1]) < delta_pos - assert abs(res[2] - pos[2]) < delta_pos - assert abs(res[3] - vel[0]) < delta_vel - assert abs(res[4] - vel[1]) < delta_vel - assert abs(res[5] - vel[2]) < delta_vel - if res[6] is not None: - dt = astronomy._days(res[6] - test_time) * 24 * 60 - assert abs(dt) < delta_time + _check_line2(f__, test_name, line1, test_line) test_line = f__.readline() + + +def _check_line2(f__, test_name: str, line1: str, test_line: str) -> None: + line2 = test_line[:69] + times_str = str.split(test_line[69:]) + times = np.arange(float(times_str[0]), + float(times_str[1]) + 1, + float(times_str[2])) + if test_name.startswith("# SL-14 DEB"): + # FIXME: we have to handle decaying satellites! + return + + try: + o = LineOrbital("unknown", line1, line2) + except NotImplementedError: + return + except ChecksumError: + assert test_line.split()[1] in ["33333", "33334", "33335"] + return + + for delay in times: + try: + test_time = delay.astype("timedelta64[m]") + o.tle.epoch + pos, vel = o.get_position(test_time, False) + res = get_results( + int(o.tle.satnumber), float(delay)) + except NotImplementedError: + # Skipping deep-space + break + # except ValueError, e: + # from warnings import warn + # warn(test_name + ' ' + str(e)) + # break + + delta_pos = 5e-6 # km = 5 mm + delta_vel = 5e-9 # km/s = 5 um/s + delta_time = 1e-3 # 1 millisecond + assert abs(res[0] - pos[0]) < delta_pos + assert abs(res[1] - pos[1]) < delta_pos + assert abs(res[2] - pos[2]) < delta_pos + assert abs(res[3] - vel[0]) < delta_vel + assert abs(res[4] - vel[1]) < delta_vel + assert abs(res[5] - vel[2]) < delta_vel + if res[6] is not None: + dt = astronomy._days(res[6] - test_time) * 24 * 60 + assert abs(dt) < delta_time