Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[pre-commit.ci] pre-commit autoupdate #572

Merged
merged 2 commits into from
Jan 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
repos:
# Normalise all Python code. (Black + isort + pyupgrade + autoflake)
- repo: https://github.com/Zac-HD/shed
rev: 2023.6.1
rev: 2024.1.1
hooks:
- id: shed
# Python Linting
Expand Down
1 change: 0 additions & 1 deletion cubedash/_filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
Common global filters for templates.
"""


import calendar
import logging
from datetime import datetime
Expand Down
8 changes: 5 additions & 3 deletions cubedash/_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,11 @@
if os.getenv("SENTRY_DSN"):
sentry_sdk.init(
dsn=os.getenv("SENTRY_DSN"),
environment=os.getenv("SENTRY_ENV_TAG")
if os.getenv("SENTRY_ENV_TAG")
else "dev-explorer",
environment=(
os.getenv("SENTRY_ENV_TAG")
if os.getenv("SENTRY_ENV_TAG")
else "dev-explorer"
),
integrations=[
FlaskIntegration(),
],
Expand Down
12 changes: 6 additions & 6 deletions cubedash/_pages.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,9 +116,11 @@ def product_page(
datasets_geojson=None, # _model.get_datasets_geojson(product_name, year, month, day),
footprint_geojson=_model.get_footprint_geojson(product_name, year, month, day),
product=product,
product_region_info=_model.STORE.get_product_region_info(product_name)
if region_geojson
else None,
product_region_info=(
_model.STORE.get_product_region_info(product_name)
if region_geojson
else None
),
# Summary for the whole product
product_summary=product_summary,
# Summary for the users' currently selected filters.
Expand Down Expand Up @@ -425,9 +427,7 @@ def timeline_page(product_name: str):
return redirect(url_for("product_page", product_name=product_name))


def _load_product(
product_name, year, month, day
) -> Tuple[
def _load_product(product_name, year, month, day) -> Tuple[
DatasetType,
ProductSummary,
TimePeriodOverview,
Expand Down
28 changes: 16 additions & 12 deletions cubedash/_stac.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,18 +196,22 @@ def as_stac_item(dataset: DatasetItem) -> pystac.Item:
"odc:processing_datetime": utc(dataset.creation_time),
}
),
measurements={
name: _band_to_measurement(
b,
dataset_location=ds.uris[0] if ds is not None and ds.uris else None,
)
for name, b in ds.measurements.items()
}
if ds is not None
else {},
accessories=_accessories_from_eo1(ds.metadata_doc)
if ds is not None
else {},
measurements=(
{
name: _band_to_measurement(
b,
dataset_location=(
ds.uris[0] if ds is not None and ds.uris else None
),
)
for name, b in ds.measurements.items()
}
if ds is not None
else {}
),
accessories=(
_accessories_from_eo1(ds.metadata_doc) if ds is not None else {}
),
# TODO: Fill in lineage. The datacube API only gives us full datasets, which is
# expensive. We only need a list of IDs here.
lineage={},
Expand Down
1 change: 1 addition & 0 deletions cubedash/_stac_legacy.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Legacy redirects to maintain old stac api URLs
"""

import json

import flask
Expand Down
6 changes: 3 additions & 3 deletions cubedash/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -726,9 +726,9 @@ def get_property_priority(ordered_properties: List, keyval):
for type_, source_dataset_doc in ordered_metadata["lineage"][
"source_datasets"
].items():
ordered_metadata["lineage"]["source_datasets"][
type_
] = prepare_document_formatting(source_dataset_doc)
ordered_metadata["lineage"]["source_datasets"][type_] = (
prepare_document_formatting(source_dataset_doc)
)

# Products have an embedded metadata doc (subset of dataset metadata)
if "metadata" in ordered_metadata:
Expand Down
1 change: 1 addition & 0 deletions cubedash/gunicorn_config.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Gunicorn config for Prometheus internal metrics
"""

import os


Expand Down
8 changes: 5 additions & 3 deletions cubedash/logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,11 @@ def lenient_json_dump(obj, *args, **kwargs):
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
# Coloured output if to terminal, otherwise json
BytesConsoleRenderer()
if not write_as_json
else structlog.processors.JSONRenderer(serializer=lenient_json_dump),
(
BytesConsoleRenderer()
if not write_as_json
else structlog.processors.JSONRenderer(serializer=lenient_json_dump)
),
]

hide_logging_levels = {
Expand Down
1 change: 1 addition & 0 deletions cubedash/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
as DATACUBE_ENVIRONMENT)

"""

from textwrap import dedent

import click
Expand Down
32 changes: 20 additions & 12 deletions cubedash/summary/_stores.py
Original file line number Diff line number Diff line change
Expand Up @@ -1347,9 +1347,11 @@ def search_items(
dataset_id=r.id,
bbox=_box2d_to_bbox(r.bbox) if r.bbox else None,
product_name=self.index.products.get(r.dataset_type_ref).name,
geometry=_get_shape(r.geometry, self._get_srid_name(r.geometry.srid))
if r.geometry is not None
else None,
geometry=(
_get_shape(r.geometry, self._get_srid_name(r.geometry.srid))
if r.geometry is not None
else None
),
region_code=r.region_code,
creation_time=r.creation_time,
center_time=r.center_time,
Expand Down Expand Up @@ -1814,16 +1816,22 @@ def _summary_from_row(res, product_name, grouping_timezone=default_timezone):
region_dataset_counts=region_dataset_counts,
timeline_period=res["timeline_period"],
# : Range
time_range=Range(
res["time_earliest"].astimezone(grouping_timezone)
time_range=(
Range(
(
res["time_earliest"].astimezone(grouping_timezone)
if res["time_earliest"]
else res["time_earliest"]
),
(
res["time_latest"].astimezone(grouping_timezone)
if res["time_latest"]
else res["time_latest"]
),
)
if res["time_earliest"]
else res["time_earliest"],
res["time_latest"].astimezone(grouping_timezone)
if res["time_latest"]
else res["time_latest"],
)
if res["time_earliest"]
else None,
else None
),
# shapely.geometry.base.BaseGeometry
footprint_geometry=(
None
Expand Down
1 change: 1 addition & 0 deletions cubedash/summary/show.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
Useful for testing Explorer-generated summaries from
scripts and the command-line.
"""

import sys
import time
from textwrap import dedent
Expand Down
1 change: 1 addition & 0 deletions integration_tests/dumpdatasets.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Util script to dump datasets from a datacube for use as test data.
"""

import gzip
import random
from datetime import datetime
Expand Down
1 change: 1 addition & 0 deletions integration_tests/test_arb_crs.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Test utility method for creating valid EPSG code based CRS from
possible WKT String
"""

from pyproj import CRS

from cubedash._utils import infer_crs
Expand Down
1 change: 1 addition & 0 deletions integration_tests/test_dataset_maturity.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
- 4 datasets have maturity level: interim
- 16 datasets have maturity level: final
"""

from pathlib import Path

import pytest
Expand Down
1 change: 1 addition & 0 deletions integration_tests/test_filter_geom.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Unit test for re-cursive geometry filtering
"""

import json
from pathlib import Path

Expand Down
1 change: 1 addition & 0 deletions integration_tests/test_page_loads.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Tests that load pages and check the contained text.
"""

import json
from datetime import datetime
from io import StringIO
Expand Down
1 change: 1 addition & 0 deletions integration_tests/test_raw_yaml.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
- odc-product.yaml (cli command: datacube product)
- odc-metadata.yaml (cli command: datacube dataset)
"""

import tempfile

import datacube.scripts.cli_app
Expand Down
1 change: 1 addition & 0 deletions integration_tests/test_resolved_uri.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Unit test for various app.config["CUBEDASH_DATA_S3_REGION"]
"""

import pytest
from flask import Flask, current_app

Expand Down
1 change: 1 addition & 0 deletions integration_tests/test_s2_l2a_footprint.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Tests that load pages and check the contained text.
"""

from datetime import datetime
from pathlib import Path

Expand Down
1 change: 1 addition & 0 deletions integration_tests/test_summarise_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

And then check their statistics match expected.
"""

from datetime import datetime, timedelta
from uuid import UUID

Expand Down
1 change: 1 addition & 0 deletions integration_tests/test_utc_tst.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Tests that load pages and check the contained text.
"""

from datetime import datetime
from pathlib import Path

Expand Down
1 change: 1 addition & 0 deletions integration_tests/test_wagl_data.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Tests that load pages and check the contained text.
"""

from datetime import datetime

import pytest
Expand Down
Loading