Skip to content

Commit

Permalink
Replace shed with ruff
Browse files Browse the repository at this point in the history
Based on issues in #581
  • Loading branch information
jeremyh committed Apr 8, 2024
1 parent fae3121 commit ed70628
Show file tree
Hide file tree
Showing 23 changed files with 134 additions and 97 deletions.
29 changes: 6 additions & 23 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,28 +1,11 @@
---
repos:
# Normalise all Python code. (Black + isort + pyupgrade + autoflake)
- repo: https://github.com/Zac-HD/shed
rev: 2024.3.1
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.5
hooks:
- id: shed
# Python Linting
- repo: https://github.com/pycqa/flake8
rev: 7.0.0
hooks:
- id: flake8
exclude: ^docs/
additional_dependencies:
- flake8-bugbear # Lint-checks too opinionated for flake8 proper
- flake8-builtins # Don't allow built-in names like list
- flake8-coding # Only UTF-8
- flake8-debugger # Don't commit debugger calls
- flake8-executable # Check shebangs and executable permissions
- flake8-logging-format # Use log arguments, not string format
- flake8-pep3101 # Don't use old string % formatting
- flake8-pytest-style # Avoid common pytest mistakes
- flake8-pytest # Use plain assert, not unittest assertions
- flake8-rst-docstrings # docstring should be valid ReST
- pep8-naming # Follow pep8 naming rules (eg. function names lowercase)
- id: ruff
args: [--fix, --show-fixes, --output-format, grouped]
- id: ruff-format
# Lint Python snippets embedded in Markdown (using flake8)
- repo: https://github.com/johnfraney/flake8-markdown
rev: v0.5.0
Expand All @@ -41,7 +24,7 @@ repos:
args: ['-c', '.yamllint']
# Common pre-commit checks
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
rev: v4.6.0
hooks:
- id: check-added-large-files # We don't want huge files. (Cut down test data!)
args: ['--maxkb=2000']
Expand Down
3 changes: 2 additions & 1 deletion cubedash/_audit.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@
from datacube.model import Range
from flask import Blueprint, Response, redirect, url_for

from . import _model, _utils as utils
from . import _model
from . import _utils as utils

_LOG = logging.getLogger(__name__)
bp = Blueprint(
Expand Down
3 changes: 2 additions & 1 deletion cubedash/_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
import flask
from flask import Blueprint, abort, url_for

from . import _model, _utils as utils
from . import _model
from . import _utils as utils

_LOG = logging.getLogger(__name__)
bp = Blueprint(
Expand Down
7 changes: 3 additions & 4 deletions cubedash/_filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@
from orjson import orjson
from shapely.geometry import MultiPolygon

from . import _model, _utils, _utils as utils
from . import _model, _utils
from . import _utils as utils

# How far to step the number when the user hits up/down.
NUMERIC_STEP_SIZE = {
Expand Down Expand Up @@ -173,9 +174,7 @@ def _format_albers_area(shape: MultiPolygon):
@bp.app_template_filter("query_value")
def _format_query_value(val):
if isinstance(val, Range):
return "{} to {}".format(
_format_query_value(val.begin), _format_query_value(val.end)
)
return f"{_format_query_value(val.begin)} to {_format_query_value(val.end)}"
if isinstance(val, datetime):
return _format_datetime(val)
if val is None:
Expand Down
8 changes: 6 additions & 2 deletions cubedash/_pages.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@
_product,
_stac,
_stac_legacy,
)
from . import (
_utils as utils,
)
from ._utils import as_rich_json, get_sorted_product_summaries
Expand Down Expand Up @@ -160,7 +162,7 @@ def legacy_search_page(
@app.route("/products/<product_name>/datasets/<int:year>")
@app.route("/products/<product_name>/datasets/<int:year>/<int:month>")
@app.route("/products/<product_name>/datasets/<int:year>/<int:month>/<int:day>")
def search_page( # noqa: C901
def search_page(
product_name: str = None, year: int = None, month: int = None, day: int = None
):
(
Expand Down Expand Up @@ -427,7 +429,9 @@ def timeline_page(product_name: str):
return redirect(url_for("product_page", product_name=product_name))


def _load_product(product_name, year, month, day) -> Tuple[
def _load_product(
product_name, year, month, day
) -> Tuple[
DatasetType,
ProductSummary,
TimePeriodOverview,
Expand Down
3 changes: 2 additions & 1 deletion cubedash/_product.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@

from flask import Blueprint, Response, abort, redirect, url_for

from cubedash import _model, _utils, _utils as utils
from cubedash import _model, _utils
from cubedash import _utils as utils

_LOG = logging.getLogger(__name__)
bp = Blueprint("product", __name__)
Expand Down
10 changes: 6 additions & 4 deletions cubedash/_stac.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import json
import logging
import uuid
from datetime import datetime, time as dt_time, timedelta
from datetime import datetime, timedelta
from datetime import time as dt_time
from functools import partial
from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union

Expand All @@ -10,7 +11,8 @@
from datacube.model import Dataset, Range
from datacube.utils import DocReader, parse_time
from dateutil.tz import tz
from eodatasets3 import serialise, stac as eo3stac
from eodatasets3 import serialise
from eodatasets3 import stac as eo3stac
from eodatasets3.model import AccessoryDoc, DatasetDoc, MeasurementDoc, ProductDoc
from eodatasets3.properties import Eo3Dict
from eodatasets3.utils import is_doc_eo3
Expand Down Expand Up @@ -288,7 +290,7 @@ def field_path_row(key, value):
elif key == "sat_row":
kind = "landsat:wrs_row"
else:
raise ValueError(f"Path/row kind {repr(key)}")
raise ValueError(f"Path/row kind {key!r}")

# If there's only one value in the range, return it.
if isinstance(value, Range):
Expand Down Expand Up @@ -623,7 +625,7 @@ def _handle_fields_extension(
keys=inc.split("."),
# get corresponding field from item
# disallow default to avoid None values being inserted
func=lambda _: _get_property(inc, item, no_default=True), # noqa: B023
func=lambda _: _get_property(inc, item, no_default=True),
)

for exc in fields.get("exclude") or []:
Expand Down
4 changes: 2 additions & 2 deletions cubedash/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import datacube.drivers.postgres._schema
import eodatasets3.serialise
import flask
import numpy
import numpy as np
import shapely.geometry
import shapely.validation
import structlog
Expand Down Expand Up @@ -578,7 +578,7 @@ def as_yaml(*o, content_type="text/yaml", downloadable_filename_prefix: str = No

# TODO: remove the two functions once eo-datasets fix is released
def _represent_float(self, value):
text = numpy.format_float_scientific(value)
text = np.format_float_scientific(value)
return self.represent_scalar("tag:yaml.org,2002:float", text)

def dumps_yaml(yml, stream, *docs) -> None:
Expand Down
10 changes: 6 additions & 4 deletions cubedash/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
"""

import collections
import multiprocessing
import re
Expand All @@ -64,7 +65,8 @@

import click
import structlog
from click import secho as click_secho, style
from click import secho as click_secho
from click import style
from datacube.config import LocalConfig
from datacube.index import Index, index_connect
from datacube.model import DatasetType
Expand All @@ -75,7 +77,7 @@
GenerateResult,
SummaryStore,
TimePeriodOverview,
UnsupportedWKTProductCRS,
UnsupportedWKTProductCRSError,
)
from cubedash.summary._stores import DEFAULT_EPSG
from cubedash.summary._summarise import DEFAULT_TIMEZONE
Expand Down Expand Up @@ -132,7 +134,7 @@ def print_status(product_name=None, year=None, month=None, day=None, summary=Non
minimum_change_scan_window=settings.minimum_change_scan_window,
)
return product_name, result, updated_summary
except UnsupportedWKTProductCRS as e:
except UnsupportedWKTProductCRSError as e:
log.warning("product.unsupported", reason=e.reason)
return product_name, GenerateResult.UNSUPPORTED, None
except Exception:
Expand Down Expand Up @@ -230,7 +232,7 @@ def _load_products(index: Index, product_names) -> List[DatasetType]:
p.name for p in index.products.get_all()
)
raise click.BadParameter(
f"Unknown product {repr(product_name)}.\n\n"
f"Unknown product {product_name!r}.\n\n"
f"Possibilities:\n\t{possible_product_names}",
param_hint="product_names",
)
Expand Down
3 changes: 1 addition & 2 deletions cubedash/gunicorn_config.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
"""Gunicorn config for Prometheus internal metrics
"""
"""Gunicorn config for Prometheus internal metrics"""

import os

Expand Down
4 changes: 2 additions & 2 deletions cubedash/summary/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from ._extents import RegionInfo, UnsupportedWKTProductCRS
from ._extents import RegionInfo, UnsupportedWKTProductCRSError
from ._model import TimePeriodOverview
from ._stores import (
DatasetItem,
Expand All @@ -18,5 +18,5 @@
"RegionInfo",
"SummaryStore",
"TimePeriodOverview",
"UnsupportedWKTProductCRS",
"UnsupportedWKTProductCRSError",
)
17 changes: 10 additions & 7 deletions cubedash/summary/_extents.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@

from cubedash._utils import (
ODC_DATASET as DATASET,
)
from cubedash._utils import (
alchemy_engine,
expects_eo3_metadata_type,
infer_crs,
Expand All @@ -54,7 +56,7 @@
]


class UnsupportedWKTProductCRS(NotImplementedError):
class UnsupportedWKTProductCRSError(NotImplementedError):
"""We can't, within Postgis, support arbitrary WKT CRSes at the moment."""

def __init__(self, reason: str) -> None:
Expand Down Expand Up @@ -83,7 +85,7 @@ def get_dataset_extent_alchemy_expression(md: MetadataType, default_crs: str = N
[
# If we have geometry, use it as the polygon.
(
doc[["geometry"]] != None,
doc[["geometry"]].is_not(None),
func.ST_GeomFromGeoJSON(doc[["geometry"]], type_=Geometry),
)
],
Expand All @@ -99,7 +101,7 @@ def get_dataset_extent_alchemy_expression(md: MetadataType, default_crs: str = N
[
# If we have valid_data offset, use it as the polygon.
(
doc[valid_data_offset] != None,
doc[valid_data_offset].is_not(None),
func.ST_GeomFromGeoJSON(doc[valid_data_offset], type_=Geometry),
)
],
Expand Down Expand Up @@ -168,7 +170,7 @@ def get_dataset_srid_alchemy_expression(md: MetadataType, default_crs: str = Non
# HACK: Change default CRS with inference
inferred_crs = infer_crs(default_crs)
if inferred_crs is None:
raise UnsupportedWKTProductCRS(
raise UnsupportedWKTProductCRSError(
f"WKT Product CRSes are not currently well supported, and "
f"we can't infer this product's one. "
f"(Ideally use an auth-name format for CRS, such as 'EPSG:1234') "
Expand Down Expand Up @@ -319,7 +321,8 @@ def refresh_spatial_extents(
"spatial_deletion_full_scan",
)
changed += engine.execute(
DATASET_SPATIAL.delete().where(
DATASET_SPATIAL.delete()
.where(
DATASET_SPATIAL.c.dataset_type_ref == product.id,
)
# Where it doesn't exist in the ODC dataset table.
Expand Down Expand Up @@ -926,7 +929,7 @@ def get_sample_dataset(*product_names: str, index: Index = None) -> Iterable[Dic
DATASET.c.dataset_type_ref
== bindparam("product_ref", product.id, type_=SmallInteger)
)
.where(DATASET.c.archived == None)
.where(DATASET.c.archived.is_(None))
.limit(1)
)
.fetchone()
Expand Down Expand Up @@ -968,7 +971,7 @@ def get_mapped_crses(*product_names: str, index: Index = None) -> Iterable[Dict]
]
)
.where(DATASET.c.dataset_type_ref == product.id)
.where(DATASET.c.archived == None)
.where(DATASET.c.archived.is_(None))
.limit(1)
)
.fetchone()
Expand Down
8 changes: 5 additions & 3 deletions cubedash/summary/_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
Column,
Date,
DateTime,
Enum as SqlEnum,
ForeignKey,
Index,
Integer,
Expand All @@ -26,6 +25,9 @@
func,
select,
)
from sqlalchemy import (
Enum as SqlEnum,
)
from sqlalchemy.dialects import postgresql as postgres
from sqlalchemy.engine import Engine
from sqlalchemy.exc import ProgrammingError
Expand Down Expand Up @@ -278,7 +280,7 @@ def is_compatible_generate_schema(engine: Engine) -> bool:
return is_latest and pg_column_exists(engine, ODC_DATASET.fullname, "updated")


class SchemaNotRefreshable(Exception):
class SchemaNotRefreshableError(Exception):
"""The schema is not set-up for running product refreshes"""

...
Expand Down Expand Up @@ -368,7 +370,7 @@ def check_or_update_odc_schema(engine: Engine):
_utils.install_timestamp_trigger(engine)
except ProgrammingError as e:
# We don't have permission.
raise SchemaNotRefreshable(
raise SchemaNotRefreshableError(
dedent(
"""
Missing update triggers.
Expand Down
7 changes: 4 additions & 3 deletions cubedash/summary/_stores.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@
import structlog
from cachetools.func import lru_cache, ttl_cache
from dateutil import tz
from geoalchemy2 import WKBElement, shape as geo_shape
from geoalchemy2 import WKBElement
from geoalchemy2 import shape as geo_shape
from geoalchemy2.shape import from_shape, to_shape
from shapely.geometry.base import BaseGeometry
from sqlalchemy import DDL, String, and_, exists, func, literal, or_, select, union_all
Expand Down Expand Up @@ -713,7 +714,7 @@ def _find_product_fixed_metadata(
]
)
.select_from(ODC_DATASET)
.where(ODC_DATASET.c.id.in_([r for r, in dataset_samples]))
.where(ODC_DATASET.c.id.in_([r for (r,) in dataset_samples]))
).fetchall()
assert len(result) == 1

Expand Down Expand Up @@ -1633,7 +1634,7 @@ def _mark_product_refresh_completed(
)
self._product.cache_clear()

@lru_cache() # noqa: B019
@lru_cache()
def _get_srid_name(self, srid: int):
"""
Convert an internal postgres srid key to a string auth code: eg: 'EPSG:1234'
Expand Down
5 changes: 3 additions & 2 deletions cubedash/summary/_summarise.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,8 @@
from cachetools.func import lru_cache
from datacube.model import Range
from dateutil import tz
from geoalchemy2 import Geometry, shape as geo_shape
from geoalchemy2 import Geometry
from geoalchemy2 import shape as geo_shape
from sqlalchemy import and_, func, or_, select
from sqlalchemy.dialects.postgresql import TSTZRANGE
from sqlalchemy.sql import ColumnElement
Expand Down Expand Up @@ -238,7 +239,7 @@ def _where(
)
return begin_time, end_time, where_clause

@lru_cache() # noqa: B019
@lru_cache()
def _get_srid_name(self, srid: int):
"""
Convert an internal postgres srid key to a string auth code: eg: 'EPSG:1234'
Expand Down
Loading

0 comments on commit ed70628

Please sign in to comment.