Skip to content

Commit

Permalink
MAINT Update to ruff for linting (#69)
Browse files Browse the repository at this point in the history
  • Loading branch information
rth authored Apr 12, 2024
1 parent 8da3a87 commit fb0383c
Show file tree
Hide file tree
Showing 6 changed files with 42 additions and 71 deletions.
27 changes: 7 additions & 20 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,33 +1,20 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v4.6.0
hooks:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace


- repo: https://github.com/psf/black
rev: 23.3.0
hooks:
- id: black

- repo: https://github.com/asottile/pyupgrade
rev: v3.3.1
hooks:
- id: pyupgrade
args: ["--py38-plus"]

- repo: https://github.com/hadialqattan/pycln
rev: "v2.1.3"
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: "v0.3.7"
hooks:
- id: pycln
args: [--config=pyproject.toml]
stages: [manual]
- id: ruff
args: [--fix]
- id: ruff-format


- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.1.1
rev: v1.9.0
hooks:
- id: mypy
files: ".+.py"
Expand Down
10 changes: 3 additions & 7 deletions doc/sphinxext/github_link.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def _linkcode_resolve(domain, info, package, url_fmt, revision):
return

class_name = info["fullname"].split(".")[0]
if type(class_name) != str:
if isinstance(class_name, str):
# Python 2 only
class_name = class_name.encode("utf-8")
module = __import__(info["module"], fromlist=[class_name])
Expand All @@ -60,16 +60,12 @@ def _linkcode_resolve(domain, info, package, url_fmt, revision):
if not fn:
return

fn = os.path.relpath(
fn, start=os.path.dirname(__import__(package).__file__)
)
fn = os.path.relpath(fn, start=os.path.dirname(__import__(package).__file__))
try:
lineno = inspect.getsourcelines(obj)[1]
except Exception:
lineno = ""
return url_fmt.format(
revision=revision, package=package, path=fn, lineno=lineno
)
return url_fmt.format(revision=revision, package=package, path=fn, lineno=lineno)


def make_linkcode_resolve(package, url_fmt):
Expand Down
30 changes: 9 additions & 21 deletions pgeocode.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import urllib.request
import warnings
from io import BytesIO
from typing import Any, Tuple, List, Optional
from typing import Any, List, Optional, Tuple
from zipfile import ZipFile

import numpy as np
Expand Down Expand Up @@ -226,11 +226,9 @@ def __init__(self, country: str = "fr", unique: bool = True):
country = country.upper()
if country not in COUNTRIES_VALID:
raise ValueError(
(
"country={} is not a known country code. "
"See the README for a list of supported "
"countries"
).format(country)
f"country={country} is not a known country code. "
"See the README for a list of supported "
"countries"
)
if country == "AR":
warnings.warn(
Expand Down Expand Up @@ -259,9 +257,7 @@ def _get_data(country: str) -> Tuple[str, pd.DataFrame]:
keep_default_na=False,
)
else:
download_urls = [
val.format(country=country) for val in DOWNLOAD_URL
]
download_urls = [val.format(country=country) for val in DOWNLOAD_URL]
with _open_extract_cycle_url(download_urls, country) as fh:
data = pd.read_csv(
fh,
Expand Down Expand Up @@ -344,9 +340,7 @@ def query_postal_code(self, codes):
codes = pd.DataFrame(codes, columns=["postal_code"])

codes = self._normalize_postal_code(codes)
response = pd.merge(
codes, self._data_frame, on="postal_code", how="left"
)
response = pd.merge(codes, self._data_frame, on="postal_code", how="left")
if self.unique and single_entry:
response = response.iloc[0]
return response
Expand Down Expand Up @@ -387,9 +381,7 @@ def query_location(
return contains_matches.iloc[:top_k]

if fuzzy_threshold is not None:
fuzzy_matches = self._fuzzy_search(
name, col, threshold=fuzzy_threshold
)
fuzzy_matches = self._fuzzy_search(name, col, threshold=fuzzy_threshold)
if len(fuzzy_matches) > 0:
return fuzzy_matches.iloc[:top_k]

Expand All @@ -400,9 +392,7 @@ def _str_contains_search(self, text: str, col: str) -> pd.DataFrame:
match_mask.fillna(False, inplace=True)
return self._data[match_mask]

def _fuzzy_search(
self, text: str, col: str, threshold: float = 80
) -> pd.DataFrame:
def _fuzzy_search(self, text: str, col: str, threshold: float = 80) -> pd.DataFrame:
try:
# thefuzz is not required to install pgeocode,
# it is an optional dependency for enabling fuzzy search
Expand All @@ -413,9 +403,7 @@ def _fuzzy_search(
"It can be installed with: pip install thefuzz[speedup]"
) from err

fuzzy_scores = self._data[col].apply(
lambda x: fuzz.ratio(str(x), text)
)
fuzzy_scores = self._data[col].apply(lambda x: fuzz.ratio(str(x), text))
return self._data[fuzzy_scores >= threshold]


Expand Down
24 changes: 17 additions & 7 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,8 +1,23 @@
[build-system]
requires = ["setuptools>=42", "wheel"]

[tool.black]
line-length = 79
[tool.ruff]
target-version = "py38"

[tool.ruff.lint]
select = [
"B904", # bugbear (Within an except clause, raise exceptions with raise ... from err)
"B905", # bugbear (zip() without an explicit strict= parameter set.)
# "C9", # mccabe complexity
"E", # pycodestyles
"W", # pycodestyles
"F", # pyflakes
"I", # isort
"PLC", # pylint conventions
"PLE", # pylint errors
"UP", # pyupgrade
]
ignore = ["E402", "E501", "E731", "E741"]

[tool.mypy]
python_version = "3.8"
Expand All @@ -15,11 +30,6 @@ disallow_untyped_defs = false
disallow_incomplete_defs = true
no_implicit_optional = true

[tool.pycln]
all = true

[tool.isort]
profile = "black"

[tool.tox]
legacy_tox_ini = """
Expand Down
5 changes: 1 addition & 4 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import io
import os
import re

Expand All @@ -16,9 +15,7 @@ def read(*names, **kwargs):

def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(
r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M
)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M)
if version_match:
return version_match.group(1)

Expand Down
17 changes: 5 additions & 12 deletions test_pgeocode.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ def test_haversine_distance():
try:
from geopy.distance import great_circle
except ImportError:
raise pytest.skip("scikit-learn not installed")
raise pytest.skip("scikit-learn not installed") from None

rng = np.random.RandomState(42)

Expand Down Expand Up @@ -213,8 +213,7 @@ def test_open_extract_url(httpserver):
"download_url",
[
"https://download.geonames.org/export/zip/{country}.zip",
"https://symerio.github.io/postal-codes-data/data/"
"geonames/{country}.txt",
"https://symerio.github.io/postal-codes-data/data/" "geonames/{country}.txt",
],
ids=["geonames", "gitlab-pages"],
)
Expand All @@ -228,13 +227,9 @@ def test_cdn(temp_dir, monkeypatch, download_url):

def test_url_returns_404(httpserver, monkeypatch, temp_dir):
download_url = "/fr.gzip"
httpserver.expect_oneshot_request(download_url).respond_with_data(
"", status=404
)
httpserver.expect_oneshot_request(download_url).respond_with_data("", status=404)

monkeypatch.setattr(
pgeocode, "DOWNLOAD_URL", [httpserver.url_for(download_url)]
)
monkeypatch.setattr(pgeocode, "DOWNLOAD_URL", [httpserver.url_for(download_url)])
# Nominatim("fr")
with pytest.raises(urllib.error.HTTPError, match="HTTP Error 404"):
Nominatim("fr")
Expand All @@ -243,9 +238,7 @@ def test_url_returns_404(httpserver, monkeypatch, temp_dir):

def test_first_url_fails(httpserver, monkeypatch, temp_dir):
download_url = "/IE.txt"
httpserver.expect_oneshot_request(download_url).respond_with_data(
"", status=404
)
httpserver.expect_oneshot_request(download_url).respond_with_data("", status=404)

monkeypatch.setattr(
pgeocode,
Expand Down

0 comments on commit fb0383c

Please sign in to comment.