Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Switch linting/formatting to ruff, consolidate configurations to toml #177

Closed
wants to merge 11 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions .azure-pipelines/azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,11 @@ stages:
python .azure-pipelines/syntax-validation.py
displayName: Syntax validation

# Run flake8 validation on a shallow clone
# Run Ruff validation on a shallow clone
- bash: |
pip install --disable-pip-version-check flake8
python .azure-pipelines/flake8-validation.py
displayName: Flake8 validation
pip install --disable-pip-version-check ruff
python .azure-pipelines/lint-validation.py
displayName: Ruff validation

- stage: build
displayName: Build
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,29 +3,30 @@
import os
import subprocess

# Flake8 validation
# Ruff validation
failures = 0
try:
flake8 = subprocess.run(
process = subprocess.run(
[
"flake8",
"ruff",
"check",
"--exit-zero",
],
capture_output=True,
check=True,
encoding="latin-1",
encoding="utf-8",
timeout=300,
)
except (subprocess.CalledProcessError, subprocess.TimeoutExpired) as e:
print(
"##vso[task.logissue type=error;]flake8 validation failed with",
"##vso[task.logissue type=error;]Ruff validation failed with",
str(e.__class__.__name__),
)
print(e.stdout)
print(e.stderr)
print("##vso[task.complete result=Failed;]flake8 validation failed")
print("##vso[task.complete result=Failed;]Ruff validation failed")
exit()
for line in flake8.stdout.split("\n"):
for line in process.stdout.split("\n"):
if ":" not in line:
continue
filename, lineno, column, error = line.split(":", maxsplit=3)
Expand All @@ -38,5 +39,5 @@
)

if failures:
print(f"##vso[task.logissue type=warning]Found {failures} flake8 violation(s)")
print(f"##vso[task.complete result=Failed;]Found {failures} flake8 violation(s)")
print(f"##vso[task.logissue type=warning]Found {failures} Ruff violation(s)")
print(f"##vso[task.complete result=Failed;]Found {failures} Ruff violation(s)")
16 changes: 0 additions & 16 deletions .bumpversion.cfg

This file was deleted.

2 changes: 0 additions & 2 deletions .mypy.ini

This file was deleted.

39 changes: 8 additions & 31 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
repos:
# Syntax validation and some basic sanity checks
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.1.0
rev: v4.6.0
hooks:
- id: check-merge-conflict
- id: check-ast
Expand All @@ -11,40 +11,17 @@ repos:
args: ['--maxkb=200']
- id: check-yaml

# Automatically sort imports
- repo: https://github.com/PyCQA/isort
rev: 5.12.0
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.4.8
hooks:
- id: isort
args: [
'-a', 'from __future__ import annotations', # 3.7-3.11
'--rm', 'from __future__ import absolute_import', # -3.0
'--rm', 'from __future__ import division', # -3.0
'--rm', 'from __future__ import generator_stop', # -3.7
'--rm', 'from __future__ import generators', # -2.3
'--rm', 'from __future__ import nested_scopes', # -2.2
'--rm', 'from __future__ import print_function', # -3.0
'--rm', 'from __future__ import unicode_literals', # -3.0
'--rm', 'from __future__ import with_statement', # -2.6
]

# Automatic source code formatting
- repo: https://github.com/psf/black
rev: 22.3.0
hooks:
- id: black
args: [--safe, --quiet]

# Linting
- repo: https://github.com/PyCQA/flake8
rev: 4.0.1
hooks:
- id: flake8
additional_dependencies: ['flake8-comprehensions==3.8.0']
- id: ruff
args: [--fix, --exit-non-zero-on-fix, --show-fixes]
- id: ruff-format
types: [file]

# Type checking
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.910
rev: v1.10.0
hooks:
- id: mypy
files: 'src/.*\.py$'
Expand Down
129 changes: 126 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,10 +1,133 @@
[build-system]
requires = ["setuptools >= 40.6.0", "wheel"]
requires = ["setuptools >= 61.2"]
build-backend = "setuptools.build_meta"

[tool.isort]
profile = "black"
[project]
name = "workflows"
version = "2.27"
description = "Data processing in distributed environments"
readme = "README.rst"
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
]
requires-python = ">=3.8"
dependencies = [
"bidict",
"pika",
"setuptools",
"stomp-py>=7",
]


[[project.authors]]
name = "Diamond Light Source - Scientific Software et al."
email = "[email protected]"

[project.license]
text = "BSD"

[project.urls]
Download = "https://github.com/DiamondLightSource/python-workflows/releases"
Documentation = "https://github.com/DiamondLightSource/python-workflows"
GitHub = "https://github.com/DiamondLightSource/python-workflows"
Bug-Tracker = "https://github.com/DiamondLightSource/python-workflows/issues"

[project.optional-dependencies]
prometheus = [
"prometheus-client",
]

[project.entry-points."libtbx.dispatcher.script"]
"workflows.validate_recipe" = "workflows.validate_recipe"

[project.entry-points."libtbx.precommit"]
workflows = "workflows"

[project.entry-points."workflows.services"]
SampleConsumer = "workflows.services.sample_consumer:SampleConsumer"
SampleProducer = "workflows.services.sample_producer:SampleProducer"
SampleTxn = "workflows.services.sample_transaction:SampleTxn"
SampleTxnProducer = "workflows.services.sample_transaction:SampleTxnProducer"

[project.entry-points."workflows.transport"]
PikaTransport = "workflows.transport.pika_transport:PikaTransport"
StompTransport = "workflows.transport.stomp_transport:StompTransport"
OfflineTransport = "workflows.transport.offline_transport:OfflineTransport"

[project.entry-points."zocalo.configuration.plugins"]
pika = "workflows.util.zocalo.configuration:Pika"
stomp = "workflows.util.zocalo.configuration:Stomp"
transport = "workflows.util.zocalo.configuration:DefaultTransport"

[project.scripts]
"workflows.validate_recipe" = "workflows.recipe.validate:main"

[tool.setuptools]
zip-safe = false
include-package-data = true
license-files = [
"LICENSE",
]

[tool.setuptools.package-dir]
"" = "src"

[tool.setuptools.packages.find]
where = [
"src",
]
namespaces = false

[tool.pytest.ini_options]
addopts = "-ra"
required_plugins = "pytest-timeout"

[tool.ruff.lint]
select = ["E", "F", "W", "C4", "I"]
unfixable = ["F841"]
# E501 line too long (handled by formatter)
# E741 Ambiguous variable name (We have lots of meaningful I, L, l)
ignore = ["E501", "E741"]

[tool.ruff.lint.per-file-ignores]
# "**/__init__.py" = ["F401"]

[tool.ruff.lint.isort]
known-first-party = ["workflows"]
required-imports = ["from __future__ import annotations"]

[tool.mypy]
no_implicit_optional = true
mypy_path = "src/"

[tool.bumpversion]
current_version = "2.27"
tag = true
commit = true

parse = "(?P<major>\\d+)\\.(?P<minor>\\d+)"
serialize = ["{major}.{minor}"]
search = "{current_version}"
replace = "{new_version}"

# regex = false
# ignore_missing_version = false
# ignore_missing_files = false
# sign_tags = false
# tag_name = "v{new_version}"
# tag_message = "Bump version: {current_version} → {new_version}"
# allow_dirty = false
# message = "Bump version: {current_version} → {new_version}"
# commit_args = ""

[[tool.bumpversion.files]]
filename = "src/workflows/__init__.py"
82 changes: 0 additions & 82 deletions setup.cfg

This file was deleted.

9 changes: 0 additions & 9 deletions setup.py

This file was deleted.

2 changes: 1 addition & 1 deletion src/workflows/contrib/start_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ def on_transport_preparation_hook():
transport_object = true_transport_factory()
return self.on_transport_preparation(transport_object) or transport_object

transport_factory = on_transport_preparation_hook
transport_factory = on_transport_preparation_hook # type: ignore

# When service name is specified, check if service exists or can be derived
if options.service and options.service not in known_services:
Expand Down
3 changes: 2 additions & 1 deletion src/workflows/frontend/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -368,7 +368,8 @@ def get_status(self):

def exponential_backoff(self):
"""A function that keeps waiting longer and longer the more rapidly it is called.
It can be used to increasingly slow down service starts when they keep failing."""
It can be used to increasingly slow down service starts when they keep failing.
"""
last_service_switch = self._service_starttime
if not last_service_switch:
return
Expand Down
2 changes: 1 addition & 1 deletion src/workflows/recipe/recipe.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ def translate(x):
new_recipe[idx]["error"] = translate(new_recipe[idx]["error"])

# Join 'start' nodes
for (idx, param) in other.recipe["start"]:
for idx, param in other.recipe["start"]:
new_recipe["start"].append((translate(idx), param))

# Join 'error' nodes
Expand Down
Loading
Loading