Skip to content

Commit

Permalink
MAINT: address Ruff linting issues
Browse files Browse the repository at this point in the history
  • Loading branch information
redeboer committed Feb 12, 2024
1 parent 0ff3114 commit b871700
Show file tree
Hide file tree
Showing 15 changed files with 48 additions and 26 deletions.
4 changes: 2 additions & 2 deletions docs/_extend_docstrings.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,10 +98,10 @@ def _append_to_docstring(class_type: Union[Callable, Type], appended_text: str)
_IMAGE_DIR = "_images"


def _graphviz_to_image(
def _graphviz_to_image( # noqa: PLR0917
dot: str,
options: Optional[Dict[str, str]] = None,
format: str = "svg", # noqa: A002
format: str = "svg",
indent: int = 0,
caption: str = "",
label: str = "",
Expand Down
2 changes: 1 addition & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
)

sys.path.insert(0, os.path.abspath("."))
from _extend_docstrings import extend_docstrings
from _extend_docstrings import extend_docstrings # noqa: PLC2701


def create_constraints_inventory() -> None:
Expand Down
14 changes: 13 additions & 1 deletion docs/usage.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@
},
"outputs": [],
"source": [
"subset = pdg.filter(lambda p: p.spin in [2.5, 3.5, 4.5] and p.name.startswith(\"N\"))\n",
"subset = pdg.filter(lambda p: p.spin in {2.5, 3.5, 4.5} and p.name.startswith(\"N\"))\n",
"subset.names"
]
},
Expand Down Expand Up @@ -280,6 +280,18 @@
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.18"
}
},
"nbformat": 4,
Expand Down
8 changes: 8 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,10 @@ split-on-trailing-comma = false
"S113",
"T201",
]
"docs/_extend_docstrings.py" = [
"A002",
"PLC0415",
]
"docs/conf.py" = [
"A001",
"D100",
Expand All @@ -257,6 +261,7 @@ split-on-trailing-comma = false
"D",
"INP001",
"PGH001",
"PLC2701",
"PLR0913",
"PLR2004",
"PLR6301",
Expand All @@ -268,6 +273,9 @@ split-on-trailing-comma = false
[tool.ruff.lint.pydocstyle]
convention = "google"

[tool.ruff.lint.pylint]
allow-dunder-method-names = ["_repr_pretty_"]

[tool.tomlsort]
all = false
ignore_case = true
Expand Down
4 changes: 2 additions & 2 deletions src/qrules/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@
from .transition import EdgeSettings, ProblemSet, ReactionInfo, StateTransitionManager


def check_reaction_violations( # noqa: C901
def check_reaction_violations( # noqa: C901, PLR0917
initial_state: Union[StateDefinition, Sequence[StateDefinition]],
final_state: Sequence[StateDefinition],
mass_conservation_factor: Optional[float] = 3.0,
Expand Down Expand Up @@ -252,7 +252,7 @@ def check_edge_qn_conservation() -> Set[FrozenSet[str]]:
return violations


def generate_transitions(
def generate_transitions( # noqa: PLR0917
initial_state: Union[StateDefinition, Sequence[StateDefinition]],
final_state: Sequence[StateDefinition],
allowed_intermediate_particles: Optional[List[str]] = None,
Expand Down
2 changes: 1 addition & 1 deletion src/qrules/combinatorics.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
"""A `.Transition` with only initial and final state information."""


class _KinematicRepresentation:
class _KinematicRepresentation: # noqa: PLW1641
def __init__(
self,
final_state: Optional[Union[List[List[str]], List[str]]] = None,
Expand Down
4 changes: 2 additions & 2 deletions src/qrules/io/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ def load(filename: str) -> object:
if file_extension == "json":
definition = json.load(stream)
return fromdict(definition)
if file_extension in ["yaml", "yml"]:
if file_extension in {"yaml", "yml"}:
definition = yaml.load(stream, Loader=yaml.SafeLoader)
return fromdict(definition)
msg = f'No loader defined for file type "{file_extension}"'
Expand All @@ -148,7 +148,7 @@ def write(instance: object, filename: str) -> None:
if file_extension == "json":
json.dump(asdict(instance), stream, indent=2, cls=JSONSetEncoder)
return
if file_extension in ["yaml", "yml"]:
if file_extension in {"yaml", "yml"}:
yaml.dump(
asdict(instance),
stream,
Expand Down
2 changes: 1 addition & 1 deletion src/qrules/io/_dict.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def build_topology(definition: dict) -> Topology:


def validate_particle_collection(instance: dict) -> None:
import jsonschema
import jsonschema # noqa: PLC0415

jsonschema.validate(instance=instance, schema=__SCHEMA_PARTICLES)

Expand Down
14 changes: 7 additions & 7 deletions src/qrules/particle.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def _to_float(value: SupportsFloat) -> float:

@total_ordering
@frozen(eq=False, hash=True, order=False)
class Spin:
class Spin: # noqa: PLW1641
"""Safe, immutable data container for spin **with projection**."""

magnitude: float = field(converter=_to_float)
Expand Down Expand Up @@ -242,7 +242,7 @@ def _get_name_root(name: str) -> str:
ParticleWithSpin = Tuple[Particle, float]


class ParticleCollection(abc.MutableSet):
class ParticleCollection(abc.MutableSet): # noqa: PLW1641
"""Searchable collection of immutable `.Particle` instances."""

def __init__(self, particles: Optional[Iterable[Particle]] = None) -> None:
Expand Down Expand Up @@ -359,13 +359,13 @@ def find(self, search_term: Union[int, str]) -> Particle:
"""Search for a particle by either name (`str`) or PID (`int`)."""
if isinstance(search_term, str):
particle_name = search_term
return self.__getitem__(particle_name)
return self[particle_name]
if isinstance(search_term, int):
if search_term not in self.__pid_to_name:
msg = f"No particle with PID {search_term}"
raise KeyError(msg)
particle_name = self.__pid_to_name[search_term]
return self.__getitem__(particle_name)
return self[particle_name]
msg = f"Cannot search for a search term of type {type(search_term)}"
raise NotImplementedError(msg)

Expand Down Expand Up @@ -402,7 +402,7 @@ def names(self) -> List[str]:
return [p.name for p in sorted(self)]


def create_particle(
def create_particle( # noqa: PLR0917
template_particle: Particle,
name: Optional[str] = None,
latex: Optional[str] = None,
Expand Down Expand Up @@ -504,7 +504,7 @@ def load_pdg() -> ParticleCollection:
PDG info is imported from the `scikit-hep/particle
<https://github.com/scikit-hep/particle>`_ package.
"""
from particle import Particle as PdgDatabase
from particle import Particle as PdgDatabase # noqa: PLC0415

all_pdg_particles = PdgDatabase.findall(
lambda item: item.charge is not None
Expand Down Expand Up @@ -658,7 +658,7 @@ def __filter_quark_content(pdg_particle: "PdgDatabase") -> str:


def __create_parity(parity_enum: "enums.Parity") -> Optional[Parity]:
from particle.particle import enums
from particle.particle import enums # noqa: PLC0415

if parity_enum is None or parity_enum == enums.Parity.u:
return None
Expand Down
4 changes: 2 additions & 2 deletions src/qrules/quantum_numbers.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,14 @@ def _check_plus_minus(_: Any, __: attrs.Attribute, value: Any) -> None:
f" {type(value).__name__}"
)
raise TypeError(msg)
if value not in [-1, +1]:
if value not in {-1, +1}:
msg = f"Parity can only be +1 or -1, not {value}"
raise ValueError(msg)


@total_ordering
@frozen(eq=False, hash=True, order=False, repr=False)
class Parity:
class Parity: # noqa: PLW1641
value: int = field(validator=[instance_of(int), _check_plus_minus])

def __eq__(self, other: object) -> bool:
Expand Down
2 changes: 1 addition & 1 deletion src/qrules/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def from_str(description: str) -> "InteractionType":
]


def create_interaction_settings(
def create_interaction_settings( # noqa: PLR0917
formalism: str,
particle_db: ParticleCollection,
nbody_topology: bool = False,
Expand Down
4 changes: 3 additions & 1 deletion src/qrules/solving.py
Original file line number Diff line number Diff line change
Expand Up @@ -549,7 +549,9 @@ def __clear(self) -> None:
self.__problem = Problem(BacktrackingSolver(True))
self.__scoresheet = Scoresheet()

def __initialize_constraints(self, problem_set: QNProblemSet) -> None:
def __initialize_constraints( # noqa: PLR0914
self, problem_set: QNProblemSet
) -> None:
"""Initialize all of the constraints for this graph.
For each interaction node a set of independent constraints/conservation laws are
Expand Down
4 changes: 2 additions & 2 deletions src/qrules/system_control.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def check(
class GammaCheck(InteractionDeterminator):
"""Conservation check for photons."""

def check(
def check( # noqa: PLR6301
self,
in_states: List[ParticleWithSpin],
out_states: List[ParticleWithSpin],
Expand All @@ -171,7 +171,7 @@ def check(
class LeptonCheck(InteractionDeterminator):
"""Conservation check lepton numbers."""

def check(
def check( # noqa: PLR6301
self,
in_states: List[ParticleWithSpin],
out_states: List[ParticleWithSpin],
Expand Down
4 changes: 2 additions & 2 deletions src/qrules/transition.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ class StateTransitionManager:
.. seealso:: :doc:`/usage/reaction` and `.generate_transitions`
"""

def __init__( # noqa: C901, PLR0912
def __init__( # noqa: C901, PLR0912, PLR0917
self,
initial_state: Sequence[StateDefinition],
final_state: Sequence[StateDefinition],
Expand Down Expand Up @@ -430,7 +430,7 @@ def create_problem_sets(self) -> Dict[float, List[ProblemSet]]:
]
return _group_by_strength(problem_sets)

def __determine_graph_settings( # noqa: C901
def __determine_graph_settings( # noqa: C901, PLR0914
self, topology: Topology, initial_facts: "InitialFacts"
) -> List[GraphSettings]:
weak_edge_settings, _ = self.interaction_type_settings[InteractionType.WEAK]
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/io/test_dot.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ def test_collapse_graphs(
assert len(collapsed_graphs) == 1
graph = next(iter(collapsed_graphs))
edge_id = next(iter(graph.topology.intermediate_edge_ids))
f_resonances = pdg.filter(lambda p: p.name in ["f(0)(980)", "f(0)(1500)"])
f_resonances = pdg.filter(lambda p: p.name in {"f(0)(980)", "f(0)(1500)"})
intermediate_states = graph.states[edge_id]
assert isinstance(intermediate_states, tuple)
assert all(isinstance(i, Particle) for i in intermediate_states)
Expand Down

0 comments on commit b871700

Please sign in to comment.