diff --git a/docs/_extend_docstrings.py b/docs/_extend_docstrings.py index b03f97d5..bb8b4710 100644 --- a/docs/_extend_docstrings.py +++ b/docs/_extend_docstrings.py @@ -98,10 +98,10 @@ def _append_to_docstring(class_type: Union[Callable, Type], appended_text: str) _IMAGE_DIR = "_images" -def _graphviz_to_image( +def _graphviz_to_image( # noqa: PLR0917 dot: str, options: Optional[Dict[str, str]] = None, - format: str = "svg", # noqa: A002 + format: str = "svg", indent: int = 0, caption: str = "", label: str = "", diff --git a/docs/conf.py b/docs/conf.py index b70edd4c..36d7ced2 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -13,7 +13,7 @@ ) sys.path.insert(0, os.path.abspath(".")) -from _extend_docstrings import extend_docstrings +from _extend_docstrings import extend_docstrings # noqa: PLC2701 def create_constraints_inventory() -> None: diff --git a/docs/usage.ipynb b/docs/usage.ipynb index f300d085..a85dc837 100644 --- a/docs/usage.ipynb +++ b/docs/usage.ipynb @@ -197,7 +197,7 @@ }, "outputs": [], "source": [ - "subset = pdg.filter(lambda p: p.spin in [2.5, 3.5, 4.5] and p.name.startswith(\"N\"))\n", + "subset = pdg.filter(lambda p: p.spin in {2.5, 3.5, 4.5} and p.name.startswith(\"N\"))\n", "subset.names" ] }, @@ -280,6 +280,18 @@ "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.18" } }, "nbformat": 4, diff --git a/pyproject.toml b/pyproject.toml index 3908dd7d..4428f4e1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -248,6 +248,10 @@ split-on-trailing-comma = false "S113", "T201", ] +"docs/_extend_docstrings.py" = [ + "A002", + "PLC0415", +] "docs/conf.py" = [ "A001", "D100", @@ -257,6 +261,7 @@ split-on-trailing-comma = false "D", "INP001", "PGH001", + "PLC2701", "PLR0913", "PLR2004", "PLR6301", @@ -268,6 +273,9 @@ split-on-trailing-comma = false [tool.ruff.lint.pydocstyle] convention = "google" +[tool.ruff.lint.pylint] +allow-dunder-method-names = ["_repr_pretty_"] + [tool.tomlsort] all = false ignore_case = true diff --git a/src/qrules/__init__.py b/src/qrules/__init__.py index 94ce938f..80ee9032 100644 --- a/src/qrules/__init__.py +++ b/src/qrules/__init__.py @@ -56,7 +56,7 @@ from .transition import EdgeSettings, ProblemSet, ReactionInfo, StateTransitionManager -def check_reaction_violations( # noqa: C901 +def check_reaction_violations( # noqa: C901, PLR0917 initial_state: Union[StateDefinition, Sequence[StateDefinition]], final_state: Sequence[StateDefinition], mass_conservation_factor: Optional[float] = 3.0, @@ -252,7 +252,7 @@ def check_edge_qn_conservation() -> Set[FrozenSet[str]]: return violations -def generate_transitions( +def generate_transitions( # noqa: PLR0917 initial_state: Union[StateDefinition, Sequence[StateDefinition]], final_state: Sequence[StateDefinition], allowed_intermediate_particles: Optional[List[str]] = None, diff --git a/src/qrules/combinatorics.py b/src/qrules/combinatorics.py index a3d78dc9..936cd314 100644 --- a/src/qrules/combinatorics.py +++ b/src/qrules/combinatorics.py @@ -38,7 +38,7 @@ """A `.Transition` with only initial and final state information.""" -class _KinematicRepresentation: +class _KinematicRepresentation: # noqa: PLW1641 def __init__( self, final_state: Optional[Union[List[List[str]], List[str]]] = None, diff --git a/src/qrules/io/__init__.py b/src/qrules/io/__init__.py index 0cf8118f..b8018591 100644 --- a/src/qrules/io/__init__.py +++ b/src/qrules/io/__init__.py @@ -124,7 +124,7 @@ def load(filename: str) -> object: if file_extension == "json": definition = json.load(stream) return fromdict(definition) - if file_extension in ["yaml", "yml"]: + if file_extension in {"yaml", "yml"}: definition = yaml.load(stream, Loader=yaml.SafeLoader) return fromdict(definition) msg = f'No loader defined for file type "{file_extension}"' @@ -148,7 +148,7 @@ def write(instance: object, filename: str) -> None: if file_extension == "json": json.dump(asdict(instance), stream, indent=2, cls=JSONSetEncoder) return - if file_extension in ["yaml", "yml"]: + if file_extension in {"yaml", "yml"}: yaml.dump( asdict(instance), stream, diff --git a/src/qrules/io/_dict.py b/src/qrules/io/_dict.py index a887ccfd..015a0236 100644 --- a/src/qrules/io/_dict.py +++ b/src/qrules/io/_dict.py @@ -106,7 +106,7 @@ def build_topology(definition: dict) -> Topology: def validate_particle_collection(instance: dict) -> None: - import jsonschema + import jsonschema # noqa: PLC0415 jsonschema.validate(instance=instance, schema=__SCHEMA_PARTICLES) diff --git a/src/qrules/particle.py b/src/qrules/particle.py index b976f2c2..202c71b5 100644 --- a/src/qrules/particle.py +++ b/src/qrules/particle.py @@ -54,7 +54,7 @@ def _to_float(value: SupportsFloat) -> float: @total_ordering @frozen(eq=False, hash=True, order=False) -class Spin: +class Spin: # noqa: PLW1641 """Safe, immutable data container for spin **with projection**.""" magnitude: float = field(converter=_to_float) @@ -242,7 +242,7 @@ def _get_name_root(name: str) -> str: ParticleWithSpin = Tuple[Particle, float] -class ParticleCollection(abc.MutableSet): +class ParticleCollection(abc.MutableSet): # noqa: PLW1641 """Searchable collection of immutable `.Particle` instances.""" def __init__(self, particles: Optional[Iterable[Particle]] = None) -> None: @@ -359,13 +359,13 @@ def find(self, search_term: Union[int, str]) -> Particle: """Search for a particle by either name (`str`) or PID (`int`).""" if isinstance(search_term, str): particle_name = search_term - return self.__getitem__(particle_name) + return self[particle_name] if isinstance(search_term, int): if search_term not in self.__pid_to_name: msg = f"No particle with PID {search_term}" raise KeyError(msg) particle_name = self.__pid_to_name[search_term] - return self.__getitem__(particle_name) + return self[particle_name] msg = f"Cannot search for a search term of type {type(search_term)}" raise NotImplementedError(msg) @@ -402,7 +402,7 @@ def names(self) -> List[str]: return [p.name for p in sorted(self)] -def create_particle( +def create_particle( # noqa: PLR0917 template_particle: Particle, name: Optional[str] = None, latex: Optional[str] = None, @@ -504,7 +504,7 @@ def load_pdg() -> ParticleCollection: PDG info is imported from the `scikit-hep/particle `_ package. """ - from particle import Particle as PdgDatabase + from particle import Particle as PdgDatabase # noqa: PLC0415 all_pdg_particles = PdgDatabase.findall( lambda item: item.charge is not None @@ -658,7 +658,7 @@ def __filter_quark_content(pdg_particle: "PdgDatabase") -> str: def __create_parity(parity_enum: "enums.Parity") -> Optional[Parity]: - from particle.particle import enums + from particle.particle import enums # noqa: PLC0415 if parity_enum is None or parity_enum == enums.Parity.u: return None diff --git a/src/qrules/quantum_numbers.py b/src/qrules/quantum_numbers.py index dbce48c9..1666b94c 100644 --- a/src/qrules/quantum_numbers.py +++ b/src/qrules/quantum_numbers.py @@ -25,14 +25,14 @@ def _check_plus_minus(_: Any, __: attrs.Attribute, value: Any) -> None: f" {type(value).__name__}" ) raise TypeError(msg) - if value not in [-1, +1]: + if value not in {-1, +1}: msg = f"Parity can only be +1 or -1, not {value}" raise ValueError(msg) @total_ordering @frozen(eq=False, hash=True, order=False, repr=False) -class Parity: +class Parity: # noqa: PLW1641 value: int = field(validator=[instance_of(int), _check_plus_minus]) def __eq__(self, other: object) -> bool: diff --git a/src/qrules/settings.py b/src/qrules/settings.py index 2ea977f8..05cc64f8 100644 --- a/src/qrules/settings.py +++ b/src/qrules/settings.py @@ -113,7 +113,7 @@ def from_str(description: str) -> "InteractionType": ] -def create_interaction_settings( +def create_interaction_settings( # noqa: PLR0917 formalism: str, particle_db: ParticleCollection, nbody_topology: bool = False, diff --git a/src/qrules/solving.py b/src/qrules/solving.py index 03d49101..00835a0b 100644 --- a/src/qrules/solving.py +++ b/src/qrules/solving.py @@ -549,7 +549,9 @@ def __clear(self) -> None: self.__problem = Problem(BacktrackingSolver(True)) self.__scoresheet = Scoresheet() - def __initialize_constraints(self, problem_set: QNProblemSet) -> None: + def __initialize_constraints( # noqa: PLR0914 + self, problem_set: QNProblemSet + ) -> None: """Initialize all of the constraints for this graph. For each interaction node a set of independent constraints/conservation laws are diff --git a/src/qrules/system_control.py b/src/qrules/system_control.py index aca90bdf..7b11b551 100644 --- a/src/qrules/system_control.py +++ b/src/qrules/system_control.py @@ -154,7 +154,7 @@ def check( class GammaCheck(InteractionDeterminator): """Conservation check for photons.""" - def check( + def check( # noqa: PLR6301 self, in_states: List[ParticleWithSpin], out_states: List[ParticleWithSpin], @@ -171,7 +171,7 @@ def check( class LeptonCheck(InteractionDeterminator): """Conservation check lepton numbers.""" - def check( + def check( # noqa: PLR6301 self, in_states: List[ParticleWithSpin], out_states: List[ParticleWithSpin], diff --git a/src/qrules/transition.py b/src/qrules/transition.py index 0c881707..4f0d161c 100644 --- a/src/qrules/transition.py +++ b/src/qrules/transition.py @@ -236,7 +236,7 @@ class StateTransitionManager: .. seealso:: :doc:`/usage/reaction` and `.generate_transitions` """ - def __init__( # noqa: C901, PLR0912 + def __init__( # noqa: C901, PLR0912, PLR0917 self, initial_state: Sequence[StateDefinition], final_state: Sequence[StateDefinition], @@ -430,7 +430,7 @@ def create_problem_sets(self) -> Dict[float, List[ProblemSet]]: ] return _group_by_strength(problem_sets) - def __determine_graph_settings( # noqa: C901 + def __determine_graph_settings( # noqa: C901, PLR0914 self, topology: Topology, initial_facts: "InitialFacts" ) -> List[GraphSettings]: weak_edge_settings, _ = self.interaction_type_settings[InteractionType.WEAK] diff --git a/tests/unit/io/test_dot.py b/tests/unit/io/test_dot.py index c124436d..b65e6ce1 100644 --- a/tests/unit/io/test_dot.py +++ b/tests/unit/io/test_dot.py @@ -210,7 +210,7 @@ def test_collapse_graphs( assert len(collapsed_graphs) == 1 graph = next(iter(collapsed_graphs)) edge_id = next(iter(graph.topology.intermediate_edge_ids)) - f_resonances = pdg.filter(lambda p: p.name in ["f(0)(980)", "f(0)(1500)"]) + f_resonances = pdg.filter(lambda p: p.name in {"f(0)(980)", "f(0)(1500)"}) intermediate_states = graph.states[edge_id] assert isinstance(intermediate_states, tuple) assert all(isinstance(i, Particle) for i in intermediate_states)