Skip to content

Commit

Permalink
format and clip
Browse files Browse the repository at this point in the history
  • Loading branch information
rsokl committed Sep 7, 2024
1 parent e3120fa commit c50b3df
Show file tree
Hide file tree
Showing 15 changed files with 43 additions and 31 deletions.
8 changes: 4 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
repos:
- repo: https://github.com/psf/black
rev: 22.10.0
rev: 24.8.0
hooks:
- id: black
- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
rev: 7.0.0
hooks:
- id: flake8
- repo: https://github.com/pre-commit/mirrors-isort
rev: v5.10.1
- repo: https://github.com/pycqa/isort
rev: 5.13.2
hooks:
- id: isort
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ extras = rnn


[testenv:min_numpy]
deps = numpy==1.20
deps = numpy==1.24
{[testenv]deps}
basepython = python3.9
commands = pytest -n auto --hypothesis-profile ci \
Expand Down
1 change: 1 addition & 0 deletions src/mygrad/_utils/graph_tracking.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Provides user interface for suspending computational graph tracking and back-propagation
"""

from functools import wraps
from typing import Any, Callable, TypeVar, cast

Expand Down
1 change: 1 addition & 0 deletions src/mygrad/_utils/lock_management.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Provides utilities responsible for locking/releasing array writeability.
"""

import os
from collections import Counter, defaultdict
from typing import (
Expand Down
3 changes: 1 addition & 2 deletions src/mygrad/math/misc/funcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from numpy import ndarray

import mygrad as mg
from mygrad._numpy_version import NP_IS_V2
from mygrad.math.misc.ops import MatMul
from mygrad.tensor_base import Tensor, implements_numpy_override
from mygrad.typing import ArrayLike, DTypeLikeReals, Mask
Expand Down Expand Up @@ -502,7 +501,7 @@ def clip(
Tensor([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
>>> mg.clip(a, [3, 4, 1, 1, 1, 4, 4, 4, 4, 4], 8)
Tensor([3, 4, 2, 3, 4, 5, 6, 7, 8, 8])"""
if not NP_IS_V2 and a_min is None and a_max is None:
if np.__version__ < "2.1.0" and a_min is None and a_max is None: # pragma: no cover
raise ValueError("`a_min` and `a_max` cannot both be set to `None`")

if a_min is not None:
Expand Down
1 change: 1 addition & 0 deletions src/mygrad/operation_base.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Defines the base class for mathematical operations capable of back-propagating
gradients to their input tensors."""

from abc import ABC, abstractmethod
from numbers import Real
from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union
Expand Down
8 changes: 5 additions & 3 deletions src/mygrad/tensor_manip/tensor_joining/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,11 @@ def backward_var(self, grad, index, **kwargs) -> np.ndarray:

return grad[
tuple(
slice(None, None, None)
if dim != self.axis
else slice(self.indices[index], self.indices[index + 1])
(
slice(None, None, None)
if dim != self.axis
else slice(self.indices[index], self.indices[index + 1])
)
for dim in range(var.data.ndim)
)
]
Expand Down
1 change: 1 addition & 0 deletions tests/custom_strategies/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
""" Custom hypothesis search strategies """

import math
from collections import defaultdict
from functools import lru_cache, partial, reduce
Expand Down
1 change: 1 addition & 0 deletions tests/math/binary/test_binary_funcs.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
""" Test all binary arithmetic operations, checks for appropriate broadcast behavior"""

from functools import partial
from numbers import Number
from typing import Any, Dict
Expand Down
1 change: 1 addition & 0 deletions tests/nnet/layers/test_conv.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
""" Test conv fwd-prop and back-prop for ND convs"""

from typing import Tuple

import hypothesis.extra.numpy as hnp
Expand Down
8 changes: 5 additions & 3 deletions tests/nnet/layers/test_gru.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,11 @@ def test_nonconstant_s0_raises(s0, dropout: float, out_constant: bool):
Uz, Ur, Uh = Tensor(np.random.rand(3, C, D))
bz, br, bh = Tensor(np.random.rand(3, D))

with does_not_raise() if (
out_constant or s0 is None or isinstance(s0, np.ndarray) or s0.constant
) else pytest.raises(ValueError):
with (
does_not_raise()
if (out_constant or s0 is None or isinstance(s0, np.ndarray) or s0.constant)
else pytest.raises(ValueError)
):
gru(
X,
Uz,
Expand Down
12 changes: 6 additions & 6 deletions tests/tensor_base/test_chainrule.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,9 +209,11 @@ def test_linear_graph(
# dL/d2 = dL/d4 * d4/d3 * d3/d2 * d2/d1
_check_grad(
v1,
None
if (v4.constant or v3.constant or v2.constant)
else grad * (2 * np.exp(v2.data)) * (2 * v1.data),
(
None
if (v4.constant or v3.constant or v2.constant)
else grad * (2 * np.exp(v2.data)) * (2 * v1.data)
),
)

# check the backprop clears graph & clear graph always propagates through the graph
Expand Down Expand Up @@ -501,9 +503,7 @@ def test_dynamic_interesting_graph(
)
_check_grad(v3, v3_grad)

v2_grad = (
None if (v5.constant or v4.constant) else grad * v3.data**2 * v1.data**2
)
v2_grad = None if (v5.constant or v4.constant) else grad * v3.data**2 * v1.data**2
_check_grad(v2, v2_grad)

v1_grad = None if v5.constant else grad * v4.data * v3.data
Expand Down
16 changes: 9 additions & 7 deletions tests/tensor_ops/test_setitem.py
Original file line number Diff line number Diff line change
Expand Up @@ -415,13 +415,15 @@ def test_setitem_bool_basic_index():
index_strat=lambda x: hnp.arrays(shape=(2, 3), dtype=bool).map(
lambda _x: (_x[0], _x[1])
),
value_strat=lambda o: hnp.arrays(
shape=broadcastable_shapes(o.shape, max_dims=o.ndim, max_side=max(o.shape)),
dtype=float,
elements=st.floats(-10.0, 10.0),
)
if o.shape and o.size
else st.floats(-10.0, 10.0).map(np.asarray),
value_strat=lambda o: (
hnp.arrays(
shape=broadcastable_shapes(o.shape, max_dims=o.ndim, max_side=max(o.shape)),
dtype=float,
elements=st.floats(-10.0, 10.0),
)
if o.shape and o.size
else st.floats(-10.0, 10.0).map(np.asarray)
),
)
def test_setitem_bool_axes_index():
"""index consists of boolean arrays specified for each axis"""
Expand Down
1 change: 0 additions & 1 deletion tests/utils/test_utils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
""" Test `numerical_gradient`, `numerical_derivative`, and `broadcast_check`"""


import hypothesis.extra.numpy as hnp
import hypothesis.strategies as st
import numpy as np
Expand Down
10 changes: 6 additions & 4 deletions tests/wrappers/uber.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,10 +302,12 @@ def wrapper(shapes: hnp.BroadcastableShapes, constant, data: st.DataObject):
if self.permit_0d_array_as_float:
# potentially cast a 0D array as a float
arrs = tuple(
arr.item()
if arr.ndim == 0
and data.draw(st.booleans(), label=f"arr-{n} to float")
else arr
(
arr.item()
if arr.ndim == 0
and data.draw(st.booleans(), label=f"arr-{n} to float")
else arr
)
for n, arr in enumerate(arrs)
)

Expand Down

0 comments on commit c50b3df

Please sign in to comment.