Skip to content

Commit

Permalink
Pyre fixes for common.py [3/n] (pytorch#1424)
Browse files Browse the repository at this point in the history
Summary:

Rewriting from D64259572 after BE week

Reviewed By: cyrjano

Differential Revision: D65011997
  • Loading branch information
csauper authored and facebook-github-bot committed Oct 28, 2024
1 parent 2c1c281 commit 8e23bb5
Showing 1 changed file with 13 additions and 24 deletions.
37 changes: 13 additions & 24 deletions captum/_utils/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,22 +278,13 @@ def _format_additional_forward_args(additional_forward_args: None) -> None: ...

@overload
def _format_additional_forward_args(
# pyre-fixme[24]: Generic type `tuple` expects at least 1 type parameter.
additional_forward_args: Union[Tensor, Tuple]
# pyre-fixme[24]: Generic type `tuple` expects at least 1 type parameter.
) -> Tuple: ...
additional_forward_args: Union[object, Tuple[object, ...]]
) -> Tuple[object, ...]: ...


@overload
def _format_additional_forward_args( # type: ignore
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
additional_forward_args: Any,
# pyre-fixme[24]: Generic type `tuple` expects at least 1 type parameter.
) -> Union[None, Tuple]: ...


# pyre-fixme[24]: Generic type `tuple` expects at least 1 type parameter.
def _format_additional_forward_args(additional_forward_args: Any) -> Union[None, Tuple]:
def _format_additional_forward_args(
additional_forward_args: Union[object, Tuple[object, ...], None]
) -> Union[None, Tuple[object, ...]]:
if additional_forward_args is not None and not isinstance(
additional_forward_args, tuple
):
Expand Down Expand Up @@ -853,8 +844,7 @@ def _register_backward_hook(
module: Module,
# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
hook: Callable,
# pyre-fixme[2]: Parameter annotation cannot be `Any`.
attr_obj: Any,
attr_obj: Union[object, None],
) -> List[torch.utils.hooks.RemovableHandle]:
grad_out: Dict[device, Tensor] = {}

Expand All @@ -864,10 +854,9 @@ def forward_hook(
out: Union[Tensor, Tuple[Tensor, ...]],
) -> None:
nonlocal grad_out
grad_out = {}

# pyre-fixme[53]: Captured variable `grad_out` is not annotated.
def output_tensor_hook(output_grad: Tensor) -> None:
nonlocal grad_out
grad_out[output_grad.device] = output_grad

if isinstance(out, tuple):
Expand All @@ -878,12 +867,12 @@ def output_tensor_hook(output_grad: Tensor) -> None:
else:
out.register_hook(output_tensor_hook)

# pyre-fixme[3]: Return type must be annotated.
# pyre-fixme[2]: Parameter must be annotated.
def pre_hook(module, inp):
# pyre-fixme[53]: Captured variable `module` is not annotated.
# pyre-fixme[3]: Return type must be annotated.
def input_tensor_hook(input_grad: Tensor):
def pre_hook(module: Module, inp: Union[Tensor, Tuple[Tensor, ...]]) -> Tensor:
def input_tensor_hook(
input_grad: Tensor,
) -> Union[None, Tensor, Tuple[Tensor, ...]]:
nonlocal grad_out

if len(grad_out) == 0:
return
hook_out = hook(module, input_grad, grad_out[input_grad.device])
Expand Down

0 comments on commit 8e23bb5

Please sign in to comment.