Skip to content

Commit 6d1e15a

Browse files
Vivek Miglanifacebook-github-bot
authored andcommitted
Fix layer activation pyre fixme issues
Differential Revision: D67706972
1 parent ab11352 commit 6d1e15a

File tree

1 file changed

+1
-4
lines changed

1 file changed

+1
-4
lines changed

captum/attr/_core/layer/layer_activation.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,7 @@ class LayerActivation(LayerAttribution):
2020

2121
def __init__(
2222
self,
23-
# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
24-
forward_func: Callable,
23+
forward_func: Callable[..., Union[int, float, Tensor]],
2524
layer: ModuleOrModuleList,
2625
device_ids: Union[None, List[int]] = None,
2726
) -> None:
@@ -132,8 +131,6 @@ def attribute(
132131
)
133132
else:
134133
return [
135-
# pyre-fixme[6]: For 2nd argument expected `Tuple[Tensor, ...]` but
136-
# got `Tensor`.
137134
_format_output(len(single_layer_eval) > 1, single_layer_eval)
138135
for single_layer_eval in layer_eval
139136
]

0 commit comments

Comments
 (0)