Skip to content

Commit 973a071

Browse files
generatedunixname89002005307016facebook-github-bot
authored andcommitted
Add type error suppressions for upcoming upgrade (#1345)
Summary: Pull Request resolved: #1345 Reviewed By: MaggieMoss Differential Revision: D62550750 fbshipit-source-id: 6e4fa7f92bb16cb5ab80739d0dd72895a7f4f40d
1 parent ecfa677 commit 973a071

File tree

6 files changed

+22
-0
lines changed

6 files changed

+22
-0
lines changed

captum/attr/_core/dataloader_attr.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -369,6 +369,9 @@ def attribute(
369369
assert len(input_roles) == len(inputs), (
370370
"input_roles must have the same size as the return of the dataloader,",
371371
f"length of input_roles is {len(input_roles)} ",
372+
# pyre-fixme[6]: For 1st argument expected
373+
# `pyre_extensions.ReadOnly[Sized]` but got
374+
# `Optional[typing.Tuple[typing.Any, ...]]`.
372375
f"whereas the length of dataloader return is {len(inputs)}",
373376
)
374377

@@ -395,6 +398,9 @@ def attribute(
395398
"Baselines must have the same size as the return of the dataloader ",
396399
"that need attribution",
397400
f"length of baseline is {len(baselines)} ",
401+
# pyre-fixme[6]: For 1st argument expected
402+
# `pyre_extensions.ReadOnly[Sized]` but got
403+
# `Optional[typing.Tuple[typing.Any, ...]]`.
398404
f'whereas the length of dataloader return with role "0" is {len(inputs)}',
399405
)
400406

@@ -413,6 +419,9 @@ def attribute(
413419
"Feature mask must have the same size as the return of the dataloader ",
414420
"that need attribution",
415421
f"length of feature_mask is {len(feature_mask)} ",
422+
# pyre-fixme[6]: For 1st argument expected
423+
# `pyre_extensions.ReadOnly[Sized]` but got
424+
# `Optional[typing.Tuple[typing.Any, ...]]`.
416425
f'whereas the length of dataloader return with role "0" is {len(inputs)}',
417426
)
418427

captum/attr/_core/deep_lift.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -833,6 +833,9 @@ def attribute( # type: ignore
833833
" with more than one example but found: {}."
834834
" If baselines are provided in shape of scalars or with a single"
835835
" baseline example, `DeepLift`"
836+
# pyre-fixme[16]: Item `Callable` of `Union[(...) ->
837+
# TensorOrTupleOfTensorsGeneric, TensorOrTupleOfTensorsGeneric]` has no
838+
# attribute `__getitem__`.
836839
" approach can be used instead.".format(baselines[0])
837840
)
838841

captum/attr/_core/gradient_shap.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -275,6 +275,9 @@ def attribute(
275275
# attribute `__getitem__`.
276276
assert isinstance(baselines[0], torch.Tensor), (
277277
"Baselines distribution has to be provided in a form "
278+
# pyre-fixme[16]: Item `Callable` of `Union[(...) ->
279+
# TensorOrTupleOfTensorsGeneric, TensorOrTupleOfTensorsGeneric]` has no
280+
# attribute `__getitem__`.
278281
"of a torch.Tensor {}.".format(baselines[0])
279282
)
280283

captum/attr/_core/layer/layer_gradient_shap.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -308,6 +308,8 @@ def attribute(
308308
# TensorOrTupleOfTensorsGeneric]` has no attribute `__getitem__`.
309309
assert isinstance(baselines[0], torch.Tensor), (
310310
"Baselines distribution has to be provided in a form "
311+
# pyre-fixme[16]: Item `Callable` of `Union[(...) -> Any,
312+
# TensorOrTupleOfTensorsGeneric]` has no attribute `__getitem__`.
311313
"of a torch.Tensor {}.".format(baselines[0])
312314
)
313315

captum/attr/_utils/attribution.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@ def __init__(self, forward_func: Callable) -> None:
4343
self.forward_func = forward_func
4444

4545
# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
46+
# pyre-fixme[13]: Attribute `attribute` is never initialized.
4647
attribute: Callable
4748
r"""
4849
This method computes and returns the attribution values for each input tensor.
@@ -74,6 +75,7 @@ def __init__(self, forward_func: Callable) -> None:
7475
"""
7576

7677
# pyre-fixme[24] Generic type `Callable` expects 2 type parameters.
78+
# pyre-fixme[13]: Attribute `attribute_future` is never initialized.
7779
attribute_future: Callable
7880

7981
r"""
@@ -126,6 +128,7 @@ def has_convergence_delta(self) -> bool:
126128
return False
127129

128130
# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
131+
# pyre-fixme[13]: Attribute `compute_convergence_delta` is never initialized.
129132
compute_convergence_delta: Callable
130133
r"""
131134
The attribution algorithms which derive `Attribution` class and provide
@@ -504,6 +507,7 @@ def __init__(
504507
InternalAttribution.__init__(self, forward_func, layer, device_ids)
505508

506509
# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
510+
# pyre-fixme[13]: Attribute `attribute` is never initialized.
507511
attribute: Callable
508512
r"""
509513
This method computes and returns the neuron attribution values for each

captum/concept/_core/concept.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,7 @@ def __init__(self, model: Module) -> None:
7474
self.model = model
7575

7676
# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
77+
# pyre-fixme[13]: Attribute `interpret` is never initialized.
7778
interpret: Callable
7879
r"""
7980
An abstract interpret method that performs concept-based model interpretability

0 commit comments

Comments
 (0)