Skip to content

Commit e62b9aa

Browse files
yonigozlanCyrilvallez
authored andcommitted
Fix self.dropout_p is not defined for SamAttention/Sam2Attention (#40667)
Fix dropout_p is not defined for SamAttention/Sam2Attention
1 parent c58d7d7 commit e62b9aa

File tree

5 files changed

+5
-5
lines changed

5 files changed

+5
-5
lines changed

src/transformers/models/sam/modeling_sam.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -262,7 +262,7 @@ def forward(
262262
key,
263263
value,
264264
attention_mask=attention_similarity,
265-
dropout=0.0 if not self.training else self.dropout_p,
265+
dropout=0.0,
266266
scaling=self.scaling,
267267
is_causal=self.is_causal,
268268
**kwargs,

src/transformers/models/sam2/modeling_sam2.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -893,7 +893,7 @@ def forward(
893893
key,
894894
value,
895895
attention_mask=attention_similarity,
896-
dropout=0.0 if not self.training else self.dropout_p,
896+
dropout=0.0,
897897
scaling=self.scaling,
898898
is_causal=self.is_causal,
899899
**kwargs,

src/transformers/models/sam2/modular_sam2.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -940,7 +940,7 @@ def forward(
940940
key,
941941
value,
942942
attention_mask=attention_similarity,
943-
dropout=0.0 if not self.training else self.dropout_p,
943+
dropout=0.0,
944944
scaling=self.scaling,
945945
is_causal=self.is_causal,
946946
**kwargs,

src/transformers/models/sam2_video/modeling_sam2_video.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -477,7 +477,7 @@ def forward(
477477
key,
478478
value,
479479
attention_mask=attention_similarity,
480-
dropout=0.0 if not self.training else self.dropout_p,
480+
dropout=0.0,
481481
scaling=self.scaling,
482482
is_causal=self.is_causal,
483483
**kwargs,

src/transformers/models/sam_hq/modeling_sam_hq.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -674,7 +674,7 @@ def forward(
674674
key,
675675
value,
676676
attention_mask=attention_similarity,
677-
dropout=0.0 if not self.training else self.dropout_p,
677+
dropout=0.0,
678678
scaling=self.scaling,
679679
is_causal=self.is_causal,
680680
**kwargs,

0 commit comments

Comments
 (0)