Skip to content

Commit e0ed41e

Browse files
committed
bug fix
1 parent 4b71777 commit e0ed41e

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

src/diffusers/models/attention_dispatch.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1026,7 +1026,7 @@ def _all_to_all_single(x: torch.Tensor, group) -> torch.Tensor:
10261026
return x
10271027

10281028
def _all_to_all_dim_exchange(x: torch.Tensor, scatter_idx: int = 2, gather_idx: int = 1, group=None) -> torch.Tensor:
1029-
group_world_size = funcol.get_world_size(group)
1029+
group_world_size = torch.distributed.get_world_size(group)
10301030

10311031
if scatter_idx == 2 and gather_idx == 1:
10321032
B, S_LOCAL, H, D = x.shape

0 commit comments

Comments
 (0)