We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 0bdde92 commit 1d6f8e2Copy full SHA for 1d6f8e2
test/float8/test_base.py
@@ -53,7 +53,7 @@
53
is_cuda_8_9 = torch.cuda.is_available() and torch.cuda.get_device_capability() >= (8, 9)
54
55
def bitwise_identical(a: Float8Tensor, b: Float8Tensor) -> bool:
56
- assert torch.all(a._data == b._data).item(), "scales are not identical"
+ assert torch.all(a._scale == b._scale).item(), "scales are not identical"
57
assert torch.all(a._data == b._data).item(), "data is not identical"
58
return True
59
0 commit comments