We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent b967ebd commit e7e329bCopy full SHA for e7e329b
test/prototype/test_awq.py
@@ -84,7 +84,7 @@ def test_awq_loading(device, qdtype):
84
assert awq_save_load_out is not None
85
assert torch.allclose(awq_out, awq_save_load_out, atol = 1e-2)
86
87
-@pytest.mark.skipif(not TORCH_VERSION_AT_LEAST_2_3,reason="torch.uint(2-7) requires torch2.3+")
+@pytest.mark.skipif(not TORCH_VERSION_AT_LEAST_2_5,reason="requires nightly pytorch")
88
@pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA not available")
89
def test_save_weights_only():
90
dataset_size = 100
0 commit comments