From 74d6498cf655392223d685147bb22efcfe0876fc Mon Sep 17 00:00:00 2001 From: Rahul Tuli Date: Wed, 27 Nov 2024 14:13:47 +0000 Subject: [PATCH] Run float8 test only if cuda is available and device capability is greater than 90 --- tests/test_utils/test_semi_structured_conversions.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/test_utils/test_semi_structured_conversions.py b/tests/test_utils/test_semi_structured_conversions.py index e74722fb..a477bf29 100644 --- a/tests/test_utils/test_semi_structured_conversions.py +++ b/tests/test_utils/test_semi_structured_conversions.py @@ -21,7 +21,12 @@ def supported_dtypes(): - return [torch.int8, torch.float16, torch.bfloat16, torch.float8_e4m3fn] + dtypes = [torch.int8, torch.float16, torch.bfloat16] + if torch.cuda.is_available(): + major, minor = torch.cuda.get_device_capability() + if major > 9 or (major == 9 and minor > 0): + dtypes += [torch.float8_e4m3fn] + return dtypes def get_random_mat(M, K, dtype):