Skip to content

Commit

Permalink
[Bugfix] Fix broken CPU compressed-tensors test (vllm-project#11338)
Browse files Browse the repository at this point in the history
Signed-off-by: Isotr0py <[email protected]>
  • Loading branch information
Isotr0py authored Dec 19, 2024
1 parent cdf22af commit 276738c
Showing 1 changed file with 2 additions and 4 deletions.
6 changes: 2 additions & 4 deletions vllm/model_executor/layers/quantization/utils/w8a8_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@


def sparse_cutlass_supported() -> bool:
# sparse cutlass is not supported on Rocm
if current_platform.is_rocm():
if not current_platform.is_cuda():
return False

capability_tuple = current_platform.get_device_capability()
Expand All @@ -22,8 +21,7 @@ def sparse_cutlass_supported() -> bool:


def cutlass_fp8_supported() -> bool:
# cutlass is not supported on Rocm
if current_platform.is_rocm():
if not current_platform.is_cuda():
return False

capability_tuple = current_platform.get_device_capability()
Expand Down

0 comments on commit 276738c

Please sign in to comment.