Skip to content

Commit

Permalink
[Test][Autotuner] Skip use_cuda_graph for non cuda devices (#169)
Browse files Browse the repository at this point in the history
This commit skips test for non-cuda devices that exects to
use_cuda_graph.

Signed-off-by: Dmitrii Makarenko <[email protected]>
  • Loading branch information
Devjiu authored Oct 25, 2024
1 parent 0f6dcd7 commit 51427ed
Showing 1 changed file with 6 additions and 0 deletions.
6 changes: 6 additions & 0 deletions python/test/unit/runtime/test_autotuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,19 @@
import triton.language as tl
import pytest

from triton._internal_testing import is_cuda


def do_bench(kernel_call, quantiles):
return triton.testing.do_bench(kernel_call, quantiles=quantiles, warmup=1, rep=1)


@pytest.mark.parametrize('use_cuda_graph', [False, True])
def test_kwargs(use_cuda_graph: bool, device: str):

if not is_cuda() and use_cuda_graph:
pytest.skip("Use cuda graph without cuda looks strange")

M, N = 1024, 16
src = torch.randn(M * N, device=device)
dst = torch.empty(M * N, device=device)
Expand Down

0 comments on commit 51427ed

Please sign in to comment.