From e299a59f3af40bfcaa414f17d948ab2da501706b Mon Sep 17 00:00:00 2001 From: Alex Barghi <105237337+alexbarghi-nv@users.noreply.github.com> Date: Tue, 9 Jul 2024 15:39:04 -0400 Subject: [PATCH] [FIX] Skip Distributed Sampler Tests if PyTorch with CUDA is not Available (#4518) Some nightly tests are failing because there is no CUDA-supporting version of PyTorch available (as expected, i.e. on CUDA 11.4). Instead, the CPU version of PyTorch gets installed, and the test crashes when attempting to set the CUDA allocator. This PR disables those tests when only a CPU version of PyTorch is available to prevent this from happening. Authors: - Alex Barghi (https://github.com/alexbarghi-nv) Approvers: - Vyas Ramasubramani (https://github.com/vyasr) - Rick Ratzel (https://github.com/rlratzel) URL: https://github.com/rapidsai/cugraph/pull/4518 --- python/cugraph/cugraph/tests/sampling/test_dist_sampler.py | 7 +++++-- .../cugraph/cugraph/tests/sampling/test_dist_sampler_mg.py | 7 +++++-- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/python/cugraph/cugraph/tests/sampling/test_dist_sampler.py b/python/cugraph/cugraph/tests/sampling/test_dist_sampler.py index 88589429e85..965f731d328 100644 --- a/python/cugraph/cugraph/tests/sampling/test_dist_sampler.py +++ b/python/cugraph/cugraph/tests/sampling/test_dist_sampler.py @@ -32,9 +32,12 @@ torch = import_optional("torch") if not isinstance(torch, MissingModule): - from rmm.allocators.torch import rmm_torch_allocator + if torch.cuda.is_available(): + from rmm.allocators.torch import rmm_torch_allocator - torch.cuda.change_current_allocator(rmm_torch_allocator) + torch.cuda.change_current_allocator(rmm_torch_allocator) + else: + pytest.skip("CUDA-enabled PyTorch is unavailable", allow_module_level=True) @pytest.fixture diff --git a/python/cugraph/cugraph/tests/sampling/test_dist_sampler_mg.py b/python/cugraph/cugraph/tests/sampling/test_dist_sampler_mg.py index 324811e3368..a1c32938994 100644 --- a/python/cugraph/cugraph/tests/sampling/test_dist_sampler_mg.py +++ b/python/cugraph/cugraph/tests/sampling/test_dist_sampler_mg.py @@ -37,9 +37,12 @@ torch = import_optional("torch") if __name__ == "__main__" and not isinstance(torch, MissingModule): - from rmm.allocators.torch import rmm_torch_allocator + if torch.cuda.is_available(): + from rmm.allocators.torch import rmm_torch_allocator - torch.cuda.change_current_allocator(rmm_torch_allocator) + torch.cuda.change_current_allocator(rmm_torch_allocator) + else: + pytest.skip("CUDA-enabled PyTorch is unavailable", allow_module_level=True) def karate_mg_graph(rank, world_size):