From a2b5fc67935699bdb3f36619fb7bbe8b14763c9e Mon Sep 17 00:00:00 2001 From: Chaitanya Lakhchaura <90706514+ZenithFlux@users.noreply.github.com> Date: Thu, 21 Mar 2024 17:30:39 +0530 Subject: [PATCH] fix: fixed frontend test `test_torch_unfold` for paddle backend by excluding bfloat16(#28601) --- .../test_nn/test_functional/test_convolution_functions.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ivy_tests/test_ivy/test_frontends/test_torch/test_nn/test_functional/test_convolution_functions.py b/ivy_tests/test_ivy/test_frontends/test_torch/test_nn/test_functional/test_convolution_functions.py index de8d285f5d7d3..6c69aa5b0dddd 100644 --- a/ivy_tests/test_ivy/test_frontends/test_torch/test_nn/test_functional/test_convolution_functions.py +++ b/ivy_tests/test_ivy/test_frontends/test_torch/test_nn/test_functional/test_convolution_functions.py @@ -523,6 +523,9 @@ def test_torch_unfold( backend_fw, ): dtype, vals, kernel_shape, dilations, strides, padding = dtype_vals + # TODO add bfloat16 to unsupported dtypes of the tested function + if backend_fw == "paddle": + assume("bfloat16" not in dtype[0]) helpers.test_frontend_function( input_dtypes=dtype, backend_to_test=backend_fw,