From b305cf4a51ab94c9ec2191f0adc01a2684125967 Mon Sep 17 00:00:00 2001 From: Aditya Raj <109805994+zeus2x7@users.noreply.github.com> Date: Sat, 2 Sep 2023 00:22:56 +0530 Subject: [PATCH] added sigmoid_focal_loss to paddle frontend and refactored mse_loss in paddle frontend (#22585) --- .../frontends/paddle/nn/functional/loss.py | 51 ++++++++++++- .../test_nn/test_functional/test_loss.py | 75 +++++++++++++++++++ 2 files changed, 122 insertions(+), 4 deletions(-) diff --git a/ivy/functional/frontends/paddle/nn/functional/loss.py b/ivy/functional/frontends/paddle/nn/functional/loss.py index 6494559d30614..6802e97929cf4 100644 --- a/ivy/functional/frontends/paddle/nn/functional/loss.py +++ b/ivy/functional/frontends/paddle/nn/functional/loss.py @@ -252,16 +252,13 @@ def margin_ranking_loss(input, other, label, margin=0.0, reduction="mean", name= return out -@with_supported_dtypes({"2.4.2 and below": ("float32", "float64")}, "paddle") +@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle") @inputs_to_ivy_arrays def mse_loss(input, label, reduction="mean", name=None): reduction = _get_reduction_func(reduction) ret = ivy.square(input - label) ret = reduction(ret) - if ret.shape == (): - ret = ret.expand_dims() - return paddle.to_tensor(ret) @@ -298,6 +295,52 @@ def nll_loss( return output +@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle") +@to_ivy_arrays_and_back +def sigmoid_focal_loss( + logit, + label, + normalizer=None, + alpha=0.25, + gamma=2.0, + reduction="sum", + name=None, +): + if reduction not in ["sum", "mean", "none"]: + raise ValueError( + "The value of 'reduction' in sigmoid_focal_loss should be 'sum', 'mean' or" + f" 'none', but received {reduction}, which is not allowed." + ) + + if normalizer is not None and normalizer.ndim > 1: + raise ValueError( + "Expected zero or one dimension of normalizer in sigmoid_focal_loss but" + f" got {normalizer.ndim}." + ) + + if not isinstance(logit, ivy.Array): + logit = ivy.array(logit) + + if not isinstance(label, ivy.Array): + label = ivy.array(label) + + pred = ivy.sigmoid(logit) + loss = -( + label * alpha * ivy.pow((1 - pred), gamma) * ivy.log(pred) + + (1 - label) * (1 - alpha) * ivy.pow(pred, gamma) * ivy.log(1 - pred) + ) + + if normalizer is not None: + loss /= normalizer + + if reduction == "sum": + return ivy.sum(loss) + elif reduction == "mean": + return ivy.mean(loss) + + return loss + + @with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle") @to_ivy_arrays_and_back def smooth_l1_loss( diff --git a/ivy_tests/test_ivy/test_frontends/test_paddle/test_nn/test_functional/test_loss.py b/ivy_tests/test_ivy/test_frontends/test_paddle/test_nn/test_functional/test_loss.py index 93113b66ca481..884449aa63187 100644 --- a/ivy_tests/test_ivy/test_frontends/test_paddle/test_nn/test_functional/test_loss.py +++ b/ivy_tests/test_ivy/test_frontends/test_paddle/test_nn/test_functional/test_loss.py @@ -468,6 +468,81 @@ def test_paddle_nll_loss( ) +@handle_frontend_test( + fn_tree="paddle.nn.functional.sigmoid_focal_loss", + dtype_and_x=helpers.dtype_and_values( + available_dtypes=helpers.get_dtypes("float"), + num_arrays=1, + shared_dtype=False, + min_num_dims=1, + min_dim_size=1, + ), + dtype_and_normalizer=helpers.dtype_and_values( + available_dtypes=helpers.get_dtypes("float"), + num_arrays=1, + shared_dtype=True, + min_num_dims=1, + min_dim_size=1, + max_num_dims=1, + max_dim_size=1, + ), + dtype_and_labels=helpers.dtype_and_values( + available_dtypes=helpers.get_dtypes("float"), + num_arrays=1, + shared_dtype=False, + min_num_dims=1, + min_dim_size=1, + min_value=0, + max_value=1, + ), + alpha=st.floats( + min_value=0.0, + max_value=1.0, + ), + gamma=st.floats( + min_value=0.0, + max_value=5.0, + ), + reduction=st.sampled_from(["mean", "sum", "none"]), +) +def test_paddle_sigmoid_focal_loss( + dtype_and_x, + dtype_and_normalizer, + dtype_and_labels, + alpha, + gamma, + reduction, + on_device, + fn_tree, + frontend, + test_flags, + backend_fw, +): + x_dtype, x = dtype_and_x + normalizer_dtype, normalizer = dtype_and_normalizer + label_dtype, labels = dtype_and_labels + normalizer = [norm.reshape(-1) for norm in normalizer] + labels = ivy.array(labels, dtype=ivy.int64) + helpers.test_frontend_function( + input_dtypes=[ivy.int64] + + [ivy.float64] + + x_dtype + + normalizer_dtype + + label_dtype, + backend_to_test=backend_fw, + frontend=frontend, + test_flags=test_flags, + fn_tree=fn_tree, + on_device=on_device, + logit=x[0], + label=labels[0], + alpha=alpha, + gamma=gamma, + normalizer=normalizer[0], + reduction=reduction, + ) + + # smooth_l1_loss @handle_frontend_test( fn_tree="paddle.nn.functional.smooth_l1_loss",