Skip to content

Commit

Permalink
Added glu activation to Paddle frontend
Browse files Browse the repository at this point in the history
  • Loading branch information
arshPratap committed Sep 1, 2023
1 parent 0a729b5 commit 27aca1e
Show file tree
Hide file tree
Showing 2 changed files with 35 additions and 0 deletions.
7 changes: 7 additions & 0 deletions ivy/functional/frontends/paddle/nn/functional/activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,13 @@ def gelu(x, approximate=False, name=None):
return ivy.gelu(x, approximate=approximate)


@with_supported_dtypes({"2.4.2 and below": ("float32", "float64")}, "paddle")
@to_ivy_arrays_and_back
def glu(x, axis=-1, name=None):
a, b = ivy.split(x, num_or_size_splits=2, axis=axis)
return ivy.multiply(a, ivy.sigmoid(b))


@with_supported_dtypes({"2.4.2 and below": ("float32", "float64")}, "paddle")
@to_ivy_arrays_and_back
def gumbel_softmax(x, temperature=1.0, hard=False, axis=-1, name=None):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,34 @@ def test_paddle_gelu(
)


# glu
@handle_frontend_test(
fn_tree="paddle.nn.functional.glu",
dtype_and_input=helpers.dtype_and_values(
available_dtypes=helpers.get_dtypes("valid"),
),
)
def test_paddle_glu(
*,
dtype_and_input,
on_device,
backend_fw,
fn_tree,
frontend,
test_flags,
):
input_dtype, x = dtype_and_input
helpers.test_frontend_function(
input_dtypes=input_dtype,
backend_to_test=backend_fw,
frontend=frontend,
test_flags=test_flags,
fn_tree=fn_tree,
on_device=on_device,
x=x[0],
)


# gumbel_softmax
@handle_frontend_test(
fn_tree="paddle.nn.functional.gumbel_softmax",
Expand Down

0 comments on commit 27aca1e

Please sign in to comment.