Skip to content

Commit

Permalink
Added glu activation to Paddle frontend
Browse files Browse the repository at this point in the history
  • Loading branch information
arshPratap committed Sep 5, 2023
1 parent 17ffca9 commit 2422948
Show file tree
Hide file tree
Showing 2 changed files with 52 additions and 0 deletions.
11 changes: 11 additions & 0 deletions ivy/functional/frontends/paddle/nn/functional/activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,17 @@ def gelu(x, approximate=False, name=None):
return ivy.gelu(x, approximate=approximate)


@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
@to_ivy_arrays_and_back
def glu(x, axis=-1, name=None):
size = x.shape[axis]
ivy.utils.assertions.check_equal(
size % 2, 0, message="axis size must be divisible by 2", as_array=False
)
a, b = ivy.split(x, num_or_size_splits=2, axis=axis)
return ivy.multiply(a, ivy.sigmoid(b))


@with_supported_dtypes({"2.4.2 and below": ("float32", "float64")}, "paddle")
@to_ivy_arrays_and_back
def gumbel_softmax(x, temperature=1.0, hard=False, axis=-1, name=None):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,47 @@ def test_paddle_gelu(
)


# glu
@handle_frontend_test(
fn_tree="paddle.nn.functional.glu",
dtype_and_x=helpers.dtype_and_values(
available_dtypes=helpers.get_dtypes("float"),
large_abs_safety_factor=2,
small_abs_safety_factor=2,
safety_factor_scale="linear",
min_value=-2,
min_num_dims=1,
min_dim_size=4,
max_dim_size=4,
),
axis=helpers.ints(min_value=-1, max_value=0),
test_with_out=st.just(False),
)
def test_paddle_glu(
*,
dtype_and_x,
axis,
on_device,
backend_fw,
fn_tree,
frontend,
test_flags,
):
input_dtype, x = dtype_and_x
helpers.test_frontend_function(
input_dtypes=input_dtype,
backend_to_test=backend_fw,
frontend=frontend,
test_flags=test_flags,
fn_tree=fn_tree,
on_device=on_device,
rtol=1e-01,
atol=1e-01,
x=x[0],
axis=axis,
)


# gumbel_softmax
@handle_frontend_test(
fn_tree="paddle.nn.functional.gumbel_softmax",
Expand Down

0 comments on commit 2422948

Please sign in to comment.