Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

random_uniform #22981

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 6 additions & 2 deletions ivy/functional/backends/paddle/experimental/manipulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,8 +148,12 @@ def pad(
)


pad.partial_mixed_handler = lambda *args, mode="constant", constant_values=0, reflect_type="even", **kwargs: _check_paddle_pad(
mode, reflect_type, args[1], args[0].shape, constant_values, 3
pad.partial_mixed_handler = (
lambda *args, mode="constant", constant_values=0, reflect_type="even", **kwargs: (
_check_paddle_pad(
mode, reflect_type, args[1], args[0].shape, constant_values, 3
)
)
)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -294,8 +294,10 @@ def pad(
)


pad.partial_mixed_handler = lambda *args, mode="constant", constant_values=0, reflect_type="even", **kwargs: _check_tf_pad(
args[0].shape, args[1], mode, constant_values, reflect_type
pad.partial_mixed_handler = (
lambda *args, mode="constant", constant_values=0, reflect_type="even", **kwargs: (
_check_tf_pad(args[0].shape, args[1], mode, constant_values, reflect_type)
)
)


Expand Down
6 changes: 4 additions & 2 deletions ivy/functional/backends/torch/experimental/manipulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,8 +110,10 @@ def pad(
).squeeze(0)


pad.partial_mixed_handler = lambda *args, mode="constant", constant_values=0, reflect_type="even", **kwargs: _check_torch_pad(
mode, reflect_type, args[1], args[0].shape, constant_values
pad.partial_mixed_handler = (
lambda *args, mode="constant", constant_values=0, reflect_type="even", **kwargs: (
_check_torch_pad(mode, reflect_type, args[1], args[0].shape, constant_values)
)
)


Expand Down
16 changes: 8 additions & 8 deletions ivy/functional/frontends/numpy/random/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,14 @@ def gumbel(loc=0.0, scale=1.0, size=None):
return x


@to_ivy_arrays_and_back
@from_zero_dim_arrays_to_scalar
def laplace(loc=0.0, scale=1.0, size=None):
u = ivy.random_uniform(low=0.0, high=0.0, shape=size, dtype="float64")
u = loc - scale * ivy.sign(u - 0.5) * ivy.log(1 - 2 * ivy.abs(u - 0.5))
return u


@to_ivy_arrays_and_back
@from_zero_dim_arrays_to_scalar
def logistic(loc=0.0, scale=1.0, size=None):
Expand Down Expand Up @@ -266,11 +274,3 @@ def weibull(a, size=None):
return 0
u = ivy.random_uniform(low=0.0, high=1.0, shape=size, dtype="float64")
return ivy.pow(-ivy.log(1 - u), 1 / a)


@to_ivy_arrays_and_back
@from_zero_dim_arrays_to_scalar
def laplace(loc=0.0, scale=1.0, size=None):
u = ivy.random_uniform(low=0.0, high=0.0, shape=size, dtype="float64")
u = loc - scale * ivy.sign(u - 0.5) * ivy.log(1 - 2 * ivy.abs(u - 0.5))
return u
45 changes: 22 additions & 23 deletions ivy/functional/frontends/paddle/fft.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,28 @@ def fftshift(x, axes=None, name=None):
return roll


@with_supported_dtypes(
{"2.5.1 and below": ("complex64", "complex128")},
"paddle",
)
@to_ivy_arrays_and_back
def hfft(x, n=None, axis=-1, norm="backward", name=None):
"""Compute the FFT of a signal that has Hermitian symmetry, resulting in a real
spectrum."""
# Determine the input shape and axis length
input_shape = x.shape
input_len = input_shape[axis]

# Calculate n if not provided
if n is None:
n = 2 * (input_len - 1)

# Perform the FFT along the specified axis
result = ivy.fft(x, axis, n=n, norm=norm)

return ivy.real(result)


@with_supported_dtypes(
{"2.5.1 and below": ("complex64", "complex128")},
"paddle",
Expand Down Expand Up @@ -84,28 +106,6 @@ def ifftshift(x, axes=None, name=None):
return roll


@with_supported_dtypes(
{"2.5.1 and below": ("complex64", "complex128")},
"paddle",
)
@to_ivy_arrays_and_back
def hfft(x, n=None, axis=-1, norm="backward", name=None):
"""Compute the FFT of a signal that has Hermitian symmetry, resulting in a real
spectrum."""
# Determine the input shape and axis length
input_shape = x.shape
input_len = input_shape[axis]

# Calculate n if not provided
if n is None:
n = 2 * (input_len - 1)

# Perform the FFT along the specified axis
result = ivy.fft(x, axis, n=n, norm=norm)

return ivy.real(result)


@with_supported_dtypes(
{"2.5.1 and below": ("complex64", "complex128")},
"paddle",
Expand All @@ -122,4 +122,3 @@ def irfft(x, n=None, axis=-1.0, norm="backward", name=None):
if ivy.isreal(x):
time_domain = ivy.real(time_domain)
return time_domain

9 changes: 9 additions & 0 deletions ivy/functional/frontends/paddle/tensor/creation.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,15 @@ def ones_like(x, /, *, dtype=None, name=None):
return ivy.ones_like(x, dtype=dtype)


# []-22965
@with_supported_dtypes("paddle")
@to_ivy_arrays_and_back
def random_uniform(low, high, shape, device=None, dtype=None):
return ivy.random_uniform(
low=low, high=high, shape=shape, device=device, dtype=dtype
)


@to_ivy_arrays_and_back
def to_tensor(data, /, *, dtype=None, place=None, stop_gradient=True):
array = ivy.array(data, dtype=dtype, device=place)
Expand Down
2 changes: 1 addition & 1 deletion ivy/functional/frontends/torch/comparison_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@ def topk(input, k, dim=None, largest=True, sorted=True, *, out=None):


gt = greater
ne = not_equal
ge = greater_equal
le = less_equal
lt = less
ne = not_equal
2 changes: 1 addition & 1 deletion ivy_tests/test_ivy/helpers/available_frameworks.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
# A list of available backends that can be used for testing.


def _available_frameworks(path='/opt/fw/'):
def _available_frameworks(path="/opt/fw/"):
ret = []
for backend in ["numpy", "jax", "tensorflow", "torch", "paddle"]:
if find_spec(backend) is not None:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -354,6 +354,51 @@ def test_numpy_gumbel(
)


@handle_frontend_test(
fn_tree="numpy.random.laplace",
input_dtypes=helpers.get_dtypes("float", full=False),
loc=st.floats(
allow_nan=False,
allow_infinity=False,
width=32,
min_value=0,
exclude_min=True,
),
scale=st.floats(
allow_nan=False,
allow_infinity=False,
width=32,
min_value=0,
exclude_min=True,
),
size=helpers.get_shape(allow_none=True),
test_with_out=st.just(False),
)
def test_numpy_laplace(
input_dtypes,
size,
frontend,
test_flags,
fn_tree,
on_device,
backend_fw,
loc,
scale,
):
helpers.test_frontend_function(
input_dtypes=input_dtypes,
backend_to_test=backend_fw,
test_flags=test_flags,
frontend=frontend,
fn_tree=fn_tree,
on_device=on_device,
test_values=False,
loc=loc,
scale=scale,
size=size,
)


# logistic
@handle_frontend_test(
fn_tree="numpy.random.logistic",
Expand Down Expand Up @@ -1056,48 +1101,3 @@ def test_numpy_weibull(
a=a,
size=size,
)


@handle_frontend_test(
fn_tree="numpy.random.laplace",
input_dtypes=helpers.get_dtypes("float", full=False),
loc=st.floats(
allow_nan=False,
allow_infinity=False,
width=32,
min_value=0,
exclude_min=True,
),
scale=st.floats(
allow_nan=False,
allow_infinity=False,
width=32,
min_value=0,
exclude_min=True,
),
size=helpers.get_shape(allow_none=True),
test_with_out=st.just(False),
)
def test_numpy_laplace(
input_dtypes,
size,
frontend,
test_flags,
fn_tree,
on_device,
backend_fw,
loc,
scale,
):
helpers.test_frontend_function(
input_dtypes=input_dtypes,
backend_to_test=backend_fw,
test_flags=test_flags,
frontend=frontend,
fn_tree=fn_tree,
on_device=on_device,
test_values=False,
loc=loc,
scale=scale,
size=size,
)
Original file line number Diff line number Diff line change
Expand Up @@ -559,6 +559,37 @@ def test_paddle_ones_like(
)


# random_uniform
@handle_frontend_test(
fn_tree="paddle.random_uniform",
dtype_and_shape=helpers.dtype_and_values(
available_dtypes=helpers.get_dtypes("valid"), ret_shape=True
),
test_with_out=st.just(False),
)
def test_paddle_random_uniform(
*,
dtype_and_shape,
on_device,
fn_tree,
backend_fw,
frontend,
test_flags,
):
input_dtype, x, shape = dtype_and_shape

helpers.test_frontend_function(
input_dtypes=input_dtype,
backend_to_test=backend_fw,
frontend=frontend,
test_flags=test_flags,
fn_tree=fn_tree,
on_device=on_device,
test_values=False,
shape=shape,
)


# Tests #
# ----- #

Expand Down Expand Up @@ -768,7 +799,6 @@ def test_paddle_zeros(
available_dtypes=helpers.get_dtypes("valid"),
),
dtype=helpers.get_dtypes("valid"),
test_with_out=st.just(False),
)
def test_paddle_zeros_like(
dtype_and_x,
Expand Down
23 changes: 0 additions & 23 deletions ivy_tests/test_ivy/test_functional/test_core/test_manipulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -658,29 +658,6 @@ def test_stack(*, dtypes_arrays, axis, test_flags, backend_fw, fn_name, on_devic
)


# stack
@handle_test(
fn_tree="functional.ivy.stack",
dtypes_arrays=_stack_helper(),
axis=helpers.get_axis(
shape=st.shared(helpers.get_shape(min_num_dims=1), key="values_shape"),
force_int=True,
),
)
def test_stack(*, dtypes_arrays, axis, test_flags, backend_fw, fn_name, on_device):
dtypes, arrays = dtypes_arrays

helpers.test_function(
input_dtypes=dtypes,
test_flags=test_flags,
backend_to_test=backend_fw,
fn_name=fn_name,
on_device=on_device,
arrays=arrays,
axis=axis,
)


# swapaxes
@handle_test(
fn_tree="functional.ivy.swapaxes",
Expand Down