Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: fix tpu support to torch backend, unsupported data-types #28739

Open
wants to merge 5 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion ivy/functional/backends/paddle/linear_algebra.py
Original file line number Diff line number Diff line change
Expand Up @@ -692,4 +692,4 @@ def vector_to_skew_symmetric_matrix(
row2 = paddle.concat((a3s, zs, -a1s), -1)
row3 = paddle.concat((-a2s, a1s, zs), -1)
# BS x 3 x 3
return paddle.concat((row1, row2, row3), -2)
return paddle.concat((row1, row2, row3), -2)
68 changes: 60 additions & 8 deletions ivy/functional/backends/paddle/random.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import ivy.functional.backends.paddle as paddle_backend
from typing import Optional, Union, Sequence

# local
import ivy
from paddle.device import core
from ivy.functional.ivy.random import (
Expand All @@ -24,7 +23,6 @@
# Extra #
# ------#


@with_unsupported_device_and_dtypes(
{"2.6.0 and below": {"cpu": ("int8",)}},
backend_version,
Expand All @@ -45,19 +43,17 @@ def random_uniform(
low = paddle.cast(low, "float32") if isinstance(low, paddle.Tensor) else low
high = paddle.cast(high, "float32") if isinstance(high, paddle.Tensor) else high
shape = _check_bounds_and_get_shape(low, high, shape).shape
# Set range and seed
rng = high - low
if seed:
_ = paddle.seed(seed)
random_base = paddle.uniform(shape, min=0.0, max=1.0)

return paddle_backend.add(paddle_backend.multiply(random_base, rng), low).cast(
dtype
)
return paddle_backend.add(paddle_backend.multiply(random_base, rng), low).cast(dtype)


@with_unsupported_dtypes(
{"2.6.0 and below": ("float16", "int16", "int8")}, backend_version
{"2.6.0 and below": ("float16", "int16", "int8")},
backend_version,
)
def random_normal(
*,
Expand Down Expand Up @@ -155,10 +151,66 @@ def shuffle(
) -> paddle.Tensor:
if seed:
_ = paddle.seed(seed)
# Use Paddle's randperm function to generate shuffled indices
indices = paddle.randperm(x.ndim, dtype="int64")
if paddle.is_complex(x):
shuffled_real = paddle.index_select(x.real(), indices, axis=axis)
shuffled_imag = paddle.index_select(x.imag(), indices, axis=axis)
return paddle.complex(shuffled_real, shuffled_imag)
return paddle.index_select(x, indices, axis=axis)


# New Random Distribution Functions
# -----------------------------------

def random_exponential(
*,
scale: Union[float, paddle.Tensor],
shape: Optional[Union[ivy.NativeShape, Sequence[int]]] = None,
dtype: paddle.dtype,
seed: Optional[int] = None,
) -> paddle.Tensor:
_check_valid_scale(scale)
shape = _check_bounds_and_get_shape(scale, None, shape).shape
if seed:
paddle.seed(seed)
return paddle.exponential(scale, shape).cast(dtype)


def random_poisson(
*,
lam: Union[float, paddle.Tensor],
shape: Optional[Union[ivy.NativeShape, Sequence[int]]] = None,
dtype: paddle.dtype,
seed: Optional[int] = None,
) -> paddle.Tensor:
shape = _check_bounds_and_get_shape(lam, None, shape).shape
if seed:
paddle.seed(seed)
return paddle.poisson(lam, shape).cast(dtype)


def random_bernoulli(
*,
p: Union[float, paddle.Tensor],
shape: Optional[Union[ivy.NativeShape, Sequence[int]]] = None,
dtype: paddle.dtype,
seed: Optional[int] = None,
) -> paddle.Tensor:
shape = _check_bounds_and_get_shape(p, None, shape).shape
if seed:
paddle.seed(seed)
return paddle.bernoulli(p, shape).cast(dtype)


def random_beta(
*,
alpha: Union[float, paddle.Tensor],
beta: Union[float, paddle.Tensor],
shape: Optional[Union[ivy.NativeShape, Sequence[int]]] = None,
dtype: paddle.dtype,
seed: Optional[int] = None,
) -> paddle.Tensor:
shape = _check_bounds_and_get_shape(alpha, beta, shape).shape
if seed:
paddle.seed(seed)
return paddle.beta(alpha, beta, shape).cast(dtype)
Loading
Loading