Skip to content

Commit

Permalink
Update cache files [skip ci]
Browse files Browse the repository at this point in the history
  • Loading branch information
ivy-dev-bot committed Aug 4, 2024
1 parent 91e7d06 commit a526999
Show file tree
Hide file tree
Showing 157 changed files with 2,372 additions and 2,667 deletions.
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import ivy.functional.frontends.torch.nn.functional as F
import ivy.functional.frontends.torch as torch

import typing

Expand All @@ -9,4 +9,4 @@ class Translated_AdaptiveAvgPool2d(Translated__AdaptiveAvgPoolNd):
output_size: typing.Any

def forward(self, input):
return F.adaptive_avg_pool2d(input, self.output_size)
return torch.nn.functional.adaptive_avg_pool2d(input, self.output_size)
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import ivy.functional.frontends.torch.nn.functional as F
import ivy.functional.frontends.torch as torch

import typing

Expand Down Expand Up @@ -38,7 +38,7 @@ def __init__(
self.divisor_override = divisor_override

def forward(self, input):
return F.avg_pool2d(
return torch.nn.functional.avg_pool2d(
input,
self.kernel_size,
self.stride,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import ivy.functional.frontends.torch.nn.functional as F
import ivy.functional.frontends.torch as torch

from .Translated__NormBase import Translated__NormBase

Expand Down Expand Up @@ -45,13 +45,11 @@ def forward(self, input):
passed when the update should occur (i.e. in training mode when they are tracked), or when buffer stats are
used for normalization (i.e. in eval mode when buffers are not None).
"""
return F.batch_norm(
return torch.nn.functional.batch_norm(
input,
(
self.running_mean
if not self.training or self.track_running_stats
else None
),
self.running_mean
if not self.training or self.track_running_stats
else None,
self.running_var if not self.training or self.track_running_stats else None,
self.weight,
self.bias,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import ivy.functional.frontends.torch.nn.functional as F
import ivy.functional.frontends.torch as torch

from .Translated__ConvNd import Translated__ConvNd
from .helpers import Translated_parse
from .helpers import Translated__ntuple

_pair = Translated__ntuple(2, "_pair")


class Translated_Conv2d(Translated__ConvNd):
Expand All @@ -20,10 +22,10 @@ def __init__(
dtype=None,
):
factory_kwargs = {"device": device, "dtype": dtype}
kernel_size_ = Translated_parse(kernel_size)
stride_ = Translated_parse(stride)
padding_ = padding if isinstance(padding, str) else Translated_parse(padding)
dilation_ = Translated_parse(dilation)
kernel_size_ = _pair(kernel_size)
stride_ = _pair(stride)
padding_ = padding if isinstance(padding, str) else _pair(padding)
dilation_ = _pair(dilation)
super().__init__(
in_channels,
out_channels,
Expand All @@ -32,7 +34,7 @@ def __init__(
padding_,
dilation_,
False,
Translated_parse(0),
_pair(0),
groups,
bias,
padding_mode,
Expand All @@ -41,18 +43,18 @@ def __init__(

def _conv_forward(self, input, weight, bias):
if self.padding_mode != "zeros":
return F.conv2d(
F.pad(
return torch.nn.functional.conv2d(
torch.nn.functional.pad(
input, self._reversed_padding_repeated_twice, mode=self.padding_mode
),
weight,
bias,
self.stride,
Translated_parse(0),
_pair(0),
self.dilation,
self.groups,
)
return F.conv2d(
return torch.nn.functional.conv2d(
input, weight, bias, self.stride, self.padding, self.dilation, self.groups
)

Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import ivy.functional.frontends.torch as torch
import ivy.functional.frontends.torch.nn as nn

import math
import typing
import math
from typing import Optional

from .helpers import Translated__calculate_fan_in_and_fan_out
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,16 @@
import warnings


def Translated__ntuple(n, name="parse"):
def parse(x):
if isinstance(x, collections.abc.Iterable):
return tuple(x)
return tuple(repeat(x, n))

parse.__name__ = name
return parse


def Translated__reverse_repeat_tuple(t, n):
return tuple(x for x in reversed(t) for _ in range(n))

Expand Down Expand Up @@ -87,10 +97,3 @@ def Translated__no_grad_uniform_(tensor, a, b, generator=None):

def Translated_uniform_(tensor, a=0.0, b=1.0, generator=None):
return Translated__no_grad_uniform_(tensor, a, b, generator)


def Translated_parse(x):
n = 2
if isinstance(x, collections.abc.Iterable):
return tuple(x)
return tuple(repeat(x, n))
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import ivy.functional.frontends.torch.nn.functional as F
import ivy.functional.frontends.torch as torch

from .Translated__ConvTransposeNd import Translated__ConvTransposeNd
from .helpers import Translated_parse
from .helpers import Translated__ntuple

_pair = Translated__ntuple(2, "_pair")


class Translated_ConvTranspose2d(Translated__ConvTransposeNd):
Expand All @@ -21,11 +23,11 @@ def __init__(
dtype=None,
):
factory_kwargs = {"device": device, "dtype": dtype}
kernel_size = Translated_parse(kernel_size)
stride = Translated_parse(stride)
padding = Translated_parse(padding)
dilation = Translated_parse(dilation)
output_padding = Translated_parse(output_padding)
kernel_size = _pair(kernel_size)
stride = _pair(stride)
padding = _pair(padding)
dilation = _pair(dilation)
output_padding = _pair(output_padding)
super().__init__(
in_channels,
out_channels,
Expand Down Expand Up @@ -57,7 +59,7 @@ def forward(self, input, output_size=None):
num_spatial_dims,
self.dilation,
)
return F.conv_transpose2d(
return torch.nn.functional.conv_transpose2d(
input,
self.weight,
self.bias,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
from .Translated__ConvNd import Translated__ConvNd
from .helpers import Translated_parse
from .helpers import Translated__ntuple

_single = Translated__ntuple(1, "_single")


class Translated__ConvTransposeNd(Translated__ConvNd):
Expand Down Expand Up @@ -50,7 +52,7 @@ def _output_padding(
dilation=None,
):
if output_size is None:
ret = Translated_parse(self.output_padding)
ret = _single(self.output_padding)
else:
has_batch_dim = input.dim() == num_spatial_dims + 2
num_non_spatial_dims = 2 if has_batch_dim else 1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,16 @@
import warnings


def Translated__ntuple(n, name="parse"):
def parse(x):
if isinstance(x, collections.abc.Iterable):
return tuple(x)
return tuple(repeat(x, n))

parse.__name__ = name
return parse


def Translated__reverse_repeat_tuple(t, n):
return tuple(x for x in reversed(t) for _ in range(n))

Expand Down Expand Up @@ -87,10 +97,3 @@ def Translated__no_grad_uniform_(tensor, a, b, generator=None):

def Translated_uniform_(tensor, a=0.0, b=1.0, generator=None):
return Translated__no_grad_uniform_(tensor, a, b, generator)


def Translated_parse(x):
n = 1
if isinstance(x, collections.abc.Iterable):
return tuple(x)
return tuple(repeat(x, n))
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import ivy.functional.frontends.torch.nn.functional as F
import ivy.functional.frontends.torch as torch

from .Translated__DropoutNd import Translated__DropoutNd


class Translated_Dropout2d(Translated__DropoutNd):
def forward(self, input):
return F.dropout2d(input, self.p, self.training, self.inplace)
return torch.nn.functional.dropout2d(input, self.p, self.training, self.inplace)
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
import ivy.functional.frontends.torch as torch
import ivy.functional.frontends.torch.nn as nn
import ivy.functional.frontends.torch.nn.functional as F

import typing
import numbers
import typing

from .helpers import Translated_ones_
from .helpers import Translated_zeros_
Expand Down Expand Up @@ -53,7 +52,7 @@ def reset_parameters(self):
Translated_zeros_(self.bias)

def forward(self, input):
return F.layer_norm(
return torch.nn.functional.layer_norm(
input, self.normalized_shape, self.weight, self.bias, self.eps
)

Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
import ivy.functional.frontends.torch as torch
import ivy.functional.frontends.torch.nn as nn
import ivy.functional.frontends.torch.nn.functional as F

import math
import typing
import math

from .helpers import Translated__calculate_fan_in_and_fan_out
from .helpers import Translated_kaiming_uniform_
Expand Down Expand Up @@ -40,7 +39,7 @@ def reset_parameters(self):
Translated_uniform_(self.bias, -bound, bound)

def forward(self, input):
return F.linear(input, self.weight, self.bias)
return torch.nn.functional.linear(input, self.weight, self.bias)

def extra_repr(self):
return f"in_features={self.in_features}, out_features={self.out_features}, bias={self.bias is not None}"
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import ivy.functional.frontends.torch.nn.functional as F
import ivy.functional.frontends.torch as torch

import typing

Expand All @@ -12,7 +12,7 @@ class Translated_MaxPool2d(Translated__MaxPoolNd):
dilation: typing.Any

def forward(self, input):
return F.max_pool2d(
return torch.nn.functional.max_pool2d(
input,
self.kernel_size,
self.stride,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import ivy.functional.frontends.torch.nn as nn

import typing
from collections import OrderedDict
from collections import abc as container_abcs
from collections import OrderedDict


class Translated_ModuleDict(nn.Module):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import ivy.functional.frontends.torch as torch
import ivy.functional.frontends.torch.nn as nn

import typing
import operator
from collections import OrderedDict
import typing
from typing import overload
from itertools import islice
from collections import OrderedDict


class Translated_Sequential(nn.Module):
Expand Down Expand Up @@ -142,8 +143,10 @@ def append(self, module):
return self

def insert(self, index, module):
if not isinstance(module, nn.Module):
raise AssertionError(f"module should be of type: {nn.Module}")
if not isinstance(module, torch.nn.modules.module.Module):
raise AssertionError(
f"module should be of type: {torch.nn.modules.module.Module}"
)
n = len(self._modules)
if not -n <= index <= n:
raise IndexError(f"Index out of range: {index}")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,9 @@ def forward(self, input):
"""
normalized, self.running_mean, self.running_var = ivy_batch_norm_frnt(
input,
(
self.running_mean
if not self.training or self.track_running_stats
else None
),
self.running_mean
if not self.training or self.track_running_stats
else None,
self.running_var if not self.training or self.track_running_stats else None,
self.weight,
self.bias,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
from .ivy__ConvNd import ivy__ConvNd
from .ivy__helpers import ivy__ntuple
from .ivy__helpers import ivy_conv2d_frnt
from .ivy__helpers import ivy_pad_frnt
from .ivy__helpers import ivy_parse

_pair = ivy__ntuple(2, "_pair")


class ivy_Conv2d(ivy__ConvNd):
Expand All @@ -20,10 +22,10 @@ def __init__(
dtype=None,
):
factory_kwargs = {"device": device, "dtype": dtype}
kernel_size_ = ivy_parse(kernel_size)
stride_ = ivy_parse(stride)
padding_ = padding if isinstance(padding, str) else ivy_parse(padding)
dilation_ = ivy_parse(dilation)
kernel_size_ = _pair(kernel_size)
stride_ = _pair(stride)
padding_ = padding if isinstance(padding, str) else _pair(padding)
dilation_ = _pair(dilation)
super().__init__(
in_channels,
out_channels,
Expand All @@ -32,7 +34,7 @@ def __init__(
padding_,
dilation_,
False,
ivy_parse(0),
_pair(0),
groups,
bias,
padding_mode,
Expand All @@ -48,7 +50,7 @@ def _conv_forward(self, input, weight, bias):
weight,
bias,
self.stride,
ivy_parse(0),
_pair(0),
self.dilation,
self.groups,
)
Expand Down
Loading

0 comments on commit a526999

Please sign in to comment.