Skip to content

Commit

Permalink
var -> std
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Dec 17, 2020
1 parent c60d03e commit 0493945
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 8 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ model = TabTransformer(
dim_out = 1, # binary prediction, but could be anything
depth = 6, # depth, paper recommended 6
heads = 8, # heads, paper recommends 8
continuous_mean_var = cont_mean_var # (optional) - normalize the continuous values before layer norm
continuous_mean_std = cont_mean_std # (optional) - normalize the continuous values before layer norm
)

x_categ = torch.randint(0, 5, (1, 5)) # category values, from 0 - max number of categories, in the order as passed into the constructor above
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'tab-transformer-pytorch',
packages = find_packages(),
version = '0.0.3',
version = '0.0.4',
license='MIT',
description = 'Tab Transformer - Pytorch',
author = 'Phil Wang',
Expand Down
12 changes: 6 additions & 6 deletions tab_transformer_pytorch/tab_transformer_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def __init__(
dim_head = 16,
dim_out = 1,
mlp_hidden_mults = (4, 2),
continuous_mean_var = None
continuous_mean_std = None
):
super().__init__()
assert all(map(lambda n: n > 0, categories)), 'number of each category must be positive'
Expand All @@ -103,8 +103,8 @@ def __init__(

# continuous

assert continuous_mean_var.shape == (num_continuous, 2), f'continuous_mean_var must have a shape of ({num_continuous}, 2) where the last dimension contains the mean and variance respectively'
self.register_buffer('continuous_mean_var', continuous_mean_var)
assert continuous_mean_std.shape == (num_continuous, 2), f'continuous_mean_std must have a shape of ({num_continuous}, 2) where the last dimension contains the mean and variance respectively'
self.register_buffer('continuous_mean_std', continuous_mean_std)

self.norm = nn.LayerNorm(num_continuous)
self.num_continuous = num_continuous
Expand Down Expand Up @@ -147,9 +147,9 @@ def forward(self, x_categ, x_cont):

assert x_cont.shape[1] == self.num_continuous, f'you must pass in {self.num_continuous} values for your continuous input'

if exists(self.continuous_mean_var):
mean, var = self.continuous_mean_var.unbind(dim = -1)
x_cont = (x_cont - mean) / var
if exists(self.continuous_mean_std):
mean, std = self.continuous_mean_std.unbind(dim = -1)
x_cont = (x_cont - mean) / std

normed_cont = self.norm(x_cont)

Expand Down

0 comments on commit 0493945

Please sign in to comment.