Skip to content

Commit

Permalink
add extra assert for continuous values
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Dec 17, 2020
1 parent e1c23b0 commit 6a20af9
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 3 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'tab-transformer-pytorch',
packages = find_packages(),
version = '0.0.2',
version = '0.0.3',
license='MIT',
description = 'Tab Transformer - Pytorch',
author = 'Phil Wang',
Expand Down
7 changes: 5 additions & 2 deletions tab_transformer_pytorch/tab_transformer_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,9 @@ def __init__(
assert continuous_mean_var.shape == (num_continuous, 2), f'continuous_mean_var must have a shape of ({num_continuous}, 2) where the last dimension contains the mean and variance respectively'
self.register_buffer('continuous_mean_var', continuous_mean_var)

self.norm = nn.LayerNorm(num_continuous)
self.num_continuous = num_continuous

# attention layers

self.layers = nn.ModuleList([])
Expand All @@ -116,8 +119,6 @@ def __init__(
Residual(PreNorm(dim, FeedForward(dim))),
]))

self.norm = nn.LayerNorm(num_continuous)

# mlp to logits

input_size = (dim * self.num_categories) + num_continuous
Expand All @@ -144,6 +145,8 @@ def forward(self, x_categ, x_cont):

flat_categ = x.flatten(1)

assert x_cont.shape[1] == self.num_continuous, f'you must pass in {self.num_continuous} values for your continuous input'

if exists(self.continuous_mean_var):
mean, var = self.continuous_mean_var.unbind(dim = -1)
x_cont = (x_cont - mean) / var
Expand Down

0 comments on commit 6a20af9

Please sign in to comment.