Skip to content

Commit

Permalink
add a positional encoding MLP to the mix, helps with low-d curled up …
Browse files Browse the repository at this point in the history
…data potentially
  • Loading branch information
karpathy committed Dec 12, 2019
1 parent fce68b6 commit dbd4b5a
Showing 1 changed file with 30 additions and 0 deletions.
30 changes: 30 additions & 0 deletions nflib/nets.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,21 @@ def __init__(self, n):
def forward(self, x):
return self.p.expand(x.size(0), self.p.size(1))

class PositionalEncoder(nn.Module):
"""
Each dimension of the input gets expanded out with sins/coses
to "carve" out the space. Useful in low-dimensional cases with
tightly "curled up" data.
"""
def __init__(self, freqs=(.5,1,2,4,8)):
super().__init__()
self.freqs = freqs

def forward(self, x):
sines = [torch.sin(x * f) for f in self.freqs]
coses = [torch.cos(x * f) for f in self.freqs]
out = torch.cat(sines + coses, dim=1)
return out

class MLP(nn.Module):
""" a simple 4-layer MLP """
Expand All @@ -38,6 +53,21 @@ def __init__(self, nin, nout, nh):
def forward(self, x):
return self.net(x)

class PosEncMLP(nn.Module):
"""
Position Encoded MLP, where the first layer performs position encoding.
Each dimension of the input gets transformed to len(freqs)*2 dimensions
using a fixed transformation of sin/cos of given frequencies.
"""
def __init__(self, nin, nout, nh, freqs=(.5,1,2,4,8)):
super().__init__()
self.net = nn.Sequential(
PositionalEncoder(freqs),
MLP(nin * len(freqs) * 2, nout, nh),
)
def forward(self, x):
return self.net(x)

class ARMLP(nn.Module):
""" a 4-layer auto-regressive MLP, wrapper around MADE net """

Expand Down

0 comments on commit dbd4b5a

Please sign in to comment.