Skip to content

Commit

Permalink
fix bug with encoder relative pos emb
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Nov 5, 2020
1 parent e033224 commit 261d2c4
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'x-transformers',
packages = find_packages(exclude=['examples']),
version = '0.0.19',
version = '0.0.20',
license='MIT',
description = 'X-Transformers - Pytorch',
author = 'Phil Wang',
Expand Down
2 changes: 1 addition & 1 deletion x_transformers/x_transformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ def __init__(self, dim, depth, dim_head = 64, heads = 8, use_scalenorm = False,
super().__init__()
self.dim = dim
self.layers = nn.ModuleList([])
self.rel_pos = RelativePositionBias(causal = True) if rel_pos_bias else None
self.rel_pos = RelativePositionBias() if rel_pos_bias else None

norm_class = ScaleNorm if use_scalenorm else nn.LayerNorm
prenorm_fn = partial(PreNorm, dim, norm_class = norm_class)
Expand Down

0 comments on commit 261d2c4

Please sign in to comment.