From 261d2c4db1c7856d9ecb3fe51573a098a1d03712 Mon Sep 17 00:00:00 2001 From: Phil Wang Date: Thu, 5 Nov 2020 09:40:01 -0800 Subject: [PATCH] fix bug with encoder relative pos emb --- setup.py | 2 +- x_transformers/x_transformers.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 16dd40de..14e5926e 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ setup( name = 'x-transformers', packages = find_packages(exclude=['examples']), - version = '0.0.19', + version = '0.0.20', license='MIT', description = 'X-Transformers - Pytorch', author = 'Phil Wang', diff --git a/x_transformers/x_transformers.py b/x_transformers/x_transformers.py index d7465d5b..b88acc56 100644 --- a/x_transformers/x_transformers.py +++ b/x_transformers/x_transformers.py @@ -239,7 +239,7 @@ def __init__(self, dim, depth, dim_head = 64, heads = 8, use_scalenorm = False, super().__init__() self.dim = dim self.layers = nn.ModuleList([]) - self.rel_pos = RelativePositionBias(causal = True) if rel_pos_bias else None + self.rel_pos = RelativePositionBias() if rel_pos_bias else None norm_class = ScaleNorm if use_scalenorm else nn.LayerNorm prenorm_fn = partial(PreNorm, dim, norm_class = norm_class)