Skip to content

Commit

Permalink
address a bug
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Dec 10, 2024
1 parent f5d3907 commit 5231e5d
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 5 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'x-transformers',
packages = find_packages(exclude=['examples']),
version = '1.42.25',
version = '1.42.26',
license='MIT',
description = 'X-Transformers - Pytorch',
author = 'Phil Wang',
Expand Down
7 changes: 3 additions & 4 deletions x_transformers/x_transformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,8 @@ def default(val, d):
return val
return d() if callable(d) else d

def first(it):
return it[0]
def first(it, default = None):
return it[0] if len(it) > 0 else default

def is_empty(x):
return len(x) == 0
Expand Down Expand Up @@ -1357,7 +1357,6 @@ def forward(
k = k * self.qk_norm_k_scale

if exists(rotary_pos_emb):

freqs, xpos_scale = rotary_pos_emb
q_xpos_scale, k_xpos_scale = (xpos_scale, xpos_scale ** -1.) if exists(xpos_scale) else (1., 1.)

Expand Down Expand Up @@ -1989,7 +1988,7 @@ def forward(

if exists(self.rotary_pos_emb):
if not exists(rotary_pos_emb):
maybe_mem = mems[0] # todo - handle edge case where different layers get different memory lengths. don't think this will ever come up but who knows
maybe_mem = first(mems, None) # todo - handle edge case where different layers get different memory lengths. don't think this will ever come up but who knows
mem_len = maybe_mem.shape[1] if exists(maybe_mem) else 0

if not exists(pos):
Expand Down

0 comments on commit 5231e5d

Please sign in to comment.