Skip to content

Commit

Permalink
make sure rotary embedding positions are cast to same type as qk
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Nov 6, 2021
1 parent 2cbc36b commit 10350b1
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 1 deletion.
1 change: 1 addition & 0 deletions reformer_pytorch/reformer_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -654,6 +654,7 @@ def rotate_every_two(x):
return rearrange(x, '... d j -> ... (d j)')

def apply_rotary_pos_emb(qk, sinu_pos):
sinu_pos = sinu_pos.type(qk.dtype)
sinu_pos = rearrange(sinu_pos, '() n (j d) -> n j d', j = 2)
sin, cos = sinu_pos.unbind(dim = -2)
sin, cos = map(lambda t: repeat(t, 'n d -> n (d j)', j = 2), (sin, cos))
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'reformer_pytorch',
packages = find_packages(exclude=['examples', 'pretraining']),
version = '1.4.3',
version = '1.4.4',
license='MIT',
description = 'Reformer, the Efficient Transformer, Pytorch',
author = 'Phil Wang',
Expand Down

0 comments on commit 10350b1

Please sign in to comment.