Skip to content

Commit

Permalink
memories should not have relative positions with each other, just far…
Browse files Browse the repository at this point in the history
… away from main tokens
  • Loading branch information
lucidrains committed Oct 2, 2023
1 parent 9261e86 commit 974f43d
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 5 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'voicebox-pytorch',
packages = find_packages(exclude=[]),
version = '0.2.2',
version = '0.2.3',
license='MIT',
description = 'Voicebox - Pytorch',
author = 'Phil Wang',
Expand Down
10 changes: 6 additions & 4 deletions voicebox_pytorch/voicebox_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -383,10 +383,12 @@ def forward(

# rotary embeddings

main_positions = torch.arange(seq_len, device = self.device, dtype = torch.long)
register_positions = torch.arange(self.num_register_tokens, device = self.device, dtype = torch.long)
register_positions -= 10000
positions = torch.cat((register_positions, main_positions))
positions = seq_len

if self.has_register_tokens:
main_positions = torch.arange(seq_len, device = self.device, dtype = torch.long)
register_positions = torch.full((self.num_register_tokens,), -10000, device = self.device, dtype = torch.long)
positions = torch.cat((register_positions, main_positions))

rotary_emb = self.rotary_emb(positions)

Expand Down

0 comments on commit 974f43d

Please sign in to comment.