Skip to content

Commit

Permalink
hack around some inplace error, also make sure for openai clip text e…
Browse files Browse the repository at this point in the history
…ncoding, only tokens after eos_id is masked out
  • Loading branch information
lucidrains committed Jul 13, 2022
1 parent b207321 commit f988207
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 3 deletions.
8 changes: 6 additions & 2 deletions dalle2_pytorch/dalle2_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,6 +278,7 @@ def __init__(
import clip
openai_clip, preprocess = clip.load(name)
super().__init__(openai_clip)
self.eos_id = 49407 # for handling 0 being also '!'

text_attention_final = self.find_layer('ln_final')
self.handle = text_attention_final.register_forward_hook(self._hook)
Expand Down Expand Up @@ -316,7 +317,10 @@ def max_text_len(self):
@torch.no_grad()
def embed_text(self, text):
text = text[..., :self.max_text_len]
text_mask = text != 0

is_eos_id = (text == self.eos_id)
text_mask_excluding_eos = is_eos_id.cumsum(dim = -1) == 0
text_mask = F.pad(text_mask_excluding_eos, (1, -1), value = True)
assert not self.cleared

text_embed = self.clip.encode_text(text)
Expand Down Expand Up @@ -900,7 +904,7 @@ def forward(
null_text_embeds = self.null_text_embed.to(text_encodings.dtype)

text_encodings = torch.where(
rearrange(mask, 'b n -> b n 1'),
rearrange(mask, 'b n -> b n 1').clone(),
text_encodings,
null_text_embeds
)
Expand Down
2 changes: 1 addition & 1 deletion dalle2_pytorch/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '0.23.7'
__version__ = '0.23.8'

0 comments on commit f988207

Please sign in to comment.