Skip to content

Commit

Permalink
Set t5_attention_mask for new comfy versions
Browse files Browse the repository at this point in the history
  • Loading branch information
city96 committed Oct 24, 2024
1 parent 193c2fd commit c6527a4
Showing 1 changed file with 4 additions and 0 deletions.
4 changes: 4 additions & 0 deletions PixArt/nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,6 +252,10 @@ def split(self, sd3_clip, padding):
# override special tokens
tmp.t5xxl.special_tokens = copy.deepcopy(clip.cond_stage_model.t5xxl.special_tokens)
tmp.t5xxl.special_tokens.pop("end") # make sure empty tokens match

# add attn mask opt if present in original
if hasattr(sd3_clip.cond_stage_model, "t5_attention_mask"):
tmp.t5_attention_mask = False

# tokenizer
tok = SD3Tokenizer()
Expand Down

0 comments on commit c6527a4

Please sign in to comment.