Skip to content

Commit

Permalink
fix attention couple+deep shink cause error in some reso
Browse files Browse the repository at this point in the history
  • Loading branch information
kohya-ss committed Apr 3, 2024
1 parent 2258a1b commit b748b48
Showing 1 changed file with 2 additions and 3 deletions.
5 changes: 2 additions & 3 deletions networks/lora.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,14 +247,13 @@ def get_mask_for_x(self, x):
area = x.size()[1]

mask = self.network.mask_dic.get(area, None)
if mask is None:
# raise ValueError(f"mask is None for resolution {area}")
if mask is None or len(x.size()) == 2:
# emb_layers in SDXL doesn't have mask
# if "emb" not in self.lora_name:
# print(f"mask is None for resolution {self.lora_name}, {area}, {x.size()}")
mask_size = (1, x.size()[1]) if len(x.size()) == 2 else (1, *x.size()[1:-1], 1)
return torch.ones(mask_size, dtype=x.dtype, device=x.device) / self.network.num_sub_prompts
if len(x.size()) != 4:
if len(x.size()) == 3:
mask = torch.reshape(mask, (1, -1, 1))
return mask

Expand Down

0 comments on commit b748b48

Please sign in to comment.