Skip to content

Commit 930c8fd

Browse files
authored
fix incorrect attention head dimension in AttnProcessor2_0 (huggingface#4154)
fix inner_dim
1 parent 6b1abba commit 930c8fd

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

src/diffusers/models/attention_processor.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1096,7 +1096,6 @@ def __call__(
10961096
batch_size, sequence_length, _ = (
10971097
hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape
10981098
)
1099-
inner_dim = hidden_states.shape[-1]
11001099

11011100
if attention_mask is not None:
11021101
attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size)
@@ -1117,6 +1116,7 @@ def __call__(
11171116
key = attn.to_k(encoder_hidden_states)
11181117
value = attn.to_v(encoder_hidden_states)
11191118

1119+
inner_dim = key.shape[-1]
11201120
head_dim = inner_dim // attn.heads
11211121

11221122
query = query.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2)

0 commit comments

Comments
 (0)