Skip to content

Commit 8bf80fc

Browse files
disable num attenion heads (huggingface#3969)
* disable num attenion heads * finish
1 parent 45f6d52 commit 8bf80fc

File tree

4 files changed

+23
-0
lines changed

4 files changed

+23
-0
lines changed

src/diffusers/models/unet_2d_condition.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -211,6 +211,11 @@ def __init__(
211211

212212
self.sample_size = sample_size
213213

214+
if num_attention_heads is not None:
215+
raise ValueError(
216+
"At the moment it is not possible to define the number of attention heads via `num_attention_heads` because of a naming issue as described in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131. Passing `num_attention_heads` will only be supported in diffusers v0.19."
217+
)
218+
214219
# If `num_attention_heads` is not defined (which is the case for most models)
215220
# it will default to `attention_head_dim`. This looks weird upon first reading it and it is.
216221
# The reason for this behavior is to correct for incorrectly named variables that were introduced

src/diffusers/models/unet_2d_condition_flax.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -133,6 +133,11 @@ def setup(self):
133133
block_out_channels = self.block_out_channels
134134
time_embed_dim = block_out_channels[0] * 4
135135

136+
if self.num_attention_heads is not None:
137+
raise ValueError(
138+
"At the moment it is not possible to define the number of attention heads via `num_attention_heads` because of a naming issue as described in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131. Passing `num_attention_heads` will only be supported in diffusers v0.19."
139+
)
140+
136141
# If `num_attention_heads` is not defined (which is the case for most models)
137142
# it will default to `attention_head_dim`. This looks weird upon first reading it and it is.
138143
# The reason for this behavior is to correct for incorrectly named variables that were introduced

src/diffusers/models/unet_3d_condition.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,11 @@ def __init__(
114114

115115
self.sample_size = sample_size
116116

117+
if num_attention_heads is not None:
118+
raise NotImplementedError(
119+
"At the moment it is not possible to define the number of attention heads via `num_attention_heads` because of a naming issue as described in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131. Passing `num_attention_heads` will only be supported in diffusers v0.19."
120+
)
121+
117122
# If `num_attention_heads` is not defined (which is the case for most models)
118123
# it will default to `attention_head_dim`. This looks weird upon first reading it and it is.
119124
# The reason for this behavior is to correct for incorrectly named variables that were introduced

src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -307,6 +307,14 @@ def __init__(
307307

308308
self.sample_size = sample_size
309309

310+
if num_attention_heads is not None:
311+
raise ValueError(
312+
"At the moment it is not possible to define the number of attention heads via `num_attention_heads`"
313+
" because of a naming issue as described in"
314+
" https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131. Passing"
315+
" `num_attention_heads` will only be supported in diffusers v0.19."
316+
)
317+
310318
# If `num_attention_heads` is not defined (which is the case for most models)
311319
# it will default to `attention_head_dim`. This looks weird upon first reading it and it is.
312320
# The reason for this behavior is to correct for incorrectly named variables that were introduced

0 commit comments

Comments
 (0)