You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Copy file name to clipboardExpand all lines: src/diffusers/models/unet_2d_condition.py
+5Lines changed: 5 additions & 0 deletions
Original file line number
Diff line number
Diff line change
@@ -211,6 +211,11 @@ def __init__(
211
211
212
212
self.sample_size=sample_size
213
213
214
+
ifnum_attention_headsisnotNone:
215
+
raiseValueError(
216
+
"At the moment it is not possible to define the number of attention heads via `num_attention_heads` because of a naming issue as described in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131. Passing `num_attention_heads` will only be supported in diffusers v0.19."
217
+
)
218
+
214
219
# If `num_attention_heads` is not defined (which is the case for most models)
215
220
# it will default to `attention_head_dim`. This looks weird upon first reading it and it is.
216
221
# The reason for this behavior is to correct for incorrectly named variables that were introduced
Copy file name to clipboardExpand all lines: src/diffusers/models/unet_2d_condition_flax.py
+5Lines changed: 5 additions & 0 deletions
Original file line number
Diff line number
Diff line change
@@ -133,6 +133,11 @@ def setup(self):
133
133
block_out_channels=self.block_out_channels
134
134
time_embed_dim=block_out_channels[0] *4
135
135
136
+
ifself.num_attention_headsisnotNone:
137
+
raiseValueError(
138
+
"At the moment it is not possible to define the number of attention heads via `num_attention_heads` because of a naming issue as described in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131. Passing `num_attention_heads` will only be supported in diffusers v0.19."
139
+
)
140
+
136
141
# If `num_attention_heads` is not defined (which is the case for most models)
137
142
# it will default to `attention_head_dim`. This looks weird upon first reading it and it is.
138
143
# The reason for this behavior is to correct for incorrectly named variables that were introduced
Copy file name to clipboardExpand all lines: src/diffusers/models/unet_3d_condition.py
+5Lines changed: 5 additions & 0 deletions
Original file line number
Diff line number
Diff line change
@@ -114,6 +114,11 @@ def __init__(
114
114
115
115
self.sample_size=sample_size
116
116
117
+
ifnum_attention_headsisnotNone:
118
+
raiseNotImplementedError(
119
+
"At the moment it is not possible to define the number of attention heads via `num_attention_heads` because of a naming issue as described in https://github.com/huggingface/diffusers/issues/2011#issuecomment-1547958131. Passing `num_attention_heads` will only be supported in diffusers v0.19."
120
+
)
121
+
117
122
# If `num_attention_heads` is not defined (which is the case for most models)
118
123
# it will default to `attention_head_dim`. This looks weird upon first reading it and it is.
119
124
# The reason for this behavior is to correct for incorrectly named variables that were introduced
0 commit comments