Skip to content

Commit 4e14032

Browse files
Make pad_to_patch_size function work on multi dim.
1 parent 59d58b1 commit 4e14032

File tree

1 file changed

+6
-3
lines changed

1 file changed

+6
-3
lines changed

comfy/ldm/common_dit.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,12 @@
44
def pad_to_patch_size(img, patch_size=(2, 2), padding_mode="circular"):
55
if padding_mode == "circular" and (torch.jit.is_tracing() or torch.jit.is_scripting()):
66
padding_mode = "reflect"
7-
pad_h = (patch_size[0] - img.shape[-2] % patch_size[0]) % patch_size[0]
8-
pad_w = (patch_size[1] - img.shape[-1] % patch_size[1]) % patch_size[1]
9-
return torch.nn.functional.pad(img, (0, pad_w, 0, pad_h), mode=padding_mode)
7+
8+
pad = ()
9+
for i in range(img.ndim - 2):
10+
pad = (0, (patch_size[i] - img.shape[i + 2] % patch_size[i]) % patch_size[i]) + pad
11+
12+
return torch.nn.functional.pad(img, pad, mode=padding_mode)
1013

1114
try:
1215
rms_norm_torch = torch.nn.functional.rms_norm

0 commit comments

Comments
 (0)