Skip to content

Commit d1d0b8a

Browse files
authored
Don't use bare prints in a library (huggingface#3991)
1 parent 04ddad4 commit d1d0b8a

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

src/diffusers/pipelines/stable_diffusion/convert_from_ckpt.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -391,8 +391,8 @@ def convert_ldm_unet_checkpoint(
391391

392392
# at least a 100 parameters have to start with `model_ema` in order for the checkpoint to be EMA
393393
if sum(k.startswith("model_ema") for k in keys) > 100 and extract_ema:
394-
print(f"Checkpoint {path} has both EMA and non-EMA weights.")
395-
print(
394+
logger.warning(f"Checkpoint {path} has both EMA and non-EMA weights.")
395+
logger.warning(
396396
"In this conversion only the EMA weights are extracted. If you want to instead extract the non-EMA"
397397
" weights (useful to continue fine-tuning), please make sure to remove the `--extract_ema` flag."
398398
)
@@ -402,7 +402,7 @@ def convert_ldm_unet_checkpoint(
402402
unet_state_dict[key.replace(unet_key, "")] = checkpoint.pop(flat_ema_key)
403403
else:
404404
if sum(k.startswith("model_ema") for k in keys) > 100:
405-
print(
405+
logger.warning(
406406
"In this conversion only the non-EMA weights are extracted. If you want to instead extract the EMA"
407407
" weights (usually better for inference), please make sure to add the `--extract_ema` flag."
408408
)
@@ -1183,7 +1183,7 @@ def download_from_original_stable_diffusion_ckpt(
11831183
if "global_step" in checkpoint:
11841184
global_step = checkpoint["global_step"]
11851185
else:
1186-
print("global_step key not found in model")
1186+
logger.warning("global_step key not found in model")
11871187
global_step = None
11881188

11891189
# NOTE: this while loop isn't great but this controlnet checkpoint has one additional

0 commit comments

Comments
 (0)