Skip to content

Commit 0cfcb5a

Browse files
committed
fix lr/d*lr is not logged with prodigy in finetune
1 parent c7fd336 commit 0cfcb5a

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

fine_tune.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -397,7 +397,7 @@ def fn_recursive_set_mem_eff(module: torch.nn.Module):
397397
current_loss = loss.detach().item() # 平均なのでbatch sizeは関係ないはず
398398
if args.logging_dir is not None:
399399
logs = {"loss": current_loss, "lr": float(lr_scheduler.get_last_lr()[0])}
400-
if args.optimizer_type.lower().startswith("DAdapt".lower()) or args.optimizer_type.lower() == "Prodigy": # tracking d*lr value
400+
if args.optimizer_type.lower().startswith("DAdapt".lower()) or args.optimizer_type.lower() == "Prodigy".lower(): # tracking d*lr value
401401
logs["lr/d*lr"] = (
402402
lr_scheduler.optimizers[0].param_groups[0]["d"] * lr_scheduler.optimizers[0].param_groups[0]["lr"]
403403
)

0 commit comments

Comments
 (0)