Skip to content

Commit 3238786

Browse files
apaszkesoumith
authored andcommitted
Improve optimizer error messages
1 parent 07ebbcb commit 3238786

File tree

1 file changed

+4
-0
lines changed

1 file changed

+4
-0
lines changed

torch/optim/optimizer.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,8 @@ def __init__(self, params, defaults):
2424

2525
self.state = defaultdict(dict)
2626
self.param_groups = list(params)
27+
if len(self.param_groups) == 0:
28+
raise ValueError("optimizer got an empty parameter list")
2729
if not isinstance(self.param_groups[0], dict):
2830
self.param_groups = [{'params': self.param_groups}]
2931

@@ -50,6 +52,8 @@ def __init__(self, params, defaults):
5052
if not param.requires_grad:
5153
raise ValueError("optimizing a parameter that doesn't "
5254
"require gradients")
55+
if param.creator is not None:
56+
raise ValueError("can't optimize a non-leaf Variable")
5357

5458
def __getstate__(self):
5559
return {

0 commit comments

Comments
 (0)