Skip to content

Commit 010536e

Browse files
author
kuangliu
committed
Remove Variable
1 parent b1cf0f1 commit 010536e

File tree

12 files changed

+28
-43
lines changed

12 files changed

+28
-43
lines changed

models/densenet.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,6 @@
55
import torch.nn as nn
66
import torch.nn.functional as F
77

8-
from torch.autograd import Variable
9-
108

119
class Bottleneck(nn.Module):
1210
def __init__(self, in_planes, growth_rate):
@@ -100,10 +98,10 @@ def DenseNet161():
10098
def densenet_cifar():
10199
return DenseNet(Bottleneck, [6,12,24,16], growth_rate=12)
102100

103-
def test_densenet():
101+
def test():
104102
net = densenet_cifar()
105103
x = torch.randn(1,3,32,32)
106-
y = net(Variable(x))
104+
y = net(x)
107105
print(y)
108106

109-
# test_densenet()
107+
# test()

models/dpn.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,6 @@
33
import torch.nn as nn
44
import torch.nn.functional as F
55

6-
from torch.autograd import Variable
7-
86

97
class Bottleneck(nn.Module):
108
def __init__(self, last_planes, in_planes, out_planes, dense_depth, stride, first_layer):
@@ -93,7 +91,7 @@ def DPN92():
9391

9492
def test():
9593
net = DPN92()
96-
x = Variable(torch.randn(1,3,32,32))
94+
x = torch.randn(1,3,32,32)
9795
y = net(x)
9896
print(y)
9997

models/googlenet.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,6 @@
33
import torch.nn as nn
44
import torch.nn.functional as F
55

6-
from torch.autograd import Variable
7-
86

97
class Inception(nn.Module):
108
def __init__(self, in_planes, n1x1, n3x3red, n3x3, n5x5red, n5x5, pool_planes):
@@ -99,7 +97,11 @@ def forward(self, x):
9997
out = self.linear(out)
10098
return out
10199

102-
# net = GoogLeNet()
103-
# x = torch.randn(1,3,32,32)
104-
# y = net(Variable(x))
105-
# print(y.size())
100+
101+
def test():
102+
net = GoogLeNet()
103+
x = torch.randn(1,3,32,32)
104+
y = net(x)
105+
print(y.size())
106+
107+
# test()

models/mobilenet.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,6 @@
77
import torch.nn as nn
88
import torch.nn.functional as F
99

10-
from torch.autograd import Variable
11-
1210

1311
class Block(nn.Module):
1412
'''Depthwise conv + Pointwise conv'''
@@ -57,7 +55,7 @@ def forward(self, x):
5755
def test():
5856
net = MobileNet()
5957
x = torch.randn(1,3,32,32)
60-
y = net(Variable(x))
58+
y = net(x)
6159
print(y.size())
6260

6361
# test()

models/mobilenetv2.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,6 @@
77
import torch.nn as nn
88
import torch.nn.functional as F
99

10-
from torch.autograd import Variable
11-
1210

1311
class Block(nn.Module):
1412
'''expand + depthwise + pointwise'''
@@ -81,7 +79,7 @@ def forward(self, x):
8179

8280
def test():
8381
net = MobileNetV2()
84-
x = Variable(torch.randn(2,3,32,32))
82+
x = torch.randn(2,3,32,32)
8583
y = net(x)
8684
print(y.size())
8785

models/pnasnet.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,6 @@
66
import torch.nn as nn
77
import torch.nn.functional as F
88

9-
from torch.autograd import Variable
10-
119

1210
class SepConv(nn.Module):
1311
'''Separable Convolution.'''
@@ -120,8 +118,7 @@ def PNASNetB():
120118

121119
def test():
122120
net = PNASNetB()
123-
print(net)
124-
x = Variable(torch.randn(1,3,32,32))
121+
x = torch.randn(1,3,32,32)
125122
y = net(x)
126123
print(y)
127124

models/preact_resnet.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,6 @@
88
import torch.nn as nn
99
import torch.nn.functional as F
1010

11-
from torch.autograd import Variable
12-
1311

1412
class PreActBlock(nn.Module):
1513
'''Pre-activation version of the BasicBlock.'''
@@ -114,7 +112,7 @@ def PreActResNet152():
114112

115113
def test():
116114
net = PreActResNet18()
117-
y = net(Variable(torch.randn(1,3,32,32)))
115+
y = net((torch.randn(1,3,32,32))
118116
print(y.size())
119117

120118
# test()

models/resnet.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,6 @@
1010
import torch.nn as nn
1111
import torch.nn.functional as F
1212

13-
from torch.autograd import Variable
14-
1513

1614
class BasicBlock(nn.Module):
1715
expansion = 1
@@ -117,7 +115,7 @@ def ResNet152():
117115

118116
def test():
119117
net = ResNet18()
120-
y = net(Variable(torch.randn(1,3,32,32)))
118+
y = net(torch.randn(1,3,32,32))
121119
print(y.size())
122120

123121
# test()

models/resnext.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,6 @@
66
import torch.nn as nn
77
import torch.nn.functional as F
88

9-
from torch.autograd import Variable
10-
119

1210
class Block(nn.Module):
1311
'''Grouped convolution block.'''
@@ -91,7 +89,7 @@ def ResNeXt29_32x4d():
9189
def test_resnext():
9290
net = ResNeXt29_2x64d()
9391
x = torch.randn(1,3,32,32)
94-
y = net(Variable(x))
92+
y = net(x)
9593
print(y.size())
9694

9795
# test_resnext()

models/senet.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,6 @@
66
import torch.nn as nn
77
import torch.nn.functional as F
88

9-
from torch.autograd import Variable
10-
119

1210
class BasicBlock(nn.Module):
1311
def __init__(self, in_planes, planes, stride=1):
@@ -117,7 +115,7 @@ def SENet18():
117115

118116
def test():
119117
net = SENet18()
120-
y = net(Variable(torch.randn(1,3,32,32)))
118+
y = net(torch.randn(1,3,32,32))
121119
print(y.size())
122120

123121
# test()

0 commit comments

Comments
 (0)