Skip to content

Commit 62d5ca5

Browse files
t-visoumith
authored andcommitted
Add linear layer to time series prediction
As is the final network output is modulated with a tanh nonlinearity. This is undesirable. As a simple / realistic fix we add a final linear layer.
1 parent fad7759 commit 62d5ca5

File tree

1 file changed

+9
-6
lines changed

1 file changed

+9
-6
lines changed

time_sequence_prediction/train.py

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -12,23 +12,26 @@ class Sequence(nn.Module):
1212
def __init__(self):
1313
super(Sequence, self).__init__()
1414
self.lstm1 = nn.LSTMCell(1, 51)
15-
self.lstm2 = nn.LSTMCell(51, 1)
15+
self.lstm2 = nn.LSTMCell(51, 51)
16+
self.linear = nn.Linear(51, 1)
1617

1718
def forward(self, input, future = 0):
1819
outputs = []
1920
h_t = Variable(torch.zeros(input.size(0), 51).double(), requires_grad=False)
2021
c_t = Variable(torch.zeros(input.size(0), 51).double(), requires_grad=False)
21-
h_t2 = Variable(torch.zeros(input.size(0), 1).double(), requires_grad=False)
22-
c_t2 = Variable(torch.zeros(input.size(0), 1).double(), requires_grad=False)
22+
h_t2 = Variable(torch.zeros(input.size(0), 51).double(), requires_grad=False)
23+
c_t2 = Variable(torch.zeros(input.size(0), 51).double(), requires_grad=False)
2324

2425
for i, input_t in enumerate(input.chunk(input.size(1), dim=1)):
2526
h_t, c_t = self.lstm1(input_t, (h_t, c_t))
2627
h_t2, c_t2 = self.lstm2(h_t, (h_t2, c_t2))
27-
outputs += [h_t2]
28+
output = self.linear(h_t2)
29+
outputs += [output]
2830
for i in range(future):# if we should predict the future
29-
h_t, c_t = self.lstm1(h_t2, (h_t, c_t))
31+
h_t, c_t = self.lstm1(output, (h_t, c_t))
3032
h_t2, c_t2 = self.lstm2(h_t, (h_t2, c_t2))
31-
outputs += [h_t2]
33+
output = self.linear(h_t2)
34+
outputs += [output]
3235
outputs = torch.stack(outputs, 1).squeeze(2)
3336
return outputs
3437

0 commit comments

Comments
 (0)