|
1 | 1 | import torch |
2 | 2 |
|
3 | 3 | from ..function import Function |
| 4 | +from ..variable import Variable |
4 | 5 |
|
5 | 6 |
|
6 | 7 | class Diag(Function): |
7 | 8 |
|
8 | | - def __init__(self, diagonal_idx=0): |
9 | | - super(Diag, self).__init__() |
10 | | - self.diagonal_idx = diagonal_idx |
11 | | - |
12 | | - def forward(self, input): |
13 | | - return input.diag(self.diagonal_idx) |
| 9 | + @staticmethod |
| 10 | + def forward(ctx, input, diagonal_idx=0): |
| 11 | + ctx.diagonal_idx = diagonal_idx |
| 12 | + return input.diag(ctx.diagonal_idx) |
14 | 13 |
|
15 | | - def backward(self, grad_output): |
16 | | - return grad_output.diag(self.diagonal_idx) |
| 14 | + @staticmethod |
| 15 | + def backward(ctx, grad_output): |
| 16 | + return grad_output.diag(ctx.diagonal_idx), None |
17 | 17 |
|
18 | 18 |
|
19 | 19 | class Tril(Function): |
20 | 20 |
|
21 | | - def __init__(self, diagonal_idx=0): |
22 | | - super(Tril, self).__init__() |
23 | | - self.diagonal_idx = diagonal_idx |
24 | | - |
25 | | - def forward(self, input): |
26 | | - return input.tril(self.diagonal_idx) |
| 21 | + @staticmethod |
| 22 | + def forward(ctx, input, diagonal_idx=0): |
| 23 | + ctx.diagonal_idx = diagonal_idx |
| 24 | + return input.tril(ctx.diagonal_idx) |
27 | 25 |
|
28 | | - def backward(self, grad_output): |
29 | | - return grad_output.tril(self.diagonal_idx) |
| 26 | + @staticmethod |
| 27 | + def backward(ctx, grad_output): |
| 28 | + return grad_output.tril(ctx.diagonal_idx), None |
30 | 29 |
|
31 | 30 |
|
32 | 31 | class Triu(Function): |
33 | 32 |
|
34 | | - def __init__(self, diagonal_idx=0): |
35 | | - super(Triu, self).__init__() |
36 | | - self.diagonal_idx = diagonal_idx |
37 | | - |
38 | | - def forward(self, input): |
39 | | - return input.triu(self.diagonal_idx) |
| 33 | + @staticmethod |
| 34 | + def forward(ctx, input, diagnoal_idx=0): |
| 35 | + ctx.diagonal_idx = diagnoal_idx |
| 36 | + return input.triu(ctx.diagonal_idx) |
40 | 37 |
|
41 | | - def backward(self, grad_output): |
42 | | - return grad_output.triu(self.diagonal_idx) |
| 38 | + @staticmethod |
| 39 | + def backward(ctx, grad_output): |
| 40 | + return grad_output.triu(ctx.diagonal_idx), None |
43 | 41 |
|
44 | 42 |
|
45 | 43 | class Trace(Function): |
46 | 44 |
|
47 | | - def forward(self, input): |
48 | | - self.isize = input.size() |
49 | | - return input.new((input.trace(),)) |
| 45 | + @staticmethod |
| 46 | + def forward(ctx, input): |
| 47 | + ctx.isize = input.size() |
| 48 | + return input.new((input.trace(), )) |
50 | 49 |
|
51 | | - def backward(self, grad_output): |
52 | | - isize = self.isize |
53 | | - grad_input = grad_output.new(isize).zero_() |
54 | | - grad_input.view(-1)[::(isize[1] + 1)] = grad_output[0] |
55 | | - return grad_input |
| 50 | + @staticmethod |
| 51 | + def backward(ctx, grad_output): |
| 52 | + isize = ctx.isize |
| 53 | + min_size = min(isize) |
| 54 | + grad_input = Variable(grad_output.data.new(isize).zero_()).view(-1) |
| 55 | + grad_input[::(isize[1] + 1)] = grad_output.expand(min_size) |
| 56 | + return grad_input.view(isize) |
56 | 57 |
|
57 | 58 |
|
58 | 59 | class Cross(Function): |
59 | 60 |
|
60 | | - def __init__(self, dim=-1): |
61 | | - self.dim = dim |
62 | | - |
63 | | - def forward(self, input, other): |
64 | | - self.save_for_backward(input, other) |
65 | | - return torch.cross(input, other, self.dim) |
| 61 | + @staticmethod |
| 62 | + def forward(ctx, input, other, dim=-1): |
| 63 | + ctx.dim = dim |
| 64 | + ctx.save_for_backward(input, other) |
| 65 | + return torch.cross(input, other, ctx.dim) |
66 | 66 |
|
67 | | - def backward(self, grad_output): |
68 | | - input, other = self.saved_tensors |
69 | | - grad_input = torch.cross(other, grad_output, self.dim) |
70 | | - grad_other = torch.cross(grad_output, input, self.dim) |
71 | | - return grad_input, grad_other |
| 67 | + @staticmethod |
| 68 | + def backward(ctx, grad_output): |
| 69 | + input, other = ctx.saved_variables |
| 70 | + grad_input = other.cross(grad_output, ctx.dim) |
| 71 | + grad_other = grad_output.cross(input, ctx.dim) |
| 72 | + return grad_input, grad_other, None |
72 | 73 |
|
73 | 74 |
|
74 | 75 | class Inverse(Function): |
|
0 commit comments