88from keras .layers import Embedding , SimpleRNN , Dense
99from keras .preprocessing import sequence
1010
11- from utils4e import (softmax1D , conv1D , gaussian_kernel , element_wise_product , vector_add , random_weights ,
11+ from utils4e import (conv1D , gaussian_kernel , element_wise_product , vector_add , random_weights ,
1212 scalar_vector_product , map_vector , mean_squared_error_loss )
1313
1414
@@ -46,6 +46,9 @@ def function(self, x):
4646 def derivative (self , x ):
4747 return NotImplementedError
4848
49+ def __call__ (self , x ):
50+ return self .function (x )
51+
4952
5053class Sigmoid (Activation ):
5154
@@ -56,7 +59,7 @@ def derivative(self, value):
5659 return value * (1 - value )
5760
5861
59- class Relu (Activation ):
62+ class ReLU (Activation ):
6063
6164 def function (self , x ):
6265 return max (0 , x )
@@ -65,13 +68,28 @@ def derivative(self, value):
6568 return 1 if value > 0 else 0
6669
6770
68- class Elu (Activation ):
71+ class ELU (Activation ):
72+
73+ def __init__ (self , alpha = 0.01 ):
74+ self .alpha = alpha
6975
70- def function (self , x , alpha = 0.01 ):
71- return x if x > 0 else alpha * (np .exp (x ) - 1 )
76+ def function (self , x ):
77+ return x if x > 0 else self . alpha * (np .exp (x ) - 1 )
7278
73- def derivative (self , value , alpha = 0.01 ):
74- return 1 if value > 0 else alpha * np .exp (value )
79+ def derivative (self , value ):
80+ return 1 if value > 0 else self .alpha * np .exp (value )
81+
82+
83+ class LeakyReLU (Activation ):
84+
85+ def __init__ (self , alpha = 0.01 ):
86+ self .alpha = alpha
87+
88+ def function (self , x ):
89+ return max (x , self .alpha * x )
90+
91+ def derivative (self , value ):
92+ return 1 if value > 0 else self .alpha
7593
7694
7795class Tanh (Activation ):
@@ -83,13 +101,31 @@ def derivative(self, value):
83101 return 1 - (value ** 2 )
84102
85103
86- class LeakyRelu (Activation ):
104+ class SoftMax (Activation ):
105+
106+ def function (self , x ):
107+ return np .exp (x ) / np .sum (np .exp (x ))
108+
109+ def derivative (self , x ):
110+ return np .ones_like (x )
111+
112+
113+ class SoftPlus (Activation ):
87114
88- def function (self , x , alpha = 0.01 ):
89- return x if x > 0 else alpha * x
115+ def function (self , x ):
116+ return np .log (1. + np .exp (x ))
117+
118+ def derivative (self , x ):
119+ return 1. / (1. + np .exp (- x ))
90120
91- def derivative (self , value , alpha = 0.01 ):
92- return 1 if value > 0 else alpha
121+
122+ class Linear (Activation ):
123+
124+ def function (self , x ):
125+ return x
126+
127+ def derivative (self , x ):
128+ return np .ones_like (x )
93129
94130
95131class InputLayer (Layer ):
@@ -112,9 +148,9 @@ class OutputLayer(Layer):
112148 def __init__ (self , size = 3 ):
113149 super ().__init__ (size )
114150
115- def forward (self , inputs ):
151+ def forward (self , inputs , activation = SoftMax ):
116152 assert len (self .nodes ) == len (inputs )
117- res = softmax1D (inputs )
153+ res = activation (). function (inputs )
118154 for node , val in zip (self .nodes , res ):
119155 node .value = val
120156 return res
0 commit comments