@@ -27,17 +27,17 @@ def __init__(self, input_dim=(1, 28, 28),
27
27
# 重みの初期化===========
28
28
# 各層のニューロンひとつあたりが、前層のニューロンといくつのつながりがあるか(TODO:自動で計算する)
29
29
pre_node_nums = np .array ([1 * 3 * 3 , 16 * 3 * 3 , 16 * 3 * 3 , 32 * 3 * 3 , 32 * 3 * 3 , 64 * 3 * 3 , 64 * 4 * 4 , hidden_size ])
30
- wight_init_scales = np .sqrt (2.0 / pre_node_nums ) # ReLUを使う場合に推奨される初期値
30
+ weight_init_scales = np .sqrt (2.0 / pre_node_nums ) # ReLUを使う場合に推奨される初期値
31
31
32
32
self .params = {}
33
33
pre_channel_num = input_dim [0 ]
34
34
for idx , conv_param in enumerate ([conv_param_1 , conv_param_2 , conv_param_3 , conv_param_4 , conv_param_5 , conv_param_6 ]):
35
- self .params ['W' + str (idx + 1 )] = wight_init_scales [idx ] * np .random .randn (conv_param ['filter_num' ], pre_channel_num , conv_param ['filter_size' ], conv_param ['filter_size' ])
35
+ self .params ['W' + str (idx + 1 )] = weight_init_scales [idx ] * np .random .randn (conv_param ['filter_num' ], pre_channel_num , conv_param ['filter_size' ], conv_param ['filter_size' ])
36
36
self .params ['b' + str (idx + 1 )] = np .zeros (conv_param ['filter_num' ])
37
37
pre_channel_num = conv_param ['filter_num' ]
38
- self .params ['W7' ] = wight_init_scales [6 ] * np .random .randn (64 * 4 * 4 , hidden_size )
38
+ self .params ['W7' ] = weight_init_scales [6 ] * np .random .randn (64 * 4 * 4 , hidden_size )
39
39
self .params ['b7' ] = np .zeros (hidden_size )
40
- self .params ['W8' ] = wight_init_scales [7 ] * np .random .randn (hidden_size , output_size )
40
+ self .params ['W8' ] = weight_init_scales [7 ] * np .random .randn (hidden_size , output_size )
41
41
self .params ['b8' ] = np .zeros (output_size )
42
42
43
43
# レイヤの生成===========
0 commit comments