@@ -64,7 +64,7 @@ class ModelBuilder():
6464 def weights_init (self , m ):
6565 classname = m .__class__ .__name__
6666 if classname .find ('Conv' ) != - 1 :
67- nn .init .kaiming_normal (m .weight .data )
67+ nn .init .kaiming_normal_ (m .weight .data )
6868 elif classname .find ('BatchNorm' ) != - 1 :
6969 m .weight .data .fill_ (1. )
7070 m .bias .data .fill_ (1e-4 )
@@ -295,7 +295,8 @@ def forward(self, conv_out, segSize=None):
295295 x = self .conv_last (x )
296296
297297 if self .use_softmax : # is True during inference
298- x = nn .functional .upsample (x , size = segSize , mode = 'bilinear' )
298+ x = nn .functional .upsample (
299+ x , size = segSize , mode = 'bilinear' , align_corners = False )
299300 x = nn .functional .softmax (x , dim = 1 )
300301 return x
301302
@@ -327,7 +328,8 @@ def forward(self, conv_out, segSize=None):
327328 x = self .conv_last (x )
328329
329330 if self .use_softmax : # is True during inference
330- x = nn .functional .upsample (x , size = segSize , mode = 'bilinear' )
331+ x = nn .functional .upsample (
332+ x , size = segSize , mode = 'bilinear' , align_corners = False )
331333 x = nn .functional .softmax (x , dim = 1 )
332334 else :
333335 x = nn .functional .log_softmax (x , dim = 1 )
@@ -370,13 +372,14 @@ def forward(self, conv_out, segSize=None):
370372 ppm_out .append (nn .functional .upsample (
371373 pool_scale (conv5 ),
372374 (input_size [2 ], input_size [3 ]),
373- mode = 'bilinear' ))
375+ mode = 'bilinear' , align_corners = False ))
374376 ppm_out = torch .cat (ppm_out , 1 )
375377
376378 x = self .conv_last (ppm_out )
377379
378380 if self .use_softmax : # is True during inference
379- x = nn .functional .upsample (x , size = segSize , mode = 'bilinear' )
381+ x = nn .functional .upsample (
382+ x , size = segSize , mode = 'bilinear' , align_corners = False )
380383 x = nn .functional .softmax (x , dim = 1 )
381384 else :
382385 x = nn .functional .log_softmax (x , dim = 1 )
@@ -421,13 +424,14 @@ def forward(self, conv_out, segSize=None):
421424 ppm_out .append (nn .functional .upsample (
422425 pool_scale (conv5 ),
423426 (input_size [2 ], input_size [3 ]),
424- mode = 'bilinear' ))
427+ mode = 'bilinear' , align_corners = False ))
425428 ppm_out = torch .cat (ppm_out , 1 )
426429
427430 x = self .conv_last (ppm_out )
428431
429432 if self .use_softmax : # is True during inference
430- x = nn .functional .upsample (x , size = segSize , mode = 'bilinear' )
433+ x = nn .functional .upsample (
434+ x , size = segSize , mode = 'bilinear' , align_corners = False )
431435 x = nn .functional .softmax (x , dim = 1 )
432436 return x
433437
@@ -497,7 +501,7 @@ def forward(self, conv_out, segSize=None):
497501 ppm_out .append (pool_conv (nn .functional .upsample (
498502 pool_scale (conv5 ),
499503 (input_size [2 ], input_size [3 ]),
500- mode = 'bilinear' )))
504+ mode = 'bilinear' , align_corners = False )))
501505 ppm_out = torch .cat (ppm_out , 1 )
502506 f = self .ppm_last_conv (ppm_out )
503507
@@ -506,7 +510,8 @@ def forward(self, conv_out, segSize=None):
506510 conv_x = conv_out [i ]
507511 conv_x = self .fpn_in [i ](conv_x ) # lateral branch
508512
509- f = nn .functional .upsample (f , size = conv_x .size ()[2 :], mode = 'bilinear' ) # top-down branch
513+ f = nn .functional .upsample (
514+ f , size = conv_x .size ()[2 :], mode = 'bilinear' , align_corners = False ) # top-down branch
510515 f = conv_x + f
511516
512517 fpn_feature_list .append (self .fpn_out [i ](f ))
@@ -518,12 +523,13 @@ def forward(self, conv_out, segSize=None):
518523 fusion_list .append (nn .functional .upsample (
519524 fpn_feature_list [i ],
520525 output_size ,
521- mode = 'bilinear' ))
526+ mode = 'bilinear' , align_corners = False ))
522527 fusion_out = torch .cat (fusion_list , 1 )
523528 x = self .conv_last (fusion_out )
524529
525530 if self .use_softmax : # is True during inference
526- x = nn .functional .upsample (x , size = segSize , mode = 'bilinear' )
531+ x = nn .functional .upsample (
532+ x , size = segSize , mode = 'bilinear' , align_corners = False )
527533 x = nn .functional .softmax (x , dim = 1 )
528534 return x
529535
0 commit comments