Skip to content

Commit ad7e305

Browse files
committed
fix yolov7x bug
1 parent 816eef5 commit ad7e305

File tree

1 file changed

+25
-27
lines changed

1 file changed

+25
-27
lines changed

yolov7/src/model.cpp

Lines changed: 25 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1295,7 +1295,6 @@ IHostMemory* build_engine_yolov7x(unsigned int maxBatchSize,IBuilder* builder, I
12951295
IElementWiseLayer* conv2 = convBnSilu(network, weightMap, *conv1->getOutput(0), 80, 3, 1, 1, "model.2");
12961296
IElementWiseLayer* conv3 = convBnSilu(network, weightMap, *conv2->getOutput(0), 160, 3, 2, 1, "model.3");
12971297

1298-
12991298
IElementWiseLayer* conv4 = convBnSilu(network, weightMap, *conv3->getOutput(0), 64, 1, 1, 0, "model.4");
13001299

13011300
IElementWiseLayer* conv5 = convBnSilu(network, weightMap, *conv3->getOutput(0), 64, 1, 1, 0, "model.5");
@@ -1306,13 +1305,11 @@ IHostMemory* build_engine_yolov7x(unsigned int maxBatchSize,IBuilder* builder, I
13061305
IElementWiseLayer* conv10 = convBnSilu(network, weightMap, *conv9->getOutput(0), 64, 3, 1, 1, "model.10");
13071306
IElementWiseLayer* conv11 = convBnSilu(network, weightMap, *conv10->getOutput(0), 64, 3, 1, 1, "model.11");
13081307

1309-
ITensor* input_tensor_12[] = { conv11->getOutput(0), conv9->getOutput(0), conv7->getOutput(0), conv5->getOutput(0),conv4->getOutput(0) };
1308+
ITensor* input_tensor_12[] = { conv11->getOutput(0), conv9->getOutput(0), conv7->getOutput(0), conv5->getOutput(0), conv4->getOutput(0) };
13101309
IConcatenationLayer* concat12 = network->addConcatenation(input_tensor_12, 5);
13111310
//concat9->setAxis(0);
13121311
IElementWiseLayer* conv13 = convBnSilu(network, weightMap, *concat12->getOutput(0), 320, 1, 1, 0, "model.13");
13131312

1314-
1315-
13161313
IPoolingLayer* mp1 = network->addPoolingNd(*conv13->getOutput(0), PoolingType::kMAX, DimsHW{ 2, 2 });
13171314
mp1->setStrideNd(DimsHW{ 2, 2 });
13181315
IElementWiseLayer* conv15 = convBnSilu(network, weightMap, *mp1->getOutput(0), 160, 1, 1, 0, "model.15");
@@ -1342,11 +1339,12 @@ IHostMemory* build_engine_yolov7x(unsigned int maxBatchSize,IBuilder* builder, I
13421339

13431340

13441341
IPoolingLayer* mp2 = network->addPoolingNd(*conv28->getOutput(0), PoolingType::kMAX, DimsHW{ 2, 2 });
1345-
mp1->setStrideNd(DimsHW{ 2, 2 });
1342+
mp2->setStrideNd(DimsHW{ 2, 2 });
13461343
IElementWiseLayer* conv30 = convBnSilu(network, weightMap, *mp2->getOutput(0), 320, 1, 1, 0, "model.30");
13471344

13481345
IElementWiseLayer* conv31 = convBnSilu(network, weightMap, *conv28->getOutput(0), 320, 1, 1, 0, "model.31");
13491346
IElementWiseLayer* conv32 = convBnSilu(network, weightMap, *conv31->getOutput(0), 320, 3, 2, 1, "model.32");
1347+
13501348
ITensor* input_tensor_33[] = { conv32->getOutput(0), conv30->getOutput(0) };
13511349
IConcatenationLayer* concat33 = network->addConcatenation(input_tensor_33, 2);
13521350
//IConcatenationLayer* mp2 = MPC3(network, weightMap, *conv28->getOutput(0), 320, "model.30", "model.31", "model.32");
@@ -1369,7 +1367,7 @@ IHostMemory* build_engine_yolov7x(unsigned int maxBatchSize,IBuilder* builder, I
13691367

13701368

13711369
IPoolingLayer* mp3 = network->addPoolingNd(*conv43->getOutput(0), PoolingType::kMAX, DimsHW{ 2, 2 });
1372-
mp1->setStrideNd(DimsHW{ 2, 2 });
1370+
mp3->setStrideNd(DimsHW{ 2, 2 });
13731371
IElementWiseLayer* conv45 = convBnSilu(network, weightMap, *mp3->getOutput(0), 640, 1, 1, 0, "model.45");
13741372

13751373
IElementWiseLayer* conv46 = convBnSilu(network, weightMap, *conv43->getOutput(0), 640, 1, 1, 0, "model.46");
@@ -1786,7 +1784,7 @@ IHostMemory* build_engine_yolov7_tiny(unsigned int maxBatchSize, IBuilder* build
17861784
auto conv0 = convBlockLeakRelu(network, weightMap, *data, 32, 3, 2, 1, "model.0");
17871785
assert(conv0);
17881786

1789-
// [-1, 1, Conv, [64, 3, 2, None, 1, nn.LeakyReLU(0.1)]], # 1-P2/4
1787+
// [-1, 1, Conv, [64, 3, 2, None, 1, nn.LeakyReLU(0.1)]], # 1-P2/4
17901788
auto conv1 = convBlockLeakRelu(network, weightMap, *conv0->getOutput(0), 64, 3, 2, 1, "model.1");
17911789
assert(conv1);
17921790

@@ -1907,7 +1905,7 @@ IHostMemory* build_engine_yolov7_tiny(unsigned int maxBatchSize, IBuilder* build
19071905
auto* pool31 = network->addPoolingNd(*conv30->getOutput(0), PoolingType::kMAX, DimsHW{ 5, 5 });
19081906
assert(pool31);
19091907
pool31->setStrideNd(DimsHW{ 1, 1 });
1910-
pool31->setPaddingNd(DimsHW{2,2});
1908+
pool31->setPaddingNd(DimsHW{ 2, 2 });
19111909
// [-2, 1, SP, [9]],
19121910
auto* pool32 = network->addPoolingNd(*conv30->getOutput(0), PoolingType::kMAX, DimsHW{ 9, 9 });
19131911
assert(pool32);
@@ -1985,56 +1983,56 @@ IHostMemory* build_engine_yolov7_tiny(unsigned int maxBatchSize, IBuilder* build
19851983
resize49->setResizeMode(ResizeMode::kNEAREST);
19861984
resize49->setScales(scale, 3);
19871985

1988-
// [14, 1, Conv, [64, 1, 1, None, 1, nn.LeakyReLU(0.1)]], # route backbone P3 conv11
1986+
// [14, 1, Conv, [64, 1, 1, None, 1, nn.LeakyReLU(0.1)]], # route backbone P3 conv11
19891987
auto conv50 = convBlockLeakRelu(network, weightMap, *conv14->getOutput(0), 64, 1, 1, 0, "model.50");
19901988
assert(conv50);
19911989

19921990
ITensor* input_tensor_51[] = { conv50->getOutput(0), resize49->getOutput(0) };
19931991
IConcatenationLayer* cat51 = network->addConcatenation(input_tensor_51, 2);
19941992
//cat51->setAxis(0);
19951993

1996-
// [-1, 1, Conv, [32, 1, 1, None, 1, nn.LeakyReLU(0.1)]],
1994+
// [-1, 1, Conv, [32, 1, 1, None, 1, nn.LeakyReLU(0.1)]],
19971995
auto conv52 = convBlockLeakRelu(network, weightMap, *cat51->getOutput(0), 32, 1, 1, 0, "model.52");
19981996
assert(conv52);
1999-
// [-2, 1, Conv, [32, 1, 1, None, 1, nn.LeakyReLU(0.1)]],
1997+
// [-2, 1, Conv, [32, 1, 1, None, 1, nn.LeakyReLU(0.1)]],
20001998
auto conv53 = convBlockLeakRelu(network, weightMap, *cat51->getOutput(0), 32, 1, 1, 0, "model.53");
20011999
assert(conv53);
20022000

2003-
// [-1, 1, Conv, [32, 3, 1, None, 1, nn.LeakyReLU(0.1)]],
2001+
// [-1, 1, Conv, [32, 3, 1, None, 1, nn.LeakyReLU(0.1)]],
20042002
auto conv54 = convBlockLeakRelu(network, weightMap, *conv53->getOutput(0), 32, 3, 1, 1, "model.54");
20052003
assert(conv54);
2006-
// [-1, 1, Conv, [32, 3, 1, None, 1, nn.LeakyReLU(0.1)]],
2004+
// [-1, 1, Conv, [32, 3, 1, None, 1, nn.LeakyReLU(0.1)]],
20072005
auto conv55 = convBlockLeakRelu(network, weightMap, *conv54->getOutput(0), 32, 3, 1, 1, "model.55");
20082006
assert(conv55);
20092007

20102008
ITensor* input_tensor_56[] = { conv55->getOutput(0), conv54->getOutput(0), conv53->getOutput(0),conv52->getOutput(0) };
20112009
IConcatenationLayer* cat56 = network->addConcatenation(input_tensor_56, 4);
20122010
//cat56->setAxis(0);
20132011

2014-
// [-1, 1, Conv, [64, 1, 1, None, 1, nn.LeakyReLU(0.1)]], # 57
2012+
// [-1, 1, Conv, [64, 1, 1, None, 1, nn.LeakyReLU(0.1)]], # 57
20152013
auto conv57 = convBlockLeakRelu(network, weightMap, *cat56->getOutput(0), 64, 1, 1, 0, "model.57");
20162014
assert(conv57);
20172015

2018-
// [-1, 1, Conv, [128, 3, 2, None, 1, nn.LeakyReLU(0.1)]],
2016+
// [-1, 1, Conv, [128, 3, 2, None, 1, nn.LeakyReLU(0.1)]],
20192017
auto conv58 = convBlockLeakRelu(network, weightMap, *conv57->getOutput(0), 128, 3, 2, 1, "model.58");
20202018
assert(conv58);
20212019

2022-
// conv32 [[-1, 47], 1, Concat, [1]],
2020+
// conv32 [[-1, 47], 1, Concat, [1]],
20232021
ITensor* input_tensor_59[] = { conv58->getOutput(0), conv47->getOutput(0) };
20242022
IConcatenationLayer* cat59 = network->addConcatenation(input_tensor_59, 2);
20252023
//cat59->setAxis(0);
20262024

2027-
// [-1, 1, Conv, [64, 1, 1, None, 1, nn.LeakyReLU(0.1)]],
2025+
// [-1, 1, Conv, [64, 1, 1, None, 1, nn.LeakyReLU(0.1)]],
20282026
auto conv60 = convBlockLeakRelu(network, weightMap, *cat59->getOutput(0), 64, 1, 1, 0, "model.60");
20292027
assert(conv60);
2030-
// [-2, 1, Conv, [64, 1, 1, None, 1, nn.LeakyReLU(0.1)]],
2028+
// [-2, 1, Conv, [64, 1, 1, None, 1, nn.LeakyReLU(0.1)]],
20312029
auto conv61 = convBlockLeakRelu(network, weightMap, *cat59->getOutput(0), 64, 1, 1, 0, "model.61");
20322030
assert(conv61);
20332031

2034-
// [-1, 1, Conv, [64, 3, 1, None, 1, nn.LeakyReLU(0.1)]],
2032+
// [-1, 1, Conv, [64, 3, 1, None, 1, nn.LeakyReLU(0.1)]],
20352033
auto conv62 = convBlockLeakRelu(network, weightMap, *conv61->getOutput(0), 64, 3, 1, 1, "model.62");
20362034
assert(conv62);
2037-
// [-1, 1, Conv, [64, 3, 1, None, 1, nn.LeakyReLU(0.1)]],
2035+
// [-1, 1, Conv, [64, 3, 1, None, 1, nn.LeakyReLU(0.1)]],
20382036
auto conv63 = convBlockLeakRelu(network, weightMap, *conv62->getOutput(0), 64, 3, 1, 1, "model.63");
20392037
assert(conv63);
20402038

@@ -2046,7 +2044,7 @@ IHostMemory* build_engine_yolov7_tiny(unsigned int maxBatchSize, IBuilder* build
20462044
auto conv65 = convBlockLeakRelu(network, weightMap, *cat64->getOutput(0), 128, 1, 1, 0, "model.65");
20472045
assert(conv65);
20482046

2049-
//[-1, 1, Conv, [256, 3, 2, None, 1, nn.LeakyReLU(0.1)]] ,
2047+
// [-1, 1, Conv, [256, 3, 2, None, 1, nn.LeakyReLU(0.1)]] ,
20502048
auto conv66 = convBlockLeakRelu(network, weightMap, *conv65->getOutput(0), 256, 3, 2, 1, "model.66");
20512049
assert(conv66);
20522050

@@ -2057,34 +2055,34 @@ IHostMemory* build_engine_yolov7_tiny(unsigned int maxBatchSize, IBuilder* build
20572055
// [-1, 1, Conv, [128, 1, 1, None, 1, nn.LeakyReLU(0.1)]],
20582056
auto conv68 = convBlockLeakRelu(network, weightMap, *cat67->getOutput(0), 128, 1, 1, 0, "model.68");
20592057
assert(conv68);
2060-
// [-2, 1, Conv, [128, 1, 1, None, 1, nn.LeakyReLU(0.1)]],
2058+
// [-2, 1, Conv, [128, 1, 1, None, 1, nn.LeakyReLU(0.1)]],
20612059
auto conv69 = convBlockLeakRelu(network, weightMap, *cat67->getOutput(0), 128, 1, 1, 0, "model.69");
20622060
assert(conv69);
20632061

2064-
// [-1, 1, Conv, [128, 3, 1, None, 1, nn.LeakyReLU(0.1)]],
2062+
// [-1, 1, Conv, [128, 3, 1, None, 1, nn.LeakyReLU(0.1)]],
20652063
auto conv70 = convBlockLeakRelu(network, weightMap, *conv69->getOutput(0), 128, 3, 1, 1, "model.70");
20662064
assert(conv70);
20672065

2068-
// [-1, 1, Conv, [128, 3, 1, None, 1, nn.LeakyReLU(0.1)]],
2066+
// [-1, 1, Conv, [128, 3, 1, None, 1, nn.LeakyReLU(0.1)]],
20692067
auto conv71 = convBlockLeakRelu(network, weightMap, *conv70->getOutput(0), 128, 3, 1, 1, "model.71");
20702068
assert(conv71);
20712069

20722070
ITensor* input_tensor_72[] = { conv71->getOutput(0), conv70->getOutput(0), conv69->getOutput(0), conv68->getOutput(0) };
20732071
IConcatenationLayer* cat72 = network->addConcatenation(input_tensor_72, 4);
20742072
//cat72->setAxis(0);
20752073

2076-
// [-1, 1, Conv, [256, 1, 1, None, 1, nn.LeakyReLU(0.1)]], # 73
2074+
// [-1, 1, Conv, [256, 1, 1, None, 1, nn.LeakyReLU(0.1)]], # 73
20772075
auto conv73 = convBlockLeakRelu(network, weightMap, *cat72->getOutput(0), 256, 1, 1, 0, "model.73");
20782076
assert(conv73);
20792077

20802078

20812079
// [57, 1, Conv, [128, 3, 1, None, 1, nn.LeakyReLU(0.1)]],
20822080
auto conv74 = convBlockLeakRelu(network, weightMap, *conv57->getOutput(0), 128, 3, 1, 1, "model.74");
20832081
assert(conv74);
2084-
// [65, 1, Conv, [256, 3, 1, None, 1, nn.LeakyReLU(0.1)]],
2082+
// [65, 1, Conv, [256, 3, 1, None, 1, nn.LeakyReLU(0.1)]],
20852083
auto conv75 = convBlockLeakRelu(network, weightMap, *conv65->getOutput(0), 256, 3, 1, 1, "model.75");
20862084
assert(conv75);
2087-
// [73, 1, Conv, [512, 3, 1, None, 1, nn.LeakyReLU(0.1)]],
2085+
// [73, 1, Conv, [512, 3, 1, None, 1, nn.LeakyReLU(0.1)]],
20882086
auto conv76 = convBlockLeakRelu(network, weightMap, *conv73->getOutput(0), 512, 3, 1, 1, "model.76");
20892087
assert(conv76);
20902088

0 commit comments

Comments
 (0)