diff --git a/.gitignore b/.gitignore index 1aed94c..815a19d 100755 --- a/.gitignore +++ b/.gitignore @@ -17,3 +17,5 @@ emb_img/*.jpg kkk.jpg result0.jpg result1.jpg + +model_128_old/ diff --git a/src/facenet.cpp b/src/facenet.cpp index 04c2e82..0c84211 100755 --- a/src/facenet.cpp +++ b/src/facenet.cpp @@ -51,27 +51,35 @@ void facenet::Stem(Mat &image, pBox *output) { struct BN *conv6_beta = new BN; long conv1 = ConvAndFcInit(conv1_wb, 32, 3, 3, 2, 0); - BatchNormInit(conv1_var, conv1_mean, conv1_beta, 32); + BatchNormInit(conv1_beta, conv1_mean, conv1_var, 32); long conv2 = ConvAndFcInit(conv2_wb, 32, 32, 3, 1, 0); - BatchNormInit(conv2_var, conv2_mean, conv2_beta, 32); + BatchNormInit(conv2_beta, conv2_mean, conv2_var, 32); long conv3 = ConvAndFcInit(conv3_wb, 64, 32, 3, 1, 1); - BatchNormInit(conv3_var, conv3_mean, conv3_beta, 64); + BatchNormInit(conv3_beta, conv3_mean, conv3_var, 64); long conv4 = ConvAndFcInit(conv4_wb, 80, 64, 1, 1, 0); - BatchNormInit(conv4_var, conv4_mean, conv4_beta, 80); + BatchNormInit(conv4_beta, conv4_mean, conv4_var, 80); long conv5 = ConvAndFcInit(conv5_wb, 192, 80, 3, 1, 0); - BatchNormInit(conv5_var, conv5_mean, conv5_beta, 192); + BatchNormInit(conv5_beta, conv5_mean, conv5_var, 192); long conv6 = ConvAndFcInit(conv6_wb, 256, 192, 3, 2, 0); - BatchNormInit(conv6_var, conv6_mean, conv6_beta, 256); + BatchNormInit(conv6_beta, conv6_mean, conv6_var, 256); long dataNumber[24] = {conv1, 32, 32, 32, conv2, 32, 32, 32, conv3, 64, 64, 64, conv4, 80, 80, 80, conv5, 192, 192, 192, conv6, 256, 256, 256}; - mydataFmt *pointTeam[24] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ - conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ - conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ - conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ - conv5_wb->pdata, conv5_var->pdata, conv5_mean->pdata, conv5_beta->pdata, \ - conv6_wb->pdata, conv6_var->pdata, conv6_mean->pdata, conv6_beta->pdata}; +// mydataFmt *pointTeam[24] = { +// conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ +// conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ +// conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ +// conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ +// conv5_wb->pdata, conv5_var->pdata, conv5_mean->pdata, conv5_beta->pdata, \ +// conv6_wb->pdata, conv6_var->pdata, conv6_mean->pdata, conv6_beta->pdata}; + mydataFmt *pointTeam[24] = { + conv1_wb->pdata, conv1_beta->pdata, conv1_mean->pdata, conv1_var->pdata, \ + conv2_wb->pdata, conv2_beta->pdata, conv2_mean->pdata, conv2_var->pdata, \ + conv3_wb->pdata, conv3_beta->pdata, conv3_mean->pdata, conv3_var->pdata, \ + conv4_wb->pdata, conv4_beta->pdata, conv4_mean->pdata, conv4_var->pdata, \ + conv5_wb->pdata, conv5_beta->pdata, conv5_mean->pdata, conv5_var->pdata, \ + conv6_wb->pdata, conv6_beta->pdata, conv6_mean->pdata, conv6_var->pdata}; string filename = "../model_" + to_string(Num) + "/stem_list.txt"; readData(filename, dataNumber, pointTeam, 24); @@ -82,21 +90,19 @@ void facenet::Stem(Mat &image, pBox *output) { convolutionInit(conv1_wb, rgb, conv1_out); //conv1 149 x 149 x 32 convolution(conv1_wb, rgb, conv1_out); -// printData(conv1_out); - BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); -// printData(conv1_out); + BatchNorm(conv1_out, conv1_beta, conv1_mean, conv1_var); relu(conv1_out, conv1_wb->pbias); convolutionInit(conv2_wb, conv1_out, conv2_out); //conv2 147 x 147 x 32 convolution(conv2_wb, conv1_out, conv2_out); - BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); + BatchNorm(conv2_out, conv2_beta, conv2_mean, conv2_var); relu(conv2_out, conv2_wb->pbias); convolutionInit(conv3_wb, conv2_out, conv3_out); //conv3 147 x 147 x 64 convolution(conv3_wb, conv2_out, conv3_out); - BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); + BatchNorm(conv3_out, conv3_beta, conv3_mean, conv3_var); relu(conv3_out, conv3_wb->pbias); maxPoolingInit(conv3_out, pooling1_out, 3, 2); @@ -106,20 +112,23 @@ void facenet::Stem(Mat &image, pBox *output) { convolutionInit(conv4_wb, pooling1_out, conv4_out); //conv4 73 x 73 x 80 convolution(conv4_wb, pooling1_out, conv4_out); - BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); + BatchNorm(conv4_out, conv4_beta, conv4_mean, conv4_var); +// BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); relu(conv4_out, conv4_wb->pbias); convolutionInit(conv5_wb, conv4_out, conv5_out); //conv5 71 x 71 x 192 convolution(conv5_wb, conv4_out, conv5_out); - BatchNorm(conv5_out, conv5_var, conv5_mean, conv5_beta); + BatchNorm(conv5_out, conv5_beta, conv5_mean, conv5_var); +// BatchNorm(conv5_out, conv5_var, conv5_mean, conv5_beta); relu(conv5_out, conv5_wb->pbias); convolutionInit(conv6_wb, conv5_out, output); //conv6 35 x 35 x 256 convolution(conv6_wb, conv5_out, output); - BatchNorm(output, conv6_var, conv6_mean, conv6_beta); + BatchNorm(output, conv6_beta, conv6_mean, conv6_var); +// BatchNorm(output, conv6_var, conv6_mean, conv6_beta); relu(output, conv6_wb->pbias); // firstFlag = false; // } @@ -217,19 +226,19 @@ void facenet::Inception_resnet_A(pBox *input, pBox *output, string filepath, flo long conv1 = ConvAndFcInit(conv1_wb, 32, 256, 1, 1, 0); - BatchNormInit(conv1_var, conv1_mean, conv1_beta, 32); + BatchNormInit(conv1_beta, conv1_mean, conv1_var, 32); long conv2 = ConvAndFcInit(conv2_wb, 32, 256, 1, 1, 0); - BatchNormInit(conv2_var, conv2_mean, conv2_beta, 32); + BatchNormInit(conv2_beta, conv2_mean, conv2_var, 32); long conv3 = ConvAndFcInit(conv3_wb, 32, 32, 3, 1, 1); - BatchNormInit(conv3_var, conv3_mean, conv3_beta, 32); + BatchNormInit(conv3_beta, conv3_mean, conv3_var, 32); long conv4 = ConvAndFcInit(conv4_wb, 32, 256, 1, 1, 0); - BatchNormInit(conv4_var, conv4_mean, conv4_beta, 32); + BatchNormInit(conv4_beta, conv4_mean, conv4_var, 32); long conv5 = ConvAndFcInit(conv5_wb, 32, 32, 3, 1, 1); - BatchNormInit(conv5_var, conv5_mean, conv5_beta, 32); + BatchNormInit(conv5_beta, conv5_mean, conv5_var, 32); long conv6 = ConvAndFcInit(conv6_wb, 32, 32, 3, 1, 1); - BatchNormInit(conv6_var, conv6_mean, conv6_beta, 32); + BatchNormInit(conv6_beta, conv6_mean, conv6_var, 32); long conv7 = ConvAndFcInit(conv7_wb, 256, 96, 1, 1, 0); @@ -238,12 +247,22 @@ void facenet::Inception_resnet_A(pBox *input, pBox *output, string filepath, flo long dataNumber[28] = {conv1, 32, 32, 32, conv2, 32, 32, 32, conv3, 32, 32, 32, conv4, 32, 32, 32, conv5, 32, 32, 32, conv6, 32, 32, 32, conv7, 256, conv8, 0}; - mydataFmt *pointTeam[28] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ - conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ - conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ - conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ - conv5_wb->pdata, conv5_var->pdata, conv5_mean->pdata, conv5_beta->pdata, \ - conv6_wb->pdata, conv6_var->pdata, conv6_mean->pdata, conv6_beta->pdata, \ +// mydataFmt *pointTeam[28] = { +// conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ +// conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ +// conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ +// conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ +// conv5_wb->pdata, conv5_var->pdata, conv5_mean->pdata, conv5_beta->pdata, \ +// conv6_wb->pdata, conv6_var->pdata, conv6_mean->pdata, conv6_beta->pdata, \ +// conv7_wb->pdata, conv7_wb->pbias, \ +// conv8_wb->pdata, conv8_wb->pbias}; + mydataFmt *pointTeam[28] = { + conv1_wb->pdata, conv1_beta->pdata, conv1_mean->pdata, conv1_var->pdata, \ + conv2_wb->pdata, conv2_beta->pdata, conv2_mean->pdata, conv2_var->pdata, \ + conv3_wb->pdata, conv3_beta->pdata, conv3_mean->pdata, conv3_var->pdata, \ + conv4_wb->pdata, conv4_beta->pdata, conv4_mean->pdata, conv4_var->pdata, \ + conv5_wb->pdata, conv5_beta->pdata, conv5_mean->pdata, conv5_var->pdata, \ + conv6_wb->pdata, conv6_beta->pdata, conv6_mean->pdata, conv6_var->pdata, \ conv7_wb->pdata, conv7_wb->pbias, \ conv8_wb->pdata, conv8_wb->pbias}; @@ -252,34 +271,34 @@ void facenet::Inception_resnet_A(pBox *input, pBox *output, string filepath, flo convolutionInit(conv1_wb, input, conv1_out); //conv1 35 x 35 x 32 convolution(conv1_wb, input, conv1_out); - BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); + BatchNorm(conv1_out, conv1_beta, conv1_mean, conv1_var); relu(conv1_out, conv1_wb->pbias); convolutionInit(conv2_wb, input, conv2_out); //conv2 35 x 35 x 32 convolution(conv2_wb, input, conv2_out); - BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); + BatchNorm(conv2_out, conv2_beta, conv2_mean, conv2_var); relu(conv2_out, conv2_wb->pbias); convolutionInit(conv3_wb, conv2_out, conv3_out); //conv3 35 x 35 x 32 convolution(conv3_wb, conv2_out, conv3_out); - BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); + BatchNorm(conv3_out, conv3_beta, conv3_mean, conv3_var); relu(conv3_out, conv3_wb->pbias); convolutionInit(conv4_wb, input, conv4_out); //conv4 35 x 35 x 32 convolution(conv4_wb, input, conv4_out); - BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); + BatchNorm(conv4_out, conv4_beta, conv4_mean, conv4_var); relu(conv4_out, conv4_wb->pbias); convolutionInit(conv5_wb, conv4_out, conv5_out); //conv5 35 x 35 x 32 convolution(conv5_wb, conv4_out, conv5_out); - BatchNorm(conv5_out, conv5_var, conv5_mean, conv5_beta); + BatchNorm(conv5_out, conv5_beta, conv5_mean, conv5_var); relu(conv5_out, conv5_wb->pbias); convolutionInit(conv6_wb, conv5_out, conv6_out); //conv6 35 x 35 x 32 convolution(conv6_wb, conv5_out, conv6_out); - BatchNorm(conv6_out, conv6_var, conv6_mean, conv6_beta); + BatchNorm(conv6_out, conv6_beta, conv6_mean, conv6_var); relu(conv6_out, conv6_wb->pbias); conv_mergeInit(conv7_out, conv1_out, conv3_out, conv6_out); @@ -371,20 +390,26 @@ void facenet::Reduction_A(pBox *input, pBox *output) { long conv1 = ConvAndFcInit(conv1_wb, 384, 256, 3, 2, 0); - BatchNormInit(conv1_var, conv1_mean, conv1_beta, 384); + BatchNormInit(conv1_beta, conv1_mean, conv1_var, 384); long conv2 = ConvAndFcInit(conv2_wb, 192, 256, 1, 1, 0); - BatchNormInit(conv2_var, conv2_mean, conv2_beta, 192); + BatchNormInit(conv2_beta, conv2_mean, conv2_var, 192); long conv3 = ConvAndFcInit(conv3_wb, 192, 192, 3, 1, 0); - BatchNormInit(conv3_var, conv3_mean, conv3_beta, 192); + BatchNormInit(conv3_beta, conv3_mean, conv3_var, 192); long conv4 = ConvAndFcInit(conv4_wb, 256, 192, 3, 2, 0); - BatchNormInit(conv4_var, conv4_mean, conv4_beta, 256); + BatchNormInit(conv4_beta, conv4_mean, conv4_var, 256); long dataNumber[16] = {conv1, 384, 384, 384, conv2, 192, 192, 192, conv3, 192, 192, 192, conv4, 256, 256, 256}; - mydataFmt *pointTeam[16] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ - conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ - conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ - conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata}; +// mydataFmt *pointTeam[16] = { +// conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ +// conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ +// conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ +// conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata}; + mydataFmt *pointTeam[16] = { + conv1_wb->pdata, conv1_beta->pdata, conv1_mean->pdata, conv1_var->pdata, \ + conv2_wb->pdata, conv2_beta->pdata, conv2_mean->pdata, conv2_var->pdata, \ + conv3_wb->pdata, conv3_beta->pdata, conv3_mean->pdata, conv3_var->pdata, \ + conv4_wb->pdata, conv4_beta->pdata, conv4_mean->pdata, conv4_var->pdata}; string filename = "../model_" + to_string(Num) + "/Mixed_6a_list.txt"; readData(filename, dataNumber, pointTeam, 16); @@ -395,25 +420,25 @@ void facenet::Reduction_A(pBox *input, pBox *output) { convolutionInit(conv1_wb, input, conv1_out); //conv1 17 x 17 x 384 convolution(conv1_wb, input, conv1_out); - BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); + BatchNorm(conv1_out, conv1_beta, conv1_mean, conv1_var); relu(conv1_out, conv1_wb->pbias); convolutionInit(conv2_wb, input, conv2_out); //conv2 35 x 35 x 192 convolution(conv2_wb, input, conv2_out); - BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); + BatchNorm(conv2_out, conv2_beta, conv2_mean, conv2_var); relu(conv2_out, conv2_wb->pbias); convolutionInit(conv3_wb, conv2_out, conv3_out); //conv3 35 x 35 x 192 convolution(conv3_wb, conv2_out, conv3_out); - BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); + BatchNorm(conv3_out, conv3_beta, conv3_mean, conv3_var); relu(conv3_out, conv3_wb->pbias); convolutionInit(conv4_wb, conv3_out, conv4_out); //conv4 17 x 17 x 256 convolution(conv4_wb, conv3_out, conv4_out); - BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); + BatchNorm(conv4_out, conv4_beta, conv4_mean, conv4_var); relu(conv4_out, conv4_wb->pbias); conv_mergeInit(output, pooling1_out, conv1_out, conv4_out); //17×17×896 @@ -482,14 +507,14 @@ void facenet::Inception_resnet_B(pBox *input, pBox *output, string filepath, flo long conv1 = ConvAndFcInit(conv1_wb, 128, 896, 1, 1, 0); - BatchNormInit(conv1_var, conv1_mean, conv1_beta, 128); + BatchNormInit(conv1_beta, conv1_mean, conv1_var, 128); long conv2 = ConvAndFcInit(conv2_wb, 128, 896, 1, 1, 0); - BatchNormInit(conv2_var, conv2_mean, conv2_beta, 128); + BatchNormInit(conv2_beta, conv2_mean, conv2_var, 128); long conv3 = ConvAndFcInit(conv3_wb, 128, 128, 0, 1, -1, 7, 1, 3, 0);//[1,7] - BatchNormInit(conv3_var, conv3_mean, conv3_beta, 128); + BatchNormInit(conv3_beta, conv3_mean, conv3_var, 128); long conv4 = ConvAndFcInit(conv4_wb, 128, 128, 0, 1, -1, 1, 7, 0, 3);//[7,1] - BatchNormInit(conv4_var, conv4_mean, conv4_beta, 128); + BatchNormInit(conv4_beta, conv4_mean, conv4_var, 128); long conv5 = ConvAndFcInit(conv5_wb, 896, 256, 1, 1, 0); @@ -498,10 +523,18 @@ void facenet::Inception_resnet_B(pBox *input, pBox *output, string filepath, flo long dataNumber[20] = {conv1, 128, 128, 128, conv2, 128, 128, 128, conv3, 128, 128, 128, conv4, 128, 128, 128, conv5, 896, conv6, 0}; - mydataFmt *pointTeam[20] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ - conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ - conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ - conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ +// mydataFmt *pointTeam[20] = { +// conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ +// conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ +// conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ +// conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ +// conv5_wb->pdata, conv5_wb->pbias, \ +// conv6_wb->pdata, conv6_wb->pbias}; + mydataFmt *pointTeam[20] = { + conv1_wb->pdata, conv1_beta->pdata, conv1_mean->pdata, conv1_var->pdata, \ + conv2_wb->pdata, conv2_beta->pdata, conv2_mean->pdata, conv2_var->pdata, \ + conv3_wb->pdata, conv3_beta->pdata, conv3_mean->pdata, conv3_var->pdata, \ + conv4_wb->pdata, conv4_beta->pdata, conv4_mean->pdata, conv4_var->pdata, \ conv5_wb->pdata, conv5_wb->pbias, \ conv6_wb->pdata, conv6_wb->pbias}; @@ -512,24 +545,24 @@ void facenet::Inception_resnet_B(pBox *input, pBox *output, string filepath, flo convolutionInit(conv1_wb, input, conv1_out); //conv1 17*17*128 convolution(conv1_wb, input, conv1_out); - BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); + BatchNorm(conv1_out, conv1_beta, conv1_mean, conv1_var); relu(conv1_out, conv1_wb->pbias); convolutionInit(conv2_wb, input, conv2_out); //conv2 17*17*128 convolution(conv2_wb, input, conv2_out); - BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); + BatchNorm(conv2_out, conv2_beta, conv2_mean, conv2_var); relu(conv2_out, conv2_wb->pbias); convolutionInit(conv3_wb, conv2_out, conv3_out); //conv3 17*17*128 convolution(conv3_wb, conv2_out, conv3_out); - BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); + BatchNorm(conv3_out, conv3_beta, conv3_mean, conv3_var); relu(conv3_out, conv3_wb->pbias); convolutionInit(conv4_wb, conv3_out, conv4_out); //conv4 17*17*128 convolution(conv4_wb, conv3_out, conv4_out); - BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); + BatchNorm(conv4_out, conv4_beta, conv4_mean, conv4_var); relu(conv4_out, conv4_wb->pbias); conv_mergeInit(conv5_out, conv1_out, conv4_out); @@ -621,32 +654,41 @@ void facenet::Reduction_B(pBox *input, pBox *output) { long conv1 = ConvAndFcInit(conv1_wb, 256, 896, 1, 1, 0); - BatchNormInit(conv1_var, conv1_mean, conv1_beta, 256); + BatchNormInit(conv1_beta, conv1_mean, conv1_var, 256); long conv2 = ConvAndFcInit(conv2_wb, 384, 256, 3, 2, 0); - BatchNormInit(conv2_var, conv2_mean, conv2_beta, 384); + BatchNormInit(conv2_beta, conv2_mean, conv2_var, 384); long conv3 = ConvAndFcInit(conv3_wb, 256, 896, 1, 1, 0); - BatchNormInit(conv3_var, conv3_mean, conv3_beta, 256); + BatchNormInit(conv3_beta, conv3_mean, conv3_var, 256); long conv4 = ConvAndFcInit(conv4_wb, 256, 256, 3, 2, 0); - BatchNormInit(conv4_var, conv4_mean, conv4_beta, 256); + BatchNormInit(conv4_beta, conv4_mean, conv4_var, 256); long conv5 = ConvAndFcInit(conv5_wb, 256, 896, 1, 1, 0); - BatchNormInit(conv5_var, conv5_mean, conv5_beta, 256); + BatchNormInit(conv5_beta, conv5_mean, conv5_var, 256); long conv6 = ConvAndFcInit(conv6_wb, 256, 256, 3, 1, 1); - BatchNormInit(conv6_var, conv6_mean, conv6_beta, 256); + BatchNormInit(conv6_beta, conv6_mean, conv6_var, 256); long conv7 = ConvAndFcInit(conv7_wb, 256, 256, 3, 2, 0); - BatchNormInit(conv7_var, conv7_mean, conv7_beta, 256); + BatchNormInit(conv7_beta, conv7_mean, conv7_var, 256); long dataNumber[28] = {conv1, 256, 256, 256, conv2, 384, 384, 384, conv3, 256, 256, 256, conv4, 256, 256, 256, conv5, 256, 256, 256, conv6, 256, 256, 256, conv7, 256, 256, 256}; - mydataFmt *pointTeam[28] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ - conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ - conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ - conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ - conv5_wb->pdata, conv5_var->pdata, conv5_mean->pdata, conv5_beta->pdata, \ - conv6_wb->pdata, conv6_var->pdata, conv6_mean->pdata, conv6_beta->pdata, \ - conv7_wb->pdata, conv7_var->pdata, conv7_mean->pdata, conv7_beta->pdata}; +// mydataFmt *pointTeam[28] = { +// conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ +// conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ +// conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ +// conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ +// conv5_wb->pdata, conv5_var->pdata, conv5_mean->pdata, conv5_beta->pdata, \ +// conv6_wb->pdata, conv6_var->pdata, conv6_mean->pdata, conv6_beta->pdata, \ +// conv7_wb->pdata, conv7_var->pdata, conv7_mean->pdata, conv7_beta->pdata}; + mydataFmt *pointTeam[28] = { + conv1_wb->pdata, conv1_beta->pdata, conv1_mean->pdata, conv1_var->pdata, \ + conv2_wb->pdata, conv2_beta->pdata, conv2_mean->pdata, conv2_var->pdata, \ + conv3_wb->pdata, conv3_beta->pdata, conv3_mean->pdata, conv3_var->pdata, \ + conv4_wb->pdata, conv4_beta->pdata, conv4_mean->pdata, conv4_var->pdata, \ + conv5_wb->pdata, conv5_beta->pdata, conv5_mean->pdata, conv5_var->pdata, \ + conv6_wb->pdata, conv6_beta->pdata, conv6_mean->pdata, conv6_var->pdata, \ + conv7_wb->pdata, conv7_beta->pdata, conv7_mean->pdata, conv7_var->pdata}; string filename = "../model_" + to_string(Num) + "/Mixed_7a_list.txt"; readData(filename, dataNumber, pointTeam, 28); @@ -658,43 +700,46 @@ void facenet::Reduction_B(pBox *input, pBox *output) { convolutionInit(conv1_wb, input, conv1_out); //conv1 17 x 17 x 256 convolution(conv1_wb, input, conv1_out); - BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); + BatchNorm(conv1_out, conv1_beta, conv1_mean, conv1_var); +// BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); relu(conv1_out, conv1_wb->pbias); convolutionInit(conv2_wb, conv1_out, conv2_out); //conv2 8 x 8 x 384 convolution(conv2_wb, conv1_out, conv2_out); - BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); + BatchNorm(conv2_out, conv2_beta, conv2_mean, conv2_var); +// BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); relu(conv2_out, conv2_wb->pbias); convolutionInit(conv3_wb, input, conv3_out); //conv3 17 x 17 x 256 convolution(conv3_wb, input, conv3_out); - BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); + BatchNorm(conv3_out, conv3_beta, conv3_mean, conv3_var); +// BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); relu(conv3_out, conv3_wb->pbias); convolutionInit(conv4_wb, conv3_out, conv4_out); //conv4 8 x 8 x 256 convolution(conv4_wb, conv3_out, conv4_out); - BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); + BatchNorm(conv4_out, conv4_beta, conv4_mean, conv4_var); relu(conv4_out, conv4_wb->pbias); convolutionInit(conv5_wb, input, conv5_out); //conv5 17 x 17 x 256 convolution(conv5_wb, input, conv5_out); - BatchNorm(conv5_out, conv5_var, conv5_mean, conv5_beta); + BatchNorm(conv5_out, conv5_beta, conv5_mean, conv5_var); relu(conv5_out, conv5_wb->pbias); convolutionInit(conv6_wb, conv5_out, conv6_out); //conv6 17 x 17 x 256 convolution(conv6_wb, conv5_out, conv6_out); - BatchNorm(conv6_out, conv6_var, conv6_mean, conv6_beta); + BatchNorm(conv6_out, conv6_beta, conv6_mean, conv6_var); relu(conv6_out, conv6_wb->pbias); convolutionInit(conv7_wb, conv6_out, conv7_out); //conv6 8 x 8 x 256 convolution(conv7_wb, conv6_out, conv7_out); - BatchNorm(conv7_out, conv7_var, conv7_mean, conv7_beta); + BatchNorm(conv7_out, conv7_beta, conv7_mean, conv7_var); relu(conv7_out, conv7_wb->pbias); conv_mergeInit(output, conv2_out, conv4_out, conv7_out, pooling1_out); @@ -779,13 +824,13 @@ void facenet::Inception_resnet_C(pBox *input, pBox *output, string filepath, flo long conv1 = ConvAndFcInit(conv1_wb, 192, 1792, 1, 1, 0); - BatchNormInit(conv1_var, conv1_mean, conv1_beta, 192); + BatchNormInit(conv1_beta, conv1_mean, conv1_var, 192); long conv2 = ConvAndFcInit(conv2_wb, 192, 1792, 1, 1, 0); - BatchNormInit(conv2_var, conv2_mean, conv2_beta, 192); + BatchNormInit(conv2_beta, conv2_mean, conv2_var, 192); long conv3 = ConvAndFcInit(conv3_wb, 192, 192, 0, 1, -1, 3, 1, 1, 0); - BatchNormInit(conv3_var, conv3_mean, conv3_beta, 192); + BatchNormInit(conv3_beta, conv3_mean, conv3_var, 192); long conv4 = ConvAndFcInit(conv4_wb, 192, 192, 0, 1, -1, 1, 3, 0, 1); - BatchNormInit(conv4_var, conv4_mean, conv4_beta, 192); + BatchNormInit(conv4_beta, conv4_mean, conv4_var, 192); long conv5 = ConvAndFcInit(conv5_wb, 1792, 384, 1, 1, 0); @@ -795,10 +840,18 @@ void facenet::Inception_resnet_C(pBox *input, pBox *output, string filepath, flo conv5, 1792, conv6, 0}; - mydataFmt *pointTeam[20] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ - conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ - conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ - conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ +// mydataFmt *pointTeam[20] = { +// conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ +// conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ +// conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ +// conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ +// conv5_wb->pdata, conv5_wb->pbias, \ +// conv6_wb->pdata, conv6_wb->pbias}; + mydataFmt *pointTeam[20] = { + conv1_wb->pdata, conv1_beta->pdata, conv1_mean->pdata, conv1_var->pdata, \ + conv2_wb->pdata, conv2_beta->pdata, conv2_mean->pdata, conv2_var->pdata, \ + conv3_wb->pdata, conv3_beta->pdata, conv3_mean->pdata, conv3_var->pdata, \ + conv4_wb->pdata, conv4_beta->pdata, conv4_mean->pdata, conv4_var->pdata, \ conv5_wb->pdata, conv5_wb->pbias, \ conv6_wb->pdata, conv6_wb->pbias}; @@ -809,25 +862,25 @@ void facenet::Inception_resnet_C(pBox *input, pBox *output, string filepath, flo convolutionInit(conv1_wb, input, conv1_out); //conv1 8 x 8 x 192 convolution(conv1_wb, input, conv1_out); - BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); + BatchNorm(conv1_out, conv1_beta, conv1_mean, conv1_var); relu(conv1_out, conv1_wb->pbias); convolutionInit(conv2_wb, input, conv2_out); //conv2 8 x 8 x 192 convolution(conv2_wb, input, conv2_out); - BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); + BatchNorm(conv2_out, conv2_beta, conv2_mean, conv2_var); relu(conv2_out, conv2_wb->pbias); convolutionInit(conv3_wb, conv2_out, conv3_out); //conv3 8 x 8 x 192 convolution(conv3_wb, conv2_out, conv3_out); - BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); + BatchNorm(conv3_out, conv3_beta, conv3_mean, conv3_var); relu(conv3_out, conv3_wb->pbias); convolutionInit(conv4_wb, conv3_out, conv4_out); //conv4 8 x 8 x 192 convolution(conv4_wb, conv3_out, conv4_out); - BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); + BatchNorm(conv4_out, conv4_beta, conv4_mean, conv4_var); relu(conv4_out, conv4_wb->pbias); conv_mergeInit(conv5_out, conv1_out, conv4_out); @@ -906,23 +959,30 @@ void facenet::Inception_resnet_C_None(pBox *input, pBox *output, string filepath struct BN *conv4_beta = new BN; long conv1 = ConvAndFcInit(conv1_wb, 192, 1792, 1, 1, 0); - BatchNormInit(conv1_var, conv1_mean, conv1_beta, 192); + BatchNormInit(conv1_beta, conv1_mean, conv1_var, 192); long conv2 = ConvAndFcInit(conv2_wb, 192, 1792, 1, 1, 0); - BatchNormInit(conv2_var, conv2_mean, conv2_beta, 192); + BatchNormInit(conv2_beta, conv2_mean, conv2_var, 192); long conv3 = ConvAndFcInit(conv3_wb, 192, 192, 0, 1, -1, 3, 1, 1, 0); - BatchNormInit(conv3_var, conv3_mean, conv3_beta, 192); + BatchNormInit(conv3_beta, conv3_mean, conv3_var, 192); long conv4 = ConvAndFcInit(conv4_wb, 192, 192, 0, 1, -1, 1, 3, 0, 1); - BatchNormInit(conv4_var, conv4_mean, conv4_beta, 192); + BatchNormInit(conv4_beta, conv4_mean, conv4_var, 192); long conv5 = ConvAndFcInit(conv5_wb, 1792, 384, 1, 1, 0); long dataNumber[18] = {conv1, 192, 192, 192, conv2, 192, 192, 192, conv3, 192, 192, 192, conv4, 192, 192, 192, conv5, 1792}; - mydataFmt *pointTeam[18] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ - conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ - conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ - conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ +// mydataFmt *pointTeam[18] = { +// conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ +// conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ +// conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ +// conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ +// conv5_wb->pdata, conv5_wb->pbias}; + mydataFmt *pointTeam[18] = { + conv1_wb->pdata, conv1_beta->pdata, conv1_mean->pdata, conv1_var->pdata, \ + conv2_wb->pdata, conv2_beta->pdata, conv2_mean->pdata, conv2_var->pdata, \ + conv3_wb->pdata, conv3_beta->pdata, conv3_mean->pdata, conv3_var->pdata, \ + conv4_wb->pdata, conv4_beta->pdata, conv4_mean->pdata, conv4_var->pdata, \ conv5_wb->pdata, conv5_wb->pbias}; // string filename = "../model_128/Repeat_2_list.txt"; @@ -932,25 +992,25 @@ void facenet::Inception_resnet_C_None(pBox *input, pBox *output, string filepath convolutionInit(conv1_wb, input, conv1_out); //conv1 8 x 8 x 192 convolution(conv1_wb, input, conv1_out); - BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); + BatchNorm(conv1_out, conv1_beta, conv1_mean, conv1_var); relu(conv1_out, conv1_wb->pbias); convolutionInit(conv2_wb, input, conv2_out); //conv2 8 x 8 x 192 convolution(conv2_wb, input, conv2_out); - BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); + BatchNorm(conv2_out, conv2_beta, conv2_mean, conv2_var); relu(conv2_out, conv2_wb->pbias); convolutionInit(conv3_wb, conv2_out, conv3_out); //conv3 8 x 8 x 192 convolution(conv3_wb, conv2_out, conv3_out); - BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); + BatchNorm(conv3_out, conv3_beta, conv3_mean, conv3_var); relu(conv3_out, conv3_wb->pbias); convolutionInit(conv4_wb, conv3_out, conv4_out); //conv4 8 x 8 x 192 convolution(conv4_wb, conv3_out, conv4_out); - BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); + BatchNorm(conv4_out, conv4_beta, conv4_mean, conv4_var); relu(conv4_out, conv4_wb->pbias); conv_mergeInit(conv5_out, conv1_out, conv4_out); @@ -1026,15 +1086,16 @@ void facenet::Flatten(pBox *input, pBox *output) { //参数还未设置 void facenet::fully_connect(pBox *input, pBox *output, string filepath) { struct Weight *conv1_wb = new Weight; - struct BN *conv1_var = new BN; - struct BN *conv1_mean = new BN; struct BN *conv1_beta = new BN; + struct BN *conv1_mean = new BN; + struct BN *conv1_var = new BN; long conv1 = ConvAndFcInit(conv1_wb, Num, 1792, input->height, 1, 0); - BatchNormInit(conv1_var, conv1_mean, conv1_beta, Num); + BatchNormInit(conv1_beta, conv1_mean, conv1_var, Num); long dataNumber[4] = {conv1, Num, Num, Num}; // cout << to_string(sum) << endl; - mydataFmt *pointTeam[4] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata}; +// mydataFmt *pointTeam[4] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata}; + mydataFmt *pointTeam[4] = {conv1_wb->pdata, conv1_beta->pdata, conv1_mean->pdata, conv1_var->pdata}; // string filename = "../model_128/Bottleneck_list.txt"; // int length = sizeof(dataNumber) / sizeof(*dataNumber); readData(filepath, dataNumber, pointTeam, 4); @@ -1043,7 +1104,7 @@ void facenet::fully_connect(pBox *input, pBox *output, string filepath) { //conv1 8 x 8 x 192 fullconnect(conv1_wb, input, output); - BatchNorm(output, conv1_var, conv1_mean, conv1_beta); + BatchNorm(output, conv1_beta, conv1_mean, conv1_var); freeWeight(conv1_wb); freeBN(conv1_var); diff --git a/src/network.cpp b/src/network.cpp index fc3632e..c76f364 100755 --- a/src/network.cpp +++ b/src/network.cpp @@ -869,14 +869,15 @@ void mulandadd(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float sc } } + /** * BN初始化 - * @param var 方差 - * @param mean 平均值 - * @param beta beta + * @param beta beta + * @param mean 平均值 + * @param var 方差 * @param width 参数个数 */ -void BatchNormInit(struct BN *var, struct BN *mean, struct BN *beta, int width) { +void BatchNormInit(struct BN *beta, struct BN *mean, struct BN *var, int width) { var->width = width; var->pdata = (mydataFmt *) malloc(width * sizeof(mydataFmt)); if (var->pdata == NULL)cout << "prelu apply for memory failed!!!!"; @@ -896,11 +897,11 @@ void BatchNormInit(struct BN *var, struct BN *mean, struct BN *beta, int width) /** * BN实现 * @param pbox 输入feature map - * @param var 方差 - * @param mean 平均值 - * @param beta beta + * @param beta beta + * @param mean 平均值 + * @param var 方差 */ -void BatchNorm(struct pBox *pbox, struct BN *var, struct BN *mean, struct BN *beta) { +void BatchNorm(struct pBox *pbox, struct BN *beta, struct BN *mean, struct BN *var) { if (pbox->pdata == NULL) { cout << "Relu feature is NULL!!" << endl; return; diff --git a/src/network.h b/src/network.h index dcfdc6b..c311b23 100755 --- a/src/network.h +++ b/src/network.h @@ -72,7 +72,8 @@ void mulandaddInit(const pBox *inpbox, const pBox *temppbox, pBox *outpBox); void mulandadd(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float scale = 1); -void BatchNormInit(struct BN *var, struct BN *mean, struct BN *beta, int width); +void BatchNormInit(struct BN *beta, struct BN *mean, struct BN *var, int width); + +void BatchNorm(struct pBox *pbox, struct BN *beta, struct BN *mean, struct BN *var); -void BatchNorm(struct pBox *pbox, struct BN *var, struct BN *mean, struct BN *beta); #endif \ No newline at end of file