diff --git a/result.jpg b/result.jpg index 5f3a47d..4f9b867 100644 Binary files a/result.jpg and b/result.jpg differ diff --git a/src/facenet.cpp b/src/facenet.cpp index 75ab964..4aee9c4 100644 --- a/src/facenet.cpp +++ b/src/facenet.cpp @@ -4,6 +4,995 @@ #include "facenet.h" + +void facenet::Stem(Mat &image, pBox *output) { + pBox *rgb = new pBox; + pBox *conv1_out = new pBox; + pBox *conv2_out = new pBox; + pBox *conv3_out = new pBox; + pBox *conv4_out = new pBox; + pBox *conv5_out = new pBox; + + struct Weight *conv1_wb = new Weight; + struct Weight *conv2_wb = new Weight; + struct Weight *conv3_wb = new Weight; + struct Weight *conv4_wb = new Weight; + struct Weight *conv5_wb = new Weight; + struct Weight *conv6_wb = new Weight; + + struct pBox *pooling1_out = new pBox; + + struct BN *conv1_var = new BN; + struct BN *conv1_mean = new BN; + struct BN *conv1_beta = new BN; + + struct BN *conv2_var = new BN; + struct BN *conv2_mean = new BN; + struct BN *conv2_beta = new BN; + + struct BN *conv3_var = new BN; + struct BN *conv3_mean = new BN; + struct BN *conv3_beta = new BN; + + struct BN *conv4_var = new BN; + struct BN *conv4_mean = new BN; + struct BN *conv4_beta = new BN; + + struct BN *conv5_var = new BN; + struct BN *conv5_mean = new BN; + struct BN *conv5_beta = new BN; + + struct BN *conv6_var = new BN; + struct BN *conv6_mean = new BN; + struct BN *conv6_beta = new BN; + + long conv1 = ConvAndFcInit(conv1_wb, 32, 3, 3, 2, 0); + BatchNormInit(conv1_var, conv1_mean, conv1_beta, 32); + long conv2 = ConvAndFcInit(conv2_wb, 32, 32, 3, 1, 0); + BatchNormInit(conv2_var, conv2_mean, conv2_beta, 32); + long conv3 = ConvAndFcInit(conv3_wb, 64, 32, 3, 1, 1); + BatchNormInit(conv3_var, conv3_mean, conv3_beta, 64); + long conv4 = ConvAndFcInit(conv4_wb, 80, 64, 1, 1, 0); + BatchNormInit(conv4_var, conv4_mean, conv4_beta, 80); + long conv5 = ConvAndFcInit(conv5_wb, 192, 80, 3, 1, 0); + BatchNormInit(conv5_var, conv5_mean, conv5_beta, 192); + long conv6 = ConvAndFcInit(conv6_wb, 256, 192, 3, 2, 0); + BatchNormInit(conv6_var, conv6_mean, conv6_beta, 256); + + long dataNumber[24] = {conv1, 32, 32, 32, conv2, 32, 32, 32, conv3, 64, 64, 64, conv4, 80, 80, 80, conv5, 192, 192, + 192, conv6, 256, 256, 256}; + + mydataFmt *pointTeam[24] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ + conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ + conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ + conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ + conv5_wb->pdata, conv5_var->pdata, conv5_mean->pdata, conv5_beta->pdata, \ + conv6_wb->pdata, conv6_var->pdata, conv6_mean->pdata, conv6_beta->pdata}; + string filename = "../model_" + to_string(Num) + "/stem_list.txt"; + readData(filename, dataNumber, pointTeam, 24); + +// if (firstFlag) { + image2MatrixInit(image, rgb); + image2Matrix(image, rgb, 1); + + convolutionInit(conv1_wb, rgb, conv1_out); + //conv1 149 x 149 x 32 + convolution(conv1_wb, rgb, conv1_out); +// printData(conv1_out); + BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); +// printData(conv1_out); + relu(conv1_out, conv1_wb->pbias); + + convolutionInit(conv2_wb, conv1_out, conv2_out); + //conv2 147 x 147 x 32 + convolution(conv2_wb, conv1_out, conv2_out); + BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); + relu(conv2_out, conv2_wb->pbias); + + convolutionInit(conv3_wb, conv2_out, conv3_out); + //conv3 147 x 147 x 64 + convolution(conv3_wb, conv2_out, conv3_out); + BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); + relu(conv3_out, conv3_wb->pbias); + + maxPoolingInit(conv3_out, pooling1_out, 3, 2); + //maxPooling 73 x 73 x 64 + maxPooling(conv3_out, pooling1_out, 3, 2); + + convolutionInit(conv4_wb, pooling1_out, conv4_out); + //conv4 73 x 73 x 80 + convolution(conv4_wb, pooling1_out, conv4_out); + BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); + relu(conv4_out, conv4_wb->pbias); + + convolutionInit(conv5_wb, conv4_out, conv5_out); + //conv5 71 x 71 x 192 + convolution(conv5_wb, conv4_out, conv5_out); + BatchNorm(conv5_out, conv5_var, conv5_mean, conv5_beta); + relu(conv5_out, conv5_wb->pbias); + + + convolutionInit(conv6_wb, conv5_out, output); + //conv6 35 x 35 x 256 + convolution(conv6_wb, conv5_out, output); + BatchNorm(output, conv6_var, conv6_mean, conv6_beta); + relu(output, conv6_wb->pbias); +// firstFlag = false; +// } + + freepBox(conv1_out); + freepBox(conv2_out); + freepBox(conv3_out); + freepBox(conv4_out); + freepBox(conv5_out); + freepBox(pooling1_out); + + freepBox(rgb); + + freeWeight(conv1_wb); + freeWeight(conv2_wb); + freeWeight(conv3_wb); + freeWeight(conv4_wb); + freeWeight(conv5_wb); + freeWeight(conv6_wb); + + freeBN(conv1_var); + freeBN(conv1_mean); + freeBN(conv1_beta); + + freeBN(conv2_var); + freeBN(conv2_mean); + freeBN(conv2_beta); + + freeBN(conv3_var); + freeBN(conv3_mean); + freeBN(conv3_beta); + + freeBN(conv4_var); + freeBN(conv4_mean); + freeBN(conv4_beta); + + freeBN(conv5_var); + freeBN(conv5_mean); + freeBN(conv5_beta); + + freeBN(conv6_var); + freeBN(conv6_mean); + freeBN(conv6_beta); +} + +void facenet::Inception_resnet_A(pBox *input, pBox *output, string filepath, float scale) { + pBox *conv1_out = new pBox; + pBox *conv2_out = new pBox; + pBox *conv3_out = new pBox; + pBox *conv4_out = new pBox; + pBox *conv5_out = new pBox; + pBox *conv6_out = new pBox; + pBox *conv7_out = new pBox; + pBox *conv8_out = new pBox; + + struct Weight *conv1_wb = new Weight; + struct Weight *conv2_wb = new Weight; + struct Weight *conv3_wb = new Weight; + struct Weight *conv4_wb = new Weight; + struct Weight *conv5_wb = new Weight; + struct Weight *conv6_wb = new Weight; + struct Weight *conv7_wb = new Weight; + struct Weight *conv8_wb = new Weight; + + struct BN *conv1_var = new BN; + struct BN *conv1_mean = new BN; + struct BN *conv1_beta = new BN; + + struct BN *conv2_var = new BN; + struct BN *conv2_mean = new BN; + struct BN *conv2_beta = new BN; + + struct BN *conv3_var = new BN; + struct BN *conv3_mean = new BN; + struct BN *conv3_beta = new BN; + + struct BN *conv4_var = new BN; + struct BN *conv4_mean = new BN; + struct BN *conv4_beta = new BN; + + struct BN *conv5_var = new BN; + struct BN *conv5_mean = new BN; + struct BN *conv5_beta = new BN; + + struct BN *conv6_var = new BN; + struct BN *conv6_mean = new BN; + struct BN *conv6_beta = new BN; + + + long conv1 = ConvAndFcInit(conv1_wb, 32, 256, 1, 1, 0); + BatchNormInit(conv1_var, conv1_mean, conv1_beta, 32); + + long conv2 = ConvAndFcInit(conv2_wb, 32, 256, 1, 1, 0); + BatchNormInit(conv2_var, conv2_mean, conv2_beta, 32); + long conv3 = ConvAndFcInit(conv3_wb, 32, 32, 3, 1, 1); + BatchNormInit(conv3_var, conv3_mean, conv3_beta, 32); + + long conv4 = ConvAndFcInit(conv4_wb, 32, 256, 1, 1, 0); + BatchNormInit(conv4_var, conv4_mean, conv4_beta, 32); + long conv5 = ConvAndFcInit(conv5_wb, 32, 32, 3, 1, 1); + BatchNormInit(conv5_var, conv5_mean, conv5_beta, 32); + long conv6 = ConvAndFcInit(conv6_wb, 32, 32, 3, 1, 1); + BatchNormInit(conv6_var, conv6_mean, conv6_beta, 32); + + long conv7 = ConvAndFcInit(conv7_wb, 256, 96, 1, 1, 0); + + long conv8 = ConvAndFcInit(conv8_wb, 256, 0, 0, 0, 0); + + long dataNumber[28] = {conv1, 32, 32, 32, conv2, 32, 32, 32, conv3, 32, 32, 32, conv4, 32, 32, 32, + conv5, 32, 32, 32, conv6, 32, 32, 32, conv7, 256, conv8, 0}; + + mydataFmt *pointTeam[28] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ + conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ + conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ + conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ + conv5_wb->pdata, conv5_var->pdata, conv5_mean->pdata, conv5_beta->pdata, \ + conv6_wb->pdata, conv6_var->pdata, conv6_mean->pdata, conv6_beta->pdata, \ + conv7_wb->pdata, conv7_wb->pbias, \ + conv8_wb->pdata, conv8_wb->pbias}; + + readData(filepath, dataNumber, pointTeam, 28); + + convolutionInit(conv1_wb, input, conv1_out); + //conv1 35 x 35 x 32 + convolution(conv1_wb, input, conv1_out); + BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); + relu(conv1_out, conv1_wb->pbias); + + convolutionInit(conv2_wb, input, conv2_out); + //conv2 35 x 35 x 32 + convolution(conv2_wb, input, conv2_out); + BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); + relu(conv2_out, conv2_wb->pbias); + convolutionInit(conv3_wb, conv2_out, conv3_out); + //conv3 35 x 35 x 32 + convolution(conv3_wb, conv2_out, conv3_out); + BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); + relu(conv3_out, conv3_wb->pbias); + + convolutionInit(conv4_wb, input, conv4_out); + //conv4 35 x 35 x 32 + convolution(conv4_wb, input, conv4_out); + BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); + relu(conv4_out, conv4_wb->pbias); + convolutionInit(conv5_wb, conv4_out, conv5_out); + //conv5 35 x 35 x 32 + convolution(conv5_wb, conv4_out, conv5_out); + BatchNorm(conv5_out, conv5_var, conv5_mean, conv5_beta); + relu(conv5_out, conv5_wb->pbias); + convolutionInit(conv6_wb, conv5_out, conv6_out); + //conv6 35 x 35 x 32 + convolution(conv6_wb, conv5_out, conv6_out); + BatchNorm(conv6_out, conv6_var, conv6_mean, conv6_beta); + relu(conv6_out, conv6_wb->pbias); + + conv_mergeInit(conv7_out, conv1_out, conv3_out, conv6_out); + //35 × 35 × 96 + conv_merge(conv7_out, conv1_out, conv3_out, conv6_out); + + convolutionInit(conv7_wb, conv7_out, conv8_out); + //35*35*256 + convolution(conv7_wb, conv7_out, conv8_out); + addbias(conv8_out, conv7_wb->pbias); + + mulandaddInit(input, conv8_out, output, scale); + mulandadd(input, conv8_out, output, scale); + relu(output, conv8_wb->pbias); + + freepBox(conv1_out); + freepBox(conv2_out); + freepBox(conv3_out); + freepBox(conv4_out); + freepBox(conv5_out); + freepBox(conv6_out); + freepBox(conv7_out); + freepBox(conv8_out); + + freeWeight(conv1_wb); + freeWeight(conv2_wb); + freeWeight(conv3_wb); + freeWeight(conv4_wb); + freeWeight(conv5_wb); + freeWeight(conv6_wb); + freeWeight(conv7_wb); + freeWeight(conv8_wb); + + freeBN(conv1_var); + freeBN(conv1_mean); + freeBN(conv1_beta); + + freeBN(conv2_var); + freeBN(conv2_mean); + freeBN(conv2_beta); + + freeBN(conv3_var); + freeBN(conv3_mean); + freeBN(conv3_beta); + + freeBN(conv4_var); + freeBN(conv4_mean); + freeBN(conv4_beta); + + freeBN(conv5_var); + freeBN(conv5_mean); + freeBN(conv5_beta); + + freeBN(conv6_var); + freeBN(conv6_mean); + freeBN(conv6_beta); +} + +void facenet::Reduction_A(pBox *input, pBox *output) { + pBox *conv1_out = new pBox; + pBox *conv2_out = new pBox; + pBox *conv3_out = new pBox; + pBox *conv4_out = new pBox; + + struct Weight *conv1_wb = new Weight; + struct Weight *conv2_wb = new Weight; + struct Weight *conv3_wb = new Weight; + struct Weight *conv4_wb = new Weight; + + struct pBox *pooling1_out = new pBox; + + struct BN *conv1_var = new BN; + struct BN *conv1_mean = new BN; + struct BN *conv1_beta = new BN; + struct BN *conv2_var = new BN; + struct BN *conv2_mean = new BN; + struct BN *conv2_beta = new BN; + struct BN *conv3_var = new BN; + struct BN *conv3_mean = new BN; + struct BN *conv3_beta = new BN; + struct BN *conv4_var = new BN; + struct BN *conv4_mean = new BN; + struct BN *conv4_beta = new BN; + + + long conv1 = ConvAndFcInit(conv1_wb, 384, 256, 3, 2, 0); + BatchNormInit(conv1_var, conv1_mean, conv1_beta, 384); + + long conv2 = ConvAndFcInit(conv2_wb, 192, 256, 1, 1, 0); + BatchNormInit(conv2_var, conv2_mean, conv2_beta, 192); + long conv3 = ConvAndFcInit(conv3_wb, 192, 192, 3, 1, 0); + BatchNormInit(conv3_var, conv3_mean, conv3_beta, 192); + long conv4 = ConvAndFcInit(conv4_wb, 256, 192, 3, 2, 0); + BatchNormInit(conv4_var, conv4_mean, conv4_beta, 256); + long dataNumber[16] = {conv1, 384, 384, 384, conv2, 192, 192, 192, conv3, 192, 192, 192, conv4, 256, 256, 256}; + + mydataFmt *pointTeam[16] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ + conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ + conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ + conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata}; + string filename = "../model_" + to_string(Num) + "/Mixed_6a_list.txt"; + readData(filename, dataNumber, pointTeam, 16); + + maxPoolingInit(input, pooling1_out, 3, 2); + // 17*17*256 + maxPooling(input, pooling1_out, 3, 2); + + convolutionInit(conv1_wb, input, conv1_out); + //conv1 17 x 17 x 384 + convolution(conv1_wb, input, conv1_out); + BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); + relu(conv1_out, conv1_wb->pbias); + + convolutionInit(conv2_wb, input, conv2_out); + //conv2 35 x 35 x 192 + convolution(conv2_wb, input, conv2_out); + BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); + relu(conv2_out, conv2_wb->pbias); + + convolutionInit(conv3_wb, conv2_out, conv3_out); + //conv3 35 x 35 x 192 + convolution(conv3_wb, conv2_out, conv3_out); + BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); + relu(conv3_out, conv3_wb->pbias); + + convolutionInit(conv4_wb, conv3_out, conv4_out); + //conv4 17 x 17 x 256 + convolution(conv4_wb, conv3_out, conv4_out); + BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); + relu(conv4_out, conv4_wb->pbias); + conv_mergeInit(output, pooling1_out, conv1_out, conv4_out); + //17×17×896 + conv_merge(output, pooling1_out, conv1_out, conv4_out); + + freepBox(conv1_out); + freepBox(conv2_out); + freepBox(conv3_out); + freepBox(conv4_out); + + freeWeight(conv1_wb); + freeWeight(conv2_wb); + freeWeight(conv3_wb); + freeWeight(conv4_wb); + + freepBox(pooling1_out); + + freeBN(conv1_var); + freeBN(conv1_mean); + freeBN(conv1_beta); + freeBN(conv2_var); + freeBN(conv2_mean); + freeBN(conv2_beta); + freeBN(conv3_var); + freeBN(conv3_mean); + freeBN(conv3_beta); + freeBN(conv4_var); + freeBN(conv4_mean); + freeBN(conv4_beta); +} + +void facenet::Inception_resnet_B(pBox *input, pBox *output, string filepath, float scale) { + pBox *conv1_out = new pBox; + pBox *conv2_out = new pBox; + pBox *conv3_out = new pBox; + pBox *conv4_out = new pBox; + pBox *conv5_out = new pBox; + pBox *conv6_out = new pBox; + + struct Weight *conv1_wb = new Weight; + struct Weight *conv2_wb = new Weight; + struct Weight *conv3_wb = new Weight; + struct Weight *conv4_wb = new Weight; + struct Weight *conv5_wb = new Weight; + struct Weight *conv6_wb = new Weight; + + struct BN *conv1_var = new BN; + struct BN *conv1_mean = new BN; + struct BN *conv1_beta = new BN; + struct BN *conv2_var = new BN; + struct BN *conv2_mean = new BN; + struct BN *conv2_beta = new BN; + struct BN *conv3_var = new BN; + struct BN *conv3_mean = new BN; + struct BN *conv3_beta = new BN; + struct BN *conv4_var = new BN; + struct BN *conv4_mean = new BN; + struct BN *conv4_beta = new BN; + + + long conv1 = ConvAndFcInit(conv1_wb, 128, 896, 1, 1, 0); + BatchNormInit(conv1_var, conv1_mean, conv1_beta, 128); + + long conv2 = ConvAndFcInit(conv2_wb, 128, 896, 1, 1, 0); + BatchNormInit(conv2_var, conv2_mean, conv2_beta, 128); + long conv3 = ConvAndFcInit(conv3_wb, 128, 128, 0, 1, -1, 7, 1, 3, 0);//[1,7] + BatchNormInit(conv3_var, conv3_mean, conv3_beta, 128); + long conv4 = ConvAndFcInit(conv4_wb, 128, 128, 0, 1, -1, 1, 7, 0, 3);//[7,1] + BatchNormInit(conv4_var, conv4_mean, conv4_beta, 128); + + long conv5 = ConvAndFcInit(conv5_wb, 896, 256, 1, 1, 0); + + long conv6 = ConvAndFcInit(conv6_wb, 896, 0, 0, 0, 0); + + long dataNumber[20] = {conv1, 128, 128, 128, conv2, 128, 128, 128, conv3, 128, 128, 128, conv4, 128, 128, 128, + conv5, 896, conv6, 0}; + + mydataFmt *pointTeam[20] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ + conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ + conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ + conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ + conv5_wb->pdata, conv5_wb->pbias, \ + conv6_wb->pdata, conv6_wb->pbias}; + + + readData(filepath, dataNumber, pointTeam, 20); + + + convolutionInit(conv1_wb, input, conv1_out); + //conv1 17*17*128 + convolution(conv1_wb, input, conv1_out); + BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); + relu(conv1_out, conv1_wb->pbias); + + convolutionInit(conv2_wb, input, conv2_out); + //conv2 17*17*128 + convolution(conv2_wb, input, conv2_out); + BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); + relu(conv2_out, conv2_wb->pbias); + + convolutionInit(conv3_wb, conv2_out, conv3_out); + //conv3 17*17*128 + convolution(conv3_wb, conv2_out, conv3_out); + BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); + relu(conv3_out, conv3_wb->pbias); + convolutionInit(conv4_wb, conv3_out, conv4_out); + //conv4 17*17*128 + convolution(conv4_wb, conv3_out, conv4_out); + BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); + relu(conv4_out, conv4_wb->pbias); + + conv_mergeInit(conv5_out, conv1_out, conv4_out); + //17*17*256 + conv_merge(conv5_out, conv1_out, conv4_out); + + convolutionInit(conv5_wb, conv5_out, conv6_out); + //conv5 17*17*896 + convolution(conv5_wb, conv5_out, conv6_out); + addbias(conv6_out, conv5_wb->pbias); + + mulandaddInit(input, conv6_out, output, scale); + mulandadd(input, conv6_out, output, scale); + relu(output, conv6_wb->pbias); + + freepBox(conv1_out); + freepBox(conv2_out); + freepBox(conv3_out); + freepBox(conv4_out); + freepBox(conv5_out); + freepBox(conv6_out); + + freeWeight(conv1_wb); + freeWeight(conv2_wb); + freeWeight(conv3_wb); + freeWeight(conv4_wb); + freeWeight(conv5_wb); + freeWeight(conv6_wb); + + freeBN(conv1_var); + freeBN(conv1_mean); + freeBN(conv1_beta); + freeBN(conv2_var); + freeBN(conv2_mean); + freeBN(conv2_beta); + freeBN(conv3_var); + freeBN(conv3_mean); + freeBN(conv3_beta); + freeBN(conv4_var); + freeBN(conv4_mean); + freeBN(conv4_beta); +} + +void facenet::Reduction_B(pBox *input, pBox *output) { + pBox *conv1_out = new pBox; + pBox *conv2_out = new pBox; + pBox *conv3_out = new pBox; + pBox *conv4_out = new pBox; + pBox *conv5_out = new pBox; + pBox *conv6_out = new pBox; + pBox *conv7_out = new pBox; + + struct Weight *conv1_wb = new Weight; + struct Weight *conv2_wb = new Weight; + struct Weight *conv3_wb = new Weight; + struct Weight *conv4_wb = new Weight; + struct Weight *conv5_wb = new Weight; + struct Weight *conv6_wb = new Weight; + struct Weight *conv7_wb = new Weight; + + struct pBox *pooling1_out = new pBox; + + struct BN *conv1_var = new BN; + struct BN *conv1_mean = new BN; + struct BN *conv1_beta = new BN; + struct BN *conv2_var = new BN; + struct BN *conv2_mean = new BN; + struct BN *conv2_beta = new BN; + struct BN *conv3_var = new BN; + struct BN *conv3_mean = new BN; + struct BN *conv3_beta = new BN; + struct BN *conv4_var = new BN; + struct BN *conv4_mean = new BN; + struct BN *conv4_beta = new BN; + struct BN *conv5_var = new BN; + struct BN *conv5_mean = new BN; + struct BN *conv5_beta = new BN; + struct BN *conv6_var = new BN; + struct BN *conv6_mean = new BN; + struct BN *conv6_beta = new BN; + struct BN *conv7_var = new BN; + struct BN *conv7_mean = new BN; + struct BN *conv7_beta = new BN; + + + long conv1 = ConvAndFcInit(conv1_wb, 256, 896, 1, 1, 0); + BatchNormInit(conv1_var, conv1_mean, conv1_beta, 256); + long conv2 = ConvAndFcInit(conv2_wb, 384, 256, 3, 2, 0); + BatchNormInit(conv2_var, conv2_mean, conv2_beta, 384); + + long conv3 = ConvAndFcInit(conv3_wb, 256, 896, 1, 1, 0); + BatchNormInit(conv3_var, conv3_mean, conv3_beta, 256); + long conv4 = ConvAndFcInit(conv4_wb, 256, 256, 3, 2, 0); + BatchNormInit(conv4_var, conv4_mean, conv4_beta, 256); + + long conv5 = ConvAndFcInit(conv5_wb, 256, 896, 1, 1, 0); + BatchNormInit(conv5_var, conv5_mean, conv5_beta, 256); + long conv6 = ConvAndFcInit(conv6_wb, 256, 256, 3, 1, 1); + BatchNormInit(conv6_var, conv6_mean, conv6_beta, 256); + long conv7 = ConvAndFcInit(conv7_wb, 256, 256, 3, 2, 0); + BatchNormInit(conv7_var, conv7_mean, conv7_beta, 256); + + long dataNumber[28] = {conv1, 256, 256, 256, conv2, 384, 384, 384, conv3, 256, 256, 256, conv4, 256, 256, 256, + conv5, 256, 256, 256, conv6, 256, 256, 256, conv7, 256, 256, 256}; + + mydataFmt *pointTeam[28] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ + conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ + conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ + conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ + conv5_wb->pdata, conv5_var->pdata, conv5_mean->pdata, conv5_beta->pdata, \ + conv6_wb->pdata, conv6_var->pdata, conv6_mean->pdata, conv6_beta->pdata, \ + conv7_wb->pdata, conv7_var->pdata, conv7_mean->pdata, conv7_beta->pdata}; + string filename = "../model_" + to_string(Num) + "/Mixed_7a_list.txt"; + readData(filename, dataNumber, pointTeam, 28); + + + maxPoolingInit(input, pooling1_out, 3, 2, 1); + // 8*8*896 + maxPooling(input, pooling1_out, 3, 2); + + convolutionInit(conv1_wb, input, conv1_out); + //conv1 17 x 17 x 256 + convolution(conv1_wb, input, conv1_out); + BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); + relu(conv1_out, conv1_wb->pbias); + + convolutionInit(conv2_wb, conv1_out, conv2_out); + //conv2 8 x 8 x 384 + convolution(conv2_wb, conv1_out, conv2_out); + BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); + relu(conv2_out, conv2_wb->pbias); + + convolutionInit(conv3_wb, input, conv3_out); + //conv3 17 x 17 x 256 + convolution(conv3_wb, input, conv3_out); + BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); + relu(conv3_out, conv3_wb->pbias); + + convolutionInit(conv4_wb, conv3_out, conv4_out); + //conv4 8 x 8 x 256 + convolution(conv4_wb, conv3_out, conv4_out); + BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); + relu(conv4_out, conv4_wb->pbias); + + convolutionInit(conv5_wb, input, conv5_out); + //conv5 17 x 17 x 256 + convolution(conv5_wb, input, conv5_out); + BatchNorm(conv5_out, conv5_var, conv5_mean, conv5_beta); + relu(conv5_out, conv5_wb->pbias); + + convolutionInit(conv6_wb, conv5_out, conv6_out); + //conv6 17 x 17 x 256 + convolution(conv6_wb, conv5_out, conv6_out); + BatchNorm(conv6_out, conv6_var, conv6_mean, conv6_beta); + relu(conv6_out, conv6_wb->pbias); + + convolutionInit(conv7_wb, conv6_out, conv7_out); + //conv6 8 x 8 x 256 + convolution(conv7_wb, conv6_out, conv7_out); + BatchNorm(conv7_out, conv7_var, conv7_mean, conv7_beta); + relu(conv7_out, conv7_wb->pbias); + + conv_mergeInit(output, conv2_out, conv4_out, conv7_out, pooling1_out); + //8*8*1792 + conv_merge(output, conv2_out, conv4_out, conv7_out, pooling1_out); + + freepBox(conv1_out); + freepBox(conv2_out); + freepBox(conv3_out); + freepBox(conv4_out); + freepBox(conv5_out); + freepBox(conv6_out); + freepBox(conv7_out); + + freeWeight(conv1_wb); + freeWeight(conv2_wb); + freeWeight(conv3_wb); + freeWeight(conv4_wb); + freeWeight(conv5_wb); + freeWeight(conv6_wb); + freeWeight(conv7_wb); + + freepBox(pooling1_out); + + freeBN(conv1_var); + freeBN(conv1_mean); + freeBN(conv1_beta); + freeBN(conv2_var); + freeBN(conv2_mean); + freeBN(conv2_beta); + freeBN(conv3_var); + freeBN(conv3_mean); + freeBN(conv3_beta); + freeBN(conv4_var); + freeBN(conv4_mean); + freeBN(conv4_beta); + freeBN(conv5_var); + freeBN(conv5_mean); + freeBN(conv5_beta); + freeBN(conv6_var); + freeBN(conv6_mean); + freeBN(conv6_beta); + freeBN(conv7_var); + freeBN(conv7_mean); + freeBN(conv7_beta); +} + +void facenet::Inception_resnet_C(pBox *input, pBox *output, string filepath, float scale) { + pBox *conv1_out = new pBox; + pBox *conv2_out = new pBox; + pBox *conv3_out = new pBox; + pBox *conv4_out = new pBox; + pBox *conv5_out = new pBox; + pBox *conv6_out = new pBox; + + struct Weight *conv1_wb = new Weight; + struct Weight *conv2_wb = new Weight; + struct Weight *conv3_wb = new Weight; + struct Weight *conv4_wb = new Weight; + struct Weight *conv5_wb = new Weight; + struct Weight *conv6_wb = new Weight; + + struct BN *conv1_var = new BN; + struct BN *conv1_mean = new BN; + struct BN *conv1_beta = new BN; + struct BN *conv2_var = new BN; + struct BN *conv2_mean = new BN; + struct BN *conv2_beta = new BN; + struct BN *conv3_var = new BN; + struct BN *conv3_mean = new BN; + struct BN *conv3_beta = new BN; + struct BN *conv4_var = new BN; + struct BN *conv4_mean = new BN; + struct BN *conv4_beta = new BN; + + + long conv1 = ConvAndFcInit(conv1_wb, 192, 1792, 1, 1, 0); + BatchNormInit(conv1_var, conv1_mean, conv1_beta, 192); + long conv2 = ConvAndFcInit(conv2_wb, 192, 1792, 1, 1, 0); + BatchNormInit(conv2_var, conv2_mean, conv2_beta, 192); + long conv3 = ConvAndFcInit(conv3_wb, 192, 192, 0, 1, -1, 3, 1, 1, 0); + BatchNormInit(conv3_var, conv3_mean, conv3_beta, 192); + long conv4 = ConvAndFcInit(conv4_wb, 192, 192, 0, 1, -1, 1, 3, 0, 1); + BatchNormInit(conv4_var, conv4_mean, conv4_beta, 192); + + long conv5 = ConvAndFcInit(conv5_wb, 1792, 384, 1, 1, 0); + + long conv6 = ConvAndFcInit(conv6_wb, 1792, 0, 0, 0, 0); + + long dataNumber[20] = {conv1, 192, 192, 192, conv2, 192, 192, 192, conv3, 192, 192, 192, conv4, 192, 192, 192, + conv5, 1792, conv6, 0}; + + + mydataFmt *pointTeam[20] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ + conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ + conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ + conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ + conv5_wb->pdata, conv5_wb->pbias, \ + conv6_wb->pdata, conv6_wb->pbias}; + +// string filename = "../model_128/Repeat_2_list.txt"; +// int length = sizeof(dataNumber) / sizeof(*dataNumber); + readData(filepath, dataNumber, pointTeam, 20); + + convolutionInit(conv1_wb, input, conv1_out); + //conv1 8 x 8 x 192 + convolution(conv1_wb, input, conv1_out); + BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); + relu(conv1_out, conv1_wb->pbias); + + convolutionInit(conv2_wb, input, conv2_out); + //conv2 8 x 8 x 192 + convolution(conv2_wb, input, conv2_out); + BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); + relu(conv2_out, conv2_wb->pbias); + + convolutionInit(conv3_wb, conv2_out, conv3_out); + //conv3 8 x 8 x 192 + convolution(conv3_wb, conv2_out, conv3_out); + BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); + relu(conv3_out, conv3_wb->pbias); + + convolutionInit(conv4_wb, conv3_out, conv4_out); + //conv4 8 x 8 x 192 + convolution(conv4_wb, conv3_out, conv4_out); + BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); + relu(conv4_out, conv4_wb->pbias); + + conv_mergeInit(conv5_out, conv1_out, conv4_out); + // 8*8*384 + conv_merge(conv5_out, conv1_out, conv4_out); + + convolutionInit(conv5_wb, conv5_out, conv6_out); + //conv5 8 x 8 x 1792 + convolution(conv5_wb, conv5_out, conv6_out); + addbias(conv6_out, conv5_wb->pbias); + + mulandaddInit(input, conv6_out, output, scale); + mulandadd(input, conv6_out, output, scale); + relu(output, conv6_wb->pbias); + + freepBox(conv1_out); + freepBox(conv2_out); + freepBox(conv3_out); + freepBox(conv4_out); + freepBox(conv5_out); + freepBox(conv6_out); + + freeWeight(conv1_wb); + freeWeight(conv2_wb); + freeWeight(conv3_wb); + freeWeight(conv4_wb); + freeWeight(conv5_wb); + freeWeight(conv6_wb); + + freeBN(conv1_var); + freeBN(conv1_mean); + freeBN(conv1_beta); + freeBN(conv2_var); + freeBN(conv2_mean); + freeBN(conv2_beta); + freeBN(conv3_var); + freeBN(conv3_mean); + freeBN(conv3_beta); + freeBN(conv4_var); + freeBN(conv4_mean); + freeBN(conv4_beta); +} + +void facenet::Inception_resnet_C_None(pBox *input, pBox *output, string filepath) { + pBox *conv1_out = new pBox; + pBox *conv2_out = new pBox; + pBox *conv3_out = new pBox; + pBox *conv4_out = new pBox; + pBox *conv5_out = new pBox; + pBox *conv6_out = new pBox; + + struct Weight *conv1_wb = new Weight; + struct Weight *conv2_wb = new Weight; + struct Weight *conv3_wb = new Weight; + struct Weight *conv4_wb = new Weight; + struct Weight *conv5_wb = new Weight; + + struct BN *conv1_var = new BN; + struct BN *conv1_mean = new BN; + struct BN *conv1_beta = new BN; + struct BN *conv2_var = new BN; + struct BN *conv2_mean = new BN; + struct BN *conv2_beta = new BN; + struct BN *conv3_var = new BN; + struct BN *conv3_mean = new BN; + struct BN *conv3_beta = new BN; + struct BN *conv4_var = new BN; + struct BN *conv4_mean = new BN; + struct BN *conv4_beta = new BN; + + long conv1 = ConvAndFcInit(conv1_wb, 192, 1792, 1, 1, 0); + BatchNormInit(conv1_var, conv1_mean, conv1_beta, 192); + long conv2 = ConvAndFcInit(conv2_wb, 192, 1792, 1, 1, 0); + BatchNormInit(conv2_var, conv2_mean, conv2_beta, 192); + long conv3 = ConvAndFcInit(conv3_wb, 192, 192, 0, 1, -1, 3, 1, 1, 0); + BatchNormInit(conv3_var, conv3_mean, conv3_beta, 192); + long conv4 = ConvAndFcInit(conv4_wb, 192, 192, 0, 1, -1, 1, 3, 0, 1); + BatchNormInit(conv4_var, conv4_mean, conv4_beta, 192); + long conv5 = ConvAndFcInit(conv5_wb, 1792, 384, 1, 1, 0); + + long dataNumber[18] = {conv1, 192, 192, 192, conv2, 192, 192, 192, conv3, 192, 192, 192, conv4, 192, 192, 192, + conv5, 1792}; + + + mydataFmt *pointTeam[18] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ + conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ + conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ + conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ + conv5_wb->pdata, conv5_wb->pbias}; + +// string filename = "../model_128/Repeat_2_list.txt"; +// int length = sizeof(dataNumber) / sizeof(*dataNumber); + readData(filepath, dataNumber, pointTeam, 18); + + convolutionInit(conv1_wb, input, conv1_out); + //conv1 8 x 8 x 192 + convolution(conv1_wb, input, conv1_out); + BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); + relu(conv1_out, conv1_wb->pbias); + + convolutionInit(conv2_wb, input, conv2_out); + //conv2 8 x 8 x 192 + convolution(conv2_wb, input, conv2_out); + BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); + relu(conv2_out, conv2_wb->pbias); + + convolutionInit(conv3_wb, conv2_out, conv3_out); + //conv3 8 x 8 x 192 + convolution(conv3_wb, conv2_out, conv3_out); + BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); + relu(conv3_out, conv3_wb->pbias); + + convolutionInit(conv4_wb, conv3_out, conv4_out); + //conv4 8 x 8 x 192 + convolution(conv4_wb, conv3_out, conv4_out); + BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); + relu(conv4_out, conv4_wb->pbias); + + conv_mergeInit(conv5_out, conv1_out, conv4_out); + // 8*8*384 + conv_merge(conv5_out, conv1_out, conv4_out); + + convolutionInit(conv5_wb, conv5_out, conv6_out); + //conv5 8 x 8 x 1792 + convolution(conv5_wb, conv5_out, conv6_out); + addbias(conv6_out, conv5_wb->pbias); + + mulandaddInit(input, conv6_out, output, 1); + mulandadd(input, conv6_out, output); + + freepBox(conv1_out); + freepBox(conv2_out); + freepBox(conv3_out); + freepBox(conv4_out); + freepBox(conv5_out); + freepBox(conv6_out); + + freeWeight(conv1_wb); + freeWeight(conv2_wb); + freeWeight(conv3_wb); + freeWeight(conv4_wb); + freeWeight(conv5_wb); + + freeBN(conv1_var); + freeBN(conv1_mean); + freeBN(conv1_beta); + freeBN(conv2_var); + freeBN(conv2_mean); + freeBN(conv2_beta); + freeBN(conv3_var); + freeBN(conv3_mean); + freeBN(conv3_beta); + freeBN(conv4_var); + freeBN(conv4_mean); + freeBN(conv4_beta); +} + +void facenet::AveragePooling(pBox *input, pBox *output) { +// cout << "size:" << input->height << endl; + avePoolingInit(input, output, input->height, 2); + avePooling(input, output, input->height, 2); +} + +void facenet::Flatten(pBox *input, pBox *output) { + output->width = input->channel; + output->height = 1; + output->channel = 1; + output->pdata = (mydataFmt *) malloc(output->channel * output->width * output->height * sizeof(mydataFmt)); + if (output->pdata == NULL)cout << "the maxPoolingInit is failed!!" << endl; + memcpy(output->pdata, input->pdata, output->channel * output->width * output->height * sizeof(mydataFmt)); +} + +//参数还未设置 +void facenet::fully_connect(pBox *input, pBox *output, string filepath) { + struct Weight *conv1_wb = new Weight; + struct BN *conv1_var = new BN; + struct BN *conv1_mean = new BN; + struct BN *conv1_beta = new BN; + long conv1 = ConvAndFcInit(conv1_wb, Num, 1792, input->height, 1, 0); + BatchNormInit(conv1_var, conv1_mean, conv1_beta, Num); + long dataNumber[4] = {conv1, Num, Num, Num}; + +// cout << to_string(sum) << endl; + mydataFmt *pointTeam[4] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata}; +// string filename = "../model_128/Bottleneck_list.txt"; +// int length = sizeof(dataNumber) / sizeof(*dataNumber); + readData(filepath, dataNumber, pointTeam, 4); + + fullconnectInit(conv1_wb, output); + + //conv1 8 x 8 x 192 + fullconnect(conv1_wb, input, output); + BatchNorm(output, conv1_var, conv1_mean, conv1_beta); + + freeWeight(conv1_wb); + freeBN(conv1_var); + freeBN(conv1_mean); + freeBN(conv1_beta); +} + facenet::facenet() { } @@ -14,16 +1003,16 @@ facenet::~facenet() { void facenet::printData(pBox *in) { for (long i = 0; i < in->height * in->width * in->channel; ++i) { +// if (in->pdata[i] != 0) printf("%f\n", in->pdata[i]); } cout << "printData" << endl; } -void facenet::run(Mat &image, mydataFmt *o, int count) { +void facenet::run(Mat &image, vector &o, int count) { cout << "=====This is No." + to_string(count) + " Picture=====" << endl; pBox *output = new pBox; pBox *input; -// prewhiten(image); Stem(image, output); // printData(output); // return; @@ -74,11 +1063,10 @@ void facenet::run(Mat &image, mydataFmt *o, int count) { AveragePooling(input, output); cout << "AveragePooling Finally" << endl; input = output; -// freepBox(output); - output = new pBox; - Flatten(input, output); - cout << "Flatten Finally" << endl; - input = output; +// output = new pBox; +// Flatten(input, output); +// cout << "Flatten Finally" << endl; +// input = output; output = new pBox; fully_connect(input, output, "../model_" + to_string(Num) + "/Bottleneck_list.txt"); cout << "Fully_Connect Finally" << endl; @@ -94,872 +1082,9 @@ void facenet::run(Mat &image, mydataFmt *o, int count) { divisor = sqrt(sum); } for (int j = 0; j < Num; ++j) { - o[j] = output->pdata[j] / divisor; +// o[j] = output->pdata[j] / divisor; + o.push_back(output->pdata[j] / divisor); } // memcpy(o, output->pdata, Num * sizeof(mydataFmt)); freepBox(output); -} - -void facenet::Stem(Mat &image, pBox *output) { - pBox *rgb = new pBox; - pBox *conv1_out = new pBox; - pBox *conv2_out = new pBox; - pBox *conv3_out = new pBox; - pBox *conv4_out = new pBox; - pBox *conv5_out = new pBox; - - struct Weight *conv1_wb = new Weight; - struct Weight *conv2_wb = new Weight; - struct Weight *conv3_wb = new Weight; - struct Weight *conv4_wb = new Weight; - struct Weight *conv5_wb = new Weight; - struct Weight *conv6_wb = new Weight; - - struct pBox *pooling1_out = new pBox; - - struct pRelu *prelu_gmma1 = new pRelu; - struct pRelu *prelu_gmma2 = new pRelu; - struct pRelu *prelu_gmma3 = new pRelu; - struct pRelu *prelu_gmma4 = new pRelu; - struct pRelu *prelu_gmma5 = new pRelu; - struct pRelu *prelu_gmma6 = new pRelu; - - - long conv1 = initConvAndFc(conv1_wb, 32, 3, 3, 2, 0); - initpRelu(prelu_gmma1, 32); - long conv2 = initConvAndFc(conv2_wb, 32, 32, 3, 1, 0); - initpRelu(prelu_gmma2, 32); - long conv3 = initConvAndFc(conv3_wb, 64, 32, 3, 1, 1); - initpRelu(prelu_gmma3, 64); - long conv4 = initConvAndFc(conv4_wb, 80, 64, 1, 1, 0); - initpRelu(prelu_gmma4, 80); - long conv5 = initConvAndFc(conv5_wb, 192, 80, 3, 1, 0); - initpRelu(prelu_gmma5, 192); - long conv6 = initConvAndFc(conv6_wb, 256, 192, 3, 2, 0); - initpRelu(prelu_gmma6, 256); - long dataNumber[18] = {conv1, 0, 0, conv2, 0, 0, conv3, 0, 0, conv4, 0, 0, conv5, 0, 0, conv6, 0, - 0}; - - mydataFmt *pointTeam[18] = {conv1_wb->pdata, conv1_wb->pbias, prelu_gmma1->pdata, \ - conv2_wb->pdata, conv2_wb->pbias, prelu_gmma2->pdata, \ - conv3_wb->pdata, conv3_wb->pbias, prelu_gmma3->pdata, \ - conv4_wb->pdata, conv4_wb->pbias, prelu_gmma4->pdata, \ - conv5_wb->pdata, conv5_wb->pbias, prelu_gmma5->pdata, \ - conv6_wb->pdata, conv6_wb->pbias, prelu_gmma6->pdata,}; - string filename = "../model_" + to_string(Num) + "/stem_list.txt"; - readData(filename, dataNumber, pointTeam); - - - -// if (firstFlag) { - image2MatrixInit(image, rgb); - image2Matrix(image, rgb, 1); - - convolutionInit(conv1_wb, rgb, conv1_out); - //conv1 149 x 149 x 32 - convolution(conv1_wb, rgb, conv1_out); - prelu(conv1_out, conv1_wb->pbias, prelu_gmma1->pdata); - convolutionInit(conv2_wb, conv1_out, conv2_out); - //conv2 147 x 147 x 32 - convolution(conv2_wb, conv1_out, conv2_out); - prelu(conv2_out, conv2_wb->pbias, prelu_gmma2->pdata); - - convolutionInit(conv3_wb, conv2_out, conv3_out); - //conv3 147 x 147 x 64 - convolution(conv3_wb, conv2_out, conv3_out); - prelu(conv3_out, conv3_wb->pbias, prelu_gmma3->pdata); - - maxPoolingInit(conv3_out, pooling1_out, 3, 2); - //maxPooling 73 x 73 x 64 - maxPooling(conv3_out, pooling1_out, 3, 2); - - convolutionInit(conv4_wb, pooling1_out, conv4_out); - //conv4 73 x 73 x 80 - convolution(conv4_wb, pooling1_out, conv4_out); - prelu(conv4_out, conv4_wb->pbias, prelu_gmma4->pdata); - - convolutionInit(conv5_wb, conv4_out, conv5_out); - //conv5 71 x 71 x 192 - convolution(conv5_wb, conv4_out, conv5_out); - prelu(conv5_out, conv5_wb->pbias, prelu_gmma5->pdata); - - - convolutionInit(conv6_wb, conv5_out, output); - //conv6 35 x 35 x 256 - convolution(conv6_wb, conv5_out, output); - prelu(output, conv6_wb->pbias, prelu_gmma6->pdata); -// firstFlag = false; -// } - - freepBox(conv1_out); - freepBox(conv2_out); - freepBox(conv3_out); - freepBox(conv4_out); - freepBox(conv5_out); - freepBox(pooling1_out); - - freepBox(rgb); - - freeWeight(conv1_wb); - freeWeight(conv2_wb); - freeWeight(conv3_wb); - freeWeight(conv4_wb); - freeWeight(conv5_wb); - freeWeight(conv6_wb); - - freepRelu(prelu_gmma1); - freepRelu(prelu_gmma2); - freepRelu(prelu_gmma3); - freepRelu(prelu_gmma4); - freepRelu(prelu_gmma5); - freepRelu(prelu_gmma6); -} - -void facenet::Inception_resnet_A(pBox *input, pBox *output, string filepath, float scale) { - pBox *conv1_out = new pBox; - pBox *conv2_out = new pBox; - pBox *conv3_out = new pBox; - pBox *conv4_out = new pBox; - pBox *conv5_out = new pBox; - pBox *conv6_out = new pBox; - pBox *conv7_out = new pBox; - pBox *conv8_out = new pBox; - - struct Weight *conv1_wb = new Weight; - struct Weight *conv2_wb = new Weight; - struct Weight *conv3_wb = new Weight; - struct Weight *conv4_wb = new Weight; - struct Weight *conv5_wb = new Weight; - struct Weight *conv6_wb = new Weight; - struct Weight *conv7_wb = new Weight; - struct Weight *conv8_wb = new Weight; - - struct pRelu *prelu_gmma1 = new pRelu; - struct pRelu *prelu_gmma2 = new pRelu; - struct pRelu *prelu_gmma3 = new pRelu; - struct pRelu *prelu_gmma4 = new pRelu; - struct pRelu *prelu_gmma5 = new pRelu; - struct pRelu *prelu_gmma6 = new pRelu; - struct pRelu *prelu_gmma8 = new pRelu; - - long conv1 = initConvAndFc(conv1_wb, 32, 256, 1, 1, 0); - initpRelu(prelu_gmma1, 32); - - long conv2 = initConvAndFc(conv2_wb, 32, 256, 1, 1, 0); - initpRelu(prelu_gmma2, 32); - long conv3 = initConvAndFc(conv3_wb, 32, 32, 3, 1, 1); - initpRelu(prelu_gmma3, 32); - - long conv4 = initConvAndFc(conv4_wb, 32, 256, 1, 1, 0); - initpRelu(prelu_gmma4, 32); - long conv5 = initConvAndFc(conv5_wb, 32, 32, 3, 1, 1); - initpRelu(prelu_gmma5, 32); - long conv6 = initConvAndFc(conv6_wb, 32, 32, 3, 1, 1); - initpRelu(prelu_gmma6, 32); - - long conv7 = initConvAndFc(conv7_wb, 256, 96, 1, 1, 0); - - long conv8 = initConvAndFc(conv8_wb, 256, 0, 0, 0, 0); - initpRelu(prelu_gmma8, 256); - - long dataNumber[23] = {conv1, 0, 0, conv2, 0, 0, conv3, 0, 0, conv4, 0, 0, conv5, 0, 0, conv6, 0, - 0, conv7, 256, conv8, 0, 0}; - - mydataFmt *pointTeam[23] = {conv1_wb->pdata, conv1_wb->pbias, prelu_gmma1->pdata, \ - conv2_wb->pdata, conv2_wb->pbias, prelu_gmma2->pdata, \ - conv3_wb->pdata, conv3_wb->pbias, prelu_gmma3->pdata, \ - conv4_wb->pdata, conv4_wb->pbias, prelu_gmma4->pdata, \ - conv5_wb->pdata, conv5_wb->pbias, prelu_gmma5->pdata, \ - conv6_wb->pdata, conv6_wb->pbias, prelu_gmma6->pdata, \ - conv7_wb->pdata, conv7_wb->pbias, \ - conv8_wb->pdata, conv8_wb->pbias, prelu_gmma8->pdata}; - - readData(filepath, dataNumber, pointTeam); - - - convolutionInit(conv1_wb, input, conv1_out); - //conv1 35 x 35 x 32 - convolution(conv1_wb, input, conv1_out); - prelu(conv1_out, conv1_wb->pbias, prelu_gmma1->pdata); - - convolutionInit(conv2_wb, input, conv2_out); - //conv2 35 x 35 x 32 - convolution(conv2_wb, input, conv2_out); - prelu(conv2_out, conv2_wb->pbias, prelu_gmma2->pdata); - convolutionInit(conv3_wb, conv2_out, conv3_out); - //conv3 35 x 35 x 32 - convolution(conv3_wb, conv2_out, conv3_out); - prelu(conv3_out, conv3_wb->pbias, prelu_gmma3->pdata); - - convolutionInit(conv4_wb, input, conv4_out); - //conv4 35 x 35 x 32 - convolution(conv4_wb, input, conv4_out); - prelu(conv4_out, conv4_wb->pbias, prelu_gmma4->pdata); - convolutionInit(conv5_wb, conv4_out, conv5_out); - //conv5 35 x 35 x 32 - convolution(conv5_wb, conv4_out, conv5_out); - prelu(conv5_out, conv5_wb->pbias, prelu_gmma5->pdata); - convolutionInit(conv6_wb, conv5_out, conv6_out); - //conv6 35 x 35 x 32 - convolution(conv6_wb, conv5_out, conv6_out); - prelu(conv6_out, conv6_wb->pbias, prelu_gmma6->pdata); - - conv_mergeInit(conv7_out, conv1_out, conv3_out, conv6_out); - //35 × 35 × 96 - conv_merge(conv7_out, conv1_out, conv3_out, conv6_out); - - convolutionInit(conv7_wb, conv7_out, conv8_out); - //35*35*256 - convolution(conv7_wb, conv7_out, conv8_out); - addbias(conv8_out, conv7_wb->pbias); - - mulandaddInit(input, conv8_out, output, scale); - mulandadd(input, conv8_out, output, scale); - prelu(output, conv8_wb->pbias, prelu_gmma8->pdata); - - freepBox(conv1_out); - freepBox(conv2_out); - freepBox(conv3_out); - freepBox(conv4_out); - freepBox(conv5_out); - freepBox(conv6_out); - freepBox(conv7_out); - freepBox(conv8_out); - - freeWeight(conv1_wb); - freeWeight(conv2_wb); - freeWeight(conv3_wb); - freeWeight(conv4_wb); - freeWeight(conv5_wb); - freeWeight(conv6_wb); - freeWeight(conv7_wb); - freeWeight(conv8_wb); - - freepRelu(prelu_gmma1); - freepRelu(prelu_gmma2); - freepRelu(prelu_gmma3); - freepRelu(prelu_gmma4); - freepRelu(prelu_gmma5); - freepRelu(prelu_gmma6); - freepRelu(prelu_gmma8); -} - -void facenet::Reduction_A(pBox *input, pBox *output) { - pBox *conv1_out = new pBox; - pBox *conv2_out = new pBox; - pBox *conv3_out = new pBox; - pBox *conv4_out = new pBox; - - struct Weight *conv1_wb = new Weight; - struct Weight *conv2_wb = new Weight; - struct Weight *conv3_wb = new Weight; - struct Weight *conv4_wb = new Weight; - - struct pBox *pooling1_out = new pBox; - - struct pRelu *prelu_gmma1 = new pRelu; - struct pRelu *prelu_gmma2 = new pRelu; - struct pRelu *prelu_gmma3 = new pRelu; - struct pRelu *prelu_gmma4 = new pRelu; - - long conv1 = initConvAndFc(conv1_wb, 384, 256, 3, 2, 0); - initpRelu(prelu_gmma1, 384); - long conv2 = initConvAndFc(conv2_wb, 192, 256, 1, 1, 0); - initpRelu(prelu_gmma2, 192); - long conv3 = initConvAndFc(conv3_wb, 192, 192, 3, 1, 0); - initpRelu(prelu_gmma3, 192); - long conv4 = initConvAndFc(conv4_wb, 256, 192, 3, 2, 0); - initpRelu(prelu_gmma4, 256); - long dataNumber[12] = {conv1, 0, 0, conv2, 0, 0, conv3, 0, 0, conv4, 0, 0}; - - mydataFmt *pointTeam[12] = {conv1_wb->pdata, conv1_wb->pbias, prelu_gmma1->pdata, \ - conv2_wb->pdata, conv2_wb->pbias, prelu_gmma2->pdata, \ - conv3_wb->pdata, conv3_wb->pbias, prelu_gmma3->pdata, \ - conv4_wb->pdata, conv4_wb->pbias, prelu_gmma4->pdata}; - string filename = "../model_" + to_string(Num) + "/Mixed_6a_list.txt"; - readData(filename, dataNumber, pointTeam); - - maxPoolingInit(input, pooling1_out, 3, 2); - // 17*17*256 - maxPooling(input, pooling1_out, 3, 2); - - convolutionInit(conv1_wb, input, conv1_out); - //conv1 17 x 17 x 384 - convolution(conv1_wb, input, conv1_out); - prelu(conv1_out, conv1_wb->pbias, prelu_gmma1->pdata); - - convolutionInit(conv2_wb, input, conv2_out); - //conv2 35 x 35 x 192 - convolution(conv2_wb, input, conv2_out); - prelu(conv2_out, conv2_wb->pbias, prelu_gmma2->pdata); - - convolutionInit(conv3_wb, conv2_out, conv3_out); - //conv3 35 x 35 x 192 - convolution(conv3_wb, conv2_out, conv3_out); - prelu(conv3_out, conv3_wb->pbias, prelu_gmma3->pdata); - - convolutionInit(conv4_wb, conv3_out, conv4_out); - //conv4 17 x 17 x 256 - convolution(conv4_wb, conv3_out, conv4_out); - prelu(conv4_out, conv4_wb->pbias, prelu_gmma4->pdata); - conv_mergeInit(output, pooling1_out, conv1_out, conv4_out); - //17×17×896 - conv_merge(output, pooling1_out, conv1_out, conv4_out); - - freepBox(conv1_out); - freepBox(conv2_out); - freepBox(conv3_out); - freepBox(conv4_out); - - freeWeight(conv1_wb); - freeWeight(conv2_wb); - freeWeight(conv3_wb); - freeWeight(conv4_wb); - - freepBox(pooling1_out); - - freepRelu(prelu_gmma1); - freepRelu(prelu_gmma2); - freepRelu(prelu_gmma3); - freepRelu(prelu_gmma4); -} - -void facenet::Inception_resnet_B(pBox *input, pBox *output, string filepath, float scale) { - pBox *conv1_out = new pBox; - pBox *conv2_out = new pBox; - pBox *conv3_out = new pBox; - pBox *conv4_out = new pBox; - pBox *conv5_out = new pBox; - pBox *conv6_out = new pBox; - - struct Weight *conv1_wb = new Weight; - struct Weight *conv2_wb = new Weight; - struct Weight *conv3_wb = new Weight; - struct Weight *conv4_wb = new Weight; - struct Weight *conv5_wb = new Weight; - struct Weight *conv6_wb = new Weight; - - struct pRelu *prelu_gmma1 = new pRelu; - struct pRelu *prelu_gmma2 = new pRelu; - struct pRelu *prelu_gmma3 = new pRelu; - struct pRelu *prelu_gmma4 = new pRelu; - struct pRelu *prelu_gmma6 = new pRelu; - - long conv1 = initConvAndFc(conv1_wb, 128, 896, 1, 1, 0); - initpRelu(prelu_gmma1, 128); - long conv2 = initConvAndFc(conv2_wb, 128, 896, 1, 1, 0); - initpRelu(prelu_gmma2, 128); - long conv3 = initConvAndFc(conv3_wb, 128, 128, 0, 1, -1, 1, 7, 0, 3);//[1,7] - initpRelu(prelu_gmma3, 128); - long conv4 = initConvAndFc(conv4_wb, 128, 128, 0, 1, -1, 7, 1, 3, 0);//[7,1] - initpRelu(prelu_gmma4, 128); - - long conv5 = initConvAndFc(conv5_wb, 896, 256, 1, 1, 0); - - long conv6 = initConvAndFc(conv6_wb, 896, 0, 0, 0, 0); - initpRelu(prelu_gmma6, 896); - - long dataNumber[17] = {conv1, 0, 0, conv2, 0, 0, conv3, 0, 0, conv4, 0, 0, conv5, 896, conv6, 0, - 0}; - - mydataFmt *pointTeam[17] = {conv1_wb->pdata, conv1_wb->pbias, prelu_gmma1->pdata, \ - conv2_wb->pdata, conv2_wb->pbias, prelu_gmma2->pdata, \ - conv3_wb->pdata, conv3_wb->pbias, prelu_gmma3->pdata, \ - conv4_wb->pdata, conv4_wb->pbias, prelu_gmma4->pdata, \ - conv5_wb->pdata, conv5_wb->pbias, \ - conv6_wb->pdata, conv6_wb->pbias, prelu_gmma6->pdata}; - - - readData(filepath, dataNumber, pointTeam); - - - convolutionInit(conv1_wb, input, conv1_out); - //conv1 17*17*128 - convolution(conv1_wb, input, conv1_out); - prelu(conv1_out, conv1_wb->pbias, prelu_gmma1->pdata); - - convolutionInit(conv2_wb, input, conv2_out); - //conv2 17*17*128 - convolution(conv2_wb, input, conv2_out); - prelu(conv2_out, conv2_wb->pbias, prelu_gmma2->pdata); - - convolutionInit(conv3_wb, conv2_out, conv3_out); - //conv3 17*17*128 - convolution(conv3_wb, conv2_out, conv3_out); - prelu(conv3_out, conv3_wb->pbias, prelu_gmma3->pdata); - - convolutionInit(conv4_wb, conv3_out, conv4_out); - //conv4 17*17*128 - convolution(conv4_wb, conv3_out, conv4_out); - prelu(conv4_out, conv4_wb->pbias, prelu_gmma4->pdata); - - conv_mergeInit(conv5_out, conv1_out, conv4_out); - //17*17*256 - conv_merge(conv5_out, conv1_out, conv4_out); - - convolutionInit(conv5_wb, conv5_out, conv6_out); - //conv5 17*17*896 - convolution(conv5_wb, conv5_out, conv6_out); - addbias(conv6_out, conv5_wb->pbias); - - mulandaddInit(input, conv6_out, output, scale); - mulandadd(input, conv6_out, output, scale); - prelu(output, conv6_wb->pbias, prelu_gmma6->pdata); - - freepBox(conv1_out); - freepBox(conv2_out); - freepBox(conv3_out); - freepBox(conv4_out); - freepBox(conv5_out); - freepBox(conv6_out); - - freeWeight(conv1_wb); - freeWeight(conv2_wb); - freeWeight(conv3_wb); - freeWeight(conv4_wb); - freeWeight(conv5_wb); - freeWeight(conv6_wb); - - freepRelu(prelu_gmma1); - freepRelu(prelu_gmma2); - freepRelu(prelu_gmma3); - freepRelu(prelu_gmma4); -// freepRelu(prelu_gmma5); - freepRelu(prelu_gmma6); -} - -void facenet::Reduction_B(pBox *input, pBox *output) { - pBox *conv1_out = new pBox; - pBox *conv2_out = new pBox; - pBox *conv3_out = new pBox; - pBox *conv4_out = new pBox; - pBox *conv5_out = new pBox; - pBox *conv6_out = new pBox; - pBox *conv7_out = new pBox; - - struct Weight *conv1_wb = new Weight; - struct Weight *conv2_wb = new Weight; - struct Weight *conv3_wb = new Weight; - struct Weight *conv4_wb = new Weight; - struct Weight *conv5_wb = new Weight; - struct Weight *conv6_wb = new Weight; - struct Weight *conv7_wb = new Weight; - - struct pBox *pooling1_out = new pBox; - - struct pRelu *prelu_gmma1 = new pRelu; - struct pRelu *prelu_gmma2 = new pRelu; - struct pRelu *prelu_gmma3 = new pRelu; - struct pRelu *prelu_gmma4 = new pRelu; - struct pRelu *prelu_gmma5 = new pRelu; - struct pRelu *prelu_gmma6 = new pRelu; - struct pRelu *prelu_gmma7 = new pRelu; - - long conv1 = initConvAndFc(conv1_wb, 256, 896, 1, 1, 0); - initpRelu(prelu_gmma1, 256); - long conv2 = initConvAndFc(conv2_wb, 384, 256, 3, 2, 0); - initpRelu(prelu_gmma2, 384); - - long conv3 = initConvAndFc(conv3_wb, 256, 896, 1, 1, 0); - initpRelu(prelu_gmma3, 256); - long conv4 = initConvAndFc(conv4_wb, 256, 256, 3, 2, 0); - initpRelu(prelu_gmma4, 256); - - long conv5 = initConvAndFc(conv5_wb, 256, 896, 1, 1, 0); - initpRelu(prelu_gmma5, 256); - long conv6 = initConvAndFc(conv6_wb, 256, 256, 3, 1, 1); - initpRelu(prelu_gmma6, 256); - long conv7 = initConvAndFc(conv7_wb, 256, 256, 3, 2, 0); - initpRelu(prelu_gmma7, 256); - - long dataNumber[21] = {conv1, 0, 0, conv2, 0, 0, conv3, 0, 0, conv4, 0, 0, conv5, 0, 0, conv6, - 0, 0, conv7, 0, 0}; - - mydataFmt *pointTeam[21] = {conv1_wb->pdata, conv1_wb->pbias, prelu_gmma1->pdata, \ - conv2_wb->pdata, conv2_wb->pbias, prelu_gmma2->pdata, \ - conv3_wb->pdata, conv3_wb->pbias, prelu_gmma3->pdata, \ - conv4_wb->pdata, conv4_wb->pbias, prelu_gmma4->pdata, \ - conv5_wb->pdata, conv5_wb->pbias, prelu_gmma5->pdata, \ - conv6_wb->pdata, conv6_wb->pbias, prelu_gmma6->pdata, \ - conv7_wb->pdata, conv7_wb->pbias, prelu_gmma7->pdata,}; - string filename = "../model_" + to_string(Num) + "/Mixed_7a_list.txt"; - readData(filename, dataNumber, pointTeam); - - - maxPoolingInit(input, pooling1_out, 3, 2, 1); - // 8*8*896 - maxPooling(input, pooling1_out, 3, 2); - - convolutionInit(conv1_wb, input, conv1_out); - //conv1 17 x 17 x 256 - convolution(conv1_wb, input, conv1_out); - prelu(conv1_out, conv1_wb->pbias, prelu_gmma1->pdata); - - convolutionInit(conv2_wb, conv1_out, conv2_out); - //conv2 8 x 8 x 384 - convolution(conv2_wb, conv1_out, conv2_out); - prelu(conv2_out, conv2_wb->pbias, prelu_gmma2->pdata); - - convolutionInit(conv3_wb, input, conv3_out); - //conv3 17 x 17 x 256 - convolution(conv3_wb, input, conv3_out); - prelu(conv3_out, conv3_wb->pbias, prelu_gmma3->pdata); - - convolutionInit(conv4_wb, conv3_out, conv4_out); - //conv4 8 x 8 x 256 - convolution(conv4_wb, conv3_out, conv4_out); - prelu(conv4_out, conv4_wb->pbias, prelu_gmma4->pdata); - - convolutionInit(conv5_wb, input, conv5_out); - //conv5 17 x 17 x 256 - convolution(conv5_wb, input, conv5_out); - prelu(conv5_out, conv5_wb->pbias, prelu_gmma5->pdata); - - convolutionInit(conv6_wb, conv5_out, conv6_out); - //conv6 17 x 17 x 256 - convolution(conv6_wb, conv5_out, conv6_out); - prelu(conv6_out, conv6_wb->pbias, prelu_gmma6->pdata); - - convolutionInit(conv7_wb, conv6_out, conv7_out); - //conv6 8 x 8 x 256 - convolution(conv7_wb, conv6_out, conv7_out); - prelu(conv7_out, conv7_wb->pbias, prelu_gmma7->pdata); - - conv_mergeInit(output, conv2_out, conv4_out, conv7_out, pooling1_out); - //8*8*1792 - conv_merge(output, conv2_out, conv4_out, conv7_out, pooling1_out); - - freepBox(conv1_out); - freepBox(conv2_out); - freepBox(conv3_out); - freepBox(conv4_out); - freepBox(conv5_out); - freepBox(conv6_out); - freepBox(conv7_out); - - freeWeight(conv1_wb); - freeWeight(conv2_wb); - freeWeight(conv3_wb); - freeWeight(conv4_wb); - freeWeight(conv5_wb); - freeWeight(conv6_wb); - freeWeight(conv7_wb); - - freepBox(pooling1_out); - - freepRelu(prelu_gmma1); - freepRelu(prelu_gmma2); - freepRelu(prelu_gmma3); - freepRelu(prelu_gmma4); - freepRelu(prelu_gmma5); - freepRelu(prelu_gmma6); - freepRelu(prelu_gmma7); -} - -void facenet::Inception_resnet_C(pBox *input, pBox *output, string filepath, float scale) { - pBox *conv1_out = new pBox; - pBox *conv2_out = new pBox; - pBox *conv3_out = new pBox; - pBox *conv4_out = new pBox; - pBox *conv5_out = new pBox; - pBox *conv6_out = new pBox; - - struct Weight *conv1_wb = new Weight; - struct Weight *conv2_wb = new Weight; - struct Weight *conv3_wb = new Weight; - struct Weight *conv4_wb = new Weight; - struct Weight *conv5_wb = new Weight; - struct Weight *conv6_wb = new Weight; - - struct pRelu *prelu_gmma1 = new pRelu; - struct pRelu *prelu_gmma2 = new pRelu; - struct pRelu *prelu_gmma3 = new pRelu; - struct pRelu *prelu_gmma4 = new pRelu; - struct pRelu *prelu_gmma6 = new pRelu; - - - long conv1 = initConvAndFc(conv1_wb, 192, 1792, 1, 1, 0); - initpRelu(prelu_gmma1, 192); - long conv2 = initConvAndFc(conv2_wb, 192, 1792, 1, 1, 0); - initpRelu(prelu_gmma2, 192); - long conv3 = initConvAndFc(conv3_wb, 192, 192, 0, 1, -1, 1, 3, 0, 1); - initpRelu(prelu_gmma3, 192); - long conv4 = initConvAndFc(conv4_wb, 192, 192, 0, 1, -1, 3, 1, 1, 0); - initpRelu(prelu_gmma4, 192); - long conv5 = initConvAndFc(conv5_wb, 1792, 384, 1, 1, 0); - - long conv6 = initConvAndFc(conv6_wb, 1792, 0, 0, 0, 0); - initpRelu(prelu_gmma6, 1792); - - long dataNumber[17] = {conv1, 0, 0, conv2, 0, 0, conv3, 0, 0, conv4, 0, 0, conv5, 1792, conv6, 0, - 0}; - - - mydataFmt *pointTeam[17] = {conv1_wb->pdata, conv1_wb->pbias, prelu_gmma1->pdata, \ - conv2_wb->pdata, conv2_wb->pbias, prelu_gmma2->pdata, \ - conv3_wb->pdata, conv3_wb->pbias, prelu_gmma3->pdata, \ - conv4_wb->pdata, conv4_wb->pbias, prelu_gmma4->pdata, \ - conv5_wb->pdata, conv5_wb->pbias, \ - conv6_wb->pdata, conv6_wb->pbias, prelu_gmma6->pdata}; - -// string filename = "../model_128/Repeat_2_list.txt"; -// int length = sizeof(dataNumber) / sizeof(*dataNumber); - readData(filepath, dataNumber, pointTeam); - - convolutionInit(conv1_wb, input, conv1_out); - //conv1 8 x 8 x 192 - convolution(conv1_wb, input, conv1_out); - prelu(conv1_out, conv1_wb->pbias, prelu_gmma1->pdata); - - convolutionInit(conv2_wb, input, conv2_out); - //conv2 8 x 8 x 192 - convolution(conv2_wb, input, conv2_out); - prelu(conv2_out, conv2_wb->pbias, prelu_gmma2->pdata); - - convolutionInit(conv3_wb, conv2_out, conv3_out); - //conv3 8 x 8 x 192 - convolution(conv3_wb, conv2_out, conv3_out); - prelu(conv3_out, conv3_wb->pbias, prelu_gmma3->pdata); - - convolutionInit(conv4_wb, conv3_out, conv4_out); - //conv4 8 x 8 x 192 - convolution(conv4_wb, conv3_out, conv4_out); - prelu(conv4_out, conv4_wb->pbias, prelu_gmma4->pdata); - - conv_mergeInit(conv5_out, conv1_out, conv4_out); - // 8*8*384 - conv_merge(conv5_out, conv1_out, conv4_out); - - convolutionInit(conv5_wb, conv5_out, conv6_out); - //conv5 8 x 8 x 1792 - convolution(conv5_wb, conv5_out, conv6_out); - addbias(conv6_out, conv5_wb->pbias); - - mulandaddInit(input, conv6_out, output, scale); - mulandadd(input, conv6_out, output, scale); - prelu(output, conv6_wb->pbias, prelu_gmma6->pdata); - - freepBox(conv1_out); - freepBox(conv2_out); - freepBox(conv3_out); - freepBox(conv4_out); - freepBox(conv5_out); - freepBox(conv6_out); - - freeWeight(conv1_wb); - freeWeight(conv2_wb); - freeWeight(conv3_wb); - freeWeight(conv4_wb); - freeWeight(conv5_wb); - freeWeight(conv6_wb); - - freepRelu(prelu_gmma1); - freepRelu(prelu_gmma2); - freepRelu(prelu_gmma3); - freepRelu(prelu_gmma4); -// freepRelu(prelu_gmma5); - freepRelu(prelu_gmma6); -} - -void facenet::Inception_resnet_C_None(pBox *input, pBox *output, string filepath) { - pBox *conv1_out = new pBox; - pBox *conv2_out = new pBox; - pBox *conv3_out = new pBox; - pBox *conv4_out = new pBox; - pBox *conv5_out = new pBox; - pBox *conv6_out = new pBox; - - struct Weight *conv1_wb = new Weight; - struct Weight *conv2_wb = new Weight; - struct Weight *conv3_wb = new Weight; - struct Weight *conv4_wb = new Weight; - struct Weight *conv5_wb = new Weight; - - struct pRelu *prelu_gmma1 = new pRelu; - struct pRelu *prelu_gmma2 = new pRelu; - struct pRelu *prelu_gmma3 = new pRelu; - struct pRelu *prelu_gmma4 = new pRelu; - - - long conv1 = initConvAndFc(conv1_wb, 192, 1792, 1, 1, 0); - initpRelu(prelu_gmma1, 192); - long conv2 = initConvAndFc(conv2_wb, 192, 1792, 1, 1, 0); - initpRelu(prelu_gmma2, 192); - long conv3 = initConvAndFc(conv3_wb, 192, 192, 0, 1, -1, 1, 3, 0, 1); - initpRelu(prelu_gmma3, 192); - long conv4 = initConvAndFc(conv4_wb, 192, 192, 0, 1, -1, 3, 1, 1, 0); - initpRelu(prelu_gmma4, 192); - long conv5 = initConvAndFc(conv5_wb, 1792, 384, 1, 1, 0); - - long dataNumber[14] = {conv1, 0, 0, conv2, 0, 0, conv3, 0, 0, conv4, 0, 0, conv5, 1792}; - - - mydataFmt *pointTeam[14] = {conv1_wb->pdata, conv1_wb->pbias, prelu_gmma1->pdata, \ - conv2_wb->pdata, conv2_wb->pbias, prelu_gmma2->pdata, \ - conv3_wb->pdata, conv3_wb->pbias, prelu_gmma3->pdata, \ - conv4_wb->pdata, conv4_wb->pbias, prelu_gmma4->pdata, \ - conv5_wb->pdata, conv5_wb->pbias}; - -// string filename = "../model_128/Repeat_2_list.txt"; -// int length = sizeof(dataNumber) / sizeof(*dataNumber); - readData(filepath, dataNumber, pointTeam); - - convolutionInit(conv1_wb, input, conv1_out); - //conv1 8 x 8 x 192 - convolution(conv1_wb, input, conv1_out); - prelu(conv1_out, conv1_wb->pbias, prelu_gmma1->pdata); - - convolutionInit(conv2_wb, input, conv2_out); - //conv2 8 x 8 x 192 - convolution(conv2_wb, input, conv2_out); - prelu(conv2_out, conv2_wb->pbias, prelu_gmma2->pdata); - - convolutionInit(conv3_wb, conv2_out, conv3_out); - //conv3 8 x 8 x 192 - convolution(conv3_wb, conv2_out, conv3_out); - prelu(conv3_out, conv3_wb->pbias, prelu_gmma3->pdata); - - convolutionInit(conv4_wb, conv3_out, conv4_out); - //conv4 8 x 8 x 192 - convolution(conv4_wb, conv3_out, conv4_out); - prelu(conv4_out, conv4_wb->pbias, prelu_gmma4->pdata); - - conv_mergeInit(conv5_out, conv1_out, conv4_out); - // 8*8*384 - conv_merge(conv5_out, conv1_out, conv4_out); - - convolutionInit(conv5_wb, conv5_out, conv6_out); - //conv5 8 x 8 x 1792 - convolution(conv5_wb, conv5_out, conv6_out); - addbias(conv6_out, conv5_wb->pbias); - - mulandaddInit(input, conv6_out, output, 1); - mulandadd(input, conv6_out, output); - - freepBox(conv1_out); - freepBox(conv2_out); - freepBox(conv3_out); - freepBox(conv4_out); - freepBox(conv5_out); - freepBox(conv6_out); - - freeWeight(conv1_wb); - freeWeight(conv2_wb); - freeWeight(conv3_wb); - freeWeight(conv4_wb); - freeWeight(conv5_wb); - - freepRelu(prelu_gmma1); - freepRelu(prelu_gmma2); - freepRelu(prelu_gmma3); - freepRelu(prelu_gmma4); -} - -void facenet::AveragePooling(pBox *input, pBox *output) { -// cout << "size:" << input->height << endl; - avePoolingInit(input, output, input->height, 2); - avePooling(input, output, input->height, 2); -} - -void facenet::Flatten(pBox *input, pBox *output) { - output->width = input->channel; - output->height = 1; - output->channel = 1; - output->pdata = (mydataFmt *) malloc(output->channel * output->width * output->height * sizeof(mydataFmt)); - if (output->pdata == NULL)cout << "the maxPoolingInit is failed!!" << endl; - memcpy(output->pdata, input->pdata, output->channel * output->width * output->height * sizeof(mydataFmt)); -} - -//参数还未设置 -void facenet::fully_connect(pBox *input, pBox *output, string filepath) { - struct Weight *conv1_wb = new Weight; - struct pRelu *prelu_gmma1 = new pRelu; - long conv1 = initConvAndFc(conv1_wb, Num, 1792, input->height, 1, 0); - initpRelu(prelu_gmma1, Num); - long dataNumber[3] = {conv1, 0, 0}; - -// cout << to_string(sum) << endl; - mydataFmt *pointTeam[3] = {conv1_wb->pdata, conv1_wb->pbias, prelu_gmma1->pdata}; -// string filename = "../model_128/Bottleneck_list.txt"; -// int length = sizeof(dataNumber) / sizeof(*dataNumber); - readData(filepath, dataNumber, pointTeam); - - fullconnectInit(conv1_wb, output); - - //conv1 8 x 8 x 192 - fullconnect(conv1_wb, input, output); -// prelu(output, conv1_wb->pbias, prelu_gmma1->pdata); - - freeWeight(conv1_wb); - freepRelu(prelu_gmma1); -} - -void facenet::conv_mergeInit(pBox *output, pBox *c1, pBox *c2, pBox *c3, pBox *c4) { - output->channel = 0; - output->height = c1->height; - output->width = c1->width; - if (c1 != 0) { - output->channel = c1->channel; - if (c2 != 0) { - output->channel += c2->channel; - if (c3 != 0) { - output->channel += c3->channel; - if (c4 != 0) { - output->channel += c4->channel; - } - } - } - } else { cout << "conv_mergeInit" << endl; } - output->pdata = (mydataFmt *) malloc(output->width * output->height * output->channel * sizeof(mydataFmt)); - if (output->pdata == NULL)cout << "the conv_mergeInit is failed!!" << endl; - memset(output->pdata, 0, output->width * output->height * output->channel * sizeof(mydataFmt)); -} - -void facenet::conv_merge(pBox *output, pBox *c1, pBox *c2, pBox *c3, pBox *c4) { -// cout << "output->channel:" << output->channel << endl; - if (c1 != 0) { - long count1 = c1->height * c1->width * c1->channel; - //output->pdata = c1->pdata; - for (long i = 0; i < count1; i++) { - output->pdata[i] = c1->pdata[i]; - } - if (c2 != 0) { - long count2 = c2->height * c2->width * c2->channel; - for (long i = 0; i < count2; i++) { - output->pdata[count1 + i] = c2->pdata[i]; - } - if (c3 != 0) { - long count3 = c3->height * c3->width * c3->channel; - for (long i = 0; i < count3; i++) { - output->pdata[count1 + count2 + i] = c3->pdata[i]; - } - if (c4 != 0) { - long count4 = c4->height * c4->width * c4->channel; - for (long i = 0; i < count4; i++) { - output->pdata[count1 + count2 + count3 + i] = c4->pdata[i]; - } - } - } - } - } else { cout << "conv_mergeInit" << endl; } -// cout << "output->pdata:" << *(output->pdata) << endl; -} - -void facenet::mulandaddInit(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float scale) { - outpBox->channel = temppbox->channel; - outpBox->width = temppbox->width; - outpBox->height = temppbox->height; - outpBox->pdata = (mydataFmt *) malloc(outpBox->width * outpBox->height * outpBox->channel * sizeof(mydataFmt)); - if (outpBox->pdata == NULL)cout << "the mulandaddInit is failed!!" << endl; - memset(outpBox->pdata, 0, outpBox->width * outpBox->height * outpBox->channel * sizeof(mydataFmt)); -} - -void facenet::mulandadd(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float scale) { - mydataFmt *ip = inpbox->pdata; - mydataFmt *tp = temppbox->pdata; - mydataFmt *op = outpBox->pdata; - long dis = inpbox->width * inpbox->height * inpbox->channel; - for (long i = 0; i < dis; i++) { - op[i] = ip[i] + tp[i] * scale; - } } \ No newline at end of file diff --git a/src/facenet.h b/src/facenet.h index 1145a66..dc8267f 100644 --- a/src/facenet.h +++ b/src/facenet.h @@ -14,7 +14,7 @@ public: ~facenet(); - void run(Mat &image, mydataFmt *o, int count = 1); + void run(Mat &image, vector &o, int count = 1); private: void Stem(Mat &image, pBox *output); @@ -35,20 +35,9 @@ private: void fully_connect(pBox *input, pBox *output, string filepath = ""); - void conv_merge(pBox *output, pBox *c1 = 0, pBox *c2 = 0, pBox *c3 = 0, pBox *c4 = 0); - - void conv_mergeInit(pBox *output, pBox *c1 = 0, pBox *c2 = 0, pBox *c3 = 0, pBox *c4 = 0); - - void mulandaddInit(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float scale); - - void mulandadd(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float scale = 1); - void Flatten(pBox *input, pBox *output); void printData(pBox *output); - - - }; #endif //MAIN_FACENET_H diff --git a/src/network.cpp b/src/network.cpp index 5373fc7..b21e1d6 100644 --- a/src/network.cpp +++ b/src/network.cpp @@ -45,44 +45,57 @@ void image2Matrix(const Mat &image, const struct pBox *pbox, int num) { return; } mydataFmt *p = pbox->pdata; + double sqr, stddev_adj; + int size; + mydataFmt mymean, mystddev; + // prewhiten + if (num != 0) { + MeanAndDev(image, mymean, mystddev); + cout << mymean << "----" << mystddev << endl; + size = image.cols * image.rows * image.channels(); + sqr = sqrt(double(size)); + if (mystddev >= 1.0 / sqr) { + stddev_adj = mystddev; + } else { + stddev_adj = 1.0 / sqr; + } + } for (int rowI = 0; rowI < image.rows; rowI++) { for (int colK = 0; colK < image.cols; colK++) { if (num == 0) { - *p = (image.at(rowI, colK)[0] - 127.5) * 0.0078125; + *p = (image.at(rowI, colK)[2] - 127.5) * 0.0078125; *(p + image.rows * image.cols) = (image.at(rowI, colK)[1] - 127.5) * 0.0078125; - *(p + 2 * image.rows * image.cols) = (image.at(rowI, colK)[2] - 127.5) * 0.0078125; + *(p + 2 * image.rows * image.cols) = (image.at(rowI, colK)[0] - 127.5) * 0.0078125; p++; } else { - double mean, stddev, sqr, stddev_adj; - int size; - Mat temp_m, temp_sd; - meanStdDev(image, temp_m, temp_sd); - mean = temp_m.at(0, 0); - stddev = temp_sd.at(0, 0); - size = image.cols * image.rows * image.channels(); - sqr = sqrt(double(size)); - - if (stddev >= 1.0 / sqr) { - stddev_adj = stddev; - } else { - stddev_adj = 1.0 / sqr; - } -// cout << mean << "|" << stddev << "|" << size << "|" << stddev_adj << "|" << endl; - for (int i = 0; i < image.rows; i++) { - for (int j = 0; j < image.cols; j++) { - image.at(i, j); - *p = (image.at(i, j)[0] - mean) / stddev_adj; - *(p + image.rows * image.cols) = (image.at(i, j)[1] - mean) / stddev_adj; - *(p + 2 * image.rows * image.cols) = (image.at(i, j)[2] - mean) / stddev_adj; -// cout << (image.at(i, j)[0] - mean) / stddev_adj << endl; -// return; - } - } + // brg2rgb + *(p + 0 * image.rows * image.cols) = (image.at(rowI, colK)[2] - mymean) / stddev_adj; + *(p + 1 * image.rows * image.cols) = (image.at(rowI, colK)[1] - mymean) / stddev_adj; + *(p + 2 * image.rows * image.cols) = (image.at(rowI, colK)[0] - mymean) / stddev_adj; + p++; } } } } +void MeanAndDev(const Mat &image, mydataFmt &p, mydataFmt &q) { + mydataFmt meansum = 0, stdsum = 0; + for (int rowI = 0; rowI < image.rows; rowI++) { + for (int colK = 0; colK < image.cols; colK++) { + meansum += image.at(rowI, colK)[0] + image.at(rowI, colK)[1] + image.at(rowI, colK)[2]; + } + } + p = meansum / (image.cols * image.rows * image.channels()); + for (int rowI = 0; rowI < image.rows; rowI++) { + for (int colK = 0; colK < image.cols; colK++) { + stdsum += pow((image.at(rowI, colK)[0] - p), 2) + + pow((image.at(rowI, colK)[1] - p), 2) + + pow((image.at(rowI, colK)[2] - p), 2); + } + } + q = sqrt(stdsum / (image.cols * image.rows * image.channels())); +} + void featurePadInit(const pBox *pbox, pBox *outpBox, const int pad, const int padw, const int padh) { if (pad < -1) { cout << "the data needn't to pad,please check you network!" << endl; @@ -156,12 +169,6 @@ void convolutionInit(const Weight *weight, pBox *pbox, pBox *outpBox) { } void convolution(const Weight *weight, const pBox *pbox, pBox *outpBox) { -// if (weight->pad != 0) { -// pBox *padpbox = new pBox; -// featurePadInit(outpBox, padpbox, weight->pad, weight->padw, weight->padh); -// featurePad(outpBox, padpbox, weight->pad, weight->padw, weight->padh); -// *outpBox = *padpbox; -// } int ckh, ckw, ckd, stride, cknum, ckpad, imginputh, imginputw, imginputd, Nh, Nw; mydataFmt *ck, *imginput; // float *output = outpBox->pdata; @@ -206,7 +213,6 @@ void convolution(const Weight *weight, const pBox *pbox, pBox *outpBox) { } } } -// cout << "output->pdata:" << (outpBox->pdata[10]) << endl; } void maxPoolingInit(const pBox *pbox, pBox *Matrix, int kernelSize, int stride, int flag) { @@ -315,8 +321,12 @@ void avePooling(const pBox *pbox, pBox *Matrix, int kernelSize, int stride) { } } - -void prelu(struct pBox *pbox, mydataFmt *pbias, mydataFmt *prelu_gmma) { +/** + * 激活函数 没有系数 + * @param pbox + * @param pbias + */ +void relu(struct pBox *pbox, mydataFmt *pbias) { if (pbox->pdata == NULL) { cout << "the Relu feature is NULL!!" << endl; return; @@ -327,17 +337,15 @@ void prelu(struct pBox *pbox, mydataFmt *pbias, mydataFmt *prelu_gmma) { } mydataFmt *op = pbox->pdata; mydataFmt *pb = pbias; - mydataFmt *pg = prelu_gmma; long dis = pbox->width * pbox->height; for (int channel = 0; channel < pbox->channel; channel++) { for (int col = 0; col < dis; col++) { *op = *op + *pb; - *op = (*op > 0) ? (*op) : ((*op) * (*pg)); + *op = (*op > 0) ? (*op) : ((*op) * 0); op++; } pb++; - pg++; } } @@ -364,36 +372,30 @@ void fullconnect(const Weight *weight, const pBox *pbox, pBox *outpBox) { // row no trans A's row A'col //cblas_sgemv(CblasRowMajor, CblasNoTrans, weight->selfChannel, weight->lastChannel, 1, weight->pdata, weight->lastChannel, pbox->pdata, 1, 0, outpBox->pdata, 1); vectorXmatrix(pbox->pdata, weight->pdata, - pbox->width * pbox->height * pbox->channel, weight->lastChannel, weight->selfChannel, outpBox->pdata); } -void vectorXmatrix(mydataFmt *matrix, mydataFmt *v, int size, int v_w, int v_h, mydataFmt *p) { +void vectorXmatrix(mydataFmt *matrix, mydataFmt *v, int v_w, int v_h, mydataFmt *p) { for (int i = 0; i < v_h; i++) { p[i] = 0; for (int j = 0; j < v_w; j++) { p[i] += matrix[j] * v[i * v_w + j]; -// cout << p[i] << endl; } -// cout << p[i] << endl; -// p[i] = -0.0735729; -// cout << "...." << endl; -// break; } -// cout << "...." << endl; } void readData(string filename, long dataNumber[], mydataFmt *pTeam[], int length) { ifstream in(filename.data()); string line; + long temp = dataNumber[0]; if (in) { int i = 0; int count = 0; int pos = 0; while (getline(in, line)) { try { - if (i < dataNumber[count]) { + if (i < temp) { line.erase(0, 1); pos = line.find(']'); line.erase(pos, 1); @@ -401,12 +403,14 @@ void readData(string filename, long dataNumber[], mydataFmt *pTeam[], int length if (pos != -1) { line.erase(pos, 1); } - *(pTeam[count])++ = atof(line.data()); + if (dataNumber[count] != 0) { + *(pTeam[count])++ = atof(line.data()); + } } else { count++; if ((length != 0) && (count == length)) break; - dataNumber[count] += dataNumber[count - 1]; + temp += dataNumber[count]; line.erase(0, 1); pos = line.find(']'); line.erase(pos, 1); @@ -414,7 +418,9 @@ void readData(string filename, long dataNumber[], mydataFmt *pTeam[], int length if (pos != -1) { line.erase(pos, 1); } - *(pTeam[count])++ = atof(line.data()); + if (dataNumber[count] != 0) { + *(pTeam[count])++ = atof(line.data()); + } } i++; } @@ -429,19 +435,15 @@ void readData(string filename, long dataNumber[], mydataFmt *pTeam[], int length } // w sc lc ks s p kw kh -long initConvAndFc(struct Weight *weight, int schannel, int lchannel, int kersize, +long ConvAndFcInit(struct Weight *weight, int schannel, int lchannel, int kersize, int stride, int pad, int w, int h, int padw, int padh) { weight->selfChannel = schannel; weight->lastChannel = lchannel; weight->kernelSize = kersize; -// if (kersize == 0) { weight->h = h; weight->w = w; -// } -// if (pad == -1) { weight->padh = padh; weight->padw = padw; -// } weight->stride = stride; weight->pad = pad; weight->pbias = (mydataFmt *) malloc(schannel * sizeof(mydataFmt)); @@ -459,9 +461,114 @@ long initConvAndFc(struct Weight *weight, int schannel, int lchannel, int kersiz return byteLenght; } -void initpRelu(struct pRelu *prelu, int width) { - prelu->width = width; - prelu->pdata = (mydataFmt *) malloc(width * sizeof(mydataFmt)); - if (prelu->pdata == NULL)cout << "prelu apply for memory failed!!!!"; - memset(prelu->pdata, 0, width * sizeof(mydataFmt)); +void conv_mergeInit(pBox *output, pBox *c1, pBox *c2, pBox *c3, pBox *c4) { + output->channel = 0; + output->height = c1->height; + output->width = c1->width; + if (c1 != 0) { + output->channel = c1->channel; + if (c2 != 0) { + output->channel += c2->channel; + if (c3 != 0) { + output->channel += c3->channel; + if (c4 != 0) { + output->channel += c4->channel; + } + } + } + } + output->pdata = (mydataFmt *) malloc(output->width * output->height * output->channel * sizeof(mydataFmt)); + if (output->pdata == NULL)cout << "the conv_mergeInit is failed!!" << endl; + memset(output->pdata, 0, output->width * output->height * output->channel * sizeof(mydataFmt)); +} + +void conv_merge(pBox *output, pBox *c1, pBox *c2, pBox *c3, pBox *c4) { +// cout << "output->channel:" << output->channel << endl; + if (c1 != 0) { + long count1 = c1->height * c1->width * c1->channel; + //output->pdata = c1->pdata; + for (long i = 0; i < count1; i++) { + output->pdata[i] = c1->pdata[i]; + } + if (c2 != 0) { + long count2 = c2->height * c2->width * c2->channel; + for (long i = 0; i < count2; i++) { + output->pdata[count1 + i] = c2->pdata[i]; + } + if (c3 != 0) { + long count3 = c3->height * c3->width * c3->channel; + for (long i = 0; i < count3; i++) { + output->pdata[count1 + count2 + i] = c3->pdata[i]; + } + if (c4 != 0) { + long count4 = c4->height * c4->width * c4->channel; + for (long i = 0; i < count4; i++) { + output->pdata[count1 + count2 + count3 + i] = c4->pdata[i]; + } + } + } + } + } else { cout << "conv_mergeInit" << endl; } +} + +void mulandaddInit(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float scale) { + outpBox->channel = temppbox->channel; + outpBox->width = temppbox->width; + outpBox->height = temppbox->height; + outpBox->pdata = (mydataFmt *) malloc(outpBox->width * outpBox->height * outpBox->channel * sizeof(mydataFmt)); + if (outpBox->pdata == NULL)cout << "the mulandaddInit is failed!!" << endl; + memset(outpBox->pdata, 0, outpBox->width * outpBox->height * outpBox->channel * sizeof(mydataFmt)); +} + +void mulandadd(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float scale) { + mydataFmt *ip = inpbox->pdata; + mydataFmt *tp = temppbox->pdata; + mydataFmt *op = outpBox->pdata; + long dis = inpbox->width * inpbox->height * inpbox->channel; + for (long i = 0; i < dis; i++) { + op[i] = ip[i] + tp[i] * scale; + } +} + +void BatchNormInit(struct BN *var, struct BN *mean, struct BN *beta, int width) { + var->width = width; + var->pdata = (mydataFmt *) malloc(width * sizeof(mydataFmt)); + if (var->pdata == NULL)cout << "prelu apply for memory failed!!!!"; + memset(var->pdata, 0, width * sizeof(mydataFmt)); + + mean->width = width; + mean->pdata = (mydataFmt *) malloc(width * sizeof(mydataFmt)); + if (mean->pdata == NULL)cout << "prelu apply for memory failed!!!!"; + memset(mean->pdata, 0, width * sizeof(mydataFmt)); + + beta->width = width; + beta->pdata = (mydataFmt *) malloc(width * sizeof(mydataFmt)); + if (beta->pdata == NULL)cout << "prelu apply for memory failed!!!!"; + memset(beta->pdata, 0, width * sizeof(mydataFmt)); +} + +void BatchNorm(struct pBox *pbox, struct BN *var, struct BN *mean, struct BN *beta) { + if (pbox->pdata == NULL) { + cout << "Relu feature is NULL!!" << endl; + return; + } + if ((var->pdata == NULL) || (mean->pdata == NULL) || (beta->pdata == NULL)) { + cout << "the BatchNorm bias is NULL!!" << endl; + return; + } + mydataFmt *pp = pbox->pdata; + mydataFmt *vp = var->pdata; + mydataFmt *mp = mean->pdata; + mydataFmt *bp = beta->pdata; + int gamma = 1; + float epsilon = 0.001; + long dis = pbox->width * pbox->height; + mydataFmt temp = 0; + for (int channel = 0; channel < pbox->channel; channel++) { + temp = gamma / sqrt(((vp[channel]) + epsilon)); + for (int col = 0; col < dis; col++) { + *pp = temp * (*pp) + ((bp[channel]) - temp * (mp[channel])); + pp++; + } + } } \ No newline at end of file diff --git a/src/network.h b/src/network.h index 543fb32..d7432f7 100644 --- a/src/network.h +++ b/src/network.h @@ -11,7 +11,6 @@ #include #include #include "pBox.h" -//#include using namespace cv; @@ -25,17 +24,15 @@ void avePooling(const pBox *pbox, pBox *Matrix, int kernelSize, int stride); void featurePad(const pBox *pbox, pBox *outpBox, const int pad, const int padw = 0, const int padh = 0); -void prelu(struct pBox *pbox, mydataFmt *pbias, mydataFmt *prelu_gmma); +void relu(struct pBox *pbox, mydataFmt *pbias); void fullconnect(const Weight *weight, const pBox *pbox, pBox *outpBox); void readData(string filename, long dataNumber[], mydataFmt *pTeam[], int length = 0); -long initConvAndFc(struct Weight *weight, int schannel, int lchannel, int kersize, int stride, int pad, +long ConvAndFcInit(struct Weight *weight, int schannel, int lchannel, int kersize, int stride, int pad, int w = 0, int h = 0, int padw = 0, int padh = 0); -void initpRelu(struct pRelu *prelu, int width); - void image2MatrixInit(Mat &image, struct pBox *pbox); void featurePadInit(const pBox *pbox, pBox *outpBox, const int pad, const int padw = 0, const int padh = 0); @@ -48,8 +45,21 @@ void convolutionInit(const Weight *weight, pBox *pbox, pBox *outpBox); void fullconnectInit(const Weight *weight, pBox *outpBox); -void vectorXmatrix(mydataFmt *matrix, mydataFmt *v, int size, int v_w, int v_h, mydataFmt *p); +void vectorXmatrix(mydataFmt *matrix, mydataFmt *v, int v_w, int v_h, mydataFmt *p); void convolution(const Weight *weight, const pBox *pbox, pBox *outpBox); +void MeanAndDev(const Mat &image, mydataFmt &p, mydataFmt &q); + +void conv_merge(pBox *output, pBox *c1 = 0, pBox *c2 = 0, pBox *c3 = 0, pBox *c4 = 0); + +void conv_mergeInit(pBox *output, pBox *c1 = 0, pBox *c2 = 0, pBox *c3 = 0, pBox *c4 = 0); + +void mulandaddInit(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float scale); + +void mulandadd(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float scale = 1); + +void BatchNormInit(struct BN *var, struct BN *mean, struct BN *beta, int width); + +void BatchNorm(struct pBox *pbox, struct BN *var, struct BN *mean, struct BN *beta); #endif \ No newline at end of file diff --git a/src/pBox.cpp b/src/pBox.cpp index 32e821a..0969cf4 100644 --- a/src/pBox.cpp +++ b/src/pBox.cpp @@ -22,4 +22,12 @@ void freeWeight(struct Weight *weight) { free(weight->pdata); weight->pdata = NULL; delete weight; -} \ No newline at end of file +} + +void freeBN(struct BN *bn) { + if (bn->pdata == NULL)cout << "weight is NULL!" << endl; + else + free(bn->pdata); + bn->pdata = NULL; + delete bn; +} diff --git a/src/pBox.h b/src/pBox.h index 41ea211..8703500 100644 --- a/src/pBox.h +++ b/src/pBox.h @@ -6,11 +6,14 @@ #include #include +/** + * 声明结构体 + */ + using namespace std; //#define mydataFmt double #define Num 128 -typedef double mydataFmt; - +typedef float mydataFmt; struct pBox : public cv::String { mydataFmt *pdata; @@ -19,12 +22,15 @@ struct pBox : public cv::String { int channel; }; - struct pRelu { mydataFmt *pdata; int width; }; +struct BN { + mydataFmt *pdata; + int width; +}; struct Weight { mydataFmt *pdata; @@ -40,23 +46,21 @@ struct Weight { int padh; }; -class pBox1 { -public: - vector>> pdata; +struct Bbox { + float score; + int x1; + int y1; + int x2; + int y2; + float area; + bool exist; + mydataFmt ppoint[10]; + mydataFmt regreCoord[4]; }; -class pRelu1 { -public: - vector pdata; -}; - -class Weight1 { -public: - vector>>> pdata; - vector pbias; - int stride; - int padw; - int padh; +struct orderScore { + mydataFmt score; + int oriOrder; }; void freepBox(struct pBox *pbox); @@ -65,4 +69,6 @@ void freeWeight(struct Weight *weight); void freepRelu(struct pRelu *prelu); +void freeBN(struct BN *bn); + #endif \ No newline at end of file diff --git a/src/pikaqiu.cpp b/src/pikaqiu.cpp index bd75578..fe90636 100644 --- a/src/pikaqiu.cpp +++ b/src/pikaqiu.cpp @@ -3,52 +3,22 @@ #include int main() { - int b = 0; - if (b == 0) { - Mat image = imread("../1.jpg"); + Mat image = imread("../1.jpg"); // Mat image = imread("../2.png"); - Mat Image; - resize(image, Image, Size(299, 299), 0, 0, cv::INTER_LINEAR); - facenet ggg; - mydataFmt *o = new mydataFmt[Num]; - ggg.run(Image, o, 0); + Mat Image; + resize(image, Image, Size(160, 160), 0, 0, cv::INTER_LINEAR); + facenet ggg; + vector o; + ggg.run(Image, o, 0); // imshow("result", Image); - imwrite("../result.jpg", Image); + imwrite("../result.jpg", Image); - for (int i = 0; i < Num; ++i) { - cout << o[i] << endl; - } - - waitKey(0); - image.release(); - } else { - Mat image; - VideoCapture cap(0); - if (!cap.isOpened()) - cout << "fail to open!" << endl; - cap >> image; - if (!image.data) { - cout << "读取视频失败" << endl; - return -1; - } - - clock_t start; - int stop = 1200; - //while (stop--) { - while (true) { - start = clock(); - cap >> image; - resize(image, image, Size(299, 299), 0, 0, cv::INTER_LINEAR); - facenet ggg; - mydataFmt *o = new mydataFmt[Num]; - ggg.run(image, o, 0); - imshow("result", image); - if (waitKey(1) >= 0) break; - start = clock() - start; - cout << "time is " << (double) start / CLOCKS_PER_SEC * 1000 << "ms" << endl; - } - waitKey(0); - image.release(); + for (int i = 0; i < Num; ++i) { + cout << o[i] << endl; } + + waitKey(0); + image.release(); + return 0; }