更改模型参数结构

更改模型参数结构
This commit is contained in:
2020-01-07 15:05:00 +08:00
parent 52d50c4544
commit 08fd79f3bb
4 changed files with 624 additions and 131 deletions

3
.gitignore vendored
View File

@@ -35,4 +35,5 @@
.idea/ .idea/
cmake-build-debug/ cmake-build-debug/
cmake-build-release/ cmake-build-release/
model_128/ model_128/
model_512/

368
src/facenet.cpp Normal file → Executable file
View File

@@ -4,7 +4,11 @@
#include "facenet.h" #include "facenet.h"
/**
* stem网络
* @param image 输入图片
* @param output 输出featuremap 指针形式
*/
void facenet::Stem(Mat &image, pBox *output) { void facenet::Stem(Mat &image, pBox *output) {
pBox *rgb = new pBox; pBox *rgb = new pBox;
pBox *conv1_out = new pBox; pBox *conv1_out = new pBox;
@@ -47,27 +51,35 @@ void facenet::Stem(Mat &image, pBox *output) {
struct BN *conv6_beta = new BN; struct BN *conv6_beta = new BN;
long conv1 = ConvAndFcInit(conv1_wb, 32, 3, 3, 2, 0); long conv1 = ConvAndFcInit(conv1_wb, 32, 3, 3, 2, 0);
BatchNormInit(conv1_var, conv1_mean, conv1_beta, 32); BatchNormInit(conv1_beta, conv1_mean, conv1_var, 32);
long conv2 = ConvAndFcInit(conv2_wb, 32, 32, 3, 1, 0); long conv2 = ConvAndFcInit(conv2_wb, 32, 32, 3, 1, 0);
BatchNormInit(conv2_var, conv2_mean, conv2_beta, 32); BatchNormInit(conv2_beta, conv2_mean, conv2_var, 32);
long conv3 = ConvAndFcInit(conv3_wb, 64, 32, 3, 1, 1); long conv3 = ConvAndFcInit(conv3_wb, 64, 32, 3, 1, 1);
BatchNormInit(conv3_var, conv3_mean, conv3_beta, 64); BatchNormInit(conv3_beta, conv3_mean, conv3_var, 64);
long conv4 = ConvAndFcInit(conv4_wb, 80, 64, 1, 1, 0); long conv4 = ConvAndFcInit(conv4_wb, 80, 64, 1, 1, 0);
BatchNormInit(conv4_var, conv4_mean, conv4_beta, 80); BatchNormInit(conv4_beta, conv4_mean, conv4_var, 80);
long conv5 = ConvAndFcInit(conv5_wb, 192, 80, 3, 1, 0); long conv5 = ConvAndFcInit(conv5_wb, 192, 80, 3, 1, 0);
BatchNormInit(conv5_var, conv5_mean, conv5_beta, 192); BatchNormInit(conv5_beta, conv5_mean, conv5_var, 192);
long conv6 = ConvAndFcInit(conv6_wb, 256, 192, 3, 2, 0); long conv6 = ConvAndFcInit(conv6_wb, 256, 192, 3, 2, 0);
BatchNormInit(conv6_var, conv6_mean, conv6_beta, 256); BatchNormInit(conv6_beta, conv6_mean, conv6_var, 256);
long dataNumber[24] = {conv1, 32, 32, 32, conv2, 32, 32, 32, conv3, 64, 64, 64, conv4, 80, 80, 80, conv5, 192, 192, long dataNumber[24] = {conv1, 32, 32, 32, conv2, 32, 32, 32, conv3, 64, 64, 64, conv4, 80, 80, 80, conv5, 192, 192,
192, conv6, 256, 256, 256}; 192, conv6, 256, 256, 256};
mydataFmt *pointTeam[24] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ // mydataFmt *pointTeam[24] = {
conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ // conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \
conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ // conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \
conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ // conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \
conv5_wb->pdata, conv5_var->pdata, conv5_mean->pdata, conv5_beta->pdata, \ // conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \
conv6_wb->pdata, conv6_var->pdata, conv6_mean->pdata, conv6_beta->pdata}; // conv5_wb->pdata, conv5_var->pdata, conv5_mean->pdata, conv5_beta->pdata, \
// conv6_wb->pdata, conv6_var->pdata, conv6_mean->pdata, conv6_beta->pdata};
mydataFmt *pointTeam[24] = {
conv1_wb->pdata, conv1_beta->pdata, conv1_mean->pdata, conv1_var->pdata, \
conv2_wb->pdata, conv2_beta->pdata, conv2_mean->pdata, conv2_var->pdata, \
conv3_wb->pdata, conv3_beta->pdata, conv3_mean->pdata, conv3_var->pdata, \
conv4_wb->pdata, conv4_beta->pdata, conv4_mean->pdata, conv4_var->pdata, \
conv5_wb->pdata, conv5_beta->pdata, conv5_mean->pdata, conv5_var->pdata, \
conv6_wb->pdata, conv6_beta->pdata, conv6_mean->pdata, conv6_var->pdata};
string filename = "../model_" + to_string(Num) + "/stem_list.txt"; string filename = "../model_" + to_string(Num) + "/stem_list.txt";
readData(filename, dataNumber, pointTeam, 24); readData(filename, dataNumber, pointTeam, 24);
@@ -78,21 +90,19 @@ void facenet::Stem(Mat &image, pBox *output) {
convolutionInit(conv1_wb, rgb, conv1_out); convolutionInit(conv1_wb, rgb, conv1_out);
//conv1 149 x 149 x 32 //conv1 149 x 149 x 32
convolution(conv1_wb, rgb, conv1_out); convolution(conv1_wb, rgb, conv1_out);
// printData(conv1_out); BatchNorm(conv1_out, conv1_beta, conv1_mean, conv1_var);
BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta);
// printData(conv1_out);
relu(conv1_out, conv1_wb->pbias); relu(conv1_out, conv1_wb->pbias);
convolutionInit(conv2_wb, conv1_out, conv2_out); convolutionInit(conv2_wb, conv1_out, conv2_out);
//conv2 147 x 147 x 32 //conv2 147 x 147 x 32
convolution(conv2_wb, conv1_out, conv2_out); convolution(conv2_wb, conv1_out, conv2_out);
BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); BatchNorm(conv2_out, conv2_beta, conv2_mean, conv2_var);
relu(conv2_out, conv2_wb->pbias); relu(conv2_out, conv2_wb->pbias);
convolutionInit(conv3_wb, conv2_out, conv3_out); convolutionInit(conv3_wb, conv2_out, conv3_out);
//conv3 147 x 147 x 64 //conv3 147 x 147 x 64
convolution(conv3_wb, conv2_out, conv3_out); convolution(conv3_wb, conv2_out, conv3_out);
BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); BatchNorm(conv3_out, conv3_beta, conv3_mean, conv3_var);
relu(conv3_out, conv3_wb->pbias); relu(conv3_out, conv3_wb->pbias);
maxPoolingInit(conv3_out, pooling1_out, 3, 2); maxPoolingInit(conv3_out, pooling1_out, 3, 2);
@@ -102,20 +112,23 @@ void facenet::Stem(Mat &image, pBox *output) {
convolutionInit(conv4_wb, pooling1_out, conv4_out); convolutionInit(conv4_wb, pooling1_out, conv4_out);
//conv4 73 x 73 x 80 //conv4 73 x 73 x 80
convolution(conv4_wb, pooling1_out, conv4_out); convolution(conv4_wb, pooling1_out, conv4_out);
BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); BatchNorm(conv4_out, conv4_beta, conv4_mean, conv4_var);
// BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta);
relu(conv4_out, conv4_wb->pbias); relu(conv4_out, conv4_wb->pbias);
convolutionInit(conv5_wb, conv4_out, conv5_out); convolutionInit(conv5_wb, conv4_out, conv5_out);
//conv5 71 x 71 x 192 //conv5 71 x 71 x 192
convolution(conv5_wb, conv4_out, conv5_out); convolution(conv5_wb, conv4_out, conv5_out);
BatchNorm(conv5_out, conv5_var, conv5_mean, conv5_beta); BatchNorm(conv5_out, conv5_beta, conv5_mean, conv5_var);
// BatchNorm(conv5_out, conv5_var, conv5_mean, conv5_beta);
relu(conv5_out, conv5_wb->pbias); relu(conv5_out, conv5_wb->pbias);
convolutionInit(conv6_wb, conv5_out, output); convolutionInit(conv6_wb, conv5_out, output);
//conv6 35 x 35 x 256 //conv6 35 x 35 x 256
convolution(conv6_wb, conv5_out, output); convolution(conv6_wb, conv5_out, output);
BatchNorm(output, conv6_var, conv6_mean, conv6_beta); BatchNorm(output, conv6_beta, conv6_mean, conv6_var);
// BatchNorm(output, conv6_var, conv6_mean, conv6_beta);
relu(output, conv6_wb->pbias); relu(output, conv6_wb->pbias);
// firstFlag = false; // firstFlag = false;
// } // }
@@ -161,6 +174,13 @@ void facenet::Stem(Mat &image, pBox *output) {
freeBN(conv6_beta); freeBN(conv6_beta);
} }
/**
* Inception_resnet_A网络
* @param input 输入featuremap
* @param output 输出featuremap
* @param filepath 模型文件路径
* @param scale 比例系数
*/
void facenet::Inception_resnet_A(pBox *input, pBox *output, string filepath, float scale) { void facenet::Inception_resnet_A(pBox *input, pBox *output, string filepath, float scale) {
pBox *conv1_out = new pBox; pBox *conv1_out = new pBox;
pBox *conv2_out = new pBox; pBox *conv2_out = new pBox;
@@ -206,19 +226,19 @@ void facenet::Inception_resnet_A(pBox *input, pBox *output, string filepath, flo
long conv1 = ConvAndFcInit(conv1_wb, 32, 256, 1, 1, 0); long conv1 = ConvAndFcInit(conv1_wb, 32, 256, 1, 1, 0);
BatchNormInit(conv1_var, conv1_mean, conv1_beta, 32); BatchNormInit(conv1_beta, conv1_mean, conv1_var, 32);
long conv2 = ConvAndFcInit(conv2_wb, 32, 256, 1, 1, 0); long conv2 = ConvAndFcInit(conv2_wb, 32, 256, 1, 1, 0);
BatchNormInit(conv2_var, conv2_mean, conv2_beta, 32); BatchNormInit(conv2_beta, conv2_mean, conv2_var, 32);
long conv3 = ConvAndFcInit(conv3_wb, 32, 32, 3, 1, 1); long conv3 = ConvAndFcInit(conv3_wb, 32, 32, 3, 1, 1);
BatchNormInit(conv3_var, conv3_mean, conv3_beta, 32); BatchNormInit(conv3_beta, conv3_mean, conv3_var, 32);
long conv4 = ConvAndFcInit(conv4_wb, 32, 256, 1, 1, 0); long conv4 = ConvAndFcInit(conv4_wb, 32, 256, 1, 1, 0);
BatchNormInit(conv4_var, conv4_mean, conv4_beta, 32); BatchNormInit(conv4_beta, conv4_mean, conv4_var, 32);
long conv5 = ConvAndFcInit(conv5_wb, 32, 32, 3, 1, 1); long conv5 = ConvAndFcInit(conv5_wb, 32, 32, 3, 1, 1);
BatchNormInit(conv5_var, conv5_mean, conv5_beta, 32); BatchNormInit(conv5_beta, conv5_mean, conv5_var, 32);
long conv6 = ConvAndFcInit(conv6_wb, 32, 32, 3, 1, 1); long conv6 = ConvAndFcInit(conv6_wb, 32, 32, 3, 1, 1);
BatchNormInit(conv6_var, conv6_mean, conv6_beta, 32); BatchNormInit(conv6_beta, conv6_mean, conv6_var, 32);
long conv7 = ConvAndFcInit(conv7_wb, 256, 96, 1, 1, 0); long conv7 = ConvAndFcInit(conv7_wb, 256, 96, 1, 1, 0);
@@ -227,12 +247,22 @@ void facenet::Inception_resnet_A(pBox *input, pBox *output, string filepath, flo
long dataNumber[28] = {conv1, 32, 32, 32, conv2, 32, 32, 32, conv3, 32, 32, 32, conv4, 32, 32, 32, long dataNumber[28] = {conv1, 32, 32, 32, conv2, 32, 32, 32, conv3, 32, 32, 32, conv4, 32, 32, 32,
conv5, 32, 32, 32, conv6, 32, 32, 32, conv7, 256, conv8, 0}; conv5, 32, 32, 32, conv6, 32, 32, 32, conv7, 256, conv8, 0};
mydataFmt *pointTeam[28] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ // mydataFmt *pointTeam[28] = {
conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ // conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \
conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ // conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \
conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ // conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \
conv5_wb->pdata, conv5_var->pdata, conv5_mean->pdata, conv5_beta->pdata, \ // conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \
conv6_wb->pdata, conv6_var->pdata, conv6_mean->pdata, conv6_beta->pdata, \ // conv5_wb->pdata, conv5_var->pdata, conv5_mean->pdata, conv5_beta->pdata, \
// conv6_wb->pdata, conv6_var->pdata, conv6_mean->pdata, conv6_beta->pdata, \
// conv7_wb->pdata, conv7_wb->pbias, \
// conv8_wb->pdata, conv8_wb->pbias};
mydataFmt *pointTeam[28] = {
conv1_wb->pdata, conv1_beta->pdata, conv1_mean->pdata, conv1_var->pdata, \
conv2_wb->pdata, conv2_beta->pdata, conv2_mean->pdata, conv2_var->pdata, \
conv3_wb->pdata, conv3_beta->pdata, conv3_mean->pdata, conv3_var->pdata, \
conv4_wb->pdata, conv4_beta->pdata, conv4_mean->pdata, conv4_var->pdata, \
conv5_wb->pdata, conv5_beta->pdata, conv5_mean->pdata, conv5_var->pdata, \
conv6_wb->pdata, conv6_beta->pdata, conv6_mean->pdata, conv6_var->pdata, \
conv7_wb->pdata, conv7_wb->pbias, \ conv7_wb->pdata, conv7_wb->pbias, \
conv8_wb->pdata, conv8_wb->pbias}; conv8_wb->pdata, conv8_wb->pbias};
@@ -241,34 +271,34 @@ void facenet::Inception_resnet_A(pBox *input, pBox *output, string filepath, flo
convolutionInit(conv1_wb, input, conv1_out); convolutionInit(conv1_wb, input, conv1_out);
//conv1 35 x 35 x 32 //conv1 35 x 35 x 32
convolution(conv1_wb, input, conv1_out); convolution(conv1_wb, input, conv1_out);
BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); BatchNorm(conv1_out, conv1_beta, conv1_mean, conv1_var);
relu(conv1_out, conv1_wb->pbias); relu(conv1_out, conv1_wb->pbias);
convolutionInit(conv2_wb, input, conv2_out); convolutionInit(conv2_wb, input, conv2_out);
//conv2 35 x 35 x 32 //conv2 35 x 35 x 32
convolution(conv2_wb, input, conv2_out); convolution(conv2_wb, input, conv2_out);
BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); BatchNorm(conv2_out, conv2_beta, conv2_mean, conv2_var);
relu(conv2_out, conv2_wb->pbias); relu(conv2_out, conv2_wb->pbias);
convolutionInit(conv3_wb, conv2_out, conv3_out); convolutionInit(conv3_wb, conv2_out, conv3_out);
//conv3 35 x 35 x 32 //conv3 35 x 35 x 32
convolution(conv3_wb, conv2_out, conv3_out); convolution(conv3_wb, conv2_out, conv3_out);
BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); BatchNorm(conv3_out, conv3_beta, conv3_mean, conv3_var);
relu(conv3_out, conv3_wb->pbias); relu(conv3_out, conv3_wb->pbias);
convolutionInit(conv4_wb, input, conv4_out); convolutionInit(conv4_wb, input, conv4_out);
//conv4 35 x 35 x 32 //conv4 35 x 35 x 32
convolution(conv4_wb, input, conv4_out); convolution(conv4_wb, input, conv4_out);
BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); BatchNorm(conv4_out, conv4_beta, conv4_mean, conv4_var);
relu(conv4_out, conv4_wb->pbias); relu(conv4_out, conv4_wb->pbias);
convolutionInit(conv5_wb, conv4_out, conv5_out); convolutionInit(conv5_wb, conv4_out, conv5_out);
//conv5 35 x 35 x 32 //conv5 35 x 35 x 32
convolution(conv5_wb, conv4_out, conv5_out); convolution(conv5_wb, conv4_out, conv5_out);
BatchNorm(conv5_out, conv5_var, conv5_mean, conv5_beta); BatchNorm(conv5_out, conv5_beta, conv5_mean, conv5_var);
relu(conv5_out, conv5_wb->pbias); relu(conv5_out, conv5_wb->pbias);
convolutionInit(conv6_wb, conv5_out, conv6_out); convolutionInit(conv6_wb, conv5_out, conv6_out);
//conv6 35 x 35 x 32 //conv6 35 x 35 x 32
convolution(conv6_wb, conv5_out, conv6_out); convolution(conv6_wb, conv5_out, conv6_out);
BatchNorm(conv6_out, conv6_var, conv6_mean, conv6_beta); BatchNorm(conv6_out, conv6_beta, conv6_mean, conv6_var);
relu(conv6_out, conv6_wb->pbias); relu(conv6_out, conv6_wb->pbias);
conv_mergeInit(conv7_out, conv1_out, conv3_out, conv6_out); conv_mergeInit(conv7_out, conv1_out, conv3_out, conv6_out);
@@ -280,7 +310,7 @@ void facenet::Inception_resnet_A(pBox *input, pBox *output, string filepath, flo
convolution(conv7_wb, conv7_out, conv8_out); convolution(conv7_wb, conv7_out, conv8_out);
addbias(conv8_out, conv7_wb->pbias); addbias(conv8_out, conv7_wb->pbias);
mulandaddInit(input, conv8_out, output, scale); mulandaddInit(input, conv8_out, output);
mulandadd(input, conv8_out, output, scale); mulandadd(input, conv8_out, output, scale);
relu(output, conv8_wb->pbias); relu(output, conv8_wb->pbias);
@@ -327,6 +357,11 @@ void facenet::Inception_resnet_A(pBox *input, pBox *output, string filepath, flo
freeBN(conv6_beta); freeBN(conv6_beta);
} }
/**
* Reduction_A
* @param input 输入featuremap
* @param output 输出featuremap
*/
void facenet::Reduction_A(pBox *input, pBox *output) { void facenet::Reduction_A(pBox *input, pBox *output) {
pBox *conv1_out = new pBox; pBox *conv1_out = new pBox;
pBox *conv2_out = new pBox; pBox *conv2_out = new pBox;
@@ -355,20 +390,26 @@ void facenet::Reduction_A(pBox *input, pBox *output) {
long conv1 = ConvAndFcInit(conv1_wb, 384, 256, 3, 2, 0); long conv1 = ConvAndFcInit(conv1_wb, 384, 256, 3, 2, 0);
BatchNormInit(conv1_var, conv1_mean, conv1_beta, 384); BatchNormInit(conv1_beta, conv1_mean, conv1_var, 384);
long conv2 = ConvAndFcInit(conv2_wb, 192, 256, 1, 1, 0); long conv2 = ConvAndFcInit(conv2_wb, 192, 256, 1, 1, 0);
BatchNormInit(conv2_var, conv2_mean, conv2_beta, 192); BatchNormInit(conv2_beta, conv2_mean, conv2_var, 192);
long conv3 = ConvAndFcInit(conv3_wb, 192, 192, 3, 1, 0); long conv3 = ConvAndFcInit(conv3_wb, 192, 192, 3, 1, 0);
BatchNormInit(conv3_var, conv3_mean, conv3_beta, 192); BatchNormInit(conv3_beta, conv3_mean, conv3_var, 192);
long conv4 = ConvAndFcInit(conv4_wb, 256, 192, 3, 2, 0); long conv4 = ConvAndFcInit(conv4_wb, 256, 192, 3, 2, 0);
BatchNormInit(conv4_var, conv4_mean, conv4_beta, 256); BatchNormInit(conv4_beta, conv4_mean, conv4_var, 256);
long dataNumber[16] = {conv1, 384, 384, 384, conv2, 192, 192, 192, conv3, 192, 192, 192, conv4, 256, 256, 256}; long dataNumber[16] = {conv1, 384, 384, 384, conv2, 192, 192, 192, conv3, 192, 192, 192, conv4, 256, 256, 256};
mydataFmt *pointTeam[16] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ // mydataFmt *pointTeam[16] = {
conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ // conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \
conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ // conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \
conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata}; // conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \
// conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata};
mydataFmt *pointTeam[16] = {
conv1_wb->pdata, conv1_beta->pdata, conv1_mean->pdata, conv1_var->pdata, \
conv2_wb->pdata, conv2_beta->pdata, conv2_mean->pdata, conv2_var->pdata, \
conv3_wb->pdata, conv3_beta->pdata, conv3_mean->pdata, conv3_var->pdata, \
conv4_wb->pdata, conv4_beta->pdata, conv4_mean->pdata, conv4_var->pdata};
string filename = "../model_" + to_string(Num) + "/Mixed_6a_list.txt"; string filename = "../model_" + to_string(Num) + "/Mixed_6a_list.txt";
readData(filename, dataNumber, pointTeam, 16); readData(filename, dataNumber, pointTeam, 16);
@@ -379,25 +420,25 @@ void facenet::Reduction_A(pBox *input, pBox *output) {
convolutionInit(conv1_wb, input, conv1_out); convolutionInit(conv1_wb, input, conv1_out);
//conv1 17 x 17 x 384 //conv1 17 x 17 x 384
convolution(conv1_wb, input, conv1_out); convolution(conv1_wb, input, conv1_out);
BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); BatchNorm(conv1_out, conv1_beta, conv1_mean, conv1_var);
relu(conv1_out, conv1_wb->pbias); relu(conv1_out, conv1_wb->pbias);
convolutionInit(conv2_wb, input, conv2_out); convolutionInit(conv2_wb, input, conv2_out);
//conv2 35 x 35 x 192 //conv2 35 x 35 x 192
convolution(conv2_wb, input, conv2_out); convolution(conv2_wb, input, conv2_out);
BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); BatchNorm(conv2_out, conv2_beta, conv2_mean, conv2_var);
relu(conv2_out, conv2_wb->pbias); relu(conv2_out, conv2_wb->pbias);
convolutionInit(conv3_wb, conv2_out, conv3_out); convolutionInit(conv3_wb, conv2_out, conv3_out);
//conv3 35 x 35 x 192 //conv3 35 x 35 x 192
convolution(conv3_wb, conv2_out, conv3_out); convolution(conv3_wb, conv2_out, conv3_out);
BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); BatchNorm(conv3_out, conv3_beta, conv3_mean, conv3_var);
relu(conv3_out, conv3_wb->pbias); relu(conv3_out, conv3_wb->pbias);
convolutionInit(conv4_wb, conv3_out, conv4_out); convolutionInit(conv4_wb, conv3_out, conv4_out);
//conv4 17 x 17 x 256 //conv4 17 x 17 x 256
convolution(conv4_wb, conv3_out, conv4_out); convolution(conv4_wb, conv3_out, conv4_out);
BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); BatchNorm(conv4_out, conv4_beta, conv4_mean, conv4_var);
relu(conv4_out, conv4_wb->pbias); relu(conv4_out, conv4_wb->pbias);
conv_mergeInit(output, pooling1_out, conv1_out, conv4_out); conv_mergeInit(output, pooling1_out, conv1_out, conv4_out);
//17×17×896 //17×17×896
@@ -429,6 +470,13 @@ void facenet::Reduction_A(pBox *input, pBox *output) {
freeBN(conv4_beta); freeBN(conv4_beta);
} }
/**
* Inception_resnet_B网络
* @param input 输入featuremap
* @param output 输出featuremap
* @param filepath 模型文件路径
* @param scale 比例系数
*/
void facenet::Inception_resnet_B(pBox *input, pBox *output, string filepath, float scale) { void facenet::Inception_resnet_B(pBox *input, pBox *output, string filepath, float scale) {
pBox *conv1_out = new pBox; pBox *conv1_out = new pBox;
pBox *conv2_out = new pBox; pBox *conv2_out = new pBox;
@@ -459,14 +507,14 @@ void facenet::Inception_resnet_B(pBox *input, pBox *output, string filepath, flo
long conv1 = ConvAndFcInit(conv1_wb, 128, 896, 1, 1, 0); long conv1 = ConvAndFcInit(conv1_wb, 128, 896, 1, 1, 0);
BatchNormInit(conv1_var, conv1_mean, conv1_beta, 128); BatchNormInit(conv1_beta, conv1_mean, conv1_var, 128);
long conv2 = ConvAndFcInit(conv2_wb, 128, 896, 1, 1, 0); long conv2 = ConvAndFcInit(conv2_wb, 128, 896, 1, 1, 0);
BatchNormInit(conv2_var, conv2_mean, conv2_beta, 128); BatchNormInit(conv2_beta, conv2_mean, conv2_var, 128);
long conv3 = ConvAndFcInit(conv3_wb, 128, 128, 0, 1, -1, 7, 1, 3, 0);//[1,7] long conv3 = ConvAndFcInit(conv3_wb, 128, 128, 0, 1, -1, 7, 1, 3, 0);//[1,7]
BatchNormInit(conv3_var, conv3_mean, conv3_beta, 128); BatchNormInit(conv3_beta, conv3_mean, conv3_var, 128);
long conv4 = ConvAndFcInit(conv4_wb, 128, 128, 0, 1, -1, 1, 7, 0, 3);//[7,1] long conv4 = ConvAndFcInit(conv4_wb, 128, 128, 0, 1, -1, 1, 7, 0, 3);//[7,1]
BatchNormInit(conv4_var, conv4_mean, conv4_beta, 128); BatchNormInit(conv4_beta, conv4_mean, conv4_var, 128);
long conv5 = ConvAndFcInit(conv5_wb, 896, 256, 1, 1, 0); long conv5 = ConvAndFcInit(conv5_wb, 896, 256, 1, 1, 0);
@@ -475,10 +523,18 @@ void facenet::Inception_resnet_B(pBox *input, pBox *output, string filepath, flo
long dataNumber[20] = {conv1, 128, 128, 128, conv2, 128, 128, 128, conv3, 128, 128, 128, conv4, 128, 128, 128, long dataNumber[20] = {conv1, 128, 128, 128, conv2, 128, 128, 128, conv3, 128, 128, 128, conv4, 128, 128, 128,
conv5, 896, conv6, 0}; conv5, 896, conv6, 0};
mydataFmt *pointTeam[20] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ // mydataFmt *pointTeam[20] = {
conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ // conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \
conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ // conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \
conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ // conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \
// conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \
// conv5_wb->pdata, conv5_wb->pbias, \
// conv6_wb->pdata, conv6_wb->pbias};
mydataFmt *pointTeam[20] = {
conv1_wb->pdata, conv1_beta->pdata, conv1_mean->pdata, conv1_var->pdata, \
conv2_wb->pdata, conv2_beta->pdata, conv2_mean->pdata, conv2_var->pdata, \
conv3_wb->pdata, conv3_beta->pdata, conv3_mean->pdata, conv3_var->pdata, \
conv4_wb->pdata, conv4_beta->pdata, conv4_mean->pdata, conv4_var->pdata, \
conv5_wb->pdata, conv5_wb->pbias, \ conv5_wb->pdata, conv5_wb->pbias, \
conv6_wb->pdata, conv6_wb->pbias}; conv6_wb->pdata, conv6_wb->pbias};
@@ -489,24 +545,24 @@ void facenet::Inception_resnet_B(pBox *input, pBox *output, string filepath, flo
convolutionInit(conv1_wb, input, conv1_out); convolutionInit(conv1_wb, input, conv1_out);
//conv1 17*17*128 //conv1 17*17*128
convolution(conv1_wb, input, conv1_out); convolution(conv1_wb, input, conv1_out);
BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); BatchNorm(conv1_out, conv1_beta, conv1_mean, conv1_var);
relu(conv1_out, conv1_wb->pbias); relu(conv1_out, conv1_wb->pbias);
convolutionInit(conv2_wb, input, conv2_out); convolutionInit(conv2_wb, input, conv2_out);
//conv2 17*17*128 //conv2 17*17*128
convolution(conv2_wb, input, conv2_out); convolution(conv2_wb, input, conv2_out);
BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); BatchNorm(conv2_out, conv2_beta, conv2_mean, conv2_var);
relu(conv2_out, conv2_wb->pbias); relu(conv2_out, conv2_wb->pbias);
convolutionInit(conv3_wb, conv2_out, conv3_out); convolutionInit(conv3_wb, conv2_out, conv3_out);
//conv3 17*17*128 //conv3 17*17*128
convolution(conv3_wb, conv2_out, conv3_out); convolution(conv3_wb, conv2_out, conv3_out);
BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); BatchNorm(conv3_out, conv3_beta, conv3_mean, conv3_var);
relu(conv3_out, conv3_wb->pbias); relu(conv3_out, conv3_wb->pbias);
convolutionInit(conv4_wb, conv3_out, conv4_out); convolutionInit(conv4_wb, conv3_out, conv4_out);
//conv4 17*17*128 //conv4 17*17*128
convolution(conv4_wb, conv3_out, conv4_out); convolution(conv4_wb, conv3_out, conv4_out);
BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); BatchNorm(conv4_out, conv4_beta, conv4_mean, conv4_var);
relu(conv4_out, conv4_wb->pbias); relu(conv4_out, conv4_wb->pbias);
conv_mergeInit(conv5_out, conv1_out, conv4_out); conv_mergeInit(conv5_out, conv1_out, conv4_out);
@@ -518,7 +574,7 @@ void facenet::Inception_resnet_B(pBox *input, pBox *output, string filepath, flo
convolution(conv5_wb, conv5_out, conv6_out); convolution(conv5_wb, conv5_out, conv6_out);
addbias(conv6_out, conv5_wb->pbias); addbias(conv6_out, conv5_wb->pbias);
mulandaddInit(input, conv6_out, output, scale); mulandaddInit(input, conv6_out, output);
mulandadd(input, conv6_out, output, scale); mulandadd(input, conv6_out, output, scale);
relu(output, conv6_wb->pbias); relu(output, conv6_wb->pbias);
@@ -550,6 +606,11 @@ void facenet::Inception_resnet_B(pBox *input, pBox *output, string filepath, flo
freeBN(conv4_beta); freeBN(conv4_beta);
} }
/**
* Reduction_B
* @param input 输入featuremap
* @param output 输出featuremap
*/
void facenet::Reduction_B(pBox *input, pBox *output) { void facenet::Reduction_B(pBox *input, pBox *output) {
pBox *conv1_out = new pBox; pBox *conv1_out = new pBox;
pBox *conv2_out = new pBox; pBox *conv2_out = new pBox;
@@ -593,32 +654,41 @@ void facenet::Reduction_B(pBox *input, pBox *output) {
long conv1 = ConvAndFcInit(conv1_wb, 256, 896, 1, 1, 0); long conv1 = ConvAndFcInit(conv1_wb, 256, 896, 1, 1, 0);
BatchNormInit(conv1_var, conv1_mean, conv1_beta, 256); BatchNormInit(conv1_beta, conv1_mean, conv1_var, 256);
long conv2 = ConvAndFcInit(conv2_wb, 384, 256, 3, 2, 0); long conv2 = ConvAndFcInit(conv2_wb, 384, 256, 3, 2, 0);
BatchNormInit(conv2_var, conv2_mean, conv2_beta, 384); BatchNormInit(conv2_beta, conv2_mean, conv2_var, 384);
long conv3 = ConvAndFcInit(conv3_wb, 256, 896, 1, 1, 0); long conv3 = ConvAndFcInit(conv3_wb, 256, 896, 1, 1, 0);
BatchNormInit(conv3_var, conv3_mean, conv3_beta, 256); BatchNormInit(conv3_beta, conv3_mean, conv3_var, 256);
long conv4 = ConvAndFcInit(conv4_wb, 256, 256, 3, 2, 0); long conv4 = ConvAndFcInit(conv4_wb, 256, 256, 3, 2, 0);
BatchNormInit(conv4_var, conv4_mean, conv4_beta, 256); BatchNormInit(conv4_beta, conv4_mean, conv4_var, 256);
long conv5 = ConvAndFcInit(conv5_wb, 256, 896, 1, 1, 0); long conv5 = ConvAndFcInit(conv5_wb, 256, 896, 1, 1, 0);
BatchNormInit(conv5_var, conv5_mean, conv5_beta, 256); BatchNormInit(conv5_beta, conv5_mean, conv5_var, 256);
long conv6 = ConvAndFcInit(conv6_wb, 256, 256, 3, 1, 1); long conv6 = ConvAndFcInit(conv6_wb, 256, 256, 3, 1, 1);
BatchNormInit(conv6_var, conv6_mean, conv6_beta, 256); BatchNormInit(conv6_beta, conv6_mean, conv6_var, 256);
long conv7 = ConvAndFcInit(conv7_wb, 256, 256, 3, 2, 0); long conv7 = ConvAndFcInit(conv7_wb, 256, 256, 3, 2, 0);
BatchNormInit(conv7_var, conv7_mean, conv7_beta, 256); BatchNormInit(conv7_beta, conv7_mean, conv7_var, 256);
long dataNumber[28] = {conv1, 256, 256, 256, conv2, 384, 384, 384, conv3, 256, 256, 256, conv4, 256, 256, 256, long dataNumber[28] = {conv1, 256, 256, 256, conv2, 384, 384, 384, conv3, 256, 256, 256, conv4, 256, 256, 256,
conv5, 256, 256, 256, conv6, 256, 256, 256, conv7, 256, 256, 256}; conv5, 256, 256, 256, conv6, 256, 256, 256, conv7, 256, 256, 256};
mydataFmt *pointTeam[28] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ // mydataFmt *pointTeam[28] = {
conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ // conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \
conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ // conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \
conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ // conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \
conv5_wb->pdata, conv5_var->pdata, conv5_mean->pdata, conv5_beta->pdata, \ // conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \
conv6_wb->pdata, conv6_var->pdata, conv6_mean->pdata, conv6_beta->pdata, \ // conv5_wb->pdata, conv5_var->pdata, conv5_mean->pdata, conv5_beta->pdata, \
conv7_wb->pdata, conv7_var->pdata, conv7_mean->pdata, conv7_beta->pdata}; // conv6_wb->pdata, conv6_var->pdata, conv6_mean->pdata, conv6_beta->pdata, \
// conv7_wb->pdata, conv7_var->pdata, conv7_mean->pdata, conv7_beta->pdata};
mydataFmt *pointTeam[28] = {
conv1_wb->pdata, conv1_beta->pdata, conv1_mean->pdata, conv1_var->pdata, \
conv2_wb->pdata, conv2_beta->pdata, conv2_mean->pdata, conv2_var->pdata, \
conv3_wb->pdata, conv3_beta->pdata, conv3_mean->pdata, conv3_var->pdata, \
conv4_wb->pdata, conv4_beta->pdata, conv4_mean->pdata, conv4_var->pdata, \
conv5_wb->pdata, conv5_beta->pdata, conv5_mean->pdata, conv5_var->pdata, \
conv6_wb->pdata, conv6_beta->pdata, conv6_mean->pdata, conv6_var->pdata, \
conv7_wb->pdata, conv7_beta->pdata, conv7_mean->pdata, conv7_var->pdata};
string filename = "../model_" + to_string(Num) + "/Mixed_7a_list.txt"; string filename = "../model_" + to_string(Num) + "/Mixed_7a_list.txt";
readData(filename, dataNumber, pointTeam, 28); readData(filename, dataNumber, pointTeam, 28);
@@ -630,43 +700,46 @@ void facenet::Reduction_B(pBox *input, pBox *output) {
convolutionInit(conv1_wb, input, conv1_out); convolutionInit(conv1_wb, input, conv1_out);
//conv1 17 x 17 x 256 //conv1 17 x 17 x 256
convolution(conv1_wb, input, conv1_out); convolution(conv1_wb, input, conv1_out);
BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); BatchNorm(conv1_out, conv1_beta, conv1_mean, conv1_var);
// BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta);
relu(conv1_out, conv1_wb->pbias); relu(conv1_out, conv1_wb->pbias);
convolutionInit(conv2_wb, conv1_out, conv2_out); convolutionInit(conv2_wb, conv1_out, conv2_out);
//conv2 8 x 8 x 384 //conv2 8 x 8 x 384
convolution(conv2_wb, conv1_out, conv2_out); convolution(conv2_wb, conv1_out, conv2_out);
BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); BatchNorm(conv2_out, conv2_beta, conv2_mean, conv2_var);
// BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta);
relu(conv2_out, conv2_wb->pbias); relu(conv2_out, conv2_wb->pbias);
convolutionInit(conv3_wb, input, conv3_out); convolutionInit(conv3_wb, input, conv3_out);
//conv3 17 x 17 x 256 //conv3 17 x 17 x 256
convolution(conv3_wb, input, conv3_out); convolution(conv3_wb, input, conv3_out);
BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); BatchNorm(conv3_out, conv3_beta, conv3_mean, conv3_var);
// BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta);
relu(conv3_out, conv3_wb->pbias); relu(conv3_out, conv3_wb->pbias);
convolutionInit(conv4_wb, conv3_out, conv4_out); convolutionInit(conv4_wb, conv3_out, conv4_out);
//conv4 8 x 8 x 256 //conv4 8 x 8 x 256
convolution(conv4_wb, conv3_out, conv4_out); convolution(conv4_wb, conv3_out, conv4_out);
BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); BatchNorm(conv4_out, conv4_beta, conv4_mean, conv4_var);
relu(conv4_out, conv4_wb->pbias); relu(conv4_out, conv4_wb->pbias);
convolutionInit(conv5_wb, input, conv5_out); convolutionInit(conv5_wb, input, conv5_out);
//conv5 17 x 17 x 256 //conv5 17 x 17 x 256
convolution(conv5_wb, input, conv5_out); convolution(conv5_wb, input, conv5_out);
BatchNorm(conv5_out, conv5_var, conv5_mean, conv5_beta); BatchNorm(conv5_out, conv5_beta, conv5_mean, conv5_var);
relu(conv5_out, conv5_wb->pbias); relu(conv5_out, conv5_wb->pbias);
convolutionInit(conv6_wb, conv5_out, conv6_out); convolutionInit(conv6_wb, conv5_out, conv6_out);
//conv6 17 x 17 x 256 //conv6 17 x 17 x 256
convolution(conv6_wb, conv5_out, conv6_out); convolution(conv6_wb, conv5_out, conv6_out);
BatchNorm(conv6_out, conv6_var, conv6_mean, conv6_beta); BatchNorm(conv6_out, conv6_beta, conv6_mean, conv6_var);
relu(conv6_out, conv6_wb->pbias); relu(conv6_out, conv6_wb->pbias);
convolutionInit(conv7_wb, conv6_out, conv7_out); convolutionInit(conv7_wb, conv6_out, conv7_out);
//conv6 8 x 8 x 256 //conv6 8 x 8 x 256
convolution(conv7_wb, conv6_out, conv7_out); convolution(conv7_wb, conv6_out, conv7_out);
BatchNorm(conv7_out, conv7_var, conv7_mean, conv7_beta); BatchNorm(conv7_out, conv7_beta, conv7_mean, conv7_var);
relu(conv7_out, conv7_wb->pbias); relu(conv7_out, conv7_wb->pbias);
conv_mergeInit(output, conv2_out, conv4_out, conv7_out, pooling1_out); conv_mergeInit(output, conv2_out, conv4_out, conv7_out, pooling1_out);
@@ -714,6 +787,13 @@ void facenet::Reduction_B(pBox *input, pBox *output) {
freeBN(conv7_beta); freeBN(conv7_beta);
} }
/**
* Inception_resnet_C网络
* @param input 输入featuremap
* @param output 输出featuremap
* @param filepath 模型文件路径
* @param scale 比例系数
*/
void facenet::Inception_resnet_C(pBox *input, pBox *output, string filepath, float scale) { void facenet::Inception_resnet_C(pBox *input, pBox *output, string filepath, float scale) {
pBox *conv1_out = new pBox; pBox *conv1_out = new pBox;
pBox *conv2_out = new pBox; pBox *conv2_out = new pBox;
@@ -744,13 +824,13 @@ void facenet::Inception_resnet_C(pBox *input, pBox *output, string filepath, flo
long conv1 = ConvAndFcInit(conv1_wb, 192, 1792, 1, 1, 0); long conv1 = ConvAndFcInit(conv1_wb, 192, 1792, 1, 1, 0);
BatchNormInit(conv1_var, conv1_mean, conv1_beta, 192); BatchNormInit(conv1_beta, conv1_mean, conv1_var, 192);
long conv2 = ConvAndFcInit(conv2_wb, 192, 1792, 1, 1, 0); long conv2 = ConvAndFcInit(conv2_wb, 192, 1792, 1, 1, 0);
BatchNormInit(conv2_var, conv2_mean, conv2_beta, 192); BatchNormInit(conv2_beta, conv2_mean, conv2_var, 192);
long conv3 = ConvAndFcInit(conv3_wb, 192, 192, 0, 1, -1, 3, 1, 1, 0); long conv3 = ConvAndFcInit(conv3_wb, 192, 192, 0, 1, -1, 3, 1, 1, 0);
BatchNormInit(conv3_var, conv3_mean, conv3_beta, 192); BatchNormInit(conv3_beta, conv3_mean, conv3_var, 192);
long conv4 = ConvAndFcInit(conv4_wb, 192, 192, 0, 1, -1, 1, 3, 0, 1); long conv4 = ConvAndFcInit(conv4_wb, 192, 192, 0, 1, -1, 1, 3, 0, 1);
BatchNormInit(conv4_var, conv4_mean, conv4_beta, 192); BatchNormInit(conv4_beta, conv4_mean, conv4_var, 192);
long conv5 = ConvAndFcInit(conv5_wb, 1792, 384, 1, 1, 0); long conv5 = ConvAndFcInit(conv5_wb, 1792, 384, 1, 1, 0);
@@ -760,10 +840,18 @@ void facenet::Inception_resnet_C(pBox *input, pBox *output, string filepath, flo
conv5, 1792, conv6, 0}; conv5, 1792, conv6, 0};
mydataFmt *pointTeam[20] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ // mydataFmt *pointTeam[20] = {
conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ // conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \
conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ // conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \
conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ // conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \
// conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \
// conv5_wb->pdata, conv5_wb->pbias, \
// conv6_wb->pdata, conv6_wb->pbias};
mydataFmt *pointTeam[20] = {
conv1_wb->pdata, conv1_beta->pdata, conv1_mean->pdata, conv1_var->pdata, \
conv2_wb->pdata, conv2_beta->pdata, conv2_mean->pdata, conv2_var->pdata, \
conv3_wb->pdata, conv3_beta->pdata, conv3_mean->pdata, conv3_var->pdata, \
conv4_wb->pdata, conv4_beta->pdata, conv4_mean->pdata, conv4_var->pdata, \
conv5_wb->pdata, conv5_wb->pbias, \ conv5_wb->pdata, conv5_wb->pbias, \
conv6_wb->pdata, conv6_wb->pbias}; conv6_wb->pdata, conv6_wb->pbias};
@@ -774,25 +862,25 @@ void facenet::Inception_resnet_C(pBox *input, pBox *output, string filepath, flo
convolutionInit(conv1_wb, input, conv1_out); convolutionInit(conv1_wb, input, conv1_out);
//conv1 8 x 8 x 192 //conv1 8 x 8 x 192
convolution(conv1_wb, input, conv1_out); convolution(conv1_wb, input, conv1_out);
BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); BatchNorm(conv1_out, conv1_beta, conv1_mean, conv1_var);
relu(conv1_out, conv1_wb->pbias); relu(conv1_out, conv1_wb->pbias);
convolutionInit(conv2_wb, input, conv2_out); convolutionInit(conv2_wb, input, conv2_out);
//conv2 8 x 8 x 192 //conv2 8 x 8 x 192
convolution(conv2_wb, input, conv2_out); convolution(conv2_wb, input, conv2_out);
BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); BatchNorm(conv2_out, conv2_beta, conv2_mean, conv2_var);
relu(conv2_out, conv2_wb->pbias); relu(conv2_out, conv2_wb->pbias);
convolutionInit(conv3_wb, conv2_out, conv3_out); convolutionInit(conv3_wb, conv2_out, conv3_out);
//conv3 8 x 8 x 192 //conv3 8 x 8 x 192
convolution(conv3_wb, conv2_out, conv3_out); convolution(conv3_wb, conv2_out, conv3_out);
BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); BatchNorm(conv3_out, conv3_beta, conv3_mean, conv3_var);
relu(conv3_out, conv3_wb->pbias); relu(conv3_out, conv3_wb->pbias);
convolutionInit(conv4_wb, conv3_out, conv4_out); convolutionInit(conv4_wb, conv3_out, conv4_out);
//conv4 8 x 8 x 192 //conv4 8 x 8 x 192
convolution(conv4_wb, conv3_out, conv4_out); convolution(conv4_wb, conv3_out, conv4_out);
BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); BatchNorm(conv4_out, conv4_beta, conv4_mean, conv4_var);
relu(conv4_out, conv4_wb->pbias); relu(conv4_out, conv4_wb->pbias);
conv_mergeInit(conv5_out, conv1_out, conv4_out); conv_mergeInit(conv5_out, conv1_out, conv4_out);
@@ -804,7 +892,7 @@ void facenet::Inception_resnet_C(pBox *input, pBox *output, string filepath, flo
convolution(conv5_wb, conv5_out, conv6_out); convolution(conv5_wb, conv5_out, conv6_out);
addbias(conv6_out, conv5_wb->pbias); addbias(conv6_out, conv5_wb->pbias);
mulandaddInit(input, conv6_out, output, scale); mulandaddInit(input, conv6_out, output);
mulandadd(input, conv6_out, output, scale); mulandadd(input, conv6_out, output, scale);
relu(output, conv6_wb->pbias); relu(output, conv6_wb->pbias);
@@ -836,6 +924,13 @@ void facenet::Inception_resnet_C(pBox *input, pBox *output, string filepath, flo
freeBN(conv4_beta); freeBN(conv4_beta);
} }
/**
* Inception_resnet_C网络 最后无激活函数
* @param input 输入featuremap
* @param output 输出featuremap
* @param filepath 模型文件路径
* @param scale 比例系数
*/
void facenet::Inception_resnet_C_None(pBox *input, pBox *output, string filepath) { void facenet::Inception_resnet_C_None(pBox *input, pBox *output, string filepath) {
pBox *conv1_out = new pBox; pBox *conv1_out = new pBox;
pBox *conv2_out = new pBox; pBox *conv2_out = new pBox;
@@ -864,23 +959,30 @@ void facenet::Inception_resnet_C_None(pBox *input, pBox *output, string filepath
struct BN *conv4_beta = new BN; struct BN *conv4_beta = new BN;
long conv1 = ConvAndFcInit(conv1_wb, 192, 1792, 1, 1, 0); long conv1 = ConvAndFcInit(conv1_wb, 192, 1792, 1, 1, 0);
BatchNormInit(conv1_var, conv1_mean, conv1_beta, 192); BatchNormInit(conv1_beta, conv1_mean, conv1_var, 192);
long conv2 = ConvAndFcInit(conv2_wb, 192, 1792, 1, 1, 0); long conv2 = ConvAndFcInit(conv2_wb, 192, 1792, 1, 1, 0);
BatchNormInit(conv2_var, conv2_mean, conv2_beta, 192); BatchNormInit(conv2_beta, conv2_mean, conv2_var, 192);
long conv3 = ConvAndFcInit(conv3_wb, 192, 192, 0, 1, -1, 3, 1, 1, 0); long conv3 = ConvAndFcInit(conv3_wb, 192, 192, 0, 1, -1, 3, 1, 1, 0);
BatchNormInit(conv3_var, conv3_mean, conv3_beta, 192); BatchNormInit(conv3_beta, conv3_mean, conv3_var, 192);
long conv4 = ConvAndFcInit(conv4_wb, 192, 192, 0, 1, -1, 1, 3, 0, 1); long conv4 = ConvAndFcInit(conv4_wb, 192, 192, 0, 1, -1, 1, 3, 0, 1);
BatchNormInit(conv4_var, conv4_mean, conv4_beta, 192); BatchNormInit(conv4_beta, conv4_mean, conv4_var, 192);
long conv5 = ConvAndFcInit(conv5_wb, 1792, 384, 1, 1, 0); long conv5 = ConvAndFcInit(conv5_wb, 1792, 384, 1, 1, 0);
long dataNumber[18] = {conv1, 192, 192, 192, conv2, 192, 192, 192, conv3, 192, 192, 192, conv4, 192, 192, 192, long dataNumber[18] = {conv1, 192, 192, 192, conv2, 192, 192, 192, conv3, 192, 192, 192, conv4, 192, 192, 192,
conv5, 1792}; conv5, 1792};
mydataFmt *pointTeam[18] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \ // mydataFmt *pointTeam[18] = {
conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \ // conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata, \
conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \ // conv2_wb->pdata, conv2_var->pdata, conv2_mean->pdata, conv2_beta->pdata, \
conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \ // conv3_wb->pdata, conv3_var->pdata, conv3_mean->pdata, conv3_beta->pdata, \
// conv4_wb->pdata, conv4_var->pdata, conv4_mean->pdata, conv4_beta->pdata, \
// conv5_wb->pdata, conv5_wb->pbias};
mydataFmt *pointTeam[18] = {
conv1_wb->pdata, conv1_beta->pdata, conv1_mean->pdata, conv1_var->pdata, \
conv2_wb->pdata, conv2_beta->pdata, conv2_mean->pdata, conv2_var->pdata, \
conv3_wb->pdata, conv3_beta->pdata, conv3_mean->pdata, conv3_var->pdata, \
conv4_wb->pdata, conv4_beta->pdata, conv4_mean->pdata, conv4_var->pdata, \
conv5_wb->pdata, conv5_wb->pbias}; conv5_wb->pdata, conv5_wb->pbias};
// string filename = "../model_128/Repeat_2_list.txt"; // string filename = "../model_128/Repeat_2_list.txt";
@@ -890,25 +992,25 @@ void facenet::Inception_resnet_C_None(pBox *input, pBox *output, string filepath
convolutionInit(conv1_wb, input, conv1_out); convolutionInit(conv1_wb, input, conv1_out);
//conv1 8 x 8 x 192 //conv1 8 x 8 x 192
convolution(conv1_wb, input, conv1_out); convolution(conv1_wb, input, conv1_out);
BatchNorm(conv1_out, conv1_var, conv1_mean, conv1_beta); BatchNorm(conv1_out, conv1_beta, conv1_mean, conv1_var);
relu(conv1_out, conv1_wb->pbias); relu(conv1_out, conv1_wb->pbias);
convolutionInit(conv2_wb, input, conv2_out); convolutionInit(conv2_wb, input, conv2_out);
//conv2 8 x 8 x 192 //conv2 8 x 8 x 192
convolution(conv2_wb, input, conv2_out); convolution(conv2_wb, input, conv2_out);
BatchNorm(conv2_out, conv2_var, conv2_mean, conv2_beta); BatchNorm(conv2_out, conv2_beta, conv2_mean, conv2_var);
relu(conv2_out, conv2_wb->pbias); relu(conv2_out, conv2_wb->pbias);
convolutionInit(conv3_wb, conv2_out, conv3_out); convolutionInit(conv3_wb, conv2_out, conv3_out);
//conv3 8 x 8 x 192 //conv3 8 x 8 x 192
convolution(conv3_wb, conv2_out, conv3_out); convolution(conv3_wb, conv2_out, conv3_out);
BatchNorm(conv3_out, conv3_var, conv3_mean, conv3_beta); BatchNorm(conv3_out, conv3_beta, conv3_mean, conv3_var);
relu(conv3_out, conv3_wb->pbias); relu(conv3_out, conv3_wb->pbias);
convolutionInit(conv4_wb, conv3_out, conv4_out); convolutionInit(conv4_wb, conv3_out, conv4_out);
//conv4 8 x 8 x 192 //conv4 8 x 8 x 192
convolution(conv4_wb, conv3_out, conv4_out); convolution(conv4_wb, conv3_out, conv4_out);
BatchNorm(conv4_out, conv4_var, conv4_mean, conv4_beta); BatchNorm(conv4_out, conv4_beta, conv4_mean, conv4_var);
relu(conv4_out, conv4_wb->pbias); relu(conv4_out, conv4_wb->pbias);
conv_mergeInit(conv5_out, conv1_out, conv4_out); conv_mergeInit(conv5_out, conv1_out, conv4_out);
@@ -920,7 +1022,7 @@ void facenet::Inception_resnet_C_None(pBox *input, pBox *output, string filepath
convolution(conv5_wb, conv5_out, conv6_out); convolution(conv5_wb, conv5_out, conv6_out);
addbias(conv6_out, conv5_wb->pbias); addbias(conv6_out, conv5_wb->pbias);
mulandaddInit(input, conv6_out, output, 1); mulandaddInit(input, conv6_out, output);
mulandadd(input, conv6_out, output); mulandadd(input, conv6_out, output);
freepBox(conv1_out); freepBox(conv1_out);
@@ -950,12 +1052,22 @@ void facenet::Inception_resnet_C_None(pBox *input, pBox *output, string filepath
freeBN(conv4_beta); freeBN(conv4_beta);
} }
/**
* 平均池化
* @param input 输入featuremap
* @param output 输出featuremap
*/
void facenet::AveragePooling(pBox *input, pBox *output) { void facenet::AveragePooling(pBox *input, pBox *output) {
// cout << "size:" << input->height << endl; // cout << "size:" << input->height << endl;
avePoolingInit(input, output, input->height, 2); avePoolingInit(input, output, input->height, 2);
avePooling(input, output, input->height, 2); avePooling(input, output, input->height, 2);
} }
/**
* flatten 多维转换到一维
* @param input
* @param output
*/
void facenet::Flatten(pBox *input, pBox *output) { void facenet::Flatten(pBox *input, pBox *output) {
output->width = input->channel; output->width = input->channel;
output->height = 1; output->height = 1;
@@ -965,18 +1077,25 @@ void facenet::Flatten(pBox *input, pBox *output) {
memcpy(output->pdata, input->pdata, output->channel * output->width * output->height * sizeof(mydataFmt)); memcpy(output->pdata, input->pdata, output->channel * output->width * output->height * sizeof(mydataFmt));
} }
/**
* 全连接网络
* @param input 输入featuremap
* @param output 输出featuremap
* @param filepath 网络模型参数文件路径
*/
//参数还未设置 //参数还未设置
void facenet::fully_connect(pBox *input, pBox *output, string filepath) { void facenet::fully_connect(pBox *input, pBox *output, string filepath) {
struct Weight *conv1_wb = new Weight; struct Weight *conv1_wb = new Weight;
struct BN *conv1_var = new BN;
struct BN *conv1_mean = new BN;
struct BN *conv1_beta = new BN; struct BN *conv1_beta = new BN;
struct BN *conv1_mean = new BN;
struct BN *conv1_var = new BN;
long conv1 = ConvAndFcInit(conv1_wb, Num, 1792, input->height, 1, 0); long conv1 = ConvAndFcInit(conv1_wb, Num, 1792, input->height, 1, 0);
BatchNormInit(conv1_var, conv1_mean, conv1_beta, Num); BatchNormInit(conv1_beta, conv1_mean, conv1_var, Num);
long dataNumber[4] = {conv1, Num, Num, Num}; long dataNumber[4] = {conv1, Num, Num, Num};
// cout << to_string(sum) << endl; // cout << to_string(sum) << endl;
mydataFmt *pointTeam[4] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata}; // mydataFmt *pointTeam[4] = {conv1_wb->pdata, conv1_var->pdata, conv1_mean->pdata, conv1_beta->pdata};
mydataFmt *pointTeam[4] = {conv1_wb->pdata, conv1_beta->pdata, conv1_mean->pdata, conv1_var->pdata};
// string filename = "../model_128/Bottleneck_list.txt"; // string filename = "../model_128/Bottleneck_list.txt";
// int length = sizeof(dataNumber) / sizeof(*dataNumber); // int length = sizeof(dataNumber) / sizeof(*dataNumber);
readData(filepath, dataNumber, pointTeam, 4); readData(filepath, dataNumber, pointTeam, 4);
@@ -985,7 +1104,7 @@ void facenet::fully_connect(pBox *input, pBox *output, string filepath) {
//conv1 8 x 8 x 192 //conv1 8 x 8 x 192
fullconnect(conv1_wb, input, output); fullconnect(conv1_wb, input, output);
BatchNorm(output, conv1_var, conv1_mean, conv1_beta); BatchNorm(output, conv1_beta, conv1_mean, conv1_var);
freeWeight(conv1_wb); freeWeight(conv1_wb);
freeBN(conv1_var); freeBN(conv1_var);
@@ -1009,13 +1128,17 @@ void facenet::printData(pBox *in) {
cout << "printData" << endl; cout << "printData" << endl;
} }
/**
* facenet网络运行入口
* @param image
* @param o
* @param count
*/
void facenet::run(Mat &image, vector<mydataFmt> &o, int count) { void facenet::run(Mat &image, vector<mydataFmt> &o, int count) {
cout << "=====This is No." + to_string(count) + " Picture=====" << endl; cout << "=====This is No." + to_string(count) + " Picture=====" << endl;
pBox *output = new pBox; pBox *output = new pBox;
pBox *input; pBox *input;
Stem(image, output); Stem(image, output);
// printData(output);
// return;
cout << "Stem Finally" << endl; cout << "Stem Finally" << endl;
input = output; input = output;
output = new pBox; output = new pBox;
@@ -1030,7 +1153,6 @@ void facenet::run(Mat &image, vector<mydataFmt> &o, int count) {
Reduction_A(input, output); Reduction_A(input, output);
cout << "Reduction_A Finally" << endl; cout << "Reduction_A Finally" << endl;
input = output; input = output;
// freepBox(output);
output = new pBox; output = new pBox;
for (int j = 0; j < 10; ++j) { for (int j = 0; j < 10; ++j) {
// model_128/block17_1_list.txt // model_128/block17_1_list.txt
@@ -1048,10 +1170,8 @@ void facenet::run(Mat &image, vector<mydataFmt> &o, int count) {
for (int k = 0; k < 5; ++k) { for (int k = 0; k < 5; ++k) {
// model_128/block8_1_list.txt // model_128/block8_1_list.txt
string filepath = "../model_" + to_string(Num) + "/block8_" + to_string((k + 1)) + "_list.txt"; string filepath = "../model_" + to_string(Num) + "/block8_" + to_string((k + 1)) + "_list.txt";
// cout << filepath << endl;
Inception_resnet_C(input, output, filepath, 0.2); Inception_resnet_C(input, output, filepath, 0.2);
input = output; input = output;
// freepBox(output);
output = new pBox; output = new pBox;
} }
cout << "Inception_resnet_C Finally" << endl; cout << "Inception_resnet_C Finally" << endl;
@@ -1070,6 +1190,10 @@ void facenet::run(Mat &image, vector<mydataFmt> &o, int count) {
output = new pBox; output = new pBox;
fully_connect(input, output, "../model_" + to_string(Num) + "/Bottleneck_list.txt"); fully_connect(input, output, "../model_" + to_string(Num) + "/Bottleneck_list.txt");
cout << "Fully_Connect Finally" << endl; cout << "Fully_Connect Finally" << endl;
/**
* L2归一化
*/
mydataFmt sq = 0, sum = 0; mydataFmt sq = 0, sum = 0;
for (int i = 0; i < Num; ++i) { for (int i = 0; i < Num; ++i) {
sq = pow(output->pdata[i], 2); sq = pow(output->pdata[i], 2);

364
src/network.cpp Normal file → Executable file
View File

@@ -1,5 +1,10 @@
#include "network.h" #include "network.h"
/**
* 卷积以后偏移
* @param pbox feature map
* @param pbias 偏移量
*/
void addbias(struct pBox *pbox, mydataFmt *pbias) { void addbias(struct pBox *pbox, mydataFmt *pbias) {
if (pbox->pdata == NULL) { if (pbox->pdata == NULL) {
cout << "Relu feature is NULL!!" << endl; cout << "Relu feature is NULL!!" << endl;
@@ -22,6 +27,11 @@ void addbias(struct pBox *pbox, mydataFmt *pbias) {
} }
} }
/**
* mat图片转成pbox结构体初始化
* @param image mat格式的图片
* @param pbox 结构体pbox
*/
void image2MatrixInit(Mat &image, struct pBox *pbox) { void image2MatrixInit(Mat &image, struct pBox *pbox) {
if ((image.data == NULL) || (image.type() != CV_8UC3)) { if ((image.data == NULL) || (image.type() != CV_8UC3)) {
cout << "image's type is wrong!!Please set CV_8UC3" << endl; cout << "image's type is wrong!!Please set CV_8UC3" << endl;
@@ -36,6 +46,12 @@ void image2MatrixInit(Mat &image, struct pBox *pbox) {
memset(pbox->pdata, 0, pbox->channel * pbox->height * pbox->width * sizeof(mydataFmt)); memset(pbox->pdata, 0, pbox->channel * pbox->height * pbox->width * sizeof(mydataFmt));
} }
/**
* mat图片转成pbox结构体
* @param image mat格式的图片
* @param pbox 结构体pbox
* @param num 选择mtcnn还是facenet 0-mtcnn 非0-facenet 缺省为0
*/
void image2Matrix(const Mat &image, const struct pBox *pbox, int num) { void image2Matrix(const Mat &image, const struct pBox *pbox, int num) {
if ((image.data == NULL) || (image.type() != CV_8UC3)) { if ((image.data == NULL) || (image.type() != CV_8UC3)) {
cout << "image's type is wrong!!Please set CV_8UC3" << endl; cout << "image's type is wrong!!Please set CV_8UC3" << endl;
@@ -78,6 +94,12 @@ void image2Matrix(const Mat &image, const struct pBox *pbox, int num) {
} }
} }
/**
* 求图片像素的平均值和标准差
* @param image 图片
* @param p 平均值
* @param q 标准差
*/
void MeanAndDev(const Mat &image, mydataFmt &p, mydataFmt &q) { void MeanAndDev(const Mat &image, mydataFmt &p, mydataFmt &q) {
mydataFmt meansum = 0, stdsum = 0; mydataFmt meansum = 0, stdsum = 0;
for (int rowI = 0; rowI < image.rows; rowI++) { for (int rowI = 0; rowI < image.rows; rowI++) {
@@ -96,6 +118,14 @@ void MeanAndDev(const Mat &image, mydataFmt &p, mydataFmt &q) {
q = sqrt(stdsum / (image.cols * image.rows * image.channels())); q = sqrt(stdsum / (image.cols * image.rows * image.channels()));
} }
/**
* 卷积补偿初始化
* @param pbox 输入feature map
* @param outpBox 输出feature map
* @param pad 补偿 正方形算子(-1为不规则补偿0为不需要补偿)
* @param padw 补偿 不规则算子的宽度
* @param padh 补偿 不规则算子的高度
*/
void featurePadInit(const pBox *pbox, pBox *outpBox, const int pad, const int padw, const int padh) { void featurePadInit(const pBox *pbox, pBox *outpBox, const int pad, const int padw, const int padh) {
if (pad < -1) { if (pad < -1) {
cout << "the data needn't to pad,please check you network!" << endl; cout << "the data needn't to pad,please check you network!" << endl;
@@ -115,6 +145,14 @@ void featurePadInit(const pBox *pbox, pBox *outpBox, const int pad, const int pa
memset(outpBox->pdata, 0, outpBox->channel * outpBox->height * RowByteNum); memset(outpBox->pdata, 0, outpBox->channel * outpBox->height * RowByteNum);
} }
/**
* 卷积补偿
* @param pbox 输入feature map
* @param outpBox 输出feature map
* @param pad 补偿 正方形算子(-1为不规则补偿0为不需要补偿)
* @param padw 补偿 不规则算子的宽度
* @param padh 补偿 不规则算子的高度
*/
void featurePad(const pBox *pbox, pBox *outpBox, const int pad, const int padw, const int padh) { void featurePad(const pBox *pbox, pBox *outpBox, const int pad, const int padw, const int padh) {
mydataFmt *p = outpBox->pdata; mydataFmt *p = outpBox->pdata;
mydataFmt *pIn = pbox->pdata; mydataFmt *pIn = pbox->pdata;
@@ -143,6 +181,12 @@ void featurePad(const pBox *pbox, pBox *outpBox, const int pad, const int padw,
} }
} }
/**
* 卷积初始化
* @param weight 卷积权重
* @param pbox 输入feature map
* @param outpBox 输出feature map
*/
void convolutionInit(const Weight *weight, pBox *pbox, pBox *outpBox) { void convolutionInit(const Weight *weight, pBox *pbox, pBox *outpBox) {
outpBox->channel = weight->selfChannel; outpBox->channel = weight->selfChannel;
// ((imginputh - ckh + 2 * ckpad) / stride) + 1; // ((imginputh - ckh + 2 * ckpad) / stride) + 1;
@@ -168,6 +212,12 @@ void convolutionInit(const Weight *weight, pBox *pbox, pBox *outpBox) {
} }
} }
/**
* 卷积
* @param weight 卷积权重
* @param pbox 输入feature map
* @param outpBox 输出feature map
*/
void convolution(const Weight *weight, const pBox *pbox, pBox *outpBox) { void convolution(const Weight *weight, const pBox *pbox, pBox *outpBox) {
int ckh, ckw, ckd, stride, cknum, ckpad, imginputh, imginputw, imginputd, Nh, Nw; int ckh, ckw, ckd, stride, cknum, ckpad, imginputh, imginputw, imginputd, Nh, Nw;
mydataFmt *ck, *imginput; mydataFmt *ck, *imginput;
@@ -215,6 +265,14 @@ void convolution(const Weight *weight, const pBox *pbox, pBox *outpBox) {
} }
} }
/**
* 最大值池化初始化
* @param pbox 输入feature map
* @param Matrix 输出feature map
* @param kernelSize 池化算子大小
* @param stride 步长
* @param flag 标志位
*/
void maxPoolingInit(const pBox *pbox, pBox *Matrix, int kernelSize, int stride, int flag) { void maxPoolingInit(const pBox *pbox, pBox *Matrix, int kernelSize, int stride, int flag) {
if (flag == 1) { if (flag == 1) {
Matrix->width = floor((float) (pbox->width - kernelSize) / stride + 1); Matrix->width = floor((float) (pbox->width - kernelSize) / stride + 1);
@@ -229,6 +287,13 @@ void maxPoolingInit(const pBox *pbox, pBox *Matrix, int kernelSize, int stride,
memset(Matrix->pdata, 0, Matrix->channel * Matrix->width * Matrix->height * sizeof(mydataFmt)); memset(Matrix->pdata, 0, Matrix->channel * Matrix->width * Matrix->height * sizeof(mydataFmt));
} }
/**
* 最大值池化
* @param pbox 输入feature map
* @param Matrix 输出feature map
* @param kernelSize 池化算子大小
* @param stride 步长
*/
void maxPooling(const pBox *pbox, pBox *Matrix, int kernelSize, int stride) { void maxPooling(const pBox *pbox, pBox *Matrix, int kernelSize, int stride) {
if (pbox->pdata == NULL) { if (pbox->pdata == NULL) {
cout << "the feature2Matrix pbox is NULL!!" << endl; cout << "the feature2Matrix pbox is NULL!!" << endl;
@@ -281,6 +346,13 @@ void maxPooling(const pBox *pbox, pBox *Matrix, int kernelSize, int stride) {
} }
} }
/**
* 平均值池化初始化
* @param pbox 输入feature map
* @param Matrix 输出feature map
* @param kernelSize 池化算子大小
* @param stride 步长
*/
void avePoolingInit(const pBox *pbox, pBox *Matrix, int kernelSize, int stride) { void avePoolingInit(const pBox *pbox, pBox *Matrix, int kernelSize, int stride) {
Matrix->width = ceil((float) (pbox->width - kernelSize) / stride + 1); Matrix->width = ceil((float) (pbox->width - kernelSize) / stride + 1);
Matrix->height = ceil((float) (pbox->height - kernelSize) / stride + 1); Matrix->height = ceil((float) (pbox->height - kernelSize) / stride + 1);
@@ -290,6 +362,13 @@ void avePoolingInit(const pBox *pbox, pBox *Matrix, int kernelSize, int stride)
memset(Matrix->pdata, 0, Matrix->channel * Matrix->width * Matrix->height * sizeof(mydataFmt)); memset(Matrix->pdata, 0, Matrix->channel * Matrix->width * Matrix->height * sizeof(mydataFmt));
} }
/**
* 平均值池化
* @param pbox 输入feature map
* @param Matrix 输出feature map
* @param kernelSize 池化算子大小
* @param stride 步长
*/
void avePooling(const pBox *pbox, pBox *Matrix, int kernelSize, int stride) { void avePooling(const pBox *pbox, pBox *Matrix, int kernelSize, int stride) {
if (pbox->pdata == NULL) { if (pbox->pdata == NULL) {
cout << "the feature2Matrix pbox is NULL!!" << endl; cout << "the feature2Matrix pbox is NULL!!" << endl;
@@ -321,10 +400,53 @@ void avePooling(const pBox *pbox, pBox *Matrix, int kernelSize, int stride) {
} }
} }
/**
* 激活函数 有系数 初始化
* @param prelu 激活函数权重
* @param width 长度
*/
void pReluInit(struct pRelu *prelu, int width) {
prelu->width = width;
prelu->pdata = (mydataFmt *) malloc(width * sizeof(mydataFmt));
if (prelu->pdata == NULL)cout << "prelu apply for memory failed!!!!";
memset(prelu->pdata, 0, width * sizeof(mydataFmt));
}
/**
* 激活函数 有系数
* @param pbox 输入feature
* @param pbias 偏移
* @param prelu_gmma 激活函数权重
*/
void prelu(struct pBox *pbox, mydataFmt *pbias, mydataFmt *prelu_gmma) {
if (pbox->pdata == NULL) {
cout << "the pRelu feature is NULL!!" << endl;
return;
}
if (pbias == NULL) {
cout << "the pRelu bias is NULL!!" << endl;
return;
}
mydataFmt *op = pbox->pdata;
mydataFmt *pb = pbias;
mydataFmt *pg = prelu_gmma;
long dis = pbox->width * pbox->height;
for (int channel = 0; channel < pbox->channel; channel++) {
for (int col = 0; col < dis; col++) {
*op = *op + *pb;
*op = (*op > 0) ? (*op) : ((*op) * (*pg));
op++;
}
pb++;
pg++;
}
}
/** /**
* 激活函数 没有系数 * 激活函数 没有系数
* @param pbox * @param pbox 输入feature
* @param pbias * @param pbias 偏移
*/ */
void relu(struct pBox *pbox, mydataFmt *pbias) { void relu(struct pBox *pbox, mydataFmt *pbias) {
if (pbox->pdata == NULL) { if (pbox->pdata == NULL) {
@@ -349,6 +471,11 @@ void relu(struct pBox *pbox, mydataFmt *pbias) {
} }
} }
/**
* 全连接初始化
* @param weight 权重参数
* @param outpBox 输出feature map
*/
void fullconnectInit(const Weight *weight, pBox *outpBox) { void fullconnectInit(const Weight *weight, pBox *outpBox) {
outpBox->channel = weight->selfChannel; outpBox->channel = weight->selfChannel;
outpBox->width = 1; outpBox->width = 1;
@@ -358,6 +485,12 @@ void fullconnectInit(const Weight *weight, pBox *outpBox) {
memset(outpBox->pdata, 0, weight->selfChannel * sizeof(mydataFmt)); memset(outpBox->pdata, 0, weight->selfChannel * sizeof(mydataFmt));
} }
/**
* 全连接
* @param weight 权重参数
* @param pbox  输入feature map
* @param outpBox 输出feature map
*/
void fullconnect(const Weight *weight, const pBox *pbox, pBox *outpBox) { void fullconnect(const Weight *weight, const pBox *pbox, pBox *outpBox) {
if (pbox->pdata == NULL) { if (pbox->pdata == NULL) {
cout << "the fc feature is NULL!!" << endl; cout << "the fc feature is NULL!!" << endl;
@@ -376,6 +509,14 @@ void fullconnect(const Weight *weight, const pBox *pbox, pBox *outpBox) {
outpBox->pdata); outpBox->pdata);
} }
/**
* 一维数组与二位矩阵相乘
* @param matrix 输入feature map
* @param v 权重
* @param v_w 权重矩阵的宽度
* @param v_h 权重矩阵的高度
* @param p 输出feature map
*/
void vectorXmatrix(mydataFmt *matrix, mydataFmt *v, int v_w, int v_h, mydataFmt *p) { void vectorXmatrix(mydataFmt *matrix, mydataFmt *v, int v_w, int v_h, mydataFmt *p) {
for (int i = 0; i < v_h; i++) { for (int i = 0; i < v_h; i++) {
p[i] = 0; p[i] = 0;
@@ -385,6 +526,13 @@ void vectorXmatrix(mydataFmt *matrix, mydataFmt *v, int v_w, int v_h, mydataFmt
} }
} }
/**
* 读取模型文件
* @param filename 文件路径
* @param dataNumber 参数个数数组
* @param pTeam 变量数组
* @param length
*/
void readData(string filename, long dataNumber[], mydataFmt *pTeam[], int length) { void readData(string filename, long dataNumber[], mydataFmt *pTeam[], int length) {
ifstream in(filename.data()); ifstream in(filename.data());
string line; string line;
@@ -434,6 +582,20 @@ void readData(string filename, long dataNumber[], mydataFmt *pTeam[], int length
} }
} }
/**
* 卷积和全连接初始化
* @param weight 权重
* @param schannel 卷积核个数
* @param lchannel 上一层feature map个数
* @param kersize 卷积核大小
* @param stride 卷积步长
* @param pad 卷积是否补偿
* @param w 卷积核宽度
* @param h 卷积核高度
* @param padw 补偿宽度
* @param padh 补偿高度
* @return 参数长度
*/
// w sc lc ks s p kw kh // w sc lc ks s p kw kh
long ConvAndFcInit(struct Weight *weight, int schannel, int lchannel, int kersize, long ConvAndFcInit(struct Weight *weight, int schannel, int lchannel, int kersize,
int stride, int pad, int w, int h, int padw, int padh) { int stride, int pad, int w, int h, int padw, int padh) {
@@ -461,6 +623,159 @@ long ConvAndFcInit(struct Weight *weight, int schannel, int lchannel, int kersiz
return byteLenght; return byteLenght;
} }
/**
* softmax
* @param pbox feature map
*/
void softmax(const struct pBox *pbox) {
if (pbox->pdata == NULL) {
cout << "the softmax's pdata is NULL , Please check !" << endl;
return;
}
mydataFmt *p2D = pbox->pdata;
mydataFmt *p3D = NULL;
long mapSize = pbox->width * pbox->height;
mydataFmt eleSum = 0;
for (int row = 0; row < pbox->height; row++) {
for (int col = 0; col < pbox->width; col++) {
eleSum = 0;
for (int channel = 0; channel < pbox->channel; channel++) {
p3D = p2D + channel * mapSize;
*p3D = exp(*p3D);
eleSum += *p3D;
}
for (int channel = 0; channel < pbox->channel; channel++) {
p3D = p2D + channel * mapSize;
*p3D = (*p3D) / eleSum;
}
p2D++;
}
}
}
bool cmpScore(struct orderScore lsh, struct orderScore rsh) {
if (lsh.score < rsh.score)
return true;
else
return false;
}
/**
* 非极大值抑制
* @param boundingBox_
* @param bboxScore_
* @param overlap_threshold
* @param modelname
*/
void nms(vector<struct Bbox> &boundingBox_, vector<struct orderScore> &bboxScore_, const mydataFmt overlap_threshold,
string modelname) {
if (boundingBox_.empty()) {
return;
}
std::vector<int> heros;
//sort the score
sort(bboxScore_.begin(), bboxScore_.end(), cmpScore);
int order = 0;
float IOU = 0;
float maxX = 0;
float maxY = 0;
float minX = 0;
float minY = 0;
while (bboxScore_.size() > 0) {
order = bboxScore_.back().oriOrder;
bboxScore_.pop_back();
if (order < 0)continue;
heros.push_back(order);
boundingBox_.at(order).exist = false;//delete it
for (int num = 0; num < boundingBox_.size(); num++) {
if (boundingBox_.at(num).exist) {
//the iou
maxX = (boundingBox_.at(num).x1 > boundingBox_.at(order).x1) ? boundingBox_.at(num).x1
: boundingBox_.at(order).x1;
maxY = (boundingBox_.at(num).y1 > boundingBox_.at(order).y1) ? boundingBox_.at(num).y1
: boundingBox_.at(order).y1;
minX = (boundingBox_.at(num).x2 < boundingBox_.at(order).x2) ? boundingBox_.at(num).x2
: boundingBox_.at(order).x2;
minY = (boundingBox_.at(num).y2 < boundingBox_.at(order).y2) ? boundingBox_.at(num).y2
: boundingBox_.at(order).y2;
//maxX1 and maxY1 reuse
maxX = ((minX - maxX + 1) > 0) ? (minX - maxX + 1) : 0;
maxY = ((minY - maxY + 1) > 0) ? (minY - maxY + 1) : 0;
//IOU reuse for the area of two bbox
IOU = maxX * maxY;
if (!modelname.compare("Union"))
IOU = IOU / (boundingBox_.at(num).area + boundingBox_.at(order).area - IOU);
else if (!modelname.compare("Min")) {
IOU = IOU /
((boundingBox_.at(num).area < boundingBox_.at(order).area) ? boundingBox_.at(num).area
: boundingBox_.at(
order).area);
}
if (IOU > overlap_threshold) {
boundingBox_.at(num).exist = false;
for (vector<orderScore>::iterator it = bboxScore_.begin(); it != bboxScore_.end(); it++) {
if ((*it).oriOrder == num) {
(*it).oriOrder = -1;
break;
}
}
}
}
}
}
for (int i = 0; i < heros.size(); i++)
boundingBox_.at(heros.at(i)).exist = true;
}
void refineAndSquareBbox(vector<struct Bbox> &vecBbox, const int &height, const int &width) {
if (vecBbox.empty()) {
cout << "Bbox is empty!!" << endl;
return;
}
float bbw = 0, bbh = 0, maxSide = 0;
float h = 0, w = 0;
float x1 = 0, y1 = 0, x2 = 0, y2 = 0;
for (vector<struct Bbox>::iterator it = vecBbox.begin(); it != vecBbox.end(); it++) {
if ((*it).exist) {
bbh = (*it).x2 - (*it).x1 + 1;
bbw = (*it).y2 - (*it).y1 + 1;
x1 = (*it).x1 + (*it).regreCoord[1] * bbh;
y1 = (*it).y1 + (*it).regreCoord[0] * bbw;
x2 = (*it).x2 + (*it).regreCoord[3] * bbh;
y2 = (*it).y2 + (*it).regreCoord[2] * bbw;
h = x2 - x1 + 1;
w = y2 - y1 + 1;
maxSide = (h > w) ? h : w;
x1 = x1 + h * 0.5 - maxSide * 0.5;
y1 = y1 + w * 0.5 - maxSide * 0.5;
(*it).x2 = round(x1 + maxSide - 1);
(*it).y2 = round(y1 + maxSide - 1);
(*it).x1 = round(x1);
(*it).y1 = round(y1);
//boundary check
if ((*it).x1 < 0)(*it).x1 = 0;
if ((*it).y1 < 0)(*it).y1 = 0;
if ((*it).x2 > height)(*it).x2 = height - 1;
if ((*it).y2 > width)(*it).y2 = width - 1;
it->area = (it->x2 - it->x1) * (it->y2 - it->y1);
}
}
}
/**
* 残差融合初始化
* @param output 输出feature map
* @param c1 输入feature map
* @param c2 输入feature map
* @param c3 输入feature map
* @param c4 输入feature map
*/
void conv_mergeInit(pBox *output, pBox *c1, pBox *c2, pBox *c3, pBox *c4) { void conv_mergeInit(pBox *output, pBox *c1, pBox *c2, pBox *c3, pBox *c4) {
output->channel = 0; output->channel = 0;
output->height = c1->height; output->height = c1->height;
@@ -482,6 +797,14 @@ void conv_mergeInit(pBox *output, pBox *c1, pBox *c2, pBox *c3, pBox *c4) {
memset(output->pdata, 0, output->width * output->height * output->channel * sizeof(mydataFmt)); memset(output->pdata, 0, output->width * output->height * output->channel * sizeof(mydataFmt));
} }
/**
* 残差网络融合
* @param output 输出feature map
* @param c1 输入feature map
* @param c2 输入feature map
* @param c3 输入feature map
* @param c4 输入feature map
*/
void conv_merge(pBox *output, pBox *c1, pBox *c2, pBox *c3, pBox *c4) { void conv_merge(pBox *output, pBox *c1, pBox *c2, pBox *c3, pBox *c4) {
// cout << "output->channel:" << output->channel << endl; // cout << "output->channel:" << output->channel << endl;
if (c1 != 0) { if (c1 != 0) {
@@ -511,7 +834,16 @@ void conv_merge(pBox *output, pBox *c1, pBox *c2, pBox *c3, pBox *c4) {
} else { cout << "conv_mergeInit" << endl; } } else { cout << "conv_mergeInit" << endl; }
} }
void mulandaddInit(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float scale) { /**
* 残差网络做多次按比例相加初始化
* @param inpbox 输入feature map
* @param temppbox 输入feature map
* @param outpBox 输出feature map
*/
void mulandaddInit(const pBox *inpbox, const pBox *temppbox, pBox *outpBox) {
if (!((inpbox->width == temppbox->width) && (inpbox->height == temppbox->height) &&
(inpbox->channel == temppbox->channel)))
cout << "the mulandaddInit is failed!!" << endl;
outpBox->channel = temppbox->channel; outpBox->channel = temppbox->channel;
outpBox->width = temppbox->width; outpBox->width = temppbox->width;
outpBox->height = temppbox->height; outpBox->height = temppbox->height;
@@ -520,6 +852,13 @@ void mulandaddInit(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, floa
memset(outpBox->pdata, 0, outpBox->width * outpBox->height * outpBox->channel * sizeof(mydataFmt)); memset(outpBox->pdata, 0, outpBox->width * outpBox->height * outpBox->channel * sizeof(mydataFmt));
} }
/**
* 残差网络做多次按比例相加
* @param inpbox 输入feature map
* @param temppbox 输入feature map
* @param outpBox 输出feature map
* @param scale 比例系数
*/
void mulandadd(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float scale) { void mulandadd(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float scale) {
mydataFmt *ip = inpbox->pdata; mydataFmt *ip = inpbox->pdata;
mydataFmt *tp = temppbox->pdata; mydataFmt *tp = temppbox->pdata;
@@ -530,7 +869,15 @@ void mulandadd(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float sc
} }
} }
void BatchNormInit(struct BN *var, struct BN *mean, struct BN *beta, int width) {
/**
* BN初始化
* @param beta beta
* @param mean 平均值
* @param var 方差
* @param width 参数个数
*/
void BatchNormInit(struct BN *beta, struct BN *mean, struct BN *var, int width) {
var->width = width; var->width = width;
var->pdata = (mydataFmt *) malloc(width * sizeof(mydataFmt)); var->pdata = (mydataFmt *) malloc(width * sizeof(mydataFmt));
if (var->pdata == NULL)cout << "prelu apply for memory failed!!!!"; if (var->pdata == NULL)cout << "prelu apply for memory failed!!!!";
@@ -547,7 +894,14 @@ void BatchNormInit(struct BN *var, struct BN *mean, struct BN *beta, int width)
memset(beta->pdata, 0, width * sizeof(mydataFmt)); memset(beta->pdata, 0, width * sizeof(mydataFmt));
} }
void BatchNorm(struct pBox *pbox, struct BN *var, struct BN *mean, struct BN *beta) { /**
* BN实现
* @param pbox 输入feature map
* @param beta beta
* @param mean 平均值
* @param var 方差
*/
void BatchNorm(struct pBox *pbox, struct BN *beta, struct BN *mean, struct BN *var) {
if (pbox->pdata == NULL) { if (pbox->pdata == NULL) {
cout << "Relu feature is NULL!!" << endl; cout << "Relu feature is NULL!!" << endl;
return; return;

20
src/network.h Normal file → Executable file
View File

@@ -24,6 +24,8 @@ void avePooling(const pBox *pbox, pBox *Matrix, int kernelSize, int stride);
void featurePad(const pBox *pbox, pBox *outpBox, const int pad, const int padw = 0, const int padh = 0); void featurePad(const pBox *pbox, pBox *outpBox, const int pad, const int padw = 0, const int padh = 0);
void prelu(struct pBox *pbox, mydataFmt *pbias, mydataFmt *prelu_gmma);
void relu(struct pBox *pbox, mydataFmt *pbias); void relu(struct pBox *pbox, mydataFmt *pbias);
void fullconnect(const Weight *weight, const pBox *pbox, pBox *outpBox); void fullconnect(const Weight *weight, const pBox *pbox, pBox *outpBox);
@@ -33,6 +35,10 @@ void readData(string filename, long dataNumber[], mydataFmt *pTeam[], int length
long ConvAndFcInit(struct Weight *weight, int schannel, int lchannel, int kersize, int stride, int pad, long ConvAndFcInit(struct Weight *weight, int schannel, int lchannel, int kersize, int stride, int pad,
int w = 0, int h = 0, int padw = 0, int padh = 0); int w = 0, int h = 0, int padw = 0, int padh = 0);
void pReluInit(struct pRelu *prelu, int width);
void softmax(const struct pBox *pbox);
void image2MatrixInit(Mat &image, struct pBox *pbox); void image2MatrixInit(Mat &image, struct pBox *pbox);
void featurePadInit(const pBox *pbox, pBox *outpBox, const int pad, const int padw = 0, const int padh = 0); void featurePadInit(const pBox *pbox, pBox *outpBox, const int pad, const int padw = 0, const int padh = 0);
@@ -45,6 +51,13 @@ void convolutionInit(const Weight *weight, pBox *pbox, pBox *outpBox);
void fullconnectInit(const Weight *weight, pBox *outpBox); void fullconnectInit(const Weight *weight, pBox *outpBox);
bool cmpScore(struct orderScore lsh, struct orderScore rsh);
void nms(vector<struct Bbox> &boundingBox_, vector<struct orderScore> &bboxScore_, const mydataFmt overlap_threshold,
string modelname = "Union");
void refineAndSquareBbox(vector<struct Bbox> &vecBbox, const int &height, const int &width);
void vectorXmatrix(mydataFmt *matrix, mydataFmt *v, int v_w, int v_h, mydataFmt *p); void vectorXmatrix(mydataFmt *matrix, mydataFmt *v, int v_w, int v_h, mydataFmt *p);
void convolution(const Weight *weight, const pBox *pbox, pBox *outpBox); void convolution(const Weight *weight, const pBox *pbox, pBox *outpBox);
@@ -55,11 +68,12 @@ void conv_merge(pBox *output, pBox *c1 = 0, pBox *c2 = 0, pBox *c3 = 0, pBox *c4
void conv_mergeInit(pBox *output, pBox *c1 = 0, pBox *c2 = 0, pBox *c3 = 0, pBox *c4 = 0); void conv_mergeInit(pBox *output, pBox *c1 = 0, pBox *c2 = 0, pBox *c3 = 0, pBox *c4 = 0);
void mulandaddInit(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float scale); void mulandaddInit(const pBox *inpbox, const pBox *temppbox, pBox *outpBox);
void mulandadd(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float scale = 1); void mulandadd(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float scale = 1);
void BatchNormInit(struct BN *var, struct BN *mean, struct BN *beta, int width); void BatchNormInit(struct BN *beta, struct BN *mean, struct BN *var, int width);
void BatchNorm(struct pBox *pbox, struct BN *beta, struct BN *mean, struct BN *var);
void BatchNorm(struct pBox *pbox, struct BN *var, struct BN *mean, struct BN *beta);
#endif #endif