添加注释

添加注释

Co-Authored-By: Chris Kong <609027949@qq.com>
This commit is contained in:
2020-01-03 17:39:55 +08:00
parent 52d50c4544
commit 528cc0d788
4 changed files with 243 additions and 15 deletions

View File

@@ -4,7 +4,11 @@
#include "facenet.h"
/**
* stem网络
* @param image 输入图片
* @param output 输出featuremap 指针形式
*/
void facenet::Stem(Mat &image, pBox *output) {
pBox *rgb = new pBox;
pBox *conv1_out = new pBox;
@@ -161,6 +165,13 @@ void facenet::Stem(Mat &image, pBox *output) {
freeBN(conv6_beta);
}
/**
* Inception_resnet_A网络
* @param input 输入featuremap
* @param output 输出featuremap
* @param filepath 模型文件路径
* @param scale 比例系数
*/
void facenet::Inception_resnet_A(pBox *input, pBox *output, string filepath, float scale) {
pBox *conv1_out = new pBox;
pBox *conv2_out = new pBox;
@@ -280,7 +291,7 @@ void facenet::Inception_resnet_A(pBox *input, pBox *output, string filepath, flo
convolution(conv7_wb, conv7_out, conv8_out);
addbias(conv8_out, conv7_wb->pbias);
mulandaddInit(input, conv8_out, output, scale);
mulandaddInit(input, conv8_out, output);
mulandadd(input, conv8_out, output, scale);
relu(output, conv8_wb->pbias);
@@ -327,6 +338,11 @@ void facenet::Inception_resnet_A(pBox *input, pBox *output, string filepath, flo
freeBN(conv6_beta);
}
/**
* Reduction_A
* @param input 输入featuremap
* @param output 输出featuremap
*/
void facenet::Reduction_A(pBox *input, pBox *output) {
pBox *conv1_out = new pBox;
pBox *conv2_out = new pBox;
@@ -429,6 +445,13 @@ void facenet::Reduction_A(pBox *input, pBox *output) {
freeBN(conv4_beta);
}
/**
* Inception_resnet_B网络
* @param input 输入featuremap
* @param output 输出featuremap
* @param filepath 模型文件路径
* @param scale 比例系数
*/
void facenet::Inception_resnet_B(pBox *input, pBox *output, string filepath, float scale) {
pBox *conv1_out = new pBox;
pBox *conv2_out = new pBox;
@@ -518,7 +541,7 @@ void facenet::Inception_resnet_B(pBox *input, pBox *output, string filepath, flo
convolution(conv5_wb, conv5_out, conv6_out);
addbias(conv6_out, conv5_wb->pbias);
mulandaddInit(input, conv6_out, output, scale);
mulandaddInit(input, conv6_out, output);
mulandadd(input, conv6_out, output, scale);
relu(output, conv6_wb->pbias);
@@ -550,6 +573,11 @@ void facenet::Inception_resnet_B(pBox *input, pBox *output, string filepath, flo
freeBN(conv4_beta);
}
/**
* Reduction_B
* @param input 输入featuremap
* @param output 输出featuremap
*/
void facenet::Reduction_B(pBox *input, pBox *output) {
pBox *conv1_out = new pBox;
pBox *conv2_out = new pBox;
@@ -714,6 +742,13 @@ void facenet::Reduction_B(pBox *input, pBox *output) {
freeBN(conv7_beta);
}
/**
* Inception_resnet_C网络
* @param input 输入featuremap
* @param output 输出featuremap
* @param filepath 模型文件路径
* @param scale 比例系数
*/
void facenet::Inception_resnet_C(pBox *input, pBox *output, string filepath, float scale) {
pBox *conv1_out = new pBox;
pBox *conv2_out = new pBox;
@@ -804,7 +839,7 @@ void facenet::Inception_resnet_C(pBox *input, pBox *output, string filepath, flo
convolution(conv5_wb, conv5_out, conv6_out);
addbias(conv6_out, conv5_wb->pbias);
mulandaddInit(input, conv6_out, output, scale);
mulandaddInit(input, conv6_out, output);
mulandadd(input, conv6_out, output, scale);
relu(output, conv6_wb->pbias);
@@ -836,6 +871,13 @@ void facenet::Inception_resnet_C(pBox *input, pBox *output, string filepath, flo
freeBN(conv4_beta);
}
/**
* Inception_resnet_C网络 最后无激活函数
* @param input 输入featuremap
* @param output 输出featuremap
* @param filepath 模型文件路径
* @param scale 比例系数
*/
void facenet::Inception_resnet_C_None(pBox *input, pBox *output, string filepath) {
pBox *conv1_out = new pBox;
pBox *conv2_out = new pBox;
@@ -920,7 +962,7 @@ void facenet::Inception_resnet_C_None(pBox *input, pBox *output, string filepath
convolution(conv5_wb, conv5_out, conv6_out);
addbias(conv6_out, conv5_wb->pbias);
mulandaddInit(input, conv6_out, output, 1);
mulandaddInit(input, conv6_out, output);
mulandadd(input, conv6_out, output);
freepBox(conv1_out);
@@ -950,12 +992,22 @@ void facenet::Inception_resnet_C_None(pBox *input, pBox *output, string filepath
freeBN(conv4_beta);
}
/**
* 平均池化
* @param input 输入featuremap
* @param output 输出featuremap
*/
void facenet::AveragePooling(pBox *input, pBox *output) {
// cout << "size:" << input->height << endl;
avePoolingInit(input, output, input->height, 2);
avePooling(input, output, input->height, 2);
}
/**
* flatten 多维转换到一维
* @param input
* @param output
*/
void facenet::Flatten(pBox *input, pBox *output) {
output->width = input->channel;
output->height = 1;
@@ -965,6 +1017,12 @@ void facenet::Flatten(pBox *input, pBox *output) {
memcpy(output->pdata, input->pdata, output->channel * output->width * output->height * sizeof(mydataFmt));
}
/**
* 全连接网络
* @param input 输入featuremap
* @param output 输出featuremap
* @param filepath 网络模型参数文件路径
*/
//参数还未设置
void facenet::fully_connect(pBox *input, pBox *output, string filepath) {
struct Weight *conv1_wb = new Weight;
@@ -1009,13 +1067,17 @@ void facenet::printData(pBox *in) {
cout << "printData" << endl;
}
/**
* facenet网络运行入口
* @param image
* @param o
* @param count
*/
void facenet::run(Mat &image, vector<mydataFmt> &o, int count) {
cout << "=====This is No." + to_string(count) + " Picture=====" << endl;
pBox *output = new pBox;
pBox *input;
Stem(image, output);
// printData(output);
// return;
cout << "Stem Finally" << endl;
input = output;
output = new pBox;
@@ -1030,7 +1092,6 @@ void facenet::run(Mat &image, vector<mydataFmt> &o, int count) {
Reduction_A(input, output);
cout << "Reduction_A Finally" << endl;
input = output;
// freepBox(output);
output = new pBox;
for (int j = 0; j < 10; ++j) {
// model_128/block17_1_list.txt
@@ -1048,10 +1109,8 @@ void facenet::run(Mat &image, vector<mydataFmt> &o, int count) {
for (int k = 0; k < 5; ++k) {
// model_128/block8_1_list.txt
string filepath = "../model_" + to_string(Num) + "/block8_" + to_string((k + 1)) + "_list.txt";
// cout << filepath << endl;
Inception_resnet_C(input, output, filepath, 0.2);
input = output;
// freepBox(output);
output = new pBox;
}
cout << "Inception_resnet_C Finally" << endl;
@@ -1070,6 +1129,10 @@ void facenet::run(Mat &image, vector<mydataFmt> &o, int count) {
output = new pBox;
fully_connect(input, output, "../model_" + to_string(Num) + "/Bottleneck_list.txt");
cout << "Fully_Connect Finally" << endl;
/**
* L2归一化
*/
mydataFmt sq = 0, sum = 0;
for (int i = 0; i < Num; ++i) {
sq = pow(output->pdata[i], 2);

View File

@@ -1,5 +1,10 @@
#include "network.h"
/**
* 卷积以后偏移
* @param pbox feature map
* @param pbias 偏移量
*/
void addbias(struct pBox *pbox, mydataFmt *pbias) {
if (pbox->pdata == NULL) {
cout << "Relu feature is NULL!!" << endl;
@@ -22,6 +27,11 @@ void addbias(struct pBox *pbox, mydataFmt *pbias) {
}
}
/**
* mat图片转成pbox结构体初始化
* @param image mat格式的图片
* @param pbox 结构体pbox
*/
void image2MatrixInit(Mat &image, struct pBox *pbox) {
if ((image.data == NULL) || (image.type() != CV_8UC3)) {
cout << "image's type is wrong!!Please set CV_8UC3" << endl;
@@ -36,6 +46,12 @@ void image2MatrixInit(Mat &image, struct pBox *pbox) {
memset(pbox->pdata, 0, pbox->channel * pbox->height * pbox->width * sizeof(mydataFmt));
}
/**
* mat图片转成pbox结构体
* @param image mat格式的图片
* @param pbox 结构体pbox
* @param num 选择mtcnn还是facenet 0-mtcnn 非0-facenet 缺省为0
*/
void image2Matrix(const Mat &image, const struct pBox *pbox, int num) {
if ((image.data == NULL) || (image.type() != CV_8UC3)) {
cout << "image's type is wrong!!Please set CV_8UC3" << endl;
@@ -78,6 +94,12 @@ void image2Matrix(const Mat &image, const struct pBox *pbox, int num) {
}
}
/**
* 求图片像素的平均值和标准差
* @param image 图片
* @param p 平均值
* @param q 标准差
*/
void MeanAndDev(const Mat &image, mydataFmt &p, mydataFmt &q) {
mydataFmt meansum = 0, stdsum = 0;
for (int rowI = 0; rowI < image.rows; rowI++) {
@@ -96,6 +118,14 @@ void MeanAndDev(const Mat &image, mydataFmt &p, mydataFmt &q) {
q = sqrt(stdsum / (image.cols * image.rows * image.channels()));
}
/**
* 卷积补偿初始化
* @param pbox 输入feature map
* @param outpBox 输出feature map
* @param pad 补偿 正方形算子(-1为不规则补偿0为不需要补偿)
* @param padw 补偿 不规则算子的宽度
* @param padh 补偿 不规则算子的高度
*/
void featurePadInit(const pBox *pbox, pBox *outpBox, const int pad, const int padw, const int padh) {
if (pad < -1) {
cout << "the data needn't to pad,please check you network!" << endl;
@@ -115,6 +145,14 @@ void featurePadInit(const pBox *pbox, pBox *outpBox, const int pad, const int pa
memset(outpBox->pdata, 0, outpBox->channel * outpBox->height * RowByteNum);
}
/**
* 卷积补偿
* @param pbox 输入feature map
* @param outpBox 输出feature map
* @param pad 补偿 正方形算子(-1为不规则补偿0为不需要补偿)
* @param padw 补偿 不规则算子的宽度
* @param padh 补偿 不规则算子的高度
*/
void featurePad(const pBox *pbox, pBox *outpBox, const int pad, const int padw, const int padh) {
mydataFmt *p = outpBox->pdata;
mydataFmt *pIn = pbox->pdata;
@@ -143,6 +181,12 @@ void featurePad(const pBox *pbox, pBox *outpBox, const int pad, const int padw,
}
}
/**
* 卷积初始化
* @param weight 卷积权重
* @param pbox 输入feature map
* @param outpBox 输出feature map
*/
void convolutionInit(const Weight *weight, pBox *pbox, pBox *outpBox) {
outpBox->channel = weight->selfChannel;
// ((imginputh - ckh + 2 * ckpad) / stride) + 1;
@@ -168,6 +212,12 @@ void convolutionInit(const Weight *weight, pBox *pbox, pBox *outpBox) {
}
}
/**
* 卷积
* @param weight 卷积权重
* @param pbox 输入feature map
* @param outpBox 输出feature map
*/
void convolution(const Weight *weight, const pBox *pbox, pBox *outpBox) {
int ckh, ckw, ckd, stride, cknum, ckpad, imginputh, imginputw, imginputd, Nh, Nw;
mydataFmt *ck, *imginput;
@@ -215,6 +265,14 @@ void convolution(const Weight *weight, const pBox *pbox, pBox *outpBox) {
}
}
/**
* 最大值池化初始化
* @param pbox 输入feature map
* @param Matrix 输出feature map
* @param kernelSize 池化算子大小
* @param stride 步长
* @param flag 标志位
*/
void maxPoolingInit(const pBox *pbox, pBox *Matrix, int kernelSize, int stride, int flag) {
if (flag == 1) {
Matrix->width = floor((float) (pbox->width - kernelSize) / stride + 1);
@@ -229,6 +287,13 @@ void maxPoolingInit(const pBox *pbox, pBox *Matrix, int kernelSize, int stride,
memset(Matrix->pdata, 0, Matrix->channel * Matrix->width * Matrix->height * sizeof(mydataFmt));
}
/**
* 最大值池化
* @param pbox 输入feature map
* @param Matrix 输出feature map
* @param kernelSize 池化算子大小
* @param stride 步长
*/
void maxPooling(const pBox *pbox, pBox *Matrix, int kernelSize, int stride) {
if (pbox->pdata == NULL) {
cout << "the feature2Matrix pbox is NULL!!" << endl;
@@ -281,6 +346,13 @@ void maxPooling(const pBox *pbox, pBox *Matrix, int kernelSize, int stride) {
}
}
/**
* 平均值池化初始化
* @param pbox 输入feature map
* @param Matrix 输出feature map
* @param kernelSize 池化算子大小
* @param stride 步长
*/
void avePoolingInit(const pBox *pbox, pBox *Matrix, int kernelSize, int stride) {
Matrix->width = ceil((float) (pbox->width - kernelSize) / stride + 1);
Matrix->height = ceil((float) (pbox->height - kernelSize) / stride + 1);
@@ -290,6 +362,13 @@ void avePoolingInit(const pBox *pbox, pBox *Matrix, int kernelSize, int stride)
memset(Matrix->pdata, 0, Matrix->channel * Matrix->width * Matrix->height * sizeof(mydataFmt));
}
/**
* 平均值池化
* @param pbox 输入feature map
* @param Matrix 输出feature map
* @param kernelSize 池化算子大小
* @param stride 步长
*/
void avePooling(const pBox *pbox, pBox *Matrix, int kernelSize, int stride) {
if (pbox->pdata == NULL) {
cout << "the feature2Matrix pbox is NULL!!" << endl;
@@ -323,8 +402,8 @@ void avePooling(const pBox *pbox, pBox *Matrix, int kernelSize, int stride) {
/**
* 激活函数 没有系数
* @param pbox
* @param pbias
* @param pbox 输入feature
* @param pbias 偏移
*/
void relu(struct pBox *pbox, mydataFmt *pbias) {
if (pbox->pdata == NULL) {
@@ -349,6 +428,11 @@ void relu(struct pBox *pbox, mydataFmt *pbias) {
}
}
/**
* 全连接初始化
* @param weight 权重参数
* @param outpBox 输出feature map
*/
void fullconnectInit(const Weight *weight, pBox *outpBox) {
outpBox->channel = weight->selfChannel;
outpBox->width = 1;
@@ -358,6 +442,12 @@ void fullconnectInit(const Weight *weight, pBox *outpBox) {
memset(outpBox->pdata, 0, weight->selfChannel * sizeof(mydataFmt));
}
/**
* 全连接
* @param weight 权重参数
* @param pbox  输入feature map
* @param outpBox 输出feature map
*/
void fullconnect(const Weight *weight, const pBox *pbox, pBox *outpBox) {
if (pbox->pdata == NULL) {
cout << "the fc feature is NULL!!" << endl;
@@ -376,6 +466,14 @@ void fullconnect(const Weight *weight, const pBox *pbox, pBox *outpBox) {
outpBox->pdata);
}
/**
* 一维数组与二位矩阵相乘
* @param matrix 输入feature map
* @param v 权重
* @param v_w 权重矩阵的宽度
* @param v_h 权重矩阵的高度
* @param p 输出feature map
*/
void vectorXmatrix(mydataFmt *matrix, mydataFmt *v, int v_w, int v_h, mydataFmt *p) {
for (int i = 0; i < v_h; i++) {
p[i] = 0;
@@ -385,6 +483,13 @@ void vectorXmatrix(mydataFmt *matrix, mydataFmt *v, int v_w, int v_h, mydataFmt
}
}
/**
* 读取模型文件
* @param filename 文件路径
* @param dataNumber 参数个数数组
* @param pTeam 变量数组
* @param length
*/
void readData(string filename, long dataNumber[], mydataFmt *pTeam[], int length) {
ifstream in(filename.data());
string line;
@@ -434,6 +539,20 @@ void readData(string filename, long dataNumber[], mydataFmt *pTeam[], int length
}
}
/**
* 卷积和全连接初始化
* @param weight 权重
* @param schannel 卷积核个数
* @param lchannel 上一层feature map个数
* @param kersize 卷积核大小
* @param stride 卷积步长
* @param pad 卷积是否补偿
* @param w 卷积核宽度
* @param h 卷积核高度
* @param padw 补偿宽度
* @param padh 补偿高度
* @return 参数长度
*/
// w sc lc ks s p kw kh
long ConvAndFcInit(struct Weight *weight, int schannel, int lchannel, int kersize,
int stride, int pad, int w, int h, int padw, int padh) {
@@ -461,6 +580,14 @@ long ConvAndFcInit(struct Weight *weight, int schannel, int lchannel, int kersiz
return byteLenght;
}
/**
* 残差融合初始化
* @param output 输出feature map
* @param c1 输入feature map
* @param c2 输入feature map
* @param c3 输入feature map
* @param c4 输入feature map
*/
void conv_mergeInit(pBox *output, pBox *c1, pBox *c2, pBox *c3, pBox *c4) {
output->channel = 0;
output->height = c1->height;
@@ -482,6 +609,14 @@ void conv_mergeInit(pBox *output, pBox *c1, pBox *c2, pBox *c3, pBox *c4) {
memset(output->pdata, 0, output->width * output->height * output->channel * sizeof(mydataFmt));
}
/**
* 残差网络融合
* @param output 输出feature map
* @param c1 输入feature map
* @param c2 输入feature map
* @param c3 输入feature map
* @param c4 输入feature map
*/
void conv_merge(pBox *output, pBox *c1, pBox *c2, pBox *c3, pBox *c4) {
// cout << "output->channel:" << output->channel << endl;
if (c1 != 0) {
@@ -511,7 +646,16 @@ void conv_merge(pBox *output, pBox *c1, pBox *c2, pBox *c3, pBox *c4) {
} else { cout << "conv_mergeInit" << endl; }
}
void mulandaddInit(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float scale) {
/**
* 残差网络做多次按比例相加初始化
* @param inpbox 输入feature map
* @param temppbox 输入feature map
* @param outpBox 输出feature map
*/
void mulandaddInit(const pBox *inpbox, const pBox *temppbox, pBox *outpBox) {
if (!((inpbox->width == temppbox->width) && (inpbox->height == temppbox->height) &&
(inpbox->channel == temppbox->channel)))
cout << "the mulandaddInit is failed!!" << endl;
outpBox->channel = temppbox->channel;
outpBox->width = temppbox->width;
outpBox->height = temppbox->height;
@@ -520,6 +664,13 @@ void mulandaddInit(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, floa
memset(outpBox->pdata, 0, outpBox->width * outpBox->height * outpBox->channel * sizeof(mydataFmt));
}
/**
* 残差网络做多次按比例相加
* @param inpbox 输入feature map
* @param temppbox 输入feature map
* @param outpBox 输出feature map
* @param scale 比例系数
*/
void mulandadd(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float scale) {
mydataFmt *ip = inpbox->pdata;
mydataFmt *tp = temppbox->pdata;
@@ -530,6 +681,13 @@ void mulandadd(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float sc
}
}
/**
* BN初始化
* @param var 方差
* @param mean 平均值
* @param beta beta
* @param width 参数个数
*/
void BatchNormInit(struct BN *var, struct BN *mean, struct BN *beta, int width) {
var->width = width;
var->pdata = (mydataFmt *) malloc(width * sizeof(mydataFmt));
@@ -547,6 +705,13 @@ void BatchNormInit(struct BN *var, struct BN *mean, struct BN *beta, int width)
memset(beta->pdata, 0, width * sizeof(mydataFmt));
}
/**
* BN实现
* @param pbox 输入feature map
* @param var 方差
* @param mean 平均值
* @param beta beta
*/
void BatchNorm(struct pBox *pbox, struct BN *var, struct BN *mean, struct BN *beta) {
if (pbox->pdata == NULL) {
cout << "Relu feature is NULL!!" << endl;

View File

@@ -55,7 +55,7 @@ void conv_merge(pBox *output, pBox *c1 = 0, pBox *c2 = 0, pBox *c3 = 0, pBox *c4
void conv_mergeInit(pBox *output, pBox *c1 = 0, pBox *c2 = 0, pBox *c3 = 0, pBox *c4 = 0);
void mulandaddInit(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float scale);
void mulandaddInit(const pBox *inpbox, const pBox *temppbox, pBox *outpBox);
void mulandadd(const pBox *inpbox, const pBox *temppbox, pBox *outpBox, float scale = 1);

View File

@@ -10,7 +10,7 @@ int main() {
facenet ggg;
vector<mydataFmt> o;
ggg.run(Image, o, 0);
// imshow("result", Image);
imshow("result", Image);
imwrite("../result.jpg", Image);
for (int i = 0; i < Num; ++i) {