📄 bpnnet.cpp
字号:
// BPNNET.cpp: implementation of the CBPNNET class.
//
//////////////////////////////////////////////////////////////////////
#define STRICT
#include "stdafx.h"
//#include "FaceDetRec.h"
#include <math.h>
#include <stdlib.h>
#include <stdio.h>
#include "BPNNET.h"
#ifdef _DEBUG
#undef THIS_FILE
static char THIS_FILE[]=__FILE__;
#define new DEBUG_NEW
#endif
//////////////////////////////////////////////////////////////////////
// Construction/Destruction
//////////////////////////////////////////////////////////////////////
CBPNNET::CBPNNET()
{
}
CBPNNET::~CBPNNET()
{
}
void CBPNNET::NormalizeTargets()
{
// Mean = 0;
INT Sample,j;
REAL Min, Max;
Min = MAX_REAL;
Max = MIN_REAL;
for (Sample=0; Sample<NUM_SAMPLES; Sample++)
{
for (j=0;j<MM;j++)
{
Min = MIN(Min, TargetVectors[Sample][j]);
Max = MAX(Max, TargetVectors[Sample][j]);
}
}
for (Sample=0; Sample<NUM_SAMPLES; Sample++)
{
for (j=0;j<MM;j++)
{
TargetVectors[Sample][j] = ((TargetVectors[Sample][j]-Min) / (Max-Min)) * (HI-LO) + LO;
Mean[Sample] += TargetVectors[Sample][j];
}
Mean[Sample]/=NUM_SAMPLES;
}
}
//???????????????????????
void CBPNNET::InitializeApplication(NET* Net)
{
INT Sample, i;
REAL Out, Err;
Net->Alpha =Alpha;// 0.5;
Net->Eta = Eta;//0.05;
Net->Gain = Gain;//1;
NormalizeTargets();
TrainErrorPredictingMean = 0;
for (Sample=TRAIN_LWB; Sample<=TRAIN_UPB; Sample++)
{
for (i=0; i<MM; i++)
{
Out = TargetVectors[Sample][i];//+i
Err = Mean[Sample] - Out;
TrainErrorPredictingMean += 0.5 * sqr(Err);
}
}
TestErrorPredictingMean = 0;
for (Sample=TEST_LWB; Sample<=TEST_UPB; Sample++)
{
for (i=0; i<MM; i++)
{
Out = TargetVectors[Sample][i];//+i
Err = Mean[Sample] - Out;
TestErrorPredictingMean += 0.5 * sqr(Err);
}
}
f = fopen("BPN.txt", "w");
}
//结束网络
void CBPNNET::FinalizeApplication(NET* Net)
{
fclose(f);
}
/******************************************************************************
I N I T I A L I Z A T I O N
******************************************************************************/
//初始化网络
void CBPNNET::GenerateNetwork(NET* Net)
{
INT l,i;
//分配网络所需空间
Net->Layer = (LAYER**) calloc(NUM_LAYERS, sizeof(LAYER*));
for (l=0; l<NUM_LAYERS; l++)
{
Net->Layer[l] = (LAYER*) malloc(sizeof(LAYER));
Net->Layer[l]->Units = Units[l];
Net->Layer[l]->Output = (REAL*) calloc(Units[l]+1, sizeof(REAL));
Net->Layer[l]->Error = (REAL*) calloc(Units[l]+1, sizeof(REAL));
Net->Layer[l]->Weight = (REAL**) calloc(Units[l]+1, sizeof(REAL*));
Net->Layer[l]->WeightSave = (REAL**) calloc(Units[l]+1, sizeof(REAL*));
Net->Layer[l]->dWeight = (REAL**) calloc(Units[l]+1, sizeof(REAL*));
Net->Layer[l]->Output[0] = BIAS;
if (l != 0)
{
for (i=1; i<=Units[l]; i++)
{
Net->Layer[l]->Weight[i] = (REAL*) calloc(Units[l-1]+1, sizeof(REAL));
Net->Layer[l]->WeightSave[i] = (REAL*) calloc(Units[l-1]+1, sizeof(REAL));
Net->Layer[l]->dWeight[i] = (REAL*) calloc(Units[l-1]+1, sizeof(REAL));
}
}
}
Net->InputLayer = Net->Layer[0];
Net->OutputLayer = Net->Layer[NUM_LAYERS - 1];
Net->Alpha = 0.9;
Net->Eta = 0.25;
Net->Gain = 1;
}
//随机话连接矩阵
void CBPNNET::RandomWeights(NET* Net)
{
INT l,i,j;
for (l=1; l<NUM_LAYERS; l++)
{
for (i=1; i<=Net->Layer[l]->Units; i++)
{
for (j=0; j<=Net->Layer[l-1]->Units; j++)
{
Net->Layer[l]->Weight[i][j] = RandomEqualREAL(-0.5, 0.5);
}
}
}
}
//置网络输入
void CBPNNET::SetInput(NET* Net, REAL* Input)
{
INT i;
for (i=1; i<=Net->InputLayer->Units; i++)
{
Net->InputLayer->Output[i] = Input[i-1];
}
}
//得到输出层结果
void CBPNNET::GetOutput(NET* Net, REAL* Output)
{
INT i;
for (i=1; i<=Net->OutputLayer->Units; i++)
{
Output[i-1] = Net->OutputLayer->Output[i];
}
}
//保存权重矩阵
void CBPNNET::SaveWeights(NET* Net)
{
INT l,i,j;
for (l=1; l<NUM_LAYERS; l++)
{
for (i=1; i<=Net->Layer[l]->Units; i++)
{
for (j=0; j<=Net->Layer[l-1]->Units; j++)
{
Net->Layer[l]->WeightSave[i][j] = Net->Layer[l]->Weight[i][j];
}
}
}
}
//恢复权值,从save重读取
void CBPNNET::RestoreWeights(NET* Net)
{
INT l,i,j;
for (l=1; l<NUM_LAYERS; l++)
{
for (i=1; i<=Net->Layer[l]->Units; i++)
{
for (j=0; j<=Net->Layer[l-1]->Units; j++)
{
Net->Layer[l]->Weight[i][j] = Net->Layer[l]->WeightSave[i][j];
}
}
}
}
/******************************************************************************
P R O P A G A T I N G S I G N A L S
******************************************************************************/
//
void CBPNNET::PropagateLayer(NET* Net, LAYER* Lower, LAYER* Upper)
{
INT i,j;
REAL Sum;
for (i=1; i<=Upper->Units; i++)
{
Sum = 0;
for (j=0; j<=Lower->Units; j++)
{
Sum += Upper->Weight[i][j] * Lower->Output[j];//权重乘以上级输出
}
Upper->Output[i] = 1 / (1 + exp(-Net->Gain * Sum));//sigmoid函数
}
}
//
void CBPNNET::PropagateNet(NET* Net)
{
INT l;
for (l=0; l<NUM_LAYERS-1; l++)
{
PropagateLayer(Net, Net->Layer[l], Net->Layer[l+1]);
}
}
/******************************************************************************
B A C K P R O P A G A T I N G E R R O R S
******************************************************************************/
//计算输出误差
void CBPNNET::ComputeOutputError(NET* Net, REAL* Target)
{
INT i;
REAL Out, Err;
Net->Error = 0;
for (i=1; i<=Net->OutputLayer->Units; i++)
{
Out = Net->OutputLayer->Output[i];
Err = Target[i-1]-Out;
Net->OutputLayer->Error[i] = Net->Gain * Out * (1-Out) * Err;
Net->Error += 0.5 * sqr(Err);
}
}
//误差反向传播
void CBPNNET::BackpropagateLayer(NET* Net, LAYER* Upper, LAYER* Lower)
{
INT i,j;
REAL Out, Err;
for (i=1; i<=Lower->Units; i++)
{
Out = Lower->Output[i];
Err = 0;
for (j=1; j<=Upper->Units; j++)
{
Err += Upper->Weight[j][i] * Upper->Error[j];
}
Lower->Error[i] = Net->Gain * Out * (1-Out) * Err;
}
}
//误差反向传播,通过调用层间传播实现
void CBPNNET::BackpropagateNet(NET* Net)
{
INT l;
for (l=NUM_LAYERS-1; l>1; l--)
{
BackpropagateLayer(Net, Net->Layer[l], Net->Layer[l-1]);
}
}
//权重调整
void CBPNNET::AdjustWeights(NET* Net)
{
INT l,i,j;
REAL Out, Err, dWeight;
for (l=1; l<NUM_LAYERS; l++)//层数循环
{
for (i=1; i<=Net->Layer[l]->Units; i++)//第l层
{
for (j=0; j<=Net->Layer[l-1]->Units; j++)//第l-1层
{
Out = Net->Layer[l-1]->Output[j];//第l-1层第j个神经元输出
Err = Net->Layer[l]->Error[i]; //第l层第i个神经元误差
dWeight = Net->Layer[l]->dWeight[i][j];//第l-1层第i个神经元到第l层第j个神经元权重调整量
Net->Layer[l]->Weight[i][j] += Net->Eta * Err * Out + Net->Alpha * dWeight;
Net->Layer[l]->dWeight[i][j] = Net->Eta * Err * Out;
}
}
}
}
/******************************************************************************
S I M U L A T I N G T H E N E T
******************************************************************************/
//网络仿真
void CBPNNET::SimulateNet(NET* Net, REAL* Input, REAL* Output, REAL* Target, BOOL Training)
{
SetInput(Net, Input);//设置输入
PropagateNet(Net);//传播网络
GetOutput(Net, Output);//获取输入
ComputeOutputError(Net, Target);//计算输出误差
if (Training)//是训练网络吗
{
//是训练网络则进行反向传播及权重调整
BackpropagateNet(Net);
AdjustWeights(Net);
}
}
//训练网络
void CBPNNET::TrainNet(NET* Net, INT Epochs)
{
INT Sample, n;
REAL *Output=(REAL *) new REAL[MM];
for (n=0; n<Epochs*TRAIN_SAMPLES; n++)
{
Sample = RandomEqualINT(TRAIN_LWB, TRAIN_UPB);
SimulateNet(Net, /*&*/(InputVectors[Sample]), Output, /*&*/(TargetVectors[Sample]), TRUE);
}
}
//测试网络
void CBPNNET::TestNet(NET* Net)
{
INT Sample;
REAL *Output=(REAL *) new REAL[MM];
TrainError = 0;
for (Sample=TRAIN_LWB; Sample<=TRAIN_UPB; Sample++)
{
SimulateNet(Net, /*&*/(InputVectors[Sample]), Output, /*&*/(TargetVectors[Sample]), FALSE);
TrainError += Net->Error;
}
TestError = 0;
for (Sample=TEST_LWB; Sample<=TEST_UPB; Sample++)
{
SimulateNet(Net, /*&*/(InputVectors[Sample]), Output, /*&*/(TargetVectors[Sample]), FALSE);
TestError += Net->Error;
}
fprintf(f, "\nNMSE is %0.8f on Training Set and %0.8f on Test Set",
TrainError / TrainErrorPredictingMean,
TestError / TestErrorPredictingMean);
}
//网络评估
void CBPNNET::EvaluateNet(NET* Net)
{
INT Sample,i;
REAL *Output=(REAL*)new REAL[MM];//开环
CTime t;
t=t.GetCurrentTime();
fprintf(f, "\n%d-%d-%d(%d:%d:%d)",t.GetYear(),t.GetMonth(),t.GetDay(),t.GetHour(),t.GetMinute(),t.GetSecond());
fprintf(f, "\n\nOutput\tTarget\n");
fprintf(f, "\n");
for (Sample=EVAL_LWB; Sample<=EVAL_UPB; Sample++)
{
SimulateNet(Net, /*&*/(InputVectors[Sample]), Output, /*&*/(TargetVectors[Sample]), FALSE);
// SimulateNet(Net, &(Sunspots_[Sample-N]), Output_, &(Sunspots_[Sample]), FALSE);
// Sunspots_[Year] = Output_[0];
fprintf(f, "\n");//,Sample);
// for (int i=0;i<NN;i++)
// fprintf(f,"%4.3f\t",InputVectors[Sample][i]);
for (i=0;i<MM;i++)
fprintf(f,"%4.3f\t%4.3f\n",Output[i],TargetVectors[Sample][i]);
}
delete [] Output;
}
void CBPNNET::InitializeRandoms()
{
//产生随机数种子
srand(4711);
}
INT CBPNNET::RandomEqualINT(INT Low, INT High)
{
return rand() % (High-Low+1) + Low;
}
REAL CBPNNET::RandomEqualREAL(REAL Low, REAL High)
{
return ((REAL) rand() / RAND_MAX) * (High-Low) + Low;
}
void CBPNNET::SetPara(INT *m_Units, REAL **m_InputVectors, REAL **m_TargetVectors,REAL *m_Mean)
{
Units=m_Units;
Mean=m_Mean;
InputVectors=m_InputVectors;
TargetVectors=m_TargetVectors;
}
void CBPNNET::RunNet(NET *Net, REAL *Input, REAL *Output)
{
SetInput(Net, Input);//设置输入
PropagateNet(Net);//传播网络
GetOutput(Net, Output);//获取输入
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -