📄 bpnn.h
字号:
/**
*Title: AIR_BPNN-C++ Class *Description: Neural Network Class *Copyright: Copyleft (c) 2002 (See gpl.txt for details) *Company: www.air-robot.net *Author M. T. Li (mtli0913@yahoo.com) *Version 1.0 ; 2000.10.24 ; M. T. Li ; Rewriting from AIR_BPNN_Learn-C *Version 1.1 ; 2000.11.24 ; M. T. Li ; debug
*Version 1.2 ; 2000.11.25 ; M. T. Li ; Parameter output
*Version 1.3 ; 2000.11.25 ; M. T. Li ; Optimization
*Version 1.4 ; 2001.04.06 ; M. T. Li ; Input array
*Version 2.0 ; 2001.04.07 ; M. T. Li ; Show the sample
*Version 2.1 ; 2001.04.08 ; M. T. Li ; Sample editor
*Version 2.2 ; 2002.07.28 ; M. T. Li ; Distribute by GPL *Version 2.3 ; 2002.11.24 ; M. T. Li ; English version */
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <time.h>
#include <float.h>
#include <string.h>
#include <fstream.h>
#include "Array.h" //Custom array class
//Neural node
class NN
{
public:
int inLayer;
int inId;
double dnOutput;
double dnTarget;
double dnDelta;
double dnTheta;
double dndTheta;
double *dnWeight;
double *dndWeight;
private:
};
//Back-propagation neural network
class AIR_BPNN
{
private:
int i,j,k; //flag
int inHidLayerNum; //The number of hidden layer
int *inLayerNNNum; //The neural node number of each layer
int inTrainSampleNum; //The number of training sample
int inTrainSampleSetNum; //The number of training sample set
long int lnStartTime;
long int lnEndTime;
AnsiString ASnTemp;
double **dnTrainInputSample; //The input sample of training
double **dnTrainOutputSample; //The output sample of training
double *dnTrainInputSampleMax; //The max value of each input
double *dnTrainInputSampleMin; //The min value of each input
double *dnTrainOutputSampleMax; //The max value of each output
double *dnTrainOutputSampleMin; //The min value of each output
String snDumpNNFileName; //Neural network filename
NN **NNO; //The Neural matrix
public:
int inLearnedTimes; //Learned Times
double dnLearnRate; //Learning rate
double dnInertia; //Inertia Parameter
double dnMse;
void InputHidLayerNum(int iHidLayerNum)
{
inHidLayerNum=iHidLayerNum;
inLayerNNNum=new int[inHidLayerNum+2];
NNO=new NN*[iHidLayerNum+2];
}
void InputLayerNNNum(int iLayerID,int iLayerNNNum)
{
inLayerNNNum[iLayerID]=iLayerNNNum;
}
int OutputInputLayerNNNum(void)
{
return inLayerNNNum[0];
}
int OutputOutputLayerNNNum(void)
{
return inLayerNNNum[inHidLayerNum+1];
}
void CreateNeuralNetwork(void)
{
lnStartTime=time(NULL); /*Start to count time*/
inLearnedTimes=0;
int inIdCounter=0;
time_t t;
srand((unsigned)time(&t));
//Create the neural nodes of input layer
NNO[0]=new NN[inLayerNNNum[0]];
//Reset neural node
for(i=0;i<inLayerNNNum[0];i++)
{
NNO[0][i].inLayer=0;
NNO[0][i].inId=i;
NNO[0][i].dnOutput=0.0;
NNO[0][i].dnTarget=0.0;
NNO[0][i].dnDelta=0.0;
NNO[0][i].dnTheta=0.0;
NNO[0][i].dndTheta=0.0;
}
for(k=1;k<inHidLayerNum+2;k++)
{
//Create the neural nodes of hidden and output layer
NNO[k]=new NN[inLayerNNNum[k]];
//Reset neural node
for(i=0;i<inLayerNNNum[k];i++)
{
NNO[k][i].inLayer=k;
NNO[k][i].inId=i;
NNO[k][i].dnOutput=0.0;
NNO[k][i].dnTarget=0.0;
NNO[k][i].dnDelta=0.0;
NNO[k][i].dnTheta=(rand()%20000-10000)/10000.0;//-1°1(20000 step)
NNO[k][i].dndTheta=0.0;
}
//Create the Dentrites
for(j=0;j<inLayerNNNum[k];j++)
{
NNO[k][j].dnWeight=new double[inLayerNNNum[k-1]];
NNO[k][j].dndWeight=new double[inLayerNNNum[k-1]];
for(i=0;i<inLayerNNNum[k-1];i++)
{
NNO[k][j].dnWeight[i]=(rand()%20000-10000)/10000.0;//-1°1(20000 step)
NNO[k][j].dndWeight[i]=0.0;
}
}
}
dnTrainInputSampleMax=new double[inLayerNNNum[0]]; //The max value array of each input node
dnTrainInputSampleMin=new double[inLayerNNNum[0]]; //The min value array of each input node
dnTrainOutputSampleMax=new double[inLayerNNNum[inHidLayerNum+1]]; //The max value array of each output node
dnTrainOutputSampleMin=new double[inLayerNNNum[inHidLayerNum+1]]; //The min value array of each output node
//To find the max and min values of input sample
for(i=0;i<inLayerNNNum[0];i++)
{
dnTrainInputSampleMax[i]=-9999999;
dnTrainInputSampleMin[i]=9999999;
}
for(j=0;j<inTrainSampleSetNum;j++)
{
for(i=0;i<(inLayerNNNum[0]);i++)
{
if(dnTrainInputSampleMax[i]<=dnTrainInputSample[j][i])
dnTrainInputSampleMax[i]=dnTrainInputSample[j][i];
if(dnTrainInputSampleMin[i]>=dnTrainInputSample[j][i])
dnTrainInputSampleMin[i]=dnTrainInputSample[j][i];
}
}
//To normalize the input sample
for(j=0;j<inTrainSampleSetNum;j++)
{
for(i=0;i<inLayerNNNum[0];i++)
{
dnTrainInputSample[j][i]=(dnTrainInputSample[j][i]-dnTrainInputSampleMin[i])/(dnTrainInputSampleMax[i]-dnTrainInputSampleMin[i]);
}
}
//To find the max and min values of output sample
for(i=0;i<inLayerNNNum[inHidLayerNum+1];i++)
{
dnTrainOutputSampleMax[i]=-9999999;
dnTrainOutputSampleMin[i]=9999999;
}
for(j=0;j<inTrainSampleSetNum;j++)
{
for(i=0;i<(inLayerNNNum[inHidLayerNum+1]);i++)
{
if(dnTrainOutputSampleMax[i]<=dnTrainOutputSample[j][i])
dnTrainOutputSampleMax[i]=dnTrainOutputSample[j][i];
if(dnTrainOutputSampleMin[i]>=dnTrainOutputSample[j][i])
dnTrainOutputSampleMin[i]=dnTrainOutputSample[j][i];
}
}
//To normalize the output sample
for(j=0;j<inTrainSampleSetNum;j++)
{
for(i=0;i<(inLayerNNNum[inHidLayerNum+1]);i++)
{
dnTrainOutputSample[j][i]=(dnTrainOutputSample[j][i]-dnTrainOutputSampleMin[i])/(dnTrainOutputSampleMax[i]-dnTrainOutputSampleMin[i]);
}
}
}
void DumpNeuralNetwork(String sDumpNNFileName)
{
ofstream fout; //File output stream
fout.open(sDumpNNFileName.c_str());
snDumpNNFileName=sDumpNNFileName;
fout<<"HiddenLayerNumber=";
fout<<"\n";
fout<<inHidLayerNum;
fout<<"\n";
fout<<"InputLayerNodeNumber=";
fout<<"\n";
fout<<inLayerNNNum[0];
fout<<"\n";
fout<<"OutputLayerNodeNumber=";
fout<<"\n";
fout<<inLayerNNNum[inHidLayerNum+1];
fout<<"\n";
fout<<"TheNodeNumberOfEveryHiddenLayer:";
fout<<"\n";
for(i=1;i<inHidLayerNum+1;i++)
{
fout<<inLayerNNNum[i];
fout<<"\n";
}
fout<<"TheContentOfNeuralNode:";
fout<<"\n";
for(i=0;i<inLayerNNNum[0];i++)
{
fout<<"layer[";
fout<<NNO[0][i].inLayer;
fout<<"],id[";
fout<<NNO[0][i].inId;
fout<<"]Node:Output=";
fout<<"\n";
fout<<NNO[0][i].dnOutput;
fout<<"\n";
fout<<"layer[";
fout<<NNO[0][i].inLayer;
fout<<"],id[";
fout<<NNO[0][i].inId;
fout<<"]Node:Target=";
fout<<"\n";
fout<<NNO[0][i].dnTarget;
fout<<"\n";
fout<<"layer[";
fout<<NNO[0][i].inLayer;
fout<<"],id[";
fout<<NNO[0][i].inId;
fout<<"]Node:Delta=";
fout<<"\n";
fout<<NNO[0][i].dnDelta;
fout<<"\n";
fout<<"layer[";
fout<<NNO[0][i].inLayer;
fout<<"],id[";
fout<<NNO[0][i].inId;
fout<<"]Node:Theta=";
fout<<"\n";
fout<<NNO[0][i].dnTheta;
fout<<"\n";
fout<<"layer[";
fout<<NNO[0][i].inLayer;
fout<<"],id[";
fout<<NNO[0][i].inId;
fout<<"]Node:dTheta=";
fout<<"\n";
fout<<NNO[0][i].dndTheta;
fout<<"\n";
}
for(k=1;k<inHidLayerNum+2;k++)
{
for(j=0;j<inLayerNNNum[k];j++)
{
fout<<"layer[";
fout<<NNO[k][j].inLayer;
fout<<"],id[";
fout<<NNO[k][j].inId;
fout<<"]Node:Output=";
fout<<"\n";
fout<<NNO[k][j].dnOutput;
fout<<"\n";
fout<<"layer[";
fout<<NNO[k][j].inLayer;
fout<<"],id[";
fout<<NNO[k][j].inId;
fout<<"]Node:Target=";
fout<<"\n";
fout<<NNO[k][j].dnTarget;
fout<<"\n";
fout<<"layer[";
fout<<NNO[k][j].inLayer;
fout<<"],id[";
fout<<NNO[k][j].inId;
fout<<"]Node:Delta=";
fout<<"\n";
fout<<NNO[k][j].dnDelta;
fout<<"\n";
fout<<"layer[";
fout<<NNO[k][j].inLayer;
fout<<"],id[";
fout<<NNO[k][j].inId;
fout<<"]Node:Theta=";
fout<<"\n";
fout<<NNO[k][j].dnTheta;
fout<<"\n";
fout<<"layer[";
fout<<NNO[k][j].inLayer;
fout<<"],id[";
fout<<NNO[k][j].inId;
fout<<"]Node:dTheta=";
fout<<"\n";
fout<<NNO[k][j].dndTheta;
fout<<"\n";
for(i=0;i<inLayerNNNum[k-1];i++)
{
fout<<"layer[";
fout<<NNO[k][j].inLayer;
fout<<"],id[";
fout<<NNO[k][j].inId;
fout<<"]Node:Weight[";
fout<<i;
fout<<"]=";
fout<<"\n";
fout<<NNO[k][j].dnWeight[i];
fout<<"\n";
}
for(i=0;i<inLayerNNNum[k-1];i++)
{
fout<<"layer[";
fout<<NNO[k][j].inLayer;
fout<<"],id[";
fout<<NNO[k][j].inId;
fout<<"]Node:dWeight[";
fout<<i;
fout<<"]=";
fout<<"\n";
fout<<NNO[k][j].dndWeight[i];
fout<<"\n";
}
}
}
for(i=0;i<inLayerNNNum[0];i++)
{
fout<<"InputNode[";
fout<<i;
fout<<"]Maximun=\n";
fout<<dnTrainInputSampleMax[i];
fout<<"\n";
fout<<"InputNode[";
fout<<i;
fout<<"]Minimum=\n";
fout<<dnTrainInputSampleMin[i];
fout<<"\n";
}
for(i=0;i<inLayerNNNum[inHidLayerNum+1];i++)
{
fout<<"OutputNode[";
fout<<i;
fout<<"]Maximun=\n";
fout<<dnTrainOutputSampleMax[i];
fout<<"\n";
fout<<"OutputNode[";
fout<<i;
fout<<"]Minimum=\n";
fout<<dnTrainOutputSampleMin[i];
fout<<"\n";
}
fout.close();
}
void DumpStoragedInputLayerSample(String sDumpInputLayerSample)
{
int i,j;
ofstream fout;
fout.open(sDumpInputLayerSample.c_str());
for(j=0;j<inLayerNNNum[0];j++)
{
for(i=0;i<inTrainSampleSetNum;i++)
{
fout<<dnTrainInputSample[i][j];
fout<<"\t";
}
fout<<"\n";
}
fout.close();
}
void DumpParameter(String sDumpParameterFileName)
{
int i,j;
ofstream fout; //File output stream
fout.open(sDumpParameterFileName.c_str());
fout<<"NeuralNetworkDataFile=";
fout<<"\n";
fout<<snDumpNNFileName.c_str();
fout<<"\n";
fout<<"TrainingSetNumber=";
fout<<"\n";
fout<<inTrainSampleSetNum;
fout<<"\n";
fout<<"LearnedTimes=";
fout<<"\n";
fout<<inLearnedTimes;
fout<<"\n";
fout<<"LearningRate=";
fout<<"\n";
fout<<dnLearnRate;
fout<<"\n";
fout<<"InertiaParameter=";
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -