📄 bpnn.h
字号:
fout<<"\n";
fout<<dnInertia;
fout<<"\n";
lnEndTime=time(NULL); /*Stop to count time*/
fout<<"CPUTime=";
fout<<"\n";
fout<<(lnEndTime-lnStartTime)/3600;
fout<<"hour";
fout<<((lnEndTime-lnStartTime)%3600)/60;
fout<<"min";
fout<<(lnEndTime-lnStartTime)%60;
fout<<"sec";
fout<<"\n";
fout<<"InputDataOfTraining:";
fout<<"\n";
for(i=0;i<inTrainSampleSetNum;i++)
{
for(j=0;j<inLayerNNNum[0];j++)
{
fout<<dnTrainInputSample[i][j]*(dnTrainInputSampleMax[j]-dnTrainInputSampleMin[j])+dnTrainInputSampleMin[j];
fout<<"\t";
}
fout<<"\n";
}
fout<<"OutputDataOfTraining:";
fout<<"\n";
for(i=0;i<inTrainSampleSetNum;i++)
{
for(j=0;j<inLayerNNNum[inHidLayerNum+1];j++)
{
fout<<dnTrainOutputSample[i][j]*(dnTrainOutputSampleMax[j]-dnTrainOutputSampleMin[j])+dnTrainOutputSampleMin[j];
fout<<"\t";
}
fout<<"\n";
}
fout.close();
}
void DumpStoragedOutputLayerSample(String sDumpOutputLayerSample)
{
int i,j;
ofstream fout;
fout.open(sDumpOutputLayerSample.c_str());
for(j=0;j<inLayerNNNum[inHidLayerNum+1];j++)
{
for(i=0;i<inTrainSampleSetNum;i++)
{
fout<<dnTrainOutputSample[i][j];
fout<<"\t";
}
fout<<"\n";
}
fout.close();
}
void DumpUnNormalizedInputLayerSample(String sDumpInputLayerSample)
{
int i,j;
ofstream fout;
fout.open(sDumpInputLayerSample.c_str());
for(j=0;j<inLayerNNNum[0];j++)
{
for(i=0;i<inTrainSampleSetNum;i++)
{
fout<<dnTrainInputSample[i][j]*(dnTrainInputSampleMax[j]-dnTrainInputSampleMin[j])+dnTrainInputSampleMin[j];
fout<<"\t";
}
fout<<"\n";
}
fout.close();
}
void DumpUnNormalizedOutputLayerSample(String sDumpOutputLayerSample)
{
int i,j;
ofstream fout;
fout.open(sDumpOutputLayerSample.c_str());
for(j=0;j<inLayerNNNum[inHidLayerNum+1];j++)
{
for(i=0;i<inTrainSampleSetNum;i++)
{
fout<<dnTrainOutputSample[i][j]*(dnTrainOutputSampleMax[j]-dnTrainOutputSampleMin[j])+dnTrainOutputSampleMin[j];
fout<<"\t";
}
fout<<"\n";
}
fout.close();
}
void InputTrainSampleSetNum(int iTrainSampleSetNum)
{
int i;
inTrainSampleSetNum=iTrainSampleSetNum;
dnTrainInputSample=new double*[inTrainSampleSetNum];
dnTrainOutputSample=new double*[inTrainSampleSetNum];
for(i=0;i<inTrainSampleSetNum;i++)
dnTrainInputSample[i]=new double[inLayerNNNum[0]];
for(i=0;i<inTrainSampleSetNum;i++)
dnTrainOutputSample[i]=new double[inLayerNNNum[inHidLayerNum+1]];
}
int OutputTrainSampleSetNum(void)
{
return inTrainSampleSetNum;
}
void InputTrainInputSample(Matrix2D M2DTrainInputSample)
{
for(int i=0;i<inTrainSampleSetNum;i++)
for(int j=0;j<inLayerNNNum[0];j++)
dnTrainInputSample[i][j]=M2DTrainInputSample.dValue[i][j];
}
void OutputTrainInputSample(Matrix2D M2DTrainOutputSample)
{
for(int i=0;i<inTrainSampleSetNum;i++)
for(int j=0;j<inLayerNNNum[inHidLayerNum+1];j++)
dnTrainOutputSample[i][j]=M2DTrainOutputSample.dValue[i][j];
}
double OutputStoragedTrainInputSample(int iTrainInputSampleSetOrder,int iInputLayerNNOrder)
{
return dnTrainInputSample[iTrainInputSampleSetOrder][iInputLayerNNOrder];
}
double OutputStoragedTrainOutputSample(int iTrainOutputSampleSetOrder,int iOutputLayerNNOrder)
{
return dnTrainOutputSample[iTrainOutputSampleSetOrder][iOutputLayerNNOrder];
}
double OutputUnNormalizedTrainInputSample(int iTrainInputSampleSetOrder,int iInputLayerNNOrder)
{
double dTemp=0.0;
dTemp=dnTrainInputSample[iTrainInputSampleSetOrder][iInputLayerNNOrder]*(dnTrainInputSampleMax[iInputLayerNNOrder]-dnTrainInputSampleMin[iInputLayerNNOrder])+dnTrainInputSampleMin[iInputLayerNNOrder];
return dTemp;
}
double OutputUnNormalizedTrainOutputSample(int iTrainOutputSampleSetOrder,int iOutputLayerNNOrder)
{
double dTemp=0.0;
dTemp=dnTrainOutputSample[iTrainOutputSampleSetOrder][iOutputLayerNNOrder]*(dnTrainOutputSampleMax[iOutputLayerNNOrder]-dnTrainOutputSampleMin[iOutputLayerNNOrder])+dnTrainOutputSampleMin[iOutputLayerNNOrder];
return dTemp;
}
void Learn(void)
{
inLearnedTimes++;
double dSum=0.0;
dnMse=0.0;
int iTotalLayerNum=inHidLayerNum+2;
int iOutPutLayerID=inHidLayerNum+1;
int iITrainSampleSetNum;
for(iITrainSampleSetNum=0;iITrainSampleSetNum<inTrainSampleSetNum;iITrainSampleSetNum++)
{
for(i=0;i<inLayerNNNum[0];i++)
{
NNO[0][i].dnOutput=dnTrainInputSample[iITrainSampleSetNum][i];
}
for(k=1;k<iTotalLayerNum;k++)
{
for(j=0;j<inLayerNNNum[k];j++)
{
dSum=0.0;
for(i=0;i<inLayerNNNum[k-1];i++)
{
dSum+=NNO[k][j].dnWeight[i]*NNO[k-1][i].dnOutput;
}
NNO[k][j].dnOutput=1.0/(1.0+exp(-(dSum-NNO[k][j].dnTheta)));
}
}
for(i=0;i<inLayerNNNum[iOutPutLayerID];i++)
{
NNO[iOutPutLayerID][i].dnTarget=dnTrainOutputSample[iITrainSampleSetNum][i];
NNO[iOutPutLayerID][i].dnDelta=NNO[iOutPutLayerID][i].dnOutput*(1.0-NNO[iOutPutLayerID][i].dnOutput)*(NNO[iOutPutLayerID][i].dnTarget-NNO[iOutPutLayerID][i].dnOutput);
}
for(k=inHidLayerNum;k>0;k--)
{
for(j=0;j<inLayerNNNum[k];j++)
{
dSum=0.0;
for(i=0;i<inLayerNNNum[k+1];i++)
{
dSum+=NNO[k+1][i].dnWeight[j]*NNO[k+1][i].dnDelta;
}
NNO[k][j].dnDelta=NNO[k][j].dnOutput*(1.0-NNO[k][j].dnOutput)*dSum;
}
}
for(k=1;k<iTotalLayerNum;k++)
{
for(j=0;j<inLayerNNNum[k];j++)
{
NNO[k][j].dndTheta=-dnLearnRate*NNO[k][j].dnDelta+dnInertia*NNO[k][j].dndTheta;
NNO[k][j].dnTheta+=NNO[k][j].dndTheta;
for(i=0;i<inLayerNNNum[k-1];i++)
{
NNO[k][j].dndWeight[i]=dnLearnRate*NNO[k][j].dnDelta*NNO[k-1][i].dnOutput+dnInertia*NNO[k][j].dndWeight[i];
NNO[k][j].dnWeight[i]+=NNO[k][j].dndWeight[i];
}
}
}
//To calculate the root mean square error
for(i=0;i<inLayerNNNum[iOutPutLayerID];i++)
dnMse+=(NNO[iOutPutLayerID][i].dnTarget-NNO[iOutPutLayerID][i].dnOutput)*(NNO[iOutPutLayerID][i].dnTarget-NNO[iOutPutLayerID][i].dnOutput);
}
dnMse=dnMse/inTrainSampleSetNum/inLayerNNNum[iOutPutLayerID];
dnMse=sqrt(dnMse);
}
void InputRecallInputSample(int iInputRecallInputSampleOrder,double dRecallInputSample)
{
NNO[0][iInputRecallInputSampleOrder].dnOutput=(dRecallInputSample-dnTrainInputSampleMin[iInputRecallInputSampleOrder])/(dnTrainInputSampleMax[iInputRecallInputSampleOrder]-dnTrainInputSampleMin[iInputRecallInputSampleOrder]);
}
void LoadNeuralNetwork(String sLoadNNFileName)
{
ifstream fin; //File input stream
fin.open(sLoadNNFileName.c_str());
char cTemp[256];
int iTemp;
double dTemp;
fin>>cTemp;
fin>>iTemp;
InputHidLayerNum(iTemp);
fin>>cTemp;
fin>>inLayerNNNum[0];
fin>>cTemp;
fin>>inLayerNNNum[inHidLayerNum+1];
fin>>cTemp;
for(i=1;i<inHidLayerNum+1;i++)
{
fin>>inLayerNNNum[i];
}
//CreateNeuralNetwork
NNO[0]=new NN[inLayerNNNum[0]];
//Reset Neural Node Id
for(i=0;i<inLayerNNNum[0];i++)
{
NNO[0][i].inLayer=0;
NNO[0][i].inId=i;
NNO[0][i].dnOutput=0.0;
NNO[0][i].dnTarget=0.0;
NNO[0][i].dnDelta=0.0;
NNO[0][i].dnTheta=0.0;
NNO[0][i].dndTheta=0.0;
}
for(k=1;k<inHidLayerNum+2;k++)
{
NNO[k]=new NN[inLayerNNNum[k]];
//Reset Neural Node Id
for(i=0;i<inLayerNNNum[k];i++)
{
NNO[k][i].inLayer=k;
NNO[k][i].inId=i;
NNO[k][i].dnOutput=0.0;
NNO[k][i].dnTarget=0.0;
NNO[k][i].dnDelta=0.0;
NNO[k][i].dnTheta=(rand()%20000-10000)/10000.0;//-1°1(20000 steps)
NNO[k][i].dndTheta=0.0;
}
//Create the dentrites
for(j=0;j<inLayerNNNum[k];j++)
{
NNO[k][j].dnWeight=new double[inLayerNNNum[k-1]];
NNO[k][j].dndWeight=new double[inLayerNNNum[k-1]];
for(i=0;i<inLayerNNNum[k-1];i++)
{
NNO[k][j].dnWeight[i]=(rand()%20000-10000)/10000.0;//-1°1(20000 steps)
NNO[k][j].dndWeight[i]=0.0;
}
}
}
fin>>cTemp;
for(i=0;i<inLayerNNNum[0];i++)
{
fin>>cTemp;
fin>>NNO[0][i].dnOutput;
fin>>cTemp;
fin>>NNO[0][i].dnTarget;
fin>>cTemp;
fin>>NNO[0][i].dnDelta;
fin>>cTemp;
fin>>NNO[0][i].dnTheta;
fin>>cTemp;
fin>>NNO[0][i].dndTheta;
}
for(k=1;k<inHidLayerNum+2;k++)
{
for(j=0;j<inLayerNNNum[k];j++)
{
fin>>cTemp;
fin>>NNO[k][j].dnOutput;
fin>>cTemp;
fin>>NNO[k][j].dnTarget;
fin>>cTemp;
fin>>NNO[k][j].dnDelta;
fin>>cTemp;
fin>>NNO[k][j].dnTheta;
fin>>cTemp;
fin>>NNO[k][j].dndTheta;
for(i=0;i<inLayerNNNum[k-1];i++)
{
fin>>cTemp;
fin>>NNO[k][j].dnWeight[i];
}
for(i=0;i<inLayerNNNum[k-1];i++)
{
fin>>cTemp;
fin>>NNO[k][j].dndWeight[i];
}
}
}
dnTrainInputSampleMax=new double[inLayerNNNum[0]]; //The max value array of each input node
dnTrainInputSampleMin=new double[inLayerNNNum[0]]; //The min value array of each input node
dnTrainOutputSampleMax=new double[inLayerNNNum[inHidLayerNum+1]]; //The max value array of each output node
dnTrainOutputSampleMin=new double[inLayerNNNum[inHidLayerNum+1]]; //The min value array of each output node
for(i=0;i<inLayerNNNum[0];i++)
{
fin>>cTemp;
fin>>dnTrainInputSampleMax[i];
fin>>cTemp;
fin>>dnTrainInputSampleMin[i];
}
for(i=0;i<inLayerNNNum[inHidLayerNum+1];i++)
{
fin>>cTemp;
fin>>dnTrainOutputSampleMax[i];
fin>>cTemp;
fin>>dnTrainOutputSampleMin[i];
}
fin.close();
}
void Recall(void)
{
double dSum=0.0;
dnMse=0.0;
for(k=1;k<inHidLayerNum+2;k++)
{
for(j=0;j<inLayerNNNum[k];j++)
{
dSum=0.0;
for(i=0;i<inLayerNNNum[k-1];i++)
{
dSum+=NNO[k][j].dnWeight[i]*NNO[k-1][i].dnOutput;
}
NNO[k][j].dnOutput=1.0/(1.0+exp(-(dSum-NNO[k][j].dnTheta)));
}
}
}
double OutputRecallResult(int iTrainOutputSampleOrder)
{
double dTemp=0.0;
dTemp=NNO[inHidLayerNum+1][iTrainOutputSampleOrder].dnOutput*(dnTrainOutputSampleMax[iTrainOutputSampleOrder]-dnTrainOutputSampleMin[iTrainOutputSampleOrder])+dnTrainOutputSampleMin[iTrainOutputSampleOrder];
return dTemp;
}
};
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -