⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 bpnet.cpp

📁 不用任何更改即可运行的神经网络程序 包括矩阵类和神经网络类和主函数
💻 CPP
字号:
#include "BpNet.h"
#include <time.h>
#include <memory.h>
////////////////////////////////////////////////////////////////////////////
// CBpNet message handlers
//创建新网络
FILE* pFile;
void CBpNet::Create(CMatrix mInputData, CMatrix mTarget, int numSample,int iInput, int iHidden, int iOutput)
{ int i,j,k;
//  mSampleInput=zeros(mInputData.rows(),mInputData.cols());
//  mSampleTarget=zeros(mTarget.rows(),mTarget.cols());  
  mSampleInput=mInputData;
  mSampleTarget=mTarget;
  this->iInput=iInput;
  this->iHidden=iHidden;
  this->iOutput=iOutput;
  this->numSample=numSample;
  this->mHidden = new CMatrix[this->numSample];
  this->mOutput = new CMatrix[this->numSample];
  for(i=0;i<this->numSample;i++)
  {
      mHidden[i]=zeros(1,this->iHidden);
      mOutput[i]=zeros(1,this->iOutput);
  }
  //创建计算用的单个样本矩阵
  mInput=zeros(1,this->iInput);
  //创建权重矩阵,并赋初值
  this->mWeighti = new CMatrix[this->numSample];
  this->mWeighto = new CMatrix[this->numSample];
  for(i=0;i<this->numSample;i++)
  {
	  mWeighti[i]=zeros(this->iInput,this->iHidden);
	  mWeighto[i]=zeros(this->iHidden,this->iOutput);
  }
  //赋初值
  for(k=1;i<=this->numSample;k++)
  {
	  for(i=1;i<=this->iInput;i++)
		  for(j=1;j<=this->iHidden;j++)
		  {
			  mWeighti[k-1].r(i,j)=randab(-1.0+2.0*(k-1)/this->numSample,-1.0+2.0*k/this->numSample);
		  }
	  for(i=1;i<=this->iHidden;i++)
		  for(j=1;j<=this->iOutput;j++)
		  {
			  mWeighto[k-1].r(i,j)=randab(-1.0+2.0*(k-1)/this->numSample,-1.0+2.0*k/this->numSample);
		  }
  }
  
  //创建阙值矩阵,并赋值
  mThresholdi=zeros(1,this->iHidden);
  for(i=1;i<=this->iHidden;i++)
	  mThresholdi.r(i)=randab(-1.0,1.0);
  mThresholdo=zeros(1,this->iOutput);
  for(i=1;i<=this->iOutput;i++)
	  mThresholdo.r(i)=randab(-1.0,1.0);
  //创建权重变化矩阵
  this->mChangei = new CMatrix[this->numSample];
  this->mChangeo = new CMatrix[this->numSample];
  this->dblLearnRate1 = new double[this->numSample];
  this->dblLearnRate2 = new double[this->numSample];
  this->mOutputDeltas = new CMatrix[this->numSample];
  this->mHiddenDeltas = new CMatrix[this->numSample];
  for(i=0;i<this->numSample;i++)
  {
      mChangei[i]=zeros(this->iInput,this->iHidden);
      mChangeo[i]=zeros(this->iHidden,this->iOutput);
	    //学习速率赋值
      dblLearnRate1[i]=0.5;
      dblLearnRate2[i]=0.5;
	    //误差矩阵
   mOutputDeltas[i]=zeros(iOutput);
   mHiddenDeltas[i]=zeros(iHidden);
  }
      mInputNormFactor=zeros(iInput,2);
      mTargetNormFactor=zeros(iOutput,2);


  dblMomentumFactor=0.95;
  
  m_IsStop=false;
  dblMse=1.0e-6;//误差限
  this->dblError = new double[this->numSample];
  memset(dblError,1,this->numSample*sizeof(double));
  this->dblErr = new double[this->numSample];
  memset(dblErr,0,this->numSample*sizeof(double));
  lEpochs=0;
  generation = 0;

}
//根据已有的网络进行预测i
CMatrix CBpNet::simulate(CMatrix mData)
{int i,j;
 CMatrix mResult;
 CMatrix data=zeros(mData.rows(),mData.cols());
 data=mData;
 if(mData.cols()!=iInput)
 {::MessageBox(NULL,"输入数据变量个数错误!","输入数据变量个数错误!",MB_OK);
  return mResult;
 }
 mResult=zeros(data.rows(),iOutput); 
 //正规化数据
 for(i=1;i<=data.rows();i++)
	 for(j=1;j<=data.cols();j++) 
         data.r(i,j)=(data.r(i,j)-mInputNormFactor.r(j,1))/(mInputNormFactor.r(j,2)-mInputNormFactor.r(j,1));
 //计算
	 int iSample;
	 CMatrix mInputdata,mHiddendata,mOutputdata;
	 mInputdata=zeros(1,iInput);
	 mHiddendata=zeros(1,iHidden);
	 mOutputdata=zeros(1,iOutput);
	 double sum=0.0;
   for(iSample=1;iSample<=data.rows();iSample++){ 
	 //输入层数据
	    for(i=1;i<=iInput;i++)
		  mInputdata.r(i)=data.r(iSample,i);
	 //隐层数据	
		for(j=1;j<=iHidden;j++){
		  sum=0.0;
		 for(i=1;i<=iInput;i++)
			sum+=mInputdata.r(i)*mWeighti[optimalIndex].r(i,j);
		 sum-=mThresholdi.r(j); 
		 mHiddendata.r(j)=1.0/(1.0+exp(-sum));
		}
    
  //输出数据
	for(j=1;j<=iOutput;j++){
		sum=0.0;
		for(i=1;i<=iHidden;i++)
			sum+=mHiddendata.r(i)*mWeighto[optimalIndex].r(i,j);
		sum-=mThresholdo.r(j); 
		mOutputdata.r(j)=1.0/(1.0+exp(-sum));
	}
	
	//转换
	for(j=1;j<=iOutput;j++)
        mResult.r(iSample,j)=mOutputdata.r(j)*(mTargetNormFactor.r(j,2)-mTargetNormFactor.r(j,1))+mTargetNormFactor.r(j,1);
 }
  
 return (mResult);
}


//网络学习
void CBpNet::learn()
{ int iSample=1,sample=0,i=0;
  double* dblTotal;
  dblTotal = new double[this->numSample];
  if(m_IsStop)
	  m_IsStop=false;
  //数据正规化处理
  normalize();
  
  while(!m_IsStop&&generation<10000){
   ZeroMemory(dblTotal,this->numSample*sizeof(double));
   generation++;
   fprintf(pFile,"%d     ",generation);
   for(iSample=1;iSample<=mSampleInput.rows();iSample++){
	   for(sample=0;sample<this->numSample;sample++)
	   {
			forward(iSample,sample);
			backward(iSample,sample);
			dblTotal[sample]+=dblErr[sample];//总误差
	   }
	
  }
   for(sample=0;sample<this->numSample;sample++)
   if(dblTotal[sample]/dblError[sample]>1.04){//动态改变学习速率
	   dblLearnRate1[sample]*=0.7;
       dblLearnRate2[sample]*=0.7;
   }
   else{
       dblLearnRate1[sample]*=1.05;
       dblLearnRate2[sample]*=1.05;
   }
   lEpochs++;
   for(sample=0;sample<this->numSample;sample++)
   dblError[sample]=dblTotal[sample];
   for(sample=0;sample<this->numSample;sample++)
   {
	   if(sample<this->numSample-1)
		   fprintf(pFile,"%f   ",dblError[sample]);
	   else
		   fprintf(pFile,"%f\n",dblError[sample]);
	   if(dblError[sample]<dblMse)
	   {
		   m_IsStop = true;
		   optimalIndex = sample;
	   }
   }
  }
  delete dblTotal;
}

void CBpNet::stop()
{
 m_IsStop=true;
}

double CBpNet::randab(double a, double b)
{ 
	srand(unsigned(time(NULL)));
	double num = rand()%1000/1000.0;
	return((b-a)*num+a);
}

//将数据转化到(0,1)区间
void CBpNet::normalize()
{
 
 int i,j;
  //输入数据范围
  mInputNormFactor=scope(mSampleInput);
  //目标数据范围
  mTargetNormFactor=scope(mSampleTarget);

 for(i=1;i<=mSampleInput.rows();i++)
	 for(j=1;j<=mSampleInput.cols();j++)
		 mSampleInput.r(i,j)=(mSampleInput.r(i,j)-mInputNormFactor.r(j,1))/(mInputNormFactor.r(j,2)-mInputNormFactor.r(j,1));
 
 for(i=1;i<=mSampleTarget.rows();i++)
	 for(j=1;j<=mSampleTarget.cols();j++)
		 mSampleTarget.r(i,j)=(mSampleTarget.r(i,j)-mTargetNormFactor.r(j,1))/(mTargetNormFactor.r(j,2)-mTargetNormFactor.r(j,1));
 
}

//前向计算 
void CBpNet::forward(int iSample,int sample)
{//根据第iSample个样本,前向计算
	if(iSample<1||iSample>mSampleInput.rows()){
		MessageBox(NULL,"无此样本数据:索引出界!","无此样本数据:索引出界!",MB_OK);
		return;
	}
	int i,j;
	double sum=0.0;
	
	//输入层数据
	for(i=1;i<=iInput;i++)
		mInput.r(i)=mSampleInput.r(iSample,i);
	
	//隐层数据
    
	for(j=1;j<=iHidden;j++){
		sum=0.0;
		for(i=1;i<=iInput;i++)			
			sum+=mInput.r(i)*mWeighti[sample].r(i,j);
		
		sum-=mThresholdi.r(j); 
		mHidden[sample].r(j)=1.0/(1.0+exp(-sum));
	}
    	
	//输出数据
	for(j=1;j<=iOutput;j++){
		sum=0.0;
		for(i=1;i<=iHidden;i++)
			sum+=mHidden[sample].r(i)*mWeighto[sample].r(i,j);
		sum-=mThresholdo.r(j); 
		mOutput[sample].r(j)=1.0/(1.0+exp(-sum));
	}
   	
}

//后向反馈
void CBpNet::backward(int iSample,int sample)
{
	if(iSample<1||iSample>mSampleInput.rows()){
		MessageBox(NULL,"无此样本数据:索引出界!","无此样本数据:索引出界!",MB_OK);
		return;
	}
    int i,j;
	
    //输出误差
	for(i=1;i<=iOutput;i++)
	{
		double output = mOutput[sample].r(i);
		mOutputDeltas[sample].r(i)=	output*(1-output)*(mSampleTarget.r(iSample,i)-output);
	}
	
	//隐层误差
	double sum=0.0;
	for(j=1;j<=iHidden;j++){
		sum=0.0;
		for(i=1;i<=iOutput;i++)
			sum+=mOutputDeltas[sample].r(i)*mWeighto[sample].r(j,i);
		mHiddenDeltas[sample].r(j)=mHidden[sample].r(j)*(1-mHidden[sample].r(j))*sum;
	}
	//更新隐层-输出权重
	
	double dblChange;
	for(j=1;j<=iHidden;j++)
		for(i=1;i<=iOutput;i++){
			dblChange=mOutputDeltas[sample].r(i)*mHidden[sample].r(j);
			mWeighto[sample].r(j,i)=mWeighto[sample].r(j,i)+dblLearnRate2[sample]*dblChange+dblMomentumFactor*mChangeo[sample].r(j,i);
			mChangeo[sample].r(j,i)=dblChange;
		}
    
	//更新输入-隐层权重
    for(i=1;i<=iInput;i++)
		for(j=1;j<=iHidden;j++){
			dblChange=mHiddenDeltas[sample].r(j)*mInput.r(i);
            mWeighti[sample].r(i,j)=mWeighti[sample].r(i,j)+dblLearnRate1[sample]*dblChange+dblMomentumFactor*mChangei[sample].r(i,j); 
			mChangei[sample].r(i,j)=dblChange;
		}
	//修改阙值
	for(j=1;j<=iOutput;j++)
	    mThresholdo.r(j)-=dblLearnRate2[sample]*mOutputDeltas[sample].r(j);  
	for(i=1;i<=iHidden;i++) 
	    mThresholdi.r(i)-=dblLearnRate1[sample]*mHiddenDeltas[sample].r(i); 
   	//计算误差
	dblErr[sample]=0.0;
	for(i=1;i<=iOutput;i++)
		dblErr[sample]+=0.5*(mSampleTarget.r(iSample,i)-mOutput[sample].r(i))*(mSampleTarget.r(iSample,i)-mOutput[sample].r(i));
	
   
}

//求数据列的范围
CMatrix CBpNet::scope(CMatrix mData)
{CMatrix mScope;
 mScope=zeros(mData.cols(),2);
 double  min,max;
 for(int i=1;i<=mData.cols();i++){
	 min=max=mData.r(1,i); 
	 for(int j=1;j<=mData.rows();j++){
		 if(mData.r(j,i)>=max)
			 max=mData.r(j,i);
		 if(mData.r(j,i)<=min)
			 min=mData.r(j,i);
	 }
	 if(min==max)
		 min=0.0;
	 mScope.r(i,1)=min;
	 mScope.r(i,2)=max;
 }
 return(mScope);
}


void main()
{
//	FILE* pFile;
	pFile = fopen("dumpData.txt","w");
	char s[] = "generation     sample1   sample2   sample3   sample4   sample5   sample6   sample7   sample8   sample9   sample10";

	fprintf(pFile,"%s",s);
	CMatrix matrix1=(zeros(5,3));

	CMatrix matrix2=zeros(5,3);

	srand(unsigned(time(NULL)));
	for(int i=1;i<=matrix1.rows();i++)
		for(int j=1;j<=matrix1.cols();j++)
		{
			matrix1.r(i,j) = rand()%1000/1000.0;
			matrix2.r(i,j) = rand()%1000/1000.0;
		}
	CBpNet  net;
	net.Create(matrix1,matrix2);
	net.learn();
	fclose(pFile);
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -