⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 bpnet.cpp

📁 BP神经网络C语言程序包
💻 CPP
字号:
//Header:       BPNet.hpp
//Language:     Borland C++ 3.1
//Version:      1.0
//Environ:      Any
//Author:       Liu Kang
//Date:         3/1996
//Purpose:      Provide a class for BP neural network

#include "bpnet.hpp"
  #include <math.h>
  #include <string.h>

//constructor
BPNet::BPNet()
{
  ErrSet(NT_NOERR);
  StepLrn=0.1;
  RushLrn=0.9;
  ErrAllow=1e-2;
  ErrFlAllow=1e-8;
  LrnCom=NULL;
  LrnNumAllow=10000;
  DifFun = SigFun1;
  strcpy(NetType,"BP Network");
}
BPNet::BPNet(char *name)
{
  ErrSet(NT_NOERR);
  strcpy(NtName,name);
  if(!CFGLoad())
    ErrSet(NT_INITERR);
  else
    WtFlLoad();
  SetNdFun(SigFun);
  DifFun = SigFun1;
  LrnCom = NULL;
}
BPNet::BPNet(int ln, int nn[], char *nname)
:FrdNrl(ln,nn,nname)
{
  ErrSet(NT_NOERR);
  StepLrn = 0.1;
  RushLrn = 0.9;
  ErrAllow = 1e-2;
  ErrFlAllow = 1e-8;
  LrnCom = NULL;
  DifFun = SigFun1;
  strcpy(NetType,"BP Network");
  SetNdFun(SigFun);
}
//destructor
BPNet::~BPNet()
{
  if(!strcmp(NetType,"BP Network"))
    CFGWrite();
}
//other methods
void BPNet::SetNdFun(double (*f)(double))
{
  FrdNrl::SetNdFun(f);
  for(int i=0; i<LyNodeNum[0]; i++)
    Node[i].SetFun(InputFun);
}
void BPNet::ThdInitial()
{
  FrdNrl::ThdInitial();
  for(int i=0; i<LyNodeNum[0]; i++)
    Node[i].SetThred(0);
}
long BPNet::Learn(double **input, double **output,int examplenum)
{
  if(LayerNum <=0 ) return -1;
  #define MemErr        { ErrSet(NT_MEM); return -1; }
  //malloc work template of learning
  double *outtmp=new double[LyNodeNum[LayerNum-1]]; //output template array
  if(outtmp == NULL) MemErr
  DblArray Derta(0,NodeNum-1);//template for derta of every node
  DblArray DertaThd(0,LayerNum-1);//template for derta of threshold
  Matrix **DertWeight=new Matrix*[LayerNum-1];//template for weight's change of every weight
  if(DertWeight == NULL) MemErr
  Matrix **DertWtStore=new Matrix*[LayerNum-1];//template for weight's change store
  if(DertWtStore == NULL) MemErr
  for(int count=0; count<LayerNum-1; count++)
  {
    //matrix of weight, include threshold, changing value
    DertWeight[count] = new Matrix(LyNodeNum[count]+1,LyNodeNum[count+1],0);
    if(DertWeight[count] == NULL) MemErr
    //matrix of weight, include threshold, changing value store
    DertWtStore[count] = new Matrix(LyNodeNum[count]+1,LyNodeNum[count+1]);
    if(DertWtStore[count] == NULL) MemErr
  }
  //initial weight and threshold
  if(LdType != OLD)
  {
    WtInitial();
    ThdInitial();
    for(int i=1; i<=NodeNum; i++)
      for(int j=1; j<=NodeNum; j++)
	(*Weight)(i,j) *= (*Conect)(i,j);
  }
  //learn begin
  int LrnOk;
  int LrnFail;
  long LrnCount=0;
  do {
    //learn for each group of example
    LrnOk = 1;
    for(count=0; count<LayerNum-1; count++)
    {
      *DertWtStore[count] = *DertWeight[count];
      *DertWeight[count] = (*DertWeight[count])*0;
    }
    for(int sample=0; sample<examplenum; sample++)
    {
      //calculate the practical output for certain example
      Run(input[sample],outtmp);
      //calculate the derta and threshold's derta of output layer and set flag
      DertaThd[LayerNum-1] = 0;
      for(int lynum=0; lynum<LyNodeNum[LayerNum-1]; lynum++)
      {
	int NDNum = NumTrans(LayerNum,lynum+1)-1;
	Derta[NDNum]  = (outtmp[lynum]-output[sample][lynum])
			*DifFun(NodeIn(LayerNum,lynum+1));
	DertaThd[LayerNum-1] += Derta[NDNum]*Node[NDNum].GetThred();
	if(fabs(output[sample][lynum]-outtmp[lynum]) >= ErrAllow)
	  LrnOk = 0;
      }
      DertaThd[LayerNum-1] *= DifFun(1);
      //calculate the derta and threshold's derta of hidden layer
      for(int lyno=LayerNum-1; lyno>=1; lyno--) //all hidden layer
      {
	DertaThd[lyno-1] = 0;
	for(lynum=0; lynum<LyNodeNum[lyno-1]; lynum++)//each node in the layer
	{
	  int NDNum = NumTrans(lyno,lynum+1)-1;
	  Derta[NDNum] = 0;
	  for(int bklynum=0; bklynum<LyNodeNum[lyno]; bklynum++)
	  {
	    int bkNDNum = NumTrans(lyno+1,bklynum+1)-1;
	    Derta[NDNum] += Derta[bkNDNum]*GetWeight(lyno,lynum+1, lyno+1,bklynum+1);
	  }
	  Derta[NDNum] *= DifFun(NodeIn(lyno,lynum+1));
	  DertaThd[lyno-1] += Derta[NDNum]*Node[NDNum].GetThred();
	}
	DertaThd[lyno-1] *= DifFun(1);
      }
      //calculate every weight's change and add to all sample's
      for(lyno=1; lyno<LayerNum; lyno++)
      {
	for(lynum=1; lynum<=LyNodeNum[lyno-1]; lynum++)
	{
	  for(int bklynum=1; bklynum<=LyNodeNum[lyno]; bklynum++)
	  {
	    int NDNum = NumTrans(lyno+1,bklynum)-1;
	    (*DertWeight[lyno-1])(lynum,bklynum) += Derta[NDNum]*NodeOut(lyno,lynum);
	  }
	}
	//calculate every threshold's change and add to all sample's
	for(int bklynum=1; bklynum<=LyNodeNum[lyno]; bklynum++)
	  (*DertWeight[lyno-1])(LyNodeNum[lyno-1]+1,bklynum) += DertaThd[lyno]*Node[0].Fun(1);
      }
    }
    //change every weight and threshold after all sample calculate
    LrnFail = 1;
    for(int lyno=1; lyno<LayerNum; lyno++)
    {
      //chang weight of this layer to next layer
      for(int lynum=1; lynum<=LyNodeNum[lyno-1]; lynum++)
      {
	for(int bklynum=1; bklynum<=LyNodeNum[lyno]; bklynum++)
	{
	  double tmp;
	  tmp = StepLrn*(*DertWeight[lyno-1])(lynum,bklynum) +
		RushLrn*(*DertWtStore[lyno-1])(lynum,bklynum);
	  if(fabs(tmp) > ErrFlAllow) LrnFail = 0;
	  SetWeight(GetWeight(lyno,lynum,lyno+1,bklynum)-tmp,lyno,lynum, lyno+1,bklynum);
	}
      }
      //chang threshold of each node in this layer
      for(int bklynum=1; bklynum<=LyNodeNum[lyno]; bklynum++)
      {
	int NDNum = NumTrans(lyno+1,bklynum)-1;
	double tmp=StepLrn*(*DertWeight[lyno-1])(LyNodeNum[lyno-1]+1,bklynum)
		  +RushLrn*(*DertWtStore[lyno-1])(LyNodeNum[lyno-1]+1,bklynum);
	if(fabs(tmp) > ErrFlAllow) LrnFail = 0;
	Node[NDNum].SetThred(Node[NDNum].GetThred()-tmp);
      }
    }
    LrnCount++;
    if(LrnCom != NULL)
      if(LrnCom(LrnCount,*this,*Weight,input,output,examplenum))
		LrnFail = 1;
  }while(!LrnOk && LrnCount<LrnNumAllow && !LrnFail);
  WtFlWrite();
  ThdFlWrite();
  LdType = OLD;
  for(count=0; count<LayerNum-1; count++)
  {
    delete DertWtStore[count];
    delete DertWeight[count];
  }
  delete DertWeight;
  delete DertWtStore;
  delete outtmp;
  return LrnCount;
}
//read and write config file
int BPNet::CFGWrite()
{
  if(!strcmp(NtName,"")) return 0;
  char flname[20];
  strcpy(flname,NtName);
  strcat(flname,".CFG");
  ofstream outfile(flname);
  if(!outfile)
  {
    ErrSet(NT_FILEERR);
    return 0;
  }
  return CFGWriteIn(outfile);
}
int BPNet::CFGWriteIn(ostream& outfile)
{
  if(!FrdNrl::CFGWriteIn(outfile))
    return 0;
  outfile<<"\n"<<StepLrn<<"\n"<<RushLrn<<"\n"<<ErrAllow<<"\n"<<ErrFlAllow;
  if(!outfile)
    return 0;
  else
    return 1;
}
int BPNet::CFGLoad()
{
  if(!strcmp(NtName,"")) return 0;
  char flname[20];
  strcpy(flname,NtName);
  strcat(flname,".CFG");
  ifstream infile(flname);
  if(!CFGLoadIn(infile))
    return 0;
  else
    return 1;
}
int BPNet::CFGLoadIn(istream& infile)
{
  if(!FrdNrl::CFGLoadIn(infile))
    return 0;
  infile>>StepLrn>>RushLrn>>ErrAllow>>ErrFlAllow;
  if(!infile)
    return 0;
  else
    return 1;
}
//sigmoid function of BP netword's node function
double SigFun(double x)
{
  return 1./(1+exp(-x));
}
double SigFun1(double x)
{
  return SigFun(x)*(1-SigFun(x));
}
double InputFun(double x)
{
  return x;
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -