⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 bp.cpp

📁 神经网络bp自学习算法的c++实现
💻 CPP
字号:
#include <iostream>
#include <math.h>
#include <time.h>
#include "bp.h"

using namespace std;

const int TrainingSet[4][3]={
	{0,0,0},
	{0,1,1},
	{1,0,1},
	{1,1,0},
};


NeuralNetworkLayer::NeuralNetworkLayer()
{
     ParentLayer = NULL;
     ChildLayer = NULL;
     
}


void NeuralNetworkLayer::Initialize(int NumNodes,NeuralNetworkLayer* parent,NeuralNetworkLayer* child)
{
     int     i, j;
     
     NeuronValues = (double*) malloc(sizeof(double) * NumberOfNodes);
     DesiredValues = (double*) malloc(sizeof(double) * NumberOfNodes);
     Errors = (double*) malloc(sizeof(double) * NumberOfNodes);
     if(parent != NULL)
     {
          ParentLayer = parent;
     }
     if(child != NULL)
     {
          ChildLayer = child;
          Weights = (double**) malloc(sizeof(double*) *NumberOfNodes);
          WeightChanges = (double**) malloc(sizeof(double*) *NumberOfNodes);
          for(i = 0; i<NumberOfNodes; i++)
          {
               Weights[i] = (double*) malloc(sizeof(double) *  NumberOfChildNodes);
               WeightChanges[i] = (double*) malloc(sizeof(double) * NumberOfChildNodes);
          }
          BiasValues = (double*) malloc(sizeof(double) *NumberOfChildNodes);
          BiasWeights = (double*) malloc(sizeof(double) *NumberOfChildNodes);
     } else {
          Weights = NULL;
          BiasValues = NULL;
          BiasWeights = NULL;
          WeightChanges = NULL;
     }
     
     for(i=0; i<NumberOfNodes; i++)
     {
          NeuronValues[i] = 0;
          DesiredValues[i] = 0;
          Errors[i] = 0;
          if(ChildLayer != NULL)
               for(j=0; j<NumberOfChildNodes; j++)
               {
                    Weights[i][j] = 0;
                    WeightChanges[i][j] = 0;
               }
     }
     
     if(ChildLayer != NULL) 
          for(j=0; j<NumberOfChildNodes; j++)
          {
               BiasValues[j] = 0;
               BiasWeights[j] = 0;
          }
}


void NeuralNetworkLayer::CleanUp(void)
{
     int i;
     free(NeuronValues);
     free(DesiredValues);
     free(Errors);
     if(Weights != NULL)
     {
          for(i = 0; i<NumberOfNodes; i++)
          {
               free(Weights[i]);
               free(WeightChanges[i]);
          }
          free(Weights);
          free(WeightChanges);
     }
     if(BiasValues != NULL) free(BiasValues);
     if(BiasWeights != NULL) free(BiasWeights);
}


void NeuralNetworkLayer::RandomizeWeights(void)
{
     int     i,j;
     int     min = 0;
     int     max = 200;
     int     number;
     srand( (unsigned)time( NULL ) );
     for(i=0; i<NumberOfNodes; i++)
     {
          for(j=0; j<NumberOfChildNodes; j++)
          {
               number = (((abs(rand())%(max-min+1))+min));
               if(number>max)
                    number = max;
               if(number<min)
                   number = min;
               Weights[i][j] = number / 100.0f - 1;
          }
     }
     for(j=0; j<NumberOfChildNodes; j++)
     {
               number = (((abs(rand())%(max-min+1))+min));
               if(number>max)
                    number = max;
               if(number<min)
                    number = min;
               BiasWeights[j] = number / 100.0f - 1;
      }
}

void NeuralNetworkLayer::CalculateNeuronValues(void)
{
     int i,j;
     double x;
     if (ParentLayer != NULL)
     {
          for(j=0; j<NumberOfNodes; j++)
          {
               x = 0;
               for(i=0; i<NumberOfParentNodes; i++)
               {
                    x += ParentLayer->NeuronValues[i] *
                          ParentLayer->Weights[i][j];
               }
               x += ParentLayer->BiasValues[j] *
                     ParentLayer->BiasWeights[j];
              
                    NeuronValues[j] = x;
          }
     }
	 
}

void NeuralNetworkLayer::CalculateErrors(void)
{
     int i, j;
     double sum;
     if(ChildLayer == NULL) 
     {
          for(i=0; i<NumberOfNodes; i++)
          {
                 Errors[i] = (DesiredValues[i] - NeuronValues[i]) *
                               NeuronValues[i] *
                               (1.0f - NeuronValues[i]);
          }
     } else if(ParentLayer == NULL) { 
          for(i=0; i<NumberOfNodes; i++)
          {
               Errors[i] = 0.0f;
          }
     } else { 
          for(i=0; i<NumberOfNodes; i++)
          {
               sum = 0;
               for(j=0; j<NumberOfChildNodes; j++)
               {
                    sum += ChildLayer->Errors[j] * Weights[i][j];
               }
               Errors[i] = sum * NeuronValues[i] *
                               (1.0f - NeuronValues[i]);
          }
     }
}

void NeuralNetworkLayer::AdjustWeights(void)
{
     int          i, j;
     double       dw;
     if(ChildLayer != NULL)
     {
          for(i=0; i<NumberOfNodes; i++)
          {
               for(j=0; j<NumberOfChildNodes; j++)
               {
                    dw = LearningRate * ChildLayer->Errors[j] *
                          NeuronValues[i];
                    
                             Weights[i][j] += dw;
                    
               }
          }
          for(j=0; j<NumberOfChildNodes; j++)
          {
               BiasWeights[j] += LearningRate *
                                      ChildLayer->Errors[j] *
                                      BiasValues[j];
          }
     }
}

void NeuralNetworkLayer::OutData()
{
  
}
    

void NeuralNetwork::Initialize(int nNodesInput, int nNodesHidden,int nNodesOutput)
{
     InputLayer.NumberOfNodes = nNodesInput;    //输入层个数
     InputLayer.NumberOfChildNodes = nNodesHidden;
     InputLayer.NumberOfParentNodes = 0;
     InputLayer.Initialize(nNodesInput, NULL, &HiddenLayer);
     InputLayer.RandomizeWeights();
     HiddenLayer.NumberOfNodes = nNodesHidden;         //隐含层个数
     HiddenLayer.NumberOfChildNodes = nNodesOutput;
     HiddenLayer.NumberOfParentNodes = nNodesInput;
     HiddenLayer.Initialize(nNodesHidden,&InputLayer,&OutputLayer);
     HiddenLayer.RandomizeWeights();
     OutputLayer.NumberOfNodes = nNodesOutput;            //输出层个数
     OutputLayer.NumberOfChildNodes = 0;
     OutputLayer.NumberOfParentNodes = nNodesHidden;
     OutputLayer.Initialize(nNodesOutput, &HiddenLayer, NULL);
}

void NeuralNetwork::CleanUp()
{
     InputLayer.CleanUp();
     HiddenLayer.CleanUp();
     OutputLayer.CleanUp();
}

void     NeuralNetwork::SetInput(int i, double value)  //设定输入值
{
     if((i>=0) && (i<InputLayer.NumberOfNodes))
     {
          InputLayer.NeuronValues[i] = value;
     }
}

/*double     NeuralNetwork::GetOutput(int i)
{
    
     
          return OutputLayer.NeuronValues[i];
     
     
}
*/
void NeuralNetwork::SetDesiredOutput(int i, double value)  //期望的输出
{
     if((i>=0) && (i<OutputLayer.NumberOfNodes))
     {
          OutputLayer.DesiredValues[i] = value;
     }
}


void NeuralNetwork::FeedForward(void)
{
     InputLayer.CalculateNeuronValues ();
     HiddenLayer.CalculateNeuronValues (); 
     OutputLayer.CalculateNeuronValues ();
}


void NeuralNetwork::BackPropagate(void)
{
     OutputLayer.CalculateErrors();
     HiddenLayer.CalculateErrors();
     HiddenLayer.AdjustWeights();
     InputLayer.AdjustWeights();
}


double NeuralNetwork::CalculateError(void)
{
     int          i;
     double     error = 0;
     for(i=0; i<OutputLayer.NumberOfNodes; i++)
     {
          error += pow(OutputLayer.NeuronValues[i] -
                   OutputLayer.DesiredValues[i], 2);
     }
     error = error / 2;
     return error;
}

void NeuralNetwork::SetLearningRate(double rate)   //设定学习率
{
     InputLayer.LearningRate = rate;
     HiddenLayer.LearningRate = rate;
     OutputLayer.LearningRate = rate;
}

void NeuralNetwork::OutData(void)
{
//	cout<<"迭代次数为:"<<c<<endl;
	cout<<"收敛时权值为:"<<endl;
    cout<< "---------------------------------------------"<<endl;

    cout<<"输入层:"<<endl;

    cout<<"---------------------------------------------"<<endl;   
 

	 for(int i=0; i<InputLayer.NumberOfNodes; i++)

          for(int j=0; j<InputLayer.NumberOfChildNodes; j++)


		  {
			  cout<<InputLayer.Weights[i][j]<<endl;
		  }

    cout<< "Bias Weights:"<<endl;

     for(int j=0; j<InputLayer.NumberOfChildNodes; j++)

	 {
		 cout<<InputLayer.BiasWeights[j]<<endl;
	 }

    cout<< "---------------------------------------------"<<endl;

    cout<<"隐含层:"<<endl;

    cout<<"---------------------------------------------"<<endl;
    for(int k=0; k<HiddenLayer.NumberOfNodes; k++)

          for(int j=0; j<HiddenLayer.NumberOfChildNodes; j++)

		  {
			  cout<<HiddenLayer.Weights[k][j]<<endl;
			   }
              cout<<"Bias Weights:"<<endl;

     for(int l=0; l<HiddenLayer.NumberOfChildNodes; l++)


         cout<<HiddenLayer.BiasWeights[l]<<endl;
         
}

NeuralNetwork BPNetWork;


void NeuralNetwork::TrainBPNetWork()
 {
  int           i;
  double       error = 1;
  int           c = 0;
 
  while((error > 0.05) && (c<50000))
   {
          error = 0;
          c++;
          for(i=0; i<4; i++)
          {
			   
               BPNetWork.SetInput(0, TrainingSet[i][0]);
               BPNetWork.SetInput(1, TrainingSet[i][1]);
               
               BPNetWork.SetDesiredOutput(0, TrainingSet[i][2]);
               
               BPNetWork.FeedForward();

			   error += BPNetWork.CalculateError();
               BPNetWork.BackPropagate();
          }
          error = error / 2.0f;

     }
        
 }
 

int main()
{
  BPNetWork.Initialize(2,4,1);
  BPNetWork.SetLearningRate(0.2);
  BPNetWork.TrainBPNetWork();
  BPNetWork.OutData();
  BPNetWork.CleanUp();
  return 0;
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -