⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 bpnet.cpp

📁 BP神经网络的C++程序,里面有源程序和生成的可执行文件,希望对正在学习的朋友有用!
💻 CPP
📖 第 1 页 / 共 5 页
字号:

void NeuralB :: establish_test_battery_size(void)
{
  clrscr();
  cout << "Please enter the number of tests you wish to run on the BP neural net: ";
  cin >> number_of_tests;  cout << "\n";
  if(number_of_tests > 0)
  {
    Test_Data = new Testing[number_of_tests];
    for(int i = 0; i < number_of_tests; i++)
    {Test_Data[i].acquire_net_info(Net_Design.signal_dimensions, Net_Design.nodes_in_output_layer);}
  }
}



// define the establish_backprop_network function
void NeuralB::establish_backprop_network(void)
{
  clrscr();
  cout << " **** Feedforward network using backpropagation **** " << "\n\n\n";
  Net_Design.construct_and_initialize_backprop_network();
} // end establish_backprop_network function

// set the activation functions of the nodes of the network

// define train_net_with_backpropagation function
void NeuralB::train_net_with_backpropagation(void)
{
  char savefile;
  float output_error, sum_of_error, real_error_difference, target_minimum_average_squared_error;
  int sig, layers, sigdim, epoch, hidnode, hidnode2, outnode;
  int loopexit = 0;
  float *maxdifference;
  float *meandifference;

  ofstream savefile_ptr;

  clrscr();
  cout << "please enter the number of epochs you wish to use for training: ";
  cin >> Training_Data.number_of_epochs; cout<< "\n";
  cout << "please enter the learning rate constant for backpropagation (0-1): ";
  cin >> Training_Data.rate_of_learning; cout << "\n";
  cout << "please enter the minimum average squared error you wish to target" << "\n";
  cin >> target_minimum_average_squared_error; cout << "\n";
  do
  {
   cout << "do you wish to save the mean error, maximum error" << "\n";
   cout << "and average squared error for each epoch to a file? (Y or N): "; cin >> savefile;
   savefile = toupper(savefile);
   if((savefile == 'Y') || (savefile == 'N')) {loopexit = 2;}
   cout << "\n";
  } while(loopexit <= 1);

  if(savefile == 'Y')
  {
      cout << "please enter the name of the file which will hold the results of training:" << "\n";
      cin >> Training_Data.resultsname; cout <<"\n";
      savefile_ptr.open(Training_Data.resultsname, ios::out);
  }

   cout << "Do you want signal presentation in random or fixed order(R or F): ";
   cin >> Training_Data.presentation_order;  cout << "\n";
   Training_Data.presentation_order = toupper(Training_Data.presentation_order); cout << "\n";

   maxdifference = new float[Net_Design.nodes_in_output_layer];
   meandifference = new float[Net_Design.nodes_in_output_layer];

   // intiate backpropagation for appropriate number of epochs
   epoch = 0;
   do
   {
    sum_of_error = 0;

    for(sig = 0; sig < Training_Data.sample_number; sig++)
    {
      output_error = 0;
      for(sigdim = 0; sigdim < Training_Data.signal_dimensions; sigdim++)
      {

       if(Net_Design.number_of_hidden_layers == 0) // no hidden layers present
       {
	for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
	{Net_Design.node_in_output_layer[outnode].processing_unit_input[sigdim] = Training_Data.number_of_samples[Training_Data.signalpoint[sig].signal_value].data_in_sample[sigdim];}
       }
       else // 1 or 2 hidden layers present
       {
	for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[0].nodes_in_hidden_layer; hidnode++)
	{Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].processing_unit_input[sigdim] = Training_Data.number_of_samples[Training_Data.signalpoint[sig].signal_value].data_in_sample[sigdim];}
       }
      }

      if(Net_Design.number_of_hidden_layers == 2) // two layers are present
      {
	for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[0].nodes_in_hidden_layer; hidnode++)
	{
	  Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].calculate_output_signal(Net_Design.activation_function_for_hidden_layer);
	  for(hidnode2 = 0; hidnode2 < Net_Design.hidden_layer_number[1].nodes_in_hidden_layer; hidnode2++)
	  {Net_Design.hidden_layer_number[1].node_in_hidden_layer[hidnode2].processing_unit_input[hidnode] = Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].output_signal;}
	}
      }

      if(Net_Design.number_of_hidden_layers > 0)
      {
	for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers -1].nodes_in_hidden_layer; hidnode++)
	{
	  Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers -1].node_in_hidden_layer[hidnode].calculate_output_signal(Net_Design.activation_function_for_hidden_layer);
	  for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
	  {Net_Design.node_in_output_layer[outnode].processing_unit_input[hidnode] = Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers -1].node_in_hidden_layer[hidnode].output_signal;}
	}
      }
      for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
      {
	Net_Design.node_in_output_layer[outnode].calculate_output_signal(Net_Design.activation_function_for_output_layer);
	Net_Design.node_in_output_layer[outnode].calculate_output_error_information_term(Training_Data.number_of_samples[Training_Data.signalpoint[sig].signal_value].data_in_sample[Training_Data.signal_dimensions + outnode], Net_Design.activation_function_for_output_layer);
	// calculate the instantaneous sum of squared errors (Haykin, 1994)
	real_error_difference = (pow(Net_Design.node_in_output_layer[outnode].error_difference_squared, 0.5)) * (Training_Data.max_output_value[outnode] - Training_Data.min_output_value[outnode]);
	output_error += 0.5 * pow(real_error_difference, 2.0);

	// calculate maximum and mean absolute error difference for each node
	real_error_difference = Net_Design.node_in_output_layer[outnode].absolute_error_difference * (Training_Data.max_output_value[outnode] - Training_Data.min_output_value[outnode]);
	meandifference[outnode] += real_error_difference / float(Training_Data.sample_number);
	if(sig == 0) {maxdifference[outnode] = real_error_difference;}
	else
	{
	  if(real_error_difference > maxdifference[outnode])
	  {maxdifference[outnode] = real_error_difference;}
	}
      }

      // average squared error for each signal is saved
      sum_of_error += output_error / float (Training_Data.sample_number);

      // backpropagation of error will depend on the number of hidden layers
      if(Net_Design.number_of_hidden_layers > 0)
      { // backpropagate from output node to adjacent hidden layer
	for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
	{
	  for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers - 1].nodes_in_hidden_layer; hidnode++)
	  {Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers - 1].node_in_hidden_layer[hidnode].error_information_term += Net_Design.node_in_output_layer[outnode].error_information_term * Net_Design.node_in_output_layer[outnode].weight_of_inputs[hidnode];}
	}
	// calculate error information term for each node in hiddenlayer
	for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers - 1].nodes_in_hidden_layer; hidnode++)
	{Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers - 1].node_in_hidden_layer[hidnode].calculate_hidden_error_information_term(Net_Design.activation_function_for_hidden_layer);}


	  if(Net_Design.number_of_hidden_layers > 1)
	  { // backpropagate error from hidden layer 2 to hidden layer 1
	    for(hidnode2 = 0; hidnode2 < Net_Design.hidden_layer_number[1].nodes_in_hidden_layer; hidnode2++)
	    {
	      for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[0].nodes_in_hidden_layer; hidnode++)
	      {Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].error_information_term += Net_Design.hidden_layer_number[1].node_in_hidden_layer[hidnode2].error_information_term * Net_Design.hidden_layer_number[1].node_in_hidden_layer[hidnode2].weight_of_inputs[hidnode];}
	    }
	    for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[0].nodes_in_hidden_layer; hidnode++)
	    {Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].calculate_hidden_error_information_term(Net_Design.activation_function_for_hidden_layer);}
	  }
      }

      // update the networks output nodes
      for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
      {Net_Design.node_in_output_layer[outnode].calculate_weight_and_bias_correction_terms(Training_Data.rate_of_learning);}

      // update the networks hidden nodes (if they exist)
      if(Net_Design.number_of_hidden_layers > 0)
      {
	for(layers = 0; layers < Net_Design.number_of_hidden_layers; layers++)
	{
	  for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[layers].nodes_in_hidden_layer; hidnode++)
	  {Net_Design.hidden_layer_number[layers].node_in_hidden_layer[hidnode].calculate_weight_and_bias_correction_terms(Training_Data.rate_of_learning);}
	}
      }
    } // end sig loop

   // save error information (if required)
     if(savefile == 'Y')
     {
   savefile_ptr << epoch + 1 << " ";
	savefile_ptr << sum_of_error << "  ";
	for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
	{savefile_ptr << maxdifference[outnode] << " " << meandifference[outnode] << "    ";}
	savefile_ptr << endl;
	cout.width(6);
	clrscr();
	cout << "Epoch #"<< epoch + 1 <<" is completed " << endl;
     }

     if(epoch == 0)
     {Training_Data.minimum_average_squared_error = sum_of_error;}
     else
     {
       if(sum_of_error < Training_Data.minimum_average_squared_error)
       {Training_Data.minimum_average_squared_error = sum_of_error;}
     }

     // scramble the order of signal presentation (if required)
     if(Training_Data.presentation_order == 'R')
     {Training_Data.scramble_data_in_array();}

     for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
     { maxdifference[outnode] = 0.0; meandifference[outnode] = 0.0;}

     if(Training_Data.minimum_average_squared_error <= target_minimum_average_squared_error)
     {break;}

     epoch = epoch + 1;

   } while(epoch < Training_Data.number_of_epochs);

   savefile_ptr.close();

   // delete arrays holding the training data
   Training_Data.delete_signal_data_array();
   delete [] maxdifference;
   delete [] meandifference;
} // end of backpropagation function


// define the function that tests the neural network
void NeuralB::test_neural_network(int BNET)
{
  float output_error, sum_of_error, real_output;
  int sig, sigdim, hidnode, hidnode2, outnode;

  int bnet = BNET;
  for(int t = 0; t < number_of_tests; t++)
  {
    Test_Data[t].request_testing_data(bnet, t);

    sum_of_error = 0;

      cout << "please enter the name of the file wich will hold the results of test: "<< t+1 << "\n";
      cin >> Test_Data[t].resultsname; cout <<"\n";
      ofstream savefile_ptr(Test_Data[t].resultsname);

    for(sig = 0; sig < Test_Data[t].sample_number; sig++)
    {
      output_error = 0;
      savefile_ptr << sig + 1 << " ";
      //开始用每一输入节点值为每一隐含节点赋输入值
      for(sigdim = 0; sigdim < Test_Data[t].signal_dimensions; sigdim++)
      {
       //为隐含层每一单元的输入值赋值(nomalize_data_in_arry后,data-min/max-main),
       if(Net_Design.number_of_hidden_layers == 0) // no hidden layers present
       {
	for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
	{Net_Design.node_in_output_layer[outnode].processing_unit_input[sigdim] = Test_Data[t].number_of_samples[sig].data_in_sample[sigdim];}
       }
       else // 1 or 2 hidden layers present
       {
	for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[0].nodes_in_hidden_layer; hidnode++)
	{Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].processing_unit_input[sigdim] = Test_Data[t].number_of_samples[sig].data_in_sample[sigdim];}
       }
      } //完成为每一隐含节点赋输入值
      //第二隐含层输入为第一隐含层的输出
      if(Net_Design.number_of_hidden_layers == 2) // two layers are present
      {
	for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[0].nodes_in_hidden_layer; hidnode++)
	{
	  Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].calculate_output_signal(Net_Design.activation_function_for_hidden_layer);
	  for(hidnode2 = 0; hidnode2 < Net_Design.hidden_layer_number[1].nodes_in_hidden_layer; hidnode2++)
	  {Net_Design.hidden_layer_number[1].node_in_hidden_layer[hidnode2].processing_unit_input[hidnode] = Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].output_signal;}
	}
      }
      //为每一输出节点对应隐含节点的输入赋值
      if(Net_Design.number_of_hidden_layers > 0)
      {
	for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers -1].nodes_in_hidden_layer; hidnode++)
	{
	  Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers -1].node_in_hidden_layer[hidnode].calculate_output_signal(Net_Design.activation_function_for_hidden_layer);
	  for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
	  {Net_Design.node_in_output_layer[outnode].processing_unit_input[hidnode] = Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers -1].node_in_hidden_layer[hidnode].output_signal;}
	}
      }
      for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
      { //计算输出节点输出值
	Net_Design.node_in_output_layer[outnode].calculate_output_signal(Net_Design.activation_function_for_output_layer);
	Net_Design.node_in_output_layer[outnode].calculate_output_error_information_term(Test_Data[t].number_of_samples[sig].data_in_sample[Test_Data[t].signal_dimensions + outnode], Net_Design.activation_function_for_output_layer); //计算输出节点:输出信号派生值、期望值语输出差(误差)的绝对值、误差×派生值、方差
        savefile_ptr<<Net_Design.node_in_output_layer[outnode].output_signal<<" ";
      }
       // convert normalized target output data and send to file
      for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
      {
	     real_output = Test_Data[t].min_output_value[outnode] + (Test_Data[t].number_of_samples[sig].data_in_sample[outnode + Test_Data[t].signal_dimensions] * (Test_Data[t].max_output_value[outnode] - Test_Data[t].min_output_value[outnode]));
	     savefile_ptr << real_output << " ";
      }

        savefile_ptr << " ";

      // convert normalized output data and send to file
      for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
      {
	     real_output = Test_Data[t].min_output_value[outnode] + (Net_Design.node_in_output_layer[outnode].output_signal * (Test_Data[t].max_output_value[outnode] - Test_Data[t].min_output_value[outnode]));
	     savefile_ptr << real_output << " ";
      }

      // send absolute differences between each node and its output to a file
      for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
      {
	real_output = (pow(Net_Design.node_in_output_layer[outnode].error_difference_squared, 0.5)) * (Test_Data[t].max_output_value[outnode] - Test_Data[t].min_output_value[outnode]);
	savefile_ptr << real_output << " ";
	real_output = pow(real_output, 2.0);
	output_error += 0.5 * real_output;
      }
	// sum square of error
	savefile_ptr << output_error << "\n";
	if(sig == Test_Data[t].sample_number - 1)
	{savefile_ptr.close();}

	sum_of_error += output_error;
    }
	Test_Data[t].average_squared_error = sum_of_error / Test_Data[t].sample_number;
	Test_Data[t].delete_signal_array();
  }
} // end test neural network function

void NeuralB::network_training_testing(int TT)
{
  int tt = TT;
  int menu_choice;

  clrscr();
  cout << "\n\n\n\n";
  cout << "**************** Operations Menu ****************" << "\n\n";
  cout << "  Please select one of the following options:" <<"\n\n";
  cout << "      1. Train Backprop network only " <<"\n\n";
  cout << "      2. Test Backprop network only " <<"\n\n";
  cout << "      3. Train and Test Backprop network" <<"\n\n";
  cout << "*************************************************" << "\n\n";
  cout << "         Your choice?: "; cin >> menu_choice;
  cout << "\n\n";
     switch(menu_choice)
     {
       case 1:
       initialize_training_storage_array(tt);
       train_net_with_backpropagation();
       break;

       case 2:
       establish_test_battery_size();
       if(number_of_tests > 0)
       {test_neural_network(tt);}
       break;

       case 3:
       initialize_training_storage_array(tt);
       train_net_with_backpropagation();
       establish_test_battery_size();
       if(number_of_tests > 0)
       {test_neural_network(tt);}
       break;

       default:network_training_testing(tt);
     }
}
// This concludes the backpropagation section of the program


ART_units::~ART_units()
{
  delete [] input_value;
  delete [] output_value;
  delete [] input_weight_vector;
}

void ART_units::establish_input_output_arrays(void)
{
  input_value = new float[number_of_inputs];
  output_value = new float[number_of_outputs];
}

void ART_units::establish_input_weight_vector_array(void)
{input_weight_vector = new float[number_of_inputs - 1];}

void ART_units::initialize_inputs_and_weights(void)
{
  for(int w = 0; w < number_of_inputs - 1; w++)
  {input_weight_vector[w] = 1.0;}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -