⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 bpnet.cpp

📁 BP神经网络的C++程序,里面有源程序和生成的可执行文件,希望对正在学习的朋友有用!
💻 CPP
📖 第 1 页 / 共 5 页
字号:
 // pittnet.CPP  // Backpropagation / ART1 / Kohonen / Radial Basis

// The purpose of this prototype is to allow the user to construct and
// initialize a series of neural nets. Using the concept of inheritance and
// derived classes from C++ object oriented programming, the neceessity to
// declare multiple large structures that duplicate attributes is eliminated
// Utilizing pointers and the "new" function, dynamic arrays are established
// The user can then specify the storage array size for the number of hidden
// units and output units for the neural network while the program is running.
// This strategy eliminates the need to establish extremely large arrays
// while still maintaining the flexibility required to design nets of various.
// shapes and sizes.  The "Neural" classes allows the attributes of the newly
// constructed networks to be stored for further processing.
#include <vcl.h>

#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <time.h>
#include <iostream.h>
#include <string.h>
#include <conio.h>
#include <float.h>
#include <fstream.h>
#include <ctype.h>

#include "bpnet.h"

#pragma hdrstop
USERES("bpnet.res");

#define IA   16807
#define IM   2147483647
#define AM   (1.0 / IM)
#define IQ   127773
#define IR   2836
#define NTAB 32
#define NDIV (1+(IM-1) / NTAB)
#define EPS  1.2e-7
#define RNMX (1.0 - EPS)
/*
WINAPI WinMain(HINSTANCE, HINSTANCE, LPSTR, int)
{
        try
        {
                 Application->Initialize();
                _control87(MCW_EM, MCW_EM); // will mask floating point overflows,
			      // underflows, or divisions by 0

                int number_of_nets;
                Neural_Window User_net;
                clrscr();
                cout << " ******* Welcome to Pitt-Networks!! ******** " << "\n\n\n\a";
                cout << "Please enter the number of networks you wish to develop: "; cin >> number_of_nets;


                for(int NWnet = 1; NWnet < number_of_nets + 1; NWnet++)
                {
                        User_net.display_menu_for_net_selection(NWnet);
                        if(User_net.neural_network_type == 'E')
                        {break;}
                }

        }
        catch (Exception &exception)
        {
                 Application->ShowException(&exception);
        }
        return 0;
}
*/
// The following function is a random number generator
float bedlam(long *idum);
int gaset = -2500;

//产生随机数
float bedlam(long *idum)
{
  int xj;
  long xk;
  static long iy=0;
  static long iv[NTAB];
  float temp;

  if(*idum <= 0 || !iy)
  {
    if(-(*idum) < 1)
    {
      *idum = 1 + *idum;
    }
    else
    {
      *idum = -(*idum);
    }
    for(xj = NTAB+7; xj >= 0; xj--)
    {
      xk = (*idum) / IQ;
      *idum = IA * (*idum - xk * IQ) - IR * xk;
      if(*idum < 0)
      {
	*idum += IM;
      }
      if(xj < NTAB)
      {
	iv[xj] = *idum;
      }
    }
      iy = iv[0];
  }

  xk = (*idum) / IQ;
  *idum = IA * (*idum - xk * IQ) - IR * xk;
  if(*idum < 0)
  {
    *idum += IM;
  }
  xj = iy / NDIV;
  iy = iv[xj];
  iv[xj] = *idum;

  if((temp=AM*iy) > RNMX)
  {
    return(RNMX);
  }
  else
  {
    return(temp);
  }
} // end of bedlam function

//神经元初始化
Processing_units::Processing_units()
{
  bias = 0.0;                           //斜率
  output_signal = 0.0;                  //输出信号
  error_information_term = 0.0;         //误差修正
  bias_correction_term = 0.0;           //斜率修正
  sum_of_weighted_inputs = 0.0;         //权重和
}

Processing_units::~Processing_units()
{
  delete [] processing_unit_input;
  delete [] weight_of_inputs;
  delete [] weight_correction_term;
}

// Define base class member functions

void Processing_units::establish_array_of_processing_unit_inputs(void)
{
  processing_unit_input = new float[number_of_input_units];     //输入信号
  weight_of_inputs = new float[number_of_input_units];          //输入权重
  weight_correction_term = new float[number_of_input_units];    //输入权重修正
}

//随机生成各输入权重
void Processing_units::establish_weight_vector_for_processing_units(void)
{
  for(int i = 0; i < number_of_input_units; i++)
  {
    // weights range from 1 to -1
    weight_of_inputs[i] = 1.0 - (2.0 * bedlam((long*)(&gaset)));
  }
}

//计算输出信号
void Processing_units::calculate_output_signal(int activation_function)
{
  sum_of_weighted_inputs = 0.0;
  for(int i = 0; i < number_of_input_units; i++)
  {
   if(i == number_of_input_units - 1)
   {sum_of_weighted_inputs += (processing_unit_input[i] * weight_of_inputs[i]) + bias;}
   else
   {sum_of_weighted_inputs += processing_unit_input[i] * weight_of_inputs[i];}
  }

  switch(activation_function)
  {
    case 1: // binary sigmoid function
    output_signal = 1.0 / (1.0 + exp(-1.0 * sum_of_weighted_inputs));
    break;

    case 2: // bipolar sigmoid function
    output_signal = (2.0 / (1.0 + exp(-1.0 * sum_of_weighted_inputs))) - 1;
    break;
  }

}

//计算输出信号派生值
float Processing_units::calculate_output_signal_derivative(int afun)
{
  float derivative;
  switch(afun) // derivative used based on activation function seleted
  {
    case 1: // binary sigmoid function
    derivative = output_signal * (1.0 - output_signal);
    break;

    case 2:  // bipolar sigmoid function
    derivative = 0.5 * (1.0 + output_signal) * (1.0 - output_signal);
    break;
  }
   return derivative;
}

//计算权重、bias误差修正
void Processing_units::calculate_weight_and_bias_correction_terms(float learning_rate)
{
  for(int i = 0; i < number_of_input_units; i++)
  {weight_correction_term[i] = learning_rate * error_information_term * processing_unit_input[i];}
  bias_correction_term = learning_rate * error_information_term;
  error_information_term = 0.0;
  update_weights_and_biases();
}

//根据误差修正,修改输入权重和bias
void Processing_units::update_weights_and_biases(void)
{
  for(int i = 0; i < number_of_input_units; i++)
  {weight_of_inputs[i] = weight_of_inputs[i] + weight_correction_term[i];}
  bias = bias + bias_correction_term;
}

// Define member functions for derived class "Hidden_units"
//
void Hidden_units::calculate_hidden_error_information_term(int afun)
{
  float af = afun;
  float output_signal_derivative = calculate_output_signal_derivative(af);
  error_information_term = error_information_term * output_signal_derivative;
}


// Define member functions for derived class "Output_units"
void Output_units::calculate_output_error_information_term(float target_value, int af)
{
  float afun = af;
  float output_signal_derivative = calculate_output_signal_derivative(afun);   //计算输出信号的派生信号
  absolute_error_difference = fabs(target_value - output_signal);       //期望输出值和实际输出值差的绝对值
  error_information_term = (target_value - output_signal) * output_signal_derivative;
  error_difference_squared = pow((target_value - output_signal), 2.0);
}

Hidden_layer::~Hidden_layer()
{delete [] node_in_hidden_layer;}


void Back_Topology::construct_and_initialize_backprop_network(void)
{
  int nodes, inputs_to_output_node;
  char netcreate;
  int looploc = 0;

   do
   {
     cout <<"\n";
     cout << "Do you wish to" << "\n\n";
     cout << "C.  Create your own Backprop Network " << "\n";
     cout << "U.  Upload an existing Backprop Network " << "\n\n";
     cout << "Your choice?:  "; cin >> netcreate;
     netcreate = toupper(netcreate);
     cout << "\n";
     if((netcreate == 'C') || (netcreate == 'U')) {looploc = 1;}
   } while(looploc <= 0);

  if(netcreate == 'U')
  {upload_network();}
  else
  {
    cout << "Please enter the dimensions of the input vector: ";
    cin >> signal_dimensions;
    cout << "\n\n";
      do
      {
	cout << "please enter the number of hidden layers (0 - 2):  ";
	cin >> number_of_hidden_layers;
	cout << "\n\n";
      } while(number_of_hidden_layers > 2);

    if(number_of_hidden_layers > 0)
    {
     hidden_layer_number = new Hidden_layer[number_of_hidden_layers];
     for(int layer = 0; layer < number_of_hidden_layers; layer++)
     {
       cout << "please enter the number of nodes in hidden layer " << layer + 1 << ": ";
       cin >> hidden_layer_number[layer].nodes_in_hidden_layer;
       cout << "\n\n";
     }
    }
    cout << "\n";
    cout << "please enter the number of nodes in the output layer: ";
    cin >> nodes_in_output_layer;
    cout << "\n\n";

    // establish for dynamic arrays for number of nodes in hidden and output layers

    if(number_of_hidden_layers > 0)
    {
      for(int layer = 0; layer < number_of_hidden_layers; layer++)
      {
	nodes = hidden_layer_number[layer].nodes_in_hidden_layer;
	hidden_layer_number[layer].node_in_hidden_layer = new Hidden_units[nodes];
      }
    }
    node_in_output_layer = new Output_units[nodes_in_output_layer];

    if(number_of_hidden_layers > 0)
    {
      // establish input connection between signal and hidden layer
      for(nodes = 0; nodes < hidden_layer_number[0].nodes_in_hidden_layer; nodes++)
      {
	hidden_layer_number[0].node_in_hidden_layer[nodes].number_of_input_units = signal_dimensions;
	hidden_layer_number[0].node_in_hidden_layer[nodes].establish_array_of_processing_unit_inputs();
	hidden_layer_number[0].node_in_hidden_layer[nodes].establish_weight_vector_for_processing_units();
	hidden_layer_number[0].node_in_hidden_layer[nodes].bias = 1.0 - (2.0 * bedlam((long*)(&gaset)));
      }
      if(number_of_hidden_layers > 1)
      {
	// establish connection between first and second hidden layers
	for(nodes = 0; nodes < hidden_layer_number[1].nodes_in_hidden_layer; nodes++)
	{
	  hidden_layer_number[1].node_in_hidden_layer[nodes].number_of_input_units = hidden_layer_number[0].nodes_in_hidden_layer;
	  hidden_layer_number[1].node_in_hidden_layer[nodes].establish_array_of_processing_unit_inputs();
	  hidden_layer_number[1].node_in_hidden_layer[nodes].establish_weight_vector_for_processing_units();
	  hidden_layer_number[1].node_in_hidden_layer[nodes].bias = 1.0 - (2.0 * bedlam((long*)(&gaset)));
	}
      }
    }

    // determine number of inputs to the output layer
    if(number_of_hidden_layers > 0)
    {inputs_to_output_node = hidden_layer_number[number_of_hidden_layers - 1].nodes_in_hidden_layer;}
    else
    {inputs_to_output_node = signal_dimensions;}

    // establish input connections to output layer
    for(nodes = 0; nodes < nodes_in_output_layer; nodes++)
    {
     node_in_output_layer[nodes].number_of_input_units = inputs_to_output_node;
     node_in_output_layer[nodes].establish_array_of_processing_unit_inputs();
     node_in_output_layer[nodes].establish_weight_vector_for_processing_units();
     node_in_output_layer[nodes].bias = 1.0 - (2.0 * bedlam((long*)(&gaset)));
    }
    establish_activation_functions(); // for hidden and output nodes
  }
} // end construct and initialize neural network function

void Back_Topology::upload_network(void)
{
  char getname[13];
  ifstream get_ptr;
  int netid, nodes, dim, inputs_to_output_node, hid, inputs;
  int dolock = 0;

  do
  {
    cout << "\n\n";
    cout << "Please enter the name of the file which holds the Backpropagation network" << "\n";
    cin >> getname; cout << "\n";
    get_ptr.open(getname, ios::in);
    get_ptr >> netid;
    if(netid == 1) {dolock = 1;}
    else
    {
      cout << "Error** file contents do not match Backprop specifications" << "\n";
      cout << "try again" << "\n";
      get_ptr.close();
    }
  } while(dolock <= 0);

  get_ptr >> signal_dimensions;
  get_ptr >> activation_function_for_output_layer;
  get_ptr >> nodes_in_output_layer;
  get_ptr >> inputs_to_output_node;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -