📄 pittnet.cpp
字号:
// pittnet.CPP // Backpropagation / ART1 / Kohonen / Radial Basis
// The purpose of this prototype is to allow the user to construct and
// initialize a series of neural nets. Using the concept of inheritance and
// derived classes from C++ object oriented programming, the neceessity to
// declare multiple large structures that duplicate attributes is eliminated
// Utilizing pointers and the "new" function, dynamic arrays are established
// The user can then specify the storage array size for the number of hidden
// units and output units for the neural network while the program is running.
// This strategy eliminates the need to establish extremely large arrays
// while still maintaining the flexibility required to design nets of various.
// shapes and sizes. The "Neural" classes allows the attributes of the newly
// constructed networks to be stored for further processing.
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <time.h>
#include <iostream.h>
#include <string.h>
#include <conio.h>
#include <float.h>
#include <fstream.h>
#include <ctype.h>
#define IA 16807
#define IM 2147483647
#define AM (1.0 / IM)
#define IQ 127773
#define IR 2836
#define NTAB 32
#define NDIV (1+(IM-1) / NTAB)
#define EPS 1.2e-7
#define RNMX (1.0 - EPS)
// The following function is a random number generator
float bedlam(long *idum);
int gaset = -2500;
float bedlam(long *idum)
{
int xj;
long xk;
static long iy=0;
static long iv[NTAB];
float temp;
if(*idum <= 0 || !iy)
{
if(-(*idum) < 1)
{
*idum = 1 + *idum;
}
else
{
*idum = -(*idum);
}
for(xj = NTAB+7; xj >= 0; xj--)
{
xk = (*idum) / IQ;
*idum = IA * (*idum - xk * IQ) - IR * xk;
if(*idum < 0)
{
*idum += IM;
}
if(xj < NTAB)
{
iv[xj] = *idum;
}
}
iy = iv[0];
}
xk = (*idum) / IQ;
*idum = IA * (*idum - xk * IQ) - IR * xk;
if(*idum < 0)
{
*idum += IM;
}
xj = iy / NDIV;
iy = iv[xj];
iv[xj] = *idum;
if((temp=AM*iy) > RNMX)
{
return(RNMX);
}
else
{
return(temp);
}
} // end of bedlam function
//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
// (Fully connected network using backpropagation)
// In this base class, all nodes in the network have the following attributes
class Processing_units
{
public:
float *processing_unit_input;
int number_of_input_units;
void establish_array_of_processing_unit_inputs(void);
float *weight_of_inputs;
void establish_weight_vector_for_processing_units(void);
float bias;
float output_signal;
void calculate_output_signal(int activation_function);
float calculate_output_signal_derivative(int afun);
float error_information_term;
void calculate_weight_and_bias_correction_terms(float learning_rate);
float *weight_correction_term;
float bias_correction_term;
float sum_of_weighted_inputs;
void update_weights_and_biases(void);
Processing_units();
~Processing_units();
};
Processing_units::Processing_units()
{
bias = 0.0;
output_signal = 0.0;
error_information_term = 0.0;
bias_correction_term = 0.0;
sum_of_weighted_inputs = 0.0;
}
Processing_units::~Processing_units()
{
delete [] processing_unit_input;
delete [] weight_of_inputs;
delete [] weight_correction_term;
}
// Define base class member functions
void Processing_units::establish_array_of_processing_unit_inputs(void)
{
processing_unit_input = new float[number_of_input_units];
weight_of_inputs = new float[number_of_input_units];
weight_correction_term = new float[number_of_input_units];
}
void Processing_units::establish_weight_vector_for_processing_units(void)
{
for(int i = 0; i < number_of_input_units; i++)
{
// weights range from 1 to -1
weight_of_inputs[i] = 1.0 - (2.0 * bedlam((long*)(gaset)));
}
}
void Processing_units::calculate_output_signal(int activation_function)
{
sum_of_weighted_inputs = 0.0;
for(int i = 0; i < number_of_input_units; i++)
{
if(i == number_of_input_units - 1)
{sum_of_weighted_inputs += (processing_unit_input[i] * weight_of_inputs[i]) + bias;}
else
{sum_of_weighted_inputs += processing_unit_input[i] * weight_of_inputs[i];}
}
switch(activation_function)
{
case 1: // binary sigmoid function
output_signal = 1.0 / (1.0 + exp(-1.0 * sum_of_weighted_inputs));
break;
case 2: // bipolar sigmoid function
output_signal = (2.0 / (1.0 + exp(-1.0 * sum_of_weighted_inputs))) - 1;
break;
}
}
float Processing_units::calculate_output_signal_derivative(int afun)
{
float derivative;
switch(afun) // derivative used based on activation function seleted
{
case 1: // binary sigmoid function
derivative = output_signal * (1.0 - output_signal);
break;
case 2: // bipolar sigmoid function
derivative = 0.5 * (1.0 + output_signal) * (1.0 - output_signal);
break;
}
return derivative;
}
void Processing_units::calculate_weight_and_bias_correction_terms(float learning_rate)
{
for(int i = 0; i < number_of_input_units; i++)
{weight_correction_term[i] = learning_rate * error_information_term * processing_unit_input[i];}
bias_correction_term = learning_rate * error_information_term;
error_information_term = 0.0;
update_weights_and_biases();
}
void Processing_units::update_weights_and_biases(void)
{
for(int i = 0; i < number_of_input_units; i++)
{weight_of_inputs[i] = weight_of_inputs[i] + weight_correction_term[i];}
bias = bias + bias_correction_term;
}
// Declare a derived class "Hidden_units" for hidden layer of network
class Hidden_units : public Processing_units
{
public:
void calculate_hidden_error_information_term(int afun);
};
// Define member functions for derived class "Hidden_units"
void Hidden_units::calculate_hidden_error_information_term(int afun)
{
float af = afun;
float output_signal_derivative = calculate_output_signal_derivative(af);
error_information_term = error_information_term * output_signal_derivative;
}
// Declare a derived class "Output_units" for output layer of network
class Output_units : public Processing_units
{
public:
void calculate_output_error_information_term(float target_value, int af);
float absolute_error_difference;
float error_difference_squared;
};
// Define member functions for derived class "Output_units"
void Output_units::calculate_output_error_information_term(float target_value, int af)
{
float afun = af;
float output_signal_derivative = calculate_output_signal_derivative(afun);
absolute_error_difference = fabs(target_value - output_signal);
error_information_term = (target_value - output_signal) * output_signal_derivative;
error_difference_squared = pow((target_value - output_signal), 2.0);
}
// Create classes to contain neural net specifications
class Hidden_layer
{
public:
Hidden_units *node_in_hidden_layer;
int nodes_in_hidden_layer;
~Hidden_layer();
};
Hidden_layer::~Hidden_layer()
{delete [] node_in_hidden_layer;}
// The following class represents an artificial neural network containing
// the topology, weights, training performance and testing performance
class Back_Topology
{
public:
Hidden_layer *hidden_layer_number;
Output_units *node_in_output_layer;
int number_of_hidden_layers;
int activation_function_for_hidden_layer;
int nodes_in_output_layer;
int activation_function_for_output_layer;
int signal_dimensions;
int number_of_tests;
void establish_activation_functions(void);
void construct_and_initialize_backprop_network(void);
void upload_network(void);
void savenet(void);
~Back_Topology();
};
void Back_Topology::construct_and_initialize_backprop_network(void)
{
int nodes, inputs_to_output_node;
char netcreate;
int looploc = 0;
do
{
cout <<"\n";
cout << "Do you wish to" << "\n\n";
cout << "C. Create your own Backprop Network " << "\n";
cout << "U. Upload an existing Backprop Network " << "\n\n";
cout << "Your choice?: "; cin >> netcreate;
netcreate = toupper(netcreate);
cout << "\n";
if((netcreate == 'C') || (netcreate == 'U')) {looploc = 1;}
} while(looploc <= 0);
if(netcreate == 'U')
{upload_network();}
else
{
cout << "Please enter the dimensions of the input vector: ";
cin >> signal_dimensions;
cout << "\n\n";
do
{
cout << "please enter the number of hidden layers (0 - 2): ";
cin >> number_of_hidden_layers;
cout << "\n\n";
} while(number_of_hidden_layers > 2);
if(number_of_hidden_layers > 0)
{
hidden_layer_number = new Hidden_layer[number_of_hidden_layers];
for(int layer = 0; layer < number_of_hidden_layers; layer++)
{
cout << "please enter the number of nodes in hidden layer " << layer + 1 << ": ";
cin >> hidden_layer_number[layer].nodes_in_hidden_layer;
cout << "\n\n";
}
}
cout << "\n";
cout << "please enter the number of nodes in the output layer: ";
cin >> nodes_in_output_layer;
cout << "\n\n";
// establish for dynamic arrays for number of nodes in hidden and output layers
if(number_of_hidden_layers > 0)
{
for(int layer = 0; layer < number_of_hidden_layers; layer++)
{
nodes = hidden_layer_number[layer].nodes_in_hidden_layer;
hidden_layer_number[layer].node_in_hidden_layer = new Hidden_units[nodes];
}
}
node_in_output_layer = new Output_units[nodes_in_output_layer];
if(number_of_hidden_layers > 0)
{
// establish input connection between signal and hidden layer
for(nodes = 0; nodes < hidden_layer_number[0].nodes_in_hidden_layer; nodes++)
{
hidden_layer_number[0].node_in_hidden_layer[nodes].number_of_input_units = signal_dimensions;
hidden_layer_number[0].node_in_hidden_layer[nodes].establish_array_of_processing_unit_inputs();
hidden_layer_number[0].node_in_hidden_layer[nodes].establish_weight_vector_for_processing_units();
hidden_layer_number[0].node_in_hidden_layer[nodes].bias = 1.0 - (2.0 * bedlam((long*)(gaset)));
}
if(number_of_hidden_layers > 1)
{
// establish connection between first and second hidden layers
for(nodes = 0; nodes < hidden_layer_number[1].nodes_in_hidden_layer; nodes++)
{
hidden_layer_number[1].node_in_hidden_layer[nodes].number_of_input_units = hidden_layer_number[0].nodes_in_hidden_layer;
hidden_layer_number[1].node_in_hidden_layer[nodes].establish_array_of_processing_unit_inputs();
hidden_layer_number[1].node_in_hidden_layer[nodes].establish_weight_vector_for_processing_units();
hidden_layer_number[1].node_in_hidden_layer[nodes].bias = 1.0 - (2.0 * bedlam((long*)(gaset)));
}
}
}
// determine number of inputs to the output layer
if(number_of_hidden_layers > 0)
{inputs_to_output_node = hidden_layer_number[number_of_hidden_layers - 1].nodes_in_hidden_layer;}
else
{inputs_to_output_node = signal_dimensions;}
// establish input connections to output layer
for(nodes = 0; nodes < nodes_in_output_layer; nodes++)
{
node_in_output_layer[nodes].number_of_input_units = inputs_to_output_node;
node_in_output_layer[nodes].establish_array_of_processing_unit_inputs();
node_in_output_layer[nodes].establish_weight_vector_for_processing_units();
node_in_output_layer[nodes].bias = 1.0 - (2.0 * bedlam((long*)(gaset)));
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -