📄 pittnet.cpp
字号:
float minimum_average_squared_error;
void delete_signal_data_array(void);
~Training();
};
Training::~Training()
{
delete [] signalpoint;
delete [] max_output_value;
delete [] min_output_value;
}
void Training::request_training_data(int net_no)
{
cout << "Please enter the file name containing the training data for neural net no. "<< net_no << "\n";
cin >> filename;
specify_signal_sample_size();
signalpoint = new signal_data[sample_number];
for(int i = 0; i < sample_number; i++) {signalpoint[i].signal_value = i;}
normalize_data_in_array();
}
void Training::scramble_data_in_array(void)
{
int swap1, swap2, hold_sample;
float hold_rank;
// randomly assign rank to all signals
for(int sig = 0; sig < sample_number; sig ++)
{signalpoint[sig].signal_rank = bedlam((long*)(gaset));}
// reorder signals according to rank
for(swap1 = 0; swap1 < sample_number - 1; swap1++)
{
for(swap2 = swap1 + 1; swap2 < sample_number; swap2++)
{
if(signalpoint[swap1].signal_rank > signalpoint[swap2].signal_rank)
{
hold_sample = signalpoint[swap2].signal_value;
hold_rank = signalpoint[swap2].signal_rank;
signalpoint[swap2].signal_value = signalpoint[swap1].signal_value;
signalpoint[swap2].signal_rank = signalpoint[swap1].signal_rank;
signalpoint[swap1].signal_value = hold_sample;
signalpoint[swap1].signal_rank = hold_rank;
}
}
}
}
void Training::delete_signal_data_array(void)
{
delete [] signalpoint;
delete_signal_array();
}
class Testing : public Training // Derived Class For Testing Data
{
public:
void request_testing_data(int net_no, int test); // Function to request data for testing
float average_squared_error;
};
void Testing::request_testing_data(int net_no, int test)
{
cout << "Please enter the file name containing the testing data for neural net no. "<< net_no << "\n\n";
cin >> filename;
cout << "\n\n";
cout << "For test #" << test + 1 << ":";
cout << "\n\n";
specify_signal_sample_size();
normalize_data_in_array();
}
//************************************************************************//
class NeuralB // class containing neural net structure for backpropagation
{ // along with training and testing data
private:
Training Training_Data; // file name and dynamic array for training
Testing *Test_Data; // files containing data to test the network
void initialize_training_storage_array(int N);
void establish_test_battery_size(void);
void train_net_with_backpropagation(void);
void test_neural_network(int BNET);
public:
Back_Topology Net_Design; // specifications for backpropagating network
int number_of_tests;
void establish_backprop_network(void);
void network_training_testing(int TT);
~NeuralB();
};
//************************************************************************//
// these Neural class member function transmits data from the topology
// to the data storage arrays
NeuralB:: ~NeuralB()
{delete [] Test_Data;}
void NeuralB :: initialize_training_storage_array(int N)
{
Training_Data.acquire_net_info(Net_Design.signal_dimensions, Net_Design.nodes_in_output_layer);
Training_Data.request_training_data(N);
}
void NeuralB :: establish_test_battery_size(void)
{
clrscr();
cout << "Please enter the number of tests you wish to run on the BP neural net: ";
cin >> number_of_tests; cout << "\n";
if(number_of_tests > 0)
{
Test_Data = new Testing[number_of_tests];
for(int i = 0; i < number_of_tests; i++)
{Test_Data[i].acquire_net_info(Net_Design.signal_dimensions, Net_Design.nodes_in_output_layer);}
}
}
// define the establish_backprop_network function
void NeuralB::establish_backprop_network(void)
{
clrscr();
cout << " **** Feedforward network using backpropagation **** " << "\n\n\n";
Net_Design.construct_and_initialize_backprop_network();
} // end establish_backprop_network function
// set the activation functions of the nodes of the network
// define train_net_with_backpropagation function
void NeuralB::train_net_with_backpropagation(void)
{
char savefile;
float output_error, sum_of_error, real_error_difference, target_minimum_average_squared_error;
int sig, layers, sigdim, epoch, hidnode, hidnode2, outnode;
int loopexit = 0;
float *maxdifference;
float *meandifference;
ofstream savefile_ptr;
clrscr();
cout << "please enter the number of epochs you wish to use for training: ";
cin >> Training_Data.number_of_epochs; cout<< "\n";
cout << "please enter the learning rate constant for backpropagation (0-1): ";
cin >> Training_Data.rate_of_learning; cout << "\n";
cout << "please enter the minimum average squared error you wish to target" << "\n";
cin >> target_minimum_average_squared_error; cout << "\n";
do
{
cout << "do you wish to save the mean error, maximum error" << "\n";
cout << "and average squared error for each epoch to a file? (Y or N): "; cin >> savefile;
savefile = toupper(savefile);
if((savefile == 'Y') || (savefile == 'N')) {loopexit = 2;}
cout << "\n";
} while(loopexit <= 1);
if(savefile == 'Y')
{
cout << "please enter the name of the file which will hold the results of training:" << "\n";
cin >> Training_Data.resultsname; cout <<"\n";
savefile_ptr.open(Training_Data.resultsname, ios::out);
}
cout << "Do you want signal presentation in random or fixed order(R or F): ";
cin >> Training_Data.presentation_order; cout << "\n";
Training_Data.presentation_order = toupper(Training_Data.presentation_order); cout << "\n";
maxdifference = new float[Net_Design.nodes_in_output_layer];
meandifference = new float[Net_Design.nodes_in_output_layer];
// intiate backpropagation for appropriate number of epochs
epoch = 0;
do
{
sum_of_error = 0;
for(sig = 0; sig < Training_Data.sample_number; sig++)
{
output_error = 0;
for(sigdim = 0; sigdim < Training_Data.signal_dimensions; sigdim++)
{
if(Net_Design.number_of_hidden_layers == 0) // no hidden layers present
{
for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
{Net_Design.node_in_output_layer[outnode].processing_unit_input[sigdim] = Training_Data.number_of_samples[Training_Data.signalpoint[sig].signal_value].data_in_sample[sigdim];}
}
else // 1 or 2 hidden layers present
{
for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[0].nodes_in_hidden_layer; hidnode++)
{Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].processing_unit_input[sigdim] = Training_Data.number_of_samples[Training_Data.signalpoint[sig].signal_value].data_in_sample[sigdim];}
}
}
if(Net_Design.number_of_hidden_layers == 2) // two layers are present
{
for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[0].nodes_in_hidden_layer; hidnode++)
{
Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].calculate_output_signal(Net_Design.activation_function_for_hidden_layer);
for(hidnode2 = 0; hidnode2 < Net_Design.hidden_layer_number[1].nodes_in_hidden_layer; hidnode2++)
{Net_Design.hidden_layer_number[1].node_in_hidden_layer[hidnode2].processing_unit_input[hidnode] = Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].output_signal;}
}
}
if(Net_Design.number_of_hidden_layers > 0)
{
for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers -1].nodes_in_hidden_layer; hidnode++)
{
Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers -1].node_in_hidden_layer[hidnode].calculate_output_signal(Net_Design.activation_function_for_hidden_layer);
for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
{Net_Design.node_in_output_layer[outnode].processing_unit_input[hidnode] = Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers -1].node_in_hidden_layer[hidnode].output_signal;}
}
}
for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
{
Net_Design.node_in_output_layer[outnode].calculate_output_signal(Net_Design.activation_function_for_output_layer);
Net_Design.node_in_output_layer[outnode].calculate_output_error_information_term(Training_Data.number_of_samples[Training_Data.signalpoint[sig].signal_value].data_in_sample[Training_Data.signal_dimensions + outnode], Net_Design.activation_function_for_output_layer);
// calculate the instantaneous sum of squared errors (Haykin, 1994)
real_error_difference = (pow(Net_Design.node_in_output_layer[outnode].error_difference_squared, 0.5)) * (Training_Data.max_output_value[outnode] - Training_Data.min_output_value[outnode]);
output_error += 0.5 * pow(real_error_difference, 2.0);
// calculate maximum and mean absolute error difference for each node
real_error_difference = Net_Design.node_in_output_layer[outnode].absolute_error_difference * (Training_Data.max_output_value[outnode] - Training_Data.min_output_value[outnode]);
meandifference[outnode] += real_error_difference / float(Training_Data.sample_number);
if(sig == 0) {maxdifference[outnode] = real_error_difference;}
else
{
if(real_error_difference > maxdifference[outnode])
{maxdifference[outnode] = real_error_difference;}
}
}
// average squared error for each signal is saved
sum_of_error += output_error / float (Training_Data.sample_number);
// backpropagation of error will depend on the number of hidden layers
if(Net_Design.number_of_hidden_layers > 0)
{ // backpropagate from output node to adjacent hidden layer
for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
{
for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers - 1].nodes_in_hidden_layer; hidnode++)
{Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers - 1].node_in_hidden_layer[hidnode].error_information_term += Net_Design.node_in_output_layer[outnode].error_information_term * Net_Design.node_in_output_layer[outnode].weight_of_inputs[hidnode];}
}
// calculate error information term for each node in hiddenlayer
for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers - 1].nodes_in_hidden_layer; hidnode++)
{Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers - 1].node_in_hidden_layer[hidnode].calculate_hidden_error_information_term(Net_Design.activation_function_for_hidden_layer);}
if(Net_Design.number_of_hidden_layers > 1)
{ // backpropagate error from hidden layer 2 to hidden layer 1
for(hidnode2 = 0; hidnode2 < Net_Design.hidden_layer_number[1].nodes_in_hidden_layer; hidnode2++)
{
for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[0].nodes_in_hidden_layer; hidnode++)
{Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].error_information_term += Net_Design.hidden_layer_number[1].node_in_hidden_layer[hidnode2].error_information_term * Net_Design.hidden_layer_number[1].node_in_hidden_layer[hidnode2].weight_of_inputs[hidnode];}
}
for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[0].nodes_in_hidden_layer; hidnode++)
{Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].calculate_hidden_error_information_term(Net_Design.activation_function_for_hidden_layer);}
}
}
// update the networks output nodes
for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
{Net_Design.node_in_output_layer[outnode].calculate_weight_and_bias_correction_terms(Training_Data.rate_of_learning);}
// update the networks hidden nodes (if they exist)
if(Net_Design.number_of_hidden_layers > 0)
{
for(layers = 0; layers < Net_Design.number_of_hidden_layers; layers++)
{
for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[layers].nodes_in_hidden_layer; hidnode++)
{Net_Design.hidden_layer_number[layers].node_in_hidden_layer[hidnode].calculate_weight_and_bias_correction_terms(Training_Data.rate_of_learning);}
}
}
} // end sig loop
// save error information (if required)
if(savefile == 'Y')
{
savefile_ptr << epoch + 1 << " ";
savefile_ptr << sum_of_error << " ";
for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
{savefile_ptr << maxdifference[outnode] << " " << meandifference[outnode] << " ";}
savefile_ptr << endl;
cout.width(6);
clrscr();
cout << "Epoch #"<< epoch + 1 <<" is completed " << endl;
}
if(epoch == 0)
{Training_Data.minimum_average_squared_error = sum_of_error;}
else
{
if(sum_of_error < Training_Data.minimum_average_squared_error)
{Training_Data.minimum_average_squared_error = sum_of_error;}
}
// scramble the order of signal presentation (if required)
if(Training_Data.presentation_order == 'R')
{Training_Data.scramble_data_in_array();}
for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
{ maxdifference[outnode] = 0.0; meandifference[outnode] = 0.0;}
if(Training_Data.minimum_average_squared_error <= target_minimum_average_squared_error)
{break;}
epoch = epoch + 1;
} while(epoch < Training_Data.number_of_epochs);
savefile_ptr.close();
// delete arrays holding the training data
Training_Data.delete_signal_data_array();
delete [] maxdifference;
delete [] meandifference;
} // end of backpropagation function
// define the function that tests the neural network
void NeuralB::test_neural_network(int BNET)
{
float output_error, sum_of_error, real_output;
int sig, sigdim, hidnode, hidnode2, outnode;
int bnet = BNET;
for(int t = 0; t < number_of_tests; t++)
{
Test_Data[t].request_testing_data(bnet, t);
sum_of_error = 0;
cout << "please enter the name of the file wich will hold the results of test: "<< t+1 << "\n";
cin >> Test_Data[t].resultsname; cout <<"\n";
ofstream savefile_ptr(Test_Data[t].resultsname);
for(sig = 0; sig < Test_Data[t].sample_number; sig++)
{
output_error = 0;
savefile_ptr << sig + 1 << " ";
for(sigdim = 0; sigdim < Test_Data[t].signal_dimensions; sigdim++)
{
if(Net_Design.number_of_hidden_layers == 0) // no hidden layers present
{
for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
{Net_Design.node_in_output_layer[outnode].processing_unit_input[sigdim] = Test_Data[t].number_of_samples[sig].data_in_sample[sigdim];}
}
else // 1 or 2 hidden layers present
{
for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[0].nodes_in_hidden_layer; hidnode++)
{Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].processing_unit_input[sigdim] = Test_Data[t].number_of_samples[sig].data_in_sample[sigdim];}
}
}
if(Net_Design.number_of_hidden_layers == 2) // two layers are present
{
for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[0].nodes_in_hidden_layer; hidnode++)
{
Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].calculate_output_signal(Net_Design.activation_function_for_hidden_layer);
for(hidnode2 = 0; hidnode2 < Net_Design.hidden_layer_number[1].nodes_in_hidden_layer; hidnode2++)
{Net_Design.hidden_layer_number[1].node_in_hidden_layer[hidnode2].processing_unit_input[hidnode] = Net_Design.hidden_layer_number[0].node_in_hidden_layer[hidnode].output_signal;}
}
}
if(Net_Design.number_of_hidden_layers > 0)
{
for(hidnode = 0; hidnode < Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers -1].nodes_in_hidden_layer; hidnode++)
{
Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers -1].node_in_hidden_layer[hidnode].calculate_output_signal(Net_Design.activation_function_for_hidden_layer);
for(outnode = 0; outnode < Net_Design.nodes_in_output_layer; outnode++)
{Net_Design.node_in_output_layer[outnode].processing_unit_input[hidnode] = Net_Design.hidden_layer_number[Net_Design.number_of_hidden_layers -1].node_in_hidden_layer[hidnode].output_signal;}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -