📄 bp_lv.m
字号:
function [TrainingTime, TestingTime, TrainingAccuracy, TestingAccuracy] = bp_lv(TrainingData_File, TestingData_File, Problem_Type, NumberofHiddenNeurons, Max_Epochs, Goal)
% Usage: bp_lm(TrainingData_File, TestingData_File, Problem_Type, NumberofHiddenNeurons, Max_Epochs)
% OR: [TrainingTime, TestingTime, TrainingAccuracy, TestingAccuracy] = bp_lm(TrainingData_File, TestingData_File, Problem_Type, NumberofHiddenNeurons, Max_Epochs)
%
% Input:
% TrainingData_File - Filename of training data set
% TestingData_File - Filename of testing data set
% Problem_Type - 0 for regression; 1 for (both binary and multi-classes) classification
% NumberofHiddenNeurons - Number of hidden neurons assigned to the network
% Max_Epochs - Max number of training epochs
% Goal - The performance target represented in mean
% squared error (MSE)
%
% Output:
% TrainingTime - Time (seconds) spent on training
% TestingTime - Time (seconds) spent on predicting ALL testing data
% TrainingAccuracy - Training accuracy:
% RMSE for regression or correct classification rate for classifcation
% TestingAccuracy - Testing accuracy:
% RMSE for regression or correct classification rate for classifcation
%
% MULTI-CLASSE CLASSIFICATION: NUMBER OF OUTPUT NEURONS WILL BE AUTOMATICALLY SET EQUAL TO NUMBER OF CLASSES
% FOR EXAMPLE, if there are 7 classes in all, there will have 7 output
% neurons; neuron 5 has the highest output means input belongs to 5-th class
%
% Sample1 regression: [TrainingTime, TestingTime, TrainingAccuracy, TestingAccuracy] = bp_lm('sinc_train', 'sinc_test', 0, 20, 100)
% Sample2 classification: bp_lm('diabetes_train', 'diabetes_test', 1, 20, 100)
%%%%%%%%%%% Macro definition
REGRESSION=0;
CLASSIFIER=1;
Gain=1;
% fhandle = str2func(PTGenerator);
% [I,T,TestMatrix] = feval(fhandle);
% %%%%%%%%%%% Load training dataset
%
% T = T;
% P = I;
% % Release raw training data array
% %%%%%%%%%%% Load testing dataset
% test_data=load(TestingData_File);
% TV.T = TestMatrix.T;
% TV.P = TestMatrix.P;
% % Copy the testing set to the validation set
%%%%%%%%%%% Load training dataset
train_data=load(TrainingData_File);
T=train_data(:,1)';
P=train_data(:,2:size(train_data,2))';
clear train_data; % Release raw training data array
%%%%%%%%%%% Load testing dataset
test_data=load(TestingData_File);
TV.T=test_data(:,1)';
TV.P=test_data(:,2:size(test_data,2))';
clear test_data; % Release raw testing data array
NumberofTrainingData=size(P,2);
NumberofTestingData=size(TV.P,2);
NumberofInputNeurons=size(P,1);
NumberofOutputNeurons=size(T,1);
NumberofValidationData = round(NumberofTestingData / 2);
if Problem_Type~=REGRESSION
%%%%%%%%%%%% Preprocessing the data of classification
sorted_target=sort(cat(2,T,TV.T),2);
label=zeros(1,1); % Find and save in 'label' class label from training and testing data sets
label(1,1)=sorted_target(1,1);
j=1;
for i = 2:(NumberofTrainingData+NumberofTestingData)
if sorted_target(1,i) ~= label(1,j)
j=j+1;
label(1,j) = sorted_target(1,i);
end
end
number_class=j;
NumberofOutputNeurons=number_class;
%%%%%%%%%% Processing the targets of training
temp_T=zeros(NumberofOutputNeurons, NumberofTrainingData);
for i = 1:NumberofTrainingData
for j = 1:number_class
if label(1,j) == T(1,i)
break;
end
end
temp_T(j,i)=1;
end
T=temp_T*2-1;
%%%%%%%%%% Processing the targets of testing
temp_TV_T=zeros(NumberofOutputNeurons, NumberofTestingData);
for i = 1:NumberofTestingData
for j = 1:number_class
if label(1,j) == TV.T(1,i)
break;
end
end
temp_TV_T(j,i)=1;
end
TV.T=temp_TV_T*2-1;
end
clear temp_T;
VV.P = TV.P(:,1:NumberofValidationData);
VV.T = TV.T(:,1:NumberofValidationData);
TV.P(:,1:NumberofValidationData)=[];
TV.T(:,1:NumberofValidationData)=[];
NumberofTestingData = NumberofTestingData - NumberofValidationData;
net=newff(minmax(P),[NumberofHiddenNeurons NumberofOutputNeurons],{'logsig','purelin'},'trainlm');
net.trainParam.show = 50;
net.trainParam.goal = Goal;
net.trainParam.lr = 0.2;
net.trainParam.epochs = Max_Epochs;
reduc = 1;
net.trainParam.mem_reduc=reduc;
startTime=cputime;
[trainedNet,trainingRecord]=train(net,P,T,[],[],VV);
OutputWeight = trainedNet.lw(2,1);
Beta = mean(abs((OutputWeight{1})'));
% [trainedNet,trainingRecord]=train(net,P*Gain,T);
Y=sim(trainedNet,P);
endTime=cputime;
% trainedNet.layers{1}.transferFcn = 'hardlim'; %Change the activation function to hardlim
start_test_time=cputime;
TY=sim(trainedNet,TV.P);
end_test_time=cputime;
TrainingTime=endTime-startTime;
TestingTime=end_test_time-start_test_time;
if Problem_Type == REGRESSION
TrainingAccuracy=sqrt(mse(Y-T));
TestingAccuracy=sqrt(mse(TY-TV.T));
end
if Problem_Type == CLASSIFIER
MissClassificationRate_Training=0;
MissClassificationRate_Testing=0;
for i = 1 : size(T, 2)
[x, label_index_expected]=max(T(:,i));
[x, label_index_actual]=max(Y(:,i));
if label_index_actual~=label_index_expected
MissClassificationRate_Training=MissClassificationRate_Training+1;
end
end
TrainingAccuracy=1-MissClassificationRate_Training/size(T,2);
for i = 1 : size(TV.T, 2)
[x, label_index_expected]=max(TV.T(:,i));
[x, label_index_actual]=max(TY(:,i));
if label_index_actual~=label_index_expected
MissClassificationRate_Testing=MissClassificationRate_Testing+1;
end
end
TestingAccuracy=1-MissClassificationRate_Testing/size(TV.T,2);
end
% figure(2);
% hold on; plot(TV.P, TV.T,'-.k');
% hold on; plot(P,T,'.r');
% plot(TV.P, TY);
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -