📄 bp_stochastical.m
字号:
function [a1, a2, t1, t2] = BP_Stochastical(TrainingData_File, TestingData_File, nHiddenNeuron, lr, mc, Type)
% Input arguments:
% P, T, TV --- Training and Testing data set
% nHiddenNeuron --- Number of Hidden Neurons
% lr --- learning rate
% mc --- momentumn constant
% Type --- 'Regression'/'Classifier'
%
% Output arguments:
% a1 --- training RMSE/accuracy
% a2 --- testing RMSE/accuracy
% t1 --- training cpu time
% t2 --- testing cput time
%%%%%%%%% chenlei
% %%%%%%%%%%% Load training dataset
% train_data=load(TrainingData_File);
% T=train_data(:,1);
% P=train_data(:,2:size(train_data,2));
% clear train_data; % Release raw training data array
%
% %%%%%%%%%%% Load testing dataset
% test_data=load(TestingData_File);
% TV.T=test_data(:,1);
% TV.P=test_data(:,2:size(test_data,2));
% clear test_data;
%%%%%%%%%%% Load training dataset
train_data=load(TrainingData_File);
T=train_data(:,1)';
P=train_data(:,2:size(train_data,2))';
clear train_data; % Release raw training data array
%%%%%%%%%%% Load testing dataset
test_data=load(TestingData_File);
TV.T=test_data(:,1)';
TV.P=test_data(:,2:size(test_data,2))';
% Copy the testing set to the validation set
clear test_data;
NumberofTrainingData=size(P,2);
NumberofTestingData=size(TV.P,2);
NumberofInputNeurons=size(P,1);
NumberofOutputNeurons=size(T,1);
NumberofValidationData = round(NumberofTestingData / 2);
%%%%%%%%%%% Macro definition
Gain=1;
if Type == 'Classifier'
%%%%%%%%%%%% Preprocessing the data of classification
sorted_target=sort(cat(2,T,TV.T),2);
label=zeros(1,1); % Find and save in 'label' class label from training and testing data sets
label(1,1)=sorted_target(1,1);
j=1;
for i = 2:(NumberofTrainingData+NumberofTestingData)
if sorted_target(1,i) ~= label(1,j)
j=j+1;
label(1,j) = sorted_target(1,i);
end
end
number_class=j;
NumberofOutputNeurons=number_class;
%%%%%%%%%% Processing the targets of training
temp_T=zeros(NumberofOutputNeurons, NumberofTrainingData);
for i = 1:NumberofTrainingData
for j = 1:number_class
if label(1,j) == T(1,i)
break;
end
end
temp_T(j,i)=1;
end
T=temp_T*2-1;
%%%%%%%%%% Processing the targets of testing
temp_TV_T=zeros(NumberofOutputNeurons, NumberofTestingData);
for i = 1:NumberofTestingData
for j = 1:number_class
if label(1,j) == TV.T(1,i)
break;
end
end
temp_TV_T(j,i)=1;
end
TV.T=temp_TV_T*2-1;
end
clear temp_T;
T=T';
P=P';
TV.T=TV.T';
TV.P=TV.P';
nTrainingData = size(P,1); nTestingData = size(TV.P,1);
%%%%% simulate network
nInputNeuron = size(P,2); nOutputNeuron = size(T,2);
InputWeight = rand(nHiddenNeuron, nInputNeuron)*2-1;
HiddenWeight = rand(nOutputNeuron, nHiddenNeuron)*2-1;
HiddenBias = rand(nHiddenNeuron,1)*2-1;
OutputBias = rand(nOutputNeuron,1)*2-1;
input_local_gradient = zeros(nHiddenNeuron,1);
hidden_local_gradient = zeros(nOutputNeuron,1);
Lam_para=1;
t1 = cputime;
%%%%% feedforward %%%%% propagation %%%%% update weights
for n = 1 : nTrainingData
Input = P(n,:)';
HiddenNet = InputWeight * Input + HiddenBias;
HiddenOutput = 1 ./ (1 + exp(-Lam_para*HiddenNet));
HiddenInput = HiddenOutput;
OutputNet = HiddenWeight * HiddenInput + OutputBias;
OutputOutput = OutputNet; Y(n,:) = OutputOutput';
if n == 1
for k = 1 : nOutputNeuron
hidden_local_gradient(k) = T(n,k) - Y(n,k);
for j = 1 : nHiddenNeuron
dHiddenWeight(k,j) = lr * hidden_local_gradient(k) * HiddenInput(j);
HiddenWeight(k,j) = HiddenWeight(k,j) + dHiddenWeight(k,j);
end
dOutputBias(k) = lr * hidden_local_gradient(k) * 1;
OutputBias(k) = OutputBias(k) + dOutputBias(k);
end
for j = 1 : nHiddenNeuron
input_local_gradient(j) = HiddenOutput(j) * (1 - HiddenOutput(j)) * hidden_local_gradient' * HiddenWeight(:,j);
for i = 1 : nInputNeuron
dInputWeight(j,i) = lr * input_local_gradient(j) * Input(i);
InputWeight(j,i) = InputWeight(j,i) + dInputWeight(j,i);
end
dHiddenBias(j) = lr * input_local_gradient(j) * 1;
HiddenBias(j) = HiddenBias(j) + dHiddenBias(j);
end
elseif n ~= 1
for k = 1 : nOutputNeuron
hidden_local_gradient(k) = T(n,k) - Y(n,k);
for j = 1 : nHiddenNeuron
dHiddenWeight(k,j) = mc * dHiddenWeight(k,j) + (1 - mc) * lr * hidden_local_gradient(k) * HiddenInput(j);
HiddenWeight(k,j) = HiddenWeight(k,j) + dHiddenWeight(k,j);
end
dOutputBias(k) = mc * dOutputBias(k) + (1 - mc) * lr * hidden_local_gradient(k) * 1;
OutputBias(k) = OutputBias(k) + dOutputBias(k);
end
for j = 1 : nHiddenNeuron
input_local_gradient(j) = HiddenOutput(j) * (1 - HiddenOutput(j)) * hidden_local_gradient' * HiddenWeight(:,j);
for i = 1 : nInputNeuron
dInputWeight(j,i) = mc * dInputWeight(j,i) + (1 - mc) * lr * input_local_gradient(j) * Input(i);
InputWeight(j,i) = InputWeight(j,i) + dInputWeight(j,i);
end
dHiddenBias(j) = mc * dHiddenBias(j) + (1 - mc) * lr * input_local_gradient(j) * 1;
HiddenBias(j) = HiddenBias(j) + dHiddenBias(j);
end
end
end
t1 = cputime - t1;
for n = 1:nTrainingData
Input = P(n,:)';
HiddenNet = InputWeight * Input + HiddenBias;
HiddenOutput = 1 ./ (1 + exp(-Lam_para*HiddenNet));
HiddenInput = HiddenOutput;
OutputNet = HiddenWeight * HiddenInput + OutputBias;
OutputOutput = OutputNet; Y(n,:) = OutputOutput';
end
t2 = cputime;
for n = 1:nTestingData
Input = TV.P(n,:)';
HiddenNet = InputWeight * Input + HiddenBias;
HiddenOutput = 1 ./ (1 + exp(-Lam_para*HiddenNet));
HiddenInput = HiddenOutput;
OutputNet = HiddenWeight * HiddenInput + OutputBias;
OutputOutput = OutputNet; TV.Y(n,:) = OutputOutput';
end
t2 = cputime - t2;
%%%%% Training Accuracy %%%%% Testing Accuracy
if Type == 'Regression'
a1 = sqrt(mse(T - Y));
a2 = sqrt(mse(TV.T - TV.Y));
elseif Type == 'Classifier'
Miss = 0;
for i = 1 : nTrainingData
[x, label_index_expected] = max(T(i,:));
[x, label_index_actual] = max(Y(i,:));
if label_index_actual ~= label_index_expected
Miss = Miss + 1;
end
end
a1 = 1 - Miss / nTrainingData;
Miss = 0;
for i = 1 : nTestingData
[x, label_index_expected] = max(TV.T(i,:));
[x, label_index_actual] = max(TV.Y(i,:));
if label_index_actual ~= label_index_expected
Miss = Miss + 1;
end
end
a2 = 1 - Miss / nTestingData;
end
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -