⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 incrementalrandomneurons.m

📁 本人编写的incremental 随机神经元网络算法
💻 M
字号:
function [TrainingTime, TrainingAccuracy, TestingAccuracy] = IncrementalRandomNeurons(TrainingData_File, TestingData_File, MaxNumberofHiddenNeurons, ActivationFunction, Problem_Type)

% Input:
% TrainingData_File     - Filename of training data set
% TestingData_File      - Filename of testing data set
% MaxNumberofHiddenNeurons - Maximum number of hidden neurons assigned
% ActivationFunction    - Type of activation function:
%                           'sig' for Sigmoidal function
%                           'sin' for Sine function
%                           'hardlim' for Hardlim function
%                           'rbf' for Gaussian function (division)
%                           'rbf_gamma' for Gaussian function (product)

% Problem_Type          - 0 for regression; 1 for classification

% Output: 
% TrainingTime          - Time (seconds) spent on training ELM
% TrainingAccuracy      - Training accuracy: 
%                           RMSE for regression or correct classification rate for classification
% TestingAccuracy       - Testing accuracy: 
%                           RMSE for regression or correct classification rate for classification

% Sample1 regression: [TrainingTime, TestingTime, TrainingAccuracy, TestingAccuracy] = IncrementalRandomNeurons('sinc_train', 'sinc_test', 20, 'sig', 0)


%obtain the function handle and execute the function by using feval

                                        %%%%    Authors: Chen Lei
REGRESSION = 0;
CLASSIFIER = 1;
%%%%%%%%%%% Load training dataset
train_data=load(TrainingData_File);
T=train_data(:,1)';
I=train_data(:,2:size(train_data,2))';
clear train_data;                                   %   Release raw training data array

%%%%%%%%%%% Load testing dataset
test_data=load(TestingData_File);
t_testing=test_data(:,1)';
x_testing=test_data(:,2:size(test_data,2))';
clear test_data;                                    %   Release raw testing data array

NumberofTrainingData=size(I,2);
NumberofTestingData=size(x_testing,2);
NumberofInputNeurons=size(I,1);
NumberofOutputNeurons=size(T,1);


L=0;    % L: number of neurons;

TrainingResidualError=ones(size(T));

%%%%%%%%%%%%%%%%%%%%%%%%% initializing %%%%%%%%
InputWeight=zeros(NumberofInputNeurons,MaxNumberofHiddenNeurons);
HiddenBias=zeros(1,MaxNumberofHiddenNeurons);
HiddenNeuronActivation=zeros(NumberofTrainingData,MaxNumberofHiddenNeurons);
Beta=zeros(NumberofOutputNeurons,MaxNumberofHiddenNeurons);
total_cputimesofar=zeros(1,MaxNumberofHiddenNeurons);
HiddenNeuronActivationTesting=zeros(NumberofTestingData,MaxNumberofHiddenNeurons);
TrainingResidualError_Norm=zeros(1,MaxNumberofHiddenNeurons);
TestingResidualError_Norm=zeros(1,MaxNumberofHiddenNeurons);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

while L<MaxNumberofHiddenNeurons
    
starting_cpu=cputime;

if L==0                        % do the thing when hidden neuron is zero.

    TrainingResidualError= T;
    L=L+1;
    switch lower(ActivationFunction)
        case {'rbf'}
            InputWeight(:,L)=2*rand(NumberofInputNeurons,1)-1; % randomly chose InputWeight for Neuron L;  for other activation functions except RBF
            HiddenBias(L)=rand(1,1);
        case {'rbf_gamma'}
            InputWeight(:,L)=2*rand(NumberofInputNeurons,1)-1; % randomly chose InputWeight for Neuron L;  for other activation functions except RBF
            HiddenBias(L)=0.5*rand(1,1);
        otherwise
            InputWeight(:,L)=2*rand(NumberofInputNeurons,1)-1; % randomly chose InputWeight for Neuron L;  for other activation functions except RBF
            HiddenBias(L)=2*rand(1,1)-1;
    end

    HiddenNeuronActivation(:,L)=hidden_output(I,InputWeight(:,L),HiddenBias(L),ActivationFunction,NumberofInputNeurons)';

    for i=1:NumberofOutputNeurons
        SumofResidualHiddenActivation(i,:)=TrainingResidualError(i,:)*HiddenNeuronActivation(:,L);
        SumofSquareHiddenActivation=HiddenNeuronActivation(:,L)'*HiddenNeuronActivation(:,L);    
        Beta(i,L)=SumofResidualHiddenActivation(i,:)/SumofSquareHiddenActivation;
    end       

    TrainingResidualError_Previous=TrainingResidualError;   % Record (L-1)th residual error
    TrainingResidualError=TrainingResidualError_Previous-Beta(:,L)*HiddenNeuronActivation(:,L)';   % Calculate L-th residual error
    if Problem_Type==REGRESSION
        TrainingResidualError_Norm(L)=norm(TrainingResidualError)/sqrt(NumberofTrainingData);
    end

    total_cputimesofar(L)=double(cputime-starting_cpu); % CPU time spent for training
    
    for n=1:NumberofTestingData   % Calculate testing residual error after k-th neuron added. 
        TestingResidualError(:,n)= t_testing(:,n);
    end
    HiddenNeuronActivationTesting(:,L)=hidden_output(x_testing,InputWeight(:,L),HiddenBias(L),ActivationFunction,NumberofInputNeurons)';

    TestingResidualError_Previous=TestingResidualError;   % Record (L-1)th residual error
    TestingResidualError=TestingResidualError_Previous-Beta(:,L)*HiddenNeuronActivationTesting(:,L)';   % Calculate L-th testing residual error
    if Problem_Type==REGRESSION
        TestingResidualError_Norm(L)=norm(TestingResidualError)/sqrt(NumberofTestingData);
    end
   
else                  % do the work when L~=0
    L=L+1;
    switch lower(ActivationFunction)
        case {'rbf'}
            InputWeight(:,L)=2*rand(NumberofInputNeurons,1)-1; % randomly chose InputWeight for Neuron L;  for other activation functions except RBF
            HiddenBias(L)=rand(1,1);
        case {'rbf_gamma'}
            InputWeight(:,L)=2*rand(NumberofInputNeurons,1)-1; % randomly chose InputWeight for Neuron L;  for other activation functions except RBF
            HiddenBias(L)=0.5*rand(1,1);
        otherwise
            InputWeight(:,L)=2*rand(NumberofInputNeurons,1)-1; % randomly chose InputWeight for Neuron L;  for other activation functions except RBF
            HiddenBias(L)=2*rand(1,1)-1;
    end

    HiddenNeuronActivation(:,L)=hidden_output(I,InputWeight(:,L),HiddenBias(L),ActivationFunction,NumberofInputNeurons)';

    for i=1:NumberofOutputNeurons
        SumofResidualHiddenActivation(i,:)=TrainingResidualError(i,:)*HiddenNeuronActivation(:,L);
        SumofSquareHiddenActivation=HiddenNeuronActivation(:,L)'*HiddenNeuronActivation(:,L);    
        Beta(i,L)=SumofResidualHiddenActivation(i,:)/SumofSquareHiddenActivation;
    end      

    TrainingResidualError_Previous=TrainingResidualError;   % Record (L-1)th residual error
    TrainingResidualError=TrainingResidualError_Previous-Beta(:,L)*HiddenNeuronActivation(:,L)';   % Calculate L-th residual error
    if Problem_Type==REGRESSION
        TrainingResidualError_Norm(L)=norm(TrainingResidualError)/sqrt(NumberofTrainingData);    
    end

    total_cputimesofar(L)=total_cputimesofar(L-1)+cputime-starting_cpu; % CPU time spent for training
    
    HiddenNeuronActivationTesting(:,L)=hidden_output(x_testing,InputWeight(:,L),HiddenBias(L),ActivationFunction,NumberofInputNeurons)';
    TestingResidualError_Previous=TestingResidualError;   % Record (L-1)th residual error
    TestingResidualError=TestingResidualError_Previous-Beta(:,L)*HiddenNeuronActivationTesting(:,L)';   % Calculate L-th testing residual error
    if Problem_Type==REGRESSION
        TestingResidualError_Norm(L)=norm(TestingResidualError)/sqrt(NumberofTestingData);
    end
end        

end % End while when TrainingResidualError not larger than min_goal

TrainingTime=total_cputimesofar;

if Problem_Type == REGRESSION
TrainingAccuracy=TrainingResidualError_Norm;
TestingAccuracy=TestingResidualError_Norm;
end

function y1=hidden_output(x,w,b,ActivationFunction,NumberofInputs)

switch lower(ActivationFunction)
    case {'sin','sine'}
        %%%%%%%% Sines
        y1=sin(w'*x+b);     
    case {'rbf'}
        %%%%%%%% RBF
        NumberofTraining=size(x,2);
        ind=ones(1,NumberofTraining);
                            
        extend_weight=w(:,ind);%%   w is column vector
        if NumberofInputs==1
            tempH=-((x-extend_weight).^2);
        else
            tempH=-sum((x-extend_weight).^2);
        end
        

        BiasMatrix=b(:,ind);  
        tempH=tempH./BiasMatrix;
        clear extend_weight;    
        
        y1=exp(tempH)+0.0001;
    case {'rbf_gamma'}
        %%%%%%%% RBF
        NumberofTraining=size(x,2);
        ind=ones(1,NumberofTraining);
                            
        extend_weight=w(:,ind);%%   w is column vector
        if NumberofInputs==1
            tempH=-((x-extend_weight).^2);
        else
            tempH=-sum((x-extend_weight).^2);
        end

        BiasMatrix=b(:,ind);  
        tempH=tempH.*BiasMatrix;
        clear extend_weight;    
        
        y1=exp(tempH)+0.0001;         
    case {'tri'}
        %%%%%%%% Triangle
        x1=w'*x+b;
        if abs(x1)>1
            y1=0;
        elseif x1>0
            y1=1-x1;
        else
            y1=x1+1;
        end
    case {'hardlim'}
        %%%%%%%% Hardlimit
        x1=w'*x+b;
        y1=sign(x1);
    case {'gau'}
        %%%%%%%% Gaussian
        x1=w'*x+b;
        y1=exp(-x1.^2);
    case {'sig','sigmoid'}
        bias_vector = b*ones(1,size(x,2));
        %%%%%%%% Sigmoid 
        y1=1./(1+exp(-(w'*x+bias_vector)));
    case {'windows'}
        %%%%%%%% windows
        x1=w'*x+b;
        traina = x1<=1;
        trainb = x1>=-1;    
        y1 = traina.*trainb+0.0001;
        %%%%%%%% More activation functions can be added here
        
end

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -