⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 elm_fun.m

📁 本人修改过的elm的matlab算法
💻 M
字号:
function [TrainingTime, TrainingAccuracy, TestingAccuracy] = elm_fun(TrainingData_File, TestingData_File, NumberofHiddenNeurons, ActivationFunction, Elm_Type)

% Usage: elm(TrainingData_File, TestingData_File, Elm_Type, NumberofHiddenNeurons, ActivationFunction)
% OR:    [TrainingTime, TestingTime, TrainingAccuracy, TestingAccuracy] = elm(TrainingData_File, TestingData_File, Elm_Type, NumberofHiddenNeurons, ActivationFunction)
%
% Input:
% TrainingData_File     - Filename of training data set
% TestingData_File      - Filename of testing data set
% Elm_Type              - 0 for regression; 1 for (both binary and multi-classes) classification
% NumberofHiddenNeurons - Number of hidden neurons assigned to the ELM
% ActivationFunction    - Type of activation function:
%                           'sig' for Sigmoidal function
%                           'sin' for Sine function
%                           'hardlim' for Hardlim function
%
% Output: 
% TrainingTime          - Time (seconds) spent on training ELM
% TestingTime           - Time (seconds) spent on predicting ALL testing data
% TrainingAccuracy      - Training accuracy: 
%                           RMSE for regression or correct classification rate for classification
% TestingAccuracy       - Testing accuracy: 
%                           RMSE for regression or correct classification rate for classification
%
% MULTI-CLASSE CLASSIFICATION: NUMBER OF OUTPUT NEURONS WILL BE AUTOMATICALLY SET EQUAL TO NUMBER OF CLASSES
% FOR EXAMPLE, if there are 7 classes in all, there will have 7 output
% neurons; neuron 5 has the highest output means input belongs to 5-th class
%
% Sample1 regression: [TrainingTime, TestingTime, TrainingAccuracy, TestingAccuracy] = elm('sinc_train', 'sinc_test', 0, 20, 'sig')
% Sample2 classification: elm('diabetes_train', 'diabetes_test', 1, 20, 'sig')
%
    %%%%    Authors:    CHEN LEI
    %%%%    NANYANG TECHNOLOGICAL UNIVERSITY, SINGAPORE
    %%%%    EMAIL:      chen_lei@pmail.ntu.edu.sg
    %%%%    DATE:       APRIL 2006

%%%%%%%%%%% Macro definition
REGRESSION=0;
CLASSIFIER=1;

%%%%%%%%%%% Load training dataset
train_data=load(TrainingData_File);
T=train_data(:,1)';
P=train_data(:,2:size(train_data,2))';
clear train_data;                                   %   Release raw training data array

%%%%%%%%%%% Load testing dataset
test_data=load(TestingData_File);
TV.T=test_data(:,1)';
TV.P=test_data(:,2:size(test_data,2))';
clear test_data;                                    %   Release raw testing data array

NumberofTrainingData=size(P,2);
NumberofTestingData=size(TV.P,2);
NumberofInputNeurons=size(P,1);

if Elm_Type~=REGRESSION
    %%%%%%%%%%%% Preprocessing the data of classification
    sorted_target=sort(cat(2,T,TV.T),2);
    label=zeros(1,1);                               %   Find and save in 'label' class label from training and testing data sets
    label(1,1)=sorted_target(1,1);
    j=1;
    for i = 2:(NumberofTrainingData+NumberofTestingData)
        if sorted_target(1,i) ~= label(1,j)
            j=j+1;
            label(1,j) = sorted_target(1,i);
        end
    end
    number_class=j;
    NumberofOutputNeurons=number_class;
    
    %%%%%%%%%% Processing the targets of training
    temp_T=zeros(NumberofOutputNeurons, NumberofTrainingData);
    for i = 1:NumberofTrainingData
        for j = 1:number_class
            if label(1,j) == T(1,i)
                break; 
            end
        end
        temp_T(j,i)=1;
    end
    T=temp_T*2-1;

    %%%%%%%%%% Processing the targets of testing
    temp_TV_T=zeros(NumberofOutputNeurons, NumberofTestingData);
    for i = 1:NumberofTestingData
        for j = 1:number_class
            if label(1,j) == TV.T(1,i)
                break; 
            end
        end
        temp_TV_T(j,i)=1;
    end
    TV.T=temp_TV_T*2-1;
end                                                 %   end if of Elm_Type

Htrainout=zeros(NumberofTrainingData,NumberofHiddenNeurons);
InputWeight=zeros(NumberofInputNeurons,NumberofHiddenNeurons);
HiddenBias=zeros(1,NumberofHiddenNeurons);
L=0;
%%%%%%%%%%% Calculate weights & biases
start_time_train=cputime;

%%%%%%%%%%%%%%%     Train matrix    %%%%%%%%
while L<NumberofHiddenNeurons
    L=L+1;
    switch lower(ActivationFunction)
        case {'rbf'}
            InputWeight(:,L)=2*rand(NumberofInputNeurons,1)-1; % randomly chose InputWeight for Neuron L;  for other activation functions except RBF
            temp=rand(1,1);
            while temp<10^-3
                temp = rand(1,1);
            end
            HiddenBias(L)=temp;
        case {'rbf_gamma'}
            InputWeight(:,L)=2*rand(NumberofInputNeurons,1)-1; % randomly chose InputWeight for Neuron L;  for other activation functions except RBF
            HiddenBias(L)=0.5*rand(1,1);
        otherwise
            InputWeight(:,L)=2*rand(NumberofInputNeurons,1)-1; % randomly chose InputWeight for Neuron L;  for other activation functions except RBF
            HiddenBias(L)=2*rand(1,1)-1;
    end
    Htrainout(:,L)=hidden_output(P,InputWeight(:,L),HiddenBias(L),ActivationFunction,NumberofInputNeurons)';
end % End while when TrainingResidualError not larger than min_goal

%%%%%%%%%%% Calculate output weights OutputWeight (beta_i)
OutputWeight=pinv(Htrainout) * T';
end_time_train=cputime;
TrainingTime=end_time_train-start_time_train;        %   Calculate CPU time (seconds) spent for training ELM

%%%%%%%%%%% Calculate the training accuracy
Y=(Htrainout * OutputWeight)';                             %   Y: the actual output of the training data
if Elm_Type == REGRESSION
    TrainingAccuracy=sqrt(mse(T - Y));               %   Calculate training accuracy (RMSE) for regression case
end

%%%%%%%%%%% Calculate the output of testing input
start_time_test=cputime;

NumberofTestingData=size(TV.P,2);
HidenTestOutput=zeros(NumberofTestingData,NumberofHiddenNeurons);
L=0;
while L<NumberofHiddenNeurons
    L=L+1;
    HidenTestOutput(:,L)=hidden_output(TV.P,InputWeight(:,L),HiddenBias(L),ActivationFunction,NumberofInputNeurons)';
end % End while when TrainingResidualError not larger than min_goal
%%%%%%%%%%%%%%%     computing testing error    %%%%%%%%
TestingTime = cputime - start_time_test;

TY=(HidenTestOutput * OutputWeight)';                       %   TY: the actual output of the testing data
end_time_test=cputime;
TestingTime=end_time_test-start_time_test;           %   Calculate CPU time (seconds) spent by ELM predicting the whole testing data

if Elm_Type == REGRESSION
    TestingAccuracy=sqrt(mse(TV.T - TY));            %   Calculate testing accuracy (RMSE) for regression case
end

if Elm_Type == CLASSIFIER
%%%%%%%%%% Calculate training & testing classification accuracy
    MissClassificationRate_Training=0;
    MissClassificationRate_Testing=0;

    for i = 1 : size(T, 2)
        [x, label_index_expected]=max(T(:,i));
        [x, label_index_actual]=max(Y(:,i));
        if label_index_actual~=label_index_expected
            MissClassificationRate_Training=MissClassificationRate_Training+1;
        end
    end
    TrainingAccuracy=1-MissClassificationRate_Training/size(T,2);
    for i = 1 : size(TV.T, 2)
        [x, label_index_expected]=max(TV.T(:,i));
        [x, label_index_actual]=max(TY(:,i));
        if label_index_actual~=label_index_expected
            MissClassificationRate_Testing=MissClassificationRate_Testing+1;
        end
    end
    TestingAccuracy=1-MissClassificationRate_Testing/size(TV.T,2);  
end

function y1=hidden_output(x,w,b,ActivationFunction,NumberofInputs)

switch lower(ActivationFunction)
    case {'sin','sine'}
        %%%%%%%% Sines
        y1=sin(w'*x+b);     
    case {'rbf'}
        %%%%%%%% RBF
        NumberofTraining=size(x,2);
        ind=ones(1,NumberofTraining);
                            
        extend_weight=w(:,ind);%%   w is column vector
        if NumberofInputs==1
            tempH=-((x-extend_weight).^2);
        else
            tempH=-sum((x-extend_weight).^2);
        end

        BiasMatrix=b(:,ind);  
        tempH=tempH./BiasMatrix;
        clear extend_weight;    
        y1=exp(tempH); 
%         y1=exp(tempH)+0.0001;
    case {'rbf_gamma'}
        %%%%%%%% RBF
        NumberofTraining=size(x,2);
        ind=ones(1,NumberofTraining);
                            
        extend_weight=w(:,ind);%%   w is column vector
        if NumberofInputs==1
            tempH=-((x-extend_weight).^2);
        else
            tempH=-sum((x-extend_weight).^2);
        end

        BiasMatrix=b(:,ind);  
        tempH=tempH.*BiasMatrix;
        clear extend_weight;    
        y1=exp(tempH); 
%         y1=exp(tempH)+0.0001;         
    case {'tri'}
        %%%%%%%% Triangle
        x1=w'*x+b;
        if abs(x1)>1
            y1=0;
        elseif x1>0
            y1=1-x1;
        else
            y1=x1+1;
        end
    case {'hardlim'}
        %%%%%%%% Hardlimit
        x1=w'*x+b;
        y1=sign(x1);
    case {'gau'}
        %%%%%%%% Gaussian
        x1=w'*x+b;
        y1=exp(-x1.^2);
    case {'sig','sigmoid'}
        %%%%%%%% Sigmoid 
        bias_vector = b*ones(1,size(x,2));
        y1=1./(1+exp(-(w'*x+bias_vector)));
    case {'windows'}
        %%%%%%%% windows
        x1=w'*x+b;
        traina = x1<=1;
        trainb = x1>=-1;    
        y1 = traina.*trainb+0.0001;
        %%%%%%%% More activation functions can be added here
        
end

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -