📄 elm_de.m
字号:
F=1;tolerance = 0.02;start_time_validation=cputime;if (NP < 5) NP=5; fprintf(1,' NP increased to minimal value 5\n');endif ((CR < 0) | (CR > 1)) CR=0.5; fprintf(1,'CR should be from interval [0,1]; set to default value 0.5\n');endif (itermax <= 0) itermax = 200; fprintf(1,'itermax should be > 0; set to default value 200\n');endrefresh = floor(refresh);%-----Initialize population and some arrays-------------------------------pop = zeros(NP,D); %initialize pop to gain speed%----pop is a matrix of size NPxD. It will be initialized-------------%----with random values between the min and max values of the---------%----parameters-------------------------------------------------------for i=1:NP pop(i,:) = XVmin + rand(1,D).*(XVmax - XVmin);endpopold = zeros(size(pop)); % toggle populationval = zeros(1,NP); % create and reset the "cost array"bestmem = zeros(1,D); % best population member everbestmemit = zeros(1,D); % best population member in iterationnfeval = 0; % number of function evaluationsbrk = 0;%------Evaluate the best member after initialization----------------------ibest = 1; % start with first population member[val(1),OutputWeight] = ELM_X(Elm_Type,pop(ibest,:),P,T,VV,NumberofHiddenNeurons);bestval = val(1); % best objective function value so farnfeval = nfeval + 1;bestweight = OutputWeight;for i=2:NP % check the remaining members [val(i),OutputWeight] = elm_x(Elm_Type,pop(i,:),P,T,VV,NumberofHiddenNeurons); nfeval = nfeval + 1; if (val(i) < bestval) % if member is better ibest = i; % save its location bestval = val(i); bestweight = OutputWeight; end endbestmemit = pop(ibest,:); % best member of current iterationbestvalit = bestval; % best value of current iterationbestmem = bestmemit; % best member ever%------DE-Minimization---------------------------------------------%------popold is the population which has to compete. It is--------%------static through one iteration. pop is the newly--------------%------emerging population.----------------------------------------pm1 = zeros(NP,D); % initialize population matrix 1pm2 = zeros(NP,D); % initialize population matrix 2pm3 = zeros(NP,D); % initialize population matrix 3pm4 = zeros(NP,D); % initialize population matrix 4pm5 = zeros(NP,D); % initialize population matrix 5bm = zeros(NP,D); % initialize bestmember matrixui = zeros(NP,D); % intermediate population of perturbed vectorsmui = zeros(NP,D); % mask for intermediate populationmpo = zeros(NP,D); % mask for old populationrot = (0:1:NP-1); % rotating index array (size NP)rotd= (0:1:D-1); % rotating index array (size D)rt = zeros(NP); % another rotating index arrayrtd = zeros(D); % rotating index array for exponential crossovera1 = zeros(NP); % index arraya2 = zeros(NP); % index arraya3 = zeros(NP); % index arraya4 = zeros(NP); % index arraya5 = zeros(NP); % index arrayind = zeros(4);iter = 1;while (~(iter > itermax) ) popold = pop; % save the old population ind = randperm(4); % index pointer array a1 = randperm(NP); % shuffle locations of vectors rt = rem(rot+ind(1),NP); % rotate indices by ind(1) positions a2 = a1(rt+1); % rotate vector locations rt = rem(rot+ind(2),NP); a3 = a2(rt+1); rt = rem(rot+ind(3),NP); a4 = a3(rt+1); rt = rem(rot+ind(4),NP); a5 = a4(rt+1); pm1 = popold(a1,:); % shuffled population 1 pm2 = popold(a2,:); % shuffled population 2 pm3 = popold(a3,:); % shuffled population 3 pm4 = popold(a4,:); % shuffled population 4 pm5 = popold(a5,:); % shuffled population 5 for i=1:NP % population filled with the best member bm(i,:) = bestmemit; % of the last iteration end mui = rand(NP,D) < CR; % all random numbers < CR are 1, 0 otherwise if (strategy > 5) st = strategy-5; % binomial crossover else st = strategy; % exponential crossover mui=sort(mui'); % transpose, collect 1's in each column for i=1:NP n=floor(rand*D); if n > 0 rtd = rem(rotd+n,D); mui(:,i) = mui(rtd+1,i); %rotate column i by n end end mui = mui'; % transpose back end mpo = mui < 0.5; % inverse mask to mui if (st == 1) % DE/best/1 ui = bm + F*(pm1 - pm2); % differential variation ui = popold.*mpo + ui.*mui; % crossover elseif (st == 2) % DE/rand/1 ui = pm3 + F*(pm1 - pm2); % differential variation ui = popold.*mpo + ui.*mui; % crossover elseif (st == 3) % DE/rand-to-best/1 ui = popold + F*(bm-popold) + F*(pm1 - pm2); ui = popold.*mpo + ui.*mui; % crossover elseif (st == 4) % DE/best/2 ui = bm + F*(pm1 - pm2 + pm3 - pm4); % differential variation ui = popold.*mpo + ui.*mui; % crossover elseif (st == 5) % DE/rand/2 ui = pm5 + F*(pm1 - pm2 + pm3 - pm4); % differential variation ui = popold.*mpo + ui.*mui; % crossover end%-----Select which vectors are allowed to enter the new population------------ for i=1:NP [tempval,OutputWeight] = ELM_X(Elm_Type,ui(i,:),P,T,VV,NumberofHiddenNeurons); % check cost of competitor nfeval = nfeval + 1; if (tempval <= val(i)) % if competitor is better than value in "cost array" pop(i,:) = ui(i,:); % replace old vector with new one (for new iteration) val(i) = tempval; % save value in "cost array" %----we update bestval only in case of success to save time----------- if bestval-tempval>tolerance*bestval bestval = tempval; % new best value bestmem = ui(i,:); % new best parameter vector ever bestweight = OutputWeight; elseif abs(tempval-bestval)<tolerance*bestval % if competitor better than the best one ever if norm(OutputWeight,2)<norm(bestweight,2) bestval = tempval; % new best value bestmem = ui(i,:); % new best parameter vector ever bestweight = OutputWeight; end end end end %---end for imember=1:NP bestmemit = bestmem; % freeze the best member of this iteration for the coming % iteration. This is needed for some of the strategies.%----Output section---------------------------------------------------------- if (refresh > 0) if (rem(iter,refresh) == 0) fprintf(1,'Iteration: %d, Best: %f, F: %f, CR: %f, NP: %d\n',iter,bestval,F,CR,NP);% for n=1:D% fprintf(1,'best(%d) = %f\n',n,bestmem(n));% end end end iter = iter + 1;end %---end while ((iter < itermax) ...end_time_validation=cputime;TrainingTime=end_time_validation-start_time_validation%%%%%%%%%%%%% Testing the performance of the best populationBeta = mean(abs(OutputWeight))NumberInputNeurons=size(P, 1);NumberofTrainingData=size(P, 2);NumberofTestingData=size(TV.P, 2);Gain=1;temp_weight_bias=reshape(bestmem, NumberofHiddenNeurons, NumberInputNeurons+1);InputWeight=temp_weight_bias(:, 1:NumberInputNeurons);BiasofHiddenNeurons=temp_weight_bias(:,NumberInputNeurons+1);tempH=InputWeight*P;ind=ones(1,NumberofTrainingData);BiasMatrix=BiasofHiddenNeurons(:,ind); % Extend the bias matrix BiasofHiddenNeurons to match the demention of HtempH=tempH+BiasMatrix;clear BiasMatrixH = 1 ./ (1 + exp(-Gain*tempH));clear tempH;% OutputWeight=pinv(H') * T';Y=(H' * bestweight)';tempH_test=InputWeight*TV.P;ind=ones(1,NumberofTestingData);BiasMatrix=BiasofHiddenNeurons(:,ind); % Extend the bias matrix BiasofHiddenNeurons to match the demention of HtempH_test=tempH_test + BiasMatrix;H_test = 1 ./ (1 + exp(-Gain*tempH_test));TY=(H_test' * bestweight)';if Elm_Type == 0 TrainingAccuracy=sqrt(mse(T - Y)) TestingAccuracy=sqrt(mse(TV.T - TY)) % Calculate testing accuracy (RMSE) for regression caseendif Elm_Type == 1%%%%%%%%%% Calculate training & testing classification accuracy MissClassificationRate_Testing=0; MissClassificationRate_Training=0; for i = 1 : size(T, 2) [x, label_index_expected]=max(T(:,i)); [x, label_index_actual]=max(Y(:,i)); if label_index_actual~=label_index_expected MissClassificationRate_Training=MissClassificationRate_Training+1; end end TrainingAccuracy=1-MissClassificationRate_Training/size(T,2) for i = 1 : size(TV.T, 2) [x, label_index_expected]=max(TV.T(:,i)); [x, label_index_actual]=max(TY(:,i)); if label_index_actual~=label_index_expected MissClassificationRate_Testing=MissClassificationRate_Testing+1; end end TestingAccuracy=1-MissClassificationRate_Testing/size(TV.T,2)end
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -