📄 linctrainwidro.m
字号:
function [recogRate, coef, allRecogRate, allCoef]=lincTrainWidro(DS, trainParam, plotOpt)
% lincTrainWidro: Linear classifier (Perceptron) training using (revised) Widro-Hoff method
% Usage: [recogRate, coef, allRecogRate, allCoef] = lincTrainWidro(DS, trainParam, plotOpt)
%
% Type "lincTrainWidro" for a demo.
% Roger Jang, 20040910
if nargin<1, selfdemo; return; end
[dim, dataNum]=size(DS.input);
if nargin<2 | isempty(trainParam),
trainParam.eta=0.001;
trainParam.maxIter=500;
trainParam.useMre=1;
end
if nargin<3, plotOpt=0; end
if plotOpt==1 & dim==2,
dcprDataPlot(DS);
axis image
limit=axis;
lineH = line(limit(1:2), limit(3:4), 'linewidth', 2, 'erase', 'xor', 'color', 'k');
end
uniqueOutput=unique(DS.output);
if length(uniqueOutput)~=2, error('Must be 2-class problem!'); end
% 盢 DS.output эΘ -1 ┪ 1
index=find(DS.output==max(uniqueOutput));
DS.output=-ones(1, size(DS.output,2));
DS.output(index)=1;
allRecogRate=-ones(1, trainParam.maxIter);
allCoef=cell(1, trainParam.maxIter);
if trainParam.useMre==1
[allRecogRate(1), coef]=lincTrainMre(DS); % Initial parameters via mrecTrain
else
coef=randn(dim+1,1); % Initial parameters via random number
end
% The main loop
for i=1:trainParam.maxIter
computed = lincEval(DS, coef);
allCoef{i}=coef;
allRecogRate(i)=sum(DS.output==computed)/dataNum;
fprintf('%d/%d: %g%%\n', i, trainParam.maxIter, allRecogRate(i)*100);
% Vectorized version, suitable for small data set
% coef=coef+trainParam.eta*sum([DS.input; ones(1,dataNum)]*diag(DS.output-computed), 2);
% For-loop version, suitable for big data set
grad=0*coef;
for j=1:dataNum
grad=grad+(computed(j)-DS.output(j))*[DS.input(:,j); 1];
end
gradLength=norm(grad);
if gradLength==0, break; end
coef=coef-trainParam.eta*grad/norm(grad); % Normalized version of steepest descent
if plotOpt==1 & dim==2,
set(lineH, 'ydata', (-coef(3)-coef(1)*limit(1:2))/coef(2));
drawnow
end
% ====== Update step size
if i>=5
if all(diff(allRecogRate(i-4:i))>0)
trainParam.eta = 1.1*trainParam.eta;
fprintf('Increase trainParam.eta to %f\n', trainParam.eta);
end
if all(sign(diff(allRecogRate(i-4:i)))==[-1 1 -1 1]) | all(sign(diff(allRecogRate(i-4:i)))==[1 -1 1 -1])
trainParam.eta = 0.9*trainParam.eta;
fprintf('Decrease trainParam.eta to %f\n', trainParam.eta);
end
end
end
index=find(allRecogRate<0);
allRecogRate(index)=[];
allCoef(index)=[];
[recogRate, index]=max(allRecogRate);
coef=allCoef{index};
if plotOpt
figure
plot(1:length(allRecogRate), allRecogRate*100, 1:length(allRecogRate), allRecogRate*100, '.');
xlabel('No. of iteration');
ylabel('Recognition rate (%)');
grid on
line(index, allRecogRate(index)*100, 'marker', 'o', 'color', 'r');
fprintf('Max. recog. rate = %.2f at %d iteration.\n', allRecogRate(index)*100, index);
end
% ====== Self demo
function coef=selfdemo
dataNum = 2000;
DS.input = rand(2, dataNum)*2-1;
DS.output = -ones(1, dataNum);
index = find(DS.input(1, :)+DS.input(2, :)>0);
DS.output(index)=1;
plotOpt=1;
coef=feval(mfilename, DS, [], plotOpt);
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -