📄 corr_lda.m
字号:
function v=corr_LDA(X,C,nh,h)
% Manli Zhu and Aleix Martinez, Pruning Noisy Bases in Discriminant
% Ananlysis, IEEE Transactions on Neural Networks.
%
%use correlation based criterion to pick PCs and reconstruct
%SigmaX{-1}SigmaB
%
%input:
%C -- number of classes
%X -- n-by-p training data
%nh -- 1-by-C matrix indicating the number of samples for each class
%h -- confidence interval
%
%
%output:
%v -- the eigenvectors
% Copyrighted code
% (c) Manli Zhu & Aleix M Martinez
%
% For additional information contact the authors
n = size(X,1);
p = size(X,2);
start = 0;
for i=1: C
temp = X(start+1:start+nh(i),:);
classmean(i,:)=mean(temp,1);
start = sum(nh(1:i));
end
clear temp;
%calculate the covariance matrix of the classmean;
meanx = mean(X);
SigmaB = zeros(p,p);
for i=1:C
temp = classmean(i,:)-meanx;
SigmaB = SigmaB + temp'*temp*nh(i);
end
SigmaB=SigmaB/n;
clear temp;
opts.disp=0;
rankB=rank(SigmaB);
[vb,db]=eigs(SigmaB,rankB,'LM',opts);
db = diag(db)';
temp=X-repmat(meanx,n,1);
SigmaX=temp'*temp/n;
rankX=rank(SigmaX);
[vx,dx]=eigs(SigmaX,rankX,'LM',opts);
dx = diag(dx)';
% %when p>n, use the following to get vx and dx;
% covx=temp*temp';
% covx=covx/(n);
% rankX=rank(covx);
% [vx,dx] = eigs(covx,rankX,'LM',opts);
% vx = n^(-0.5)*temp'*vx*dx^(-0.5);
% dx=diag(dx)';
[dx,ind]=sort(real(dx));
ind = fliplr(ind);
dx = fliplr(dx);
vx = vx(:,ind);
I = sum(((vx'*vb).^2)'); %choose PC according to P
[I,ind]=sort(I);
ind = fliplr(ind); % resort the eigenvector and eigenvalue according to the correlation value
vx=vx(:,ind);
dx=dx(:,ind);
I = fliplr(I);
I = I/(C-1);
lambda = max(I);
s = floor(-log(1-h)/lambda); % use exponential functinn determing CI
a=min(s,rankX);
dx = dx(1:a);
vx = vx(:,1:a);
rat1 = db'*(1./dx);
rat2 = vb'*vx;
rat = rat1.*rat2;
clear rat1 rat2;
new_mtx=zeros(p,p);
for i=1:rankB
tmpvx = vx*rat(i,:)';
new_mtx = new_mtx + tmpvx*vb(:,i)';
end
[v,d]=eigs(new_mtx,rank(new_mtx),'LM',opts);
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -