⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 pcalda.m

📁 It is for Face Recognition
💻 M
字号:
%function [PcaEigenVal, LdaEigenVal, LdaMat, SphereMat] = pcalda(Sample, nPcaVecNum, nLdaVecNum, fThPcaEnergyRatio)
function [PcaEigenVal, LdaEigenVal, LdaMat, SphereMat] = pcalda(Sample, nPcaVecNum, nLdaVecNum, fThPcaEnergyRatio)
% PCA-LDA Linear Discriminant Analysis.
% fThPcaEnergyRatio: how many percentage of energy will throw out in PCA (e.g. 0.0000001)

if nargin < 1
    disp('Invalid parameter number');
    return;
end
if nargin == 1
    nPcaVecNum = size(Sample,2)-1;
    nLdaVecNum = nPcaVecNum;
    fThPcaEnergyRatio = eps;
end
if nargin == 2
    nLdaVecNum = nPcaVecNum;
    fThPcaEnergyRatio = eps;
end
if nargin == 3
    fThPcaEnergyRatio = eps;
end
if fThPcaEnergyRatio < eps;
    fThPcaEnergyRatio = eps;
end

% ====== Initialization
data_n = size(Sample, 1);
feature_n = size(Sample,2)-1;
featureMatrix = Sample(:, 1:end-1); 
classLabel = Sample(:, end);
clear Sample;
[diffClassLabel, classSize] = countele(classLabel);
class_n = length(diffClassLabel);
sampleMean = mean(featureMatrix);

% ====== Compute B and W
% ====== B: between-class scatter matrix
% ====== W:  within-class scatter matrix
B = zeros(feature_n,feature_n);
W = zeros(feature_n,feature_n);
for i = 1:class_n,
	index = find(classLabel==diffClassLabel(i));
    X_i = featureMatrix(index, :);
	classMean = mean(X_i,1);
    % B = B + length(index) * (classMean - sampleMean)' * (classMean - sampleMean);
    % candidate: B = B + (classMean - sampleMean)' * (classMean - sampleMean);
    B = B + (classMean - sampleMean)' * (classMean - sampleMean);
    % same as W = W + (length(index)-1) * cov(X_i);
    for j =1 : feature_n
        X_i(:,j) = X_i(:,j) - classMean(1,j);
    end
    W = W + X_i' * X_i;
end

% calculate V such that V'BV = D (eigenValues)
[VMat, DMat] = eig(B);
PcaEigenVal = diag(DMat);
%ordering
[Temp, nIndex] = sort(PcaEigenVal); % PcaEigenVal should be positive in this case unless caused by calculation errors
clear Temp;
nIndex = flipud(nIndex);
PcaEigenVal = PcaEigenVal(nIndex);
VMat = VMat(:,nIndex);
% get the first non-zero nPcaVecNum vectors
SumEig = sum(abs(PcaEigenVal));
for i = nPcaVecNum : -1 : 1
    Ratio = abs(PcaEigenVal(i))/SumEig;
    if ( (PcaEigenVal(i)>eps) && (Ratio>fThPcaEnergyRatio) )
        nNewPcaVecNum = i;
        break;
    end
end
if nLdaVecNum > nNewPcaVecNum
    nLdaVecNum = nNewPcaVecNum;
end
PcaEigenVal = PcaEigenVal(1:nNewPcaVecNum);
VMat = VMat(:,1:nNewPcaVecNum); % same as one step VMat = VMat(:,nIndex(1:nNewPcaVecNum));
DMat = diag(PcaEigenVal);

% calculate Z =  V*D^-0.5
Temp = zeros(nNewPcaVecNum,nNewPcaVecNum);
for i = 1 : nNewPcaVecNum
    Temp(i,i) = DMat(i,i)^-0.5;
end
ZMat = VMat * Temp;

% calculate U such that U'(Z'WZ)U = D (eigenValues)
[UMat, DMat] = eig( ZMat' * W * ZMat );
LdaEigenVal = diag(DMat);
%for i = 1 : nNewPcaVecNum
%    if ( LdaEigenVal<eps )
%        LdaEigenVal = eps;
%    end
%end
%ordering
[Temp, nIndex] = sort(abs(LdaEigenVal));
clear Temp;
%nIndex = flipud(nIndex);
LdaEigenVal = LdaEigenVal(nIndex);
DMat = DMat(:,nIndex);
UMat = UMat(:,nIndex);
% get the first smallest nLdaVecNum vectors
LdaEigenVal = LdaEigenVal(1:nLdaVecNum);
UMat = UMat(:,1:nLdaVecNum); % same as one step UMat = UMat(:,nIndex(1:nLdaVecNum));
DMat = diag(LdaEigenVal);

% calculate LDA matrix
LdaMat = ZMat * UMat;

% calculate classification matrix ( = D^-0.5 * LdaMat )
SphereMat = zeros(nLdaVecNum,nLdaVecNum);
for i = 1 : nLdaVecNum
    SphereMat(i,i) = DMat(i,i)^-0.5;
end
% ClassifyMat = LdaMat * SphereMat;

return;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -