📄 lda_bb.m
字号:
function [newSample, discrim_vec] = lda(sample, discrim_vec_n)
%LDA Linear discriminant analysis
% Usage:
% [NEWSAMPLE, DISCRIM_VEC] = lda(SAMPLE, DISCRIM_VEC_N)
% SAMPLE: Sample data with class information
% (Each row of SAMPLE is a sample point, with the
% last column being the class label ranging from 1 to
% no. of classes.)
% DISCRIM_VEC_N: No. of discriminant vectors
% NEWSAMPLE: new sample after projection
%
% Reference:
% J. Duchene and S. Leclercq, "An Optimal Transformation for
% Discriminant Principal Component Analysis," IEEE Trans. on
% Pattern Analysis and Machine Intelligence,
% Vol. 10, No 6, November 1988
%
% Type "lda" for a self-demo.
% Roger Jang, 990829
if nargin == 0, selfdemo; return; end
if nargin < 2, discrim_vec_n = size(sample,2)-1; end
% ====== Initialization
data_n = size(sample, 1);
feature_n = size(sample,2)-1;
featureMatrix = sample(:, 1:end-1);
classLabel = sample(:, end);
[diffClassLabel, classSize] = countele(classLabel);
class_n = length(diffClassLabel);
sampleMean = mean(featureMatrix);
% ====== Compute B and W
% ====== B: between-class scatter matrix
% ====== W: within-class scatter matrix
% MMM = \sum_k m_k*mu_k*mu_k^T
MMM = zeros(feature_n, feature_n);
for i = 1:class_n,
index = find(classLabel==diffClassLabel(i));
classMean = mean(featureMatrix(index, :));
MMM = MMM + length(index)*classMean'*classMean;
end
W = featureMatrix'*featureMatrix - MMM;
B = MMM - data_n*sampleMean'*sampleMean;
% ====== Find the best discriminant vectors
invW = inv(W);
Q = invW*B;
D = [];
for i = 1:discrim_vec_n,
[eigVec, eigVal] = eig(Q);
[maxEigVal, index] = max(abs(diag(eigVal)));
D = [D, eigVec(:, index)]; % Each col of D is a eigenvector
Q = (eye(feature_n)-invW*D*inv(D'*invW*D)*D')*invW*B;
end
newSample = [featureMatrix*D(:,1:discrim_vec_n) classLabel];
discrim_vec = D;
%---------------------------------------------------
function selfdemo
% ====== Self demo using IRIS dataset
% ====== 1. Plot IRIS data after LDA for dimension reduction to 2D
load iris.dat
[data, discrim_vec] = feval(mfilename, iris);
index1 = find(iris(:,5)==1);
index2 = find(iris(:,5)==2);
index3 = find(iris(:,5)==3);
figure;
plot(data(index1, 1), data(index1, 2), '*', ...
data(index2, 1), data(index2, 2), 'o', ...
data(index3, 1), data(index3, 2), 'x');
legend('Class 1', 'Class 2', 'Class 3');
title('LDA projection of IRIS data onto the first 2 discriminant vectors');
looError = looknn([data(:, 1:2) iris(:, end)]);
xlabel(['Leave-one-out misclassification count = ', int2str(looError)]);
axis equal; axis tight;
figure;
plot(data(index1, 3), data(index1, 4), '*', ...
data(index2, 3), data(index2, 4), 'o', ...
data(index3, 3), data(index3, 4), 'x');
legend('Class 1', 'Class 2', 'Class 3');
title('LDA projection of IRIS data onto the last 2 discriminant vectors');
looError = looknn([data(:, 3:4) iris(:, end)]);
xlabel(['Leave-one-out misclassification count = ', int2str(looError)]);
axis equal; axis tight;
% ====== 2. Leave-one-out errors after using LDA for dimension reduction
load iris.dat;
dataNum = size(iris, 1);
fprintf('Leave-one-out analysis:\n');
fprintf('\tFull data:\n');
wrong = looknn(iris);
correct = size(iris, 1) - wrong;
fprintf('\t\tLOO error count = %g\n', wrong);
fprintf('\t\tRecognition rate = %g/%g = %5.2f%%\n', correct, dataNum,...
correct/dataNum*100);
newdata = lda(iris);
for n = 4:-1:1,
fprintf('\tPartial data after LDA (dimension = %g):\n', n);
wrong = looknn([newdata(:, 1:n) newdata(:, end)]);
correct = size(iris, 1) - wrong;
fprintf('\t\tLOO error count = %g\n', wrong);
fprintf('\t\tRecognition rate = %g/%g = %5.2f%%\n', correct, dataNum,...
correct/dataNum*100);
end
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -