📄 posteriorprobs.m
字号:
% ==========================================================
%
% Neural Networks A Classroom Approach
% Satish Kumar
% Copyright Tata McGraw Hill, 2004
%
% MATLAB code that plots the posterior probabilities for
% a two-class one dimensional Gaussian distributed data
% Reference: Table 7.1; Page 223
%
% ==========================================================
q1 = 500; % Number of points Class 1
q2 = 300; % Number of points Class 2
mu1 = 1.5; % Set means and variances for both the classes
mu2 = 3.0;
var1 = 0.25;
var2 = 0.25;
nbins = 35;
data1 = [sqrt(var1) * randn(1, q1) + mu1]'; % Generate scatter
data2 = [sqrt(var2) * randn(1, q2) + mu2]';
figure;
x = linspace(0, 5, nbins); % Generate the x-space for the p.d.f.
norm_factor1 = (sqrt(2 * pi * var1)); % Generate the p.d.f.
norm_factor2 = (sqrt(2 * pi * var2));
p1 = exp(-((x - mu1) .^ 2) /(2 * var1))/norm_factor1;
p2 = exp(-((x - mu2) .^ 2) /(2 * var2))/norm_factor2;
subplot(3,1,1); % Plot the p.d.f.
plot(x,p1, 'k', x, p2, 'k--');
legend('PDF Class 1','PDF Class 2',1);
title('(a) Probability density functions of 2-class data');
subplot(3,1,2)
hold on;
n1 = hist(data1(:,1),x); % Get the number of points in each bin
n2 = hist(data2(:,1),x);
hist(data1(:,1),x); % Plot the histograms
hist(data2(:,1),x);
h = findobj(gca,'Type','patch'); % Change the outline to white
set(h,'FaceColor','r','EdgeColor','w')
title('(b) Histogram distribution of 2-class data')
subplot(3,1,3);
hold on;
Pc1=q1/(q1+q2); % Compute the class priors
Pc2=q2/(q1+q2);
Pxl=(n1+n2)/(q1+q2); % Compute the x-unconditionals bin-wise
Pxl_c1=n1/q1; % Compute the class-conditional densities
Pxl_c2=n2/q2;
Posteriorc1_xl= (Pxl_c1*Pc1)./Pxl; % And finally the posteriors
Posteriorc2_xl= (Pxl_c2*Pc2)./Pxl;
stairs(x,Posteriorc1_xl,'k');
stairs(x,Posteriorc2_xl,'k-.');
legend('Posterior Class 1','Posterior Class 2',1);
title('(c) Plot of class-posterior probabilities');
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -