📄 blk_mdlg.m
字号:
%
% Modeling: Comparison of the conventional LMS and BLMS algorithm.
% (Time-domain implementation)
%
%
% Last updated on April 28, 1998
%
itn=input('\n No. of iterations? ');
sigman2=input('\n Variance of the plant noise? ');
sigman=sqrt(sigman2);
wo=input('\n Plant impulse response (vector, w_o)? ');
a=size(wo);
if a(1)<a(2)
wo=wo';
end
N=input('\n Length of the model (N)? ');
L=input('\n Block length (L)? ')
h=input('\n Coloring filter impulse response (vector, h)? ');
a=size(h);
if a(1)<a(2)
h=h';
end
Misad=input('\n Misadjustment (e.g., 0.1 for 10%) ? ');
mu=Misad/(N*(h'*h));
a=input('\n Do you wish to see the values of \n eigenvalue spread, expected MSE, ... (Y/N)? ','s');
if (a=='y')|(a=='Y')
MMSE=sigman2;
R=corlnm2(h,N);
lambda=eig(R);
eignsprd=max(lambda)/min(lambda);
taumax=1/(4*mu*min(lambda));
taumin=1/(4*mu*max(lambda));
MSEaprx=MMSE*(1+Misad);
disp(' ')
disp(' ')
disp([' Eigenvalue spread = ' num2str(eignsprd)])
disp([' Maximum time constant of the learning curve = ' num2str(taumax)])
disp([' Minimum time constant of the learning curve = ' num2str(taumin)])
disp([' Expected steady-state MSE = ' num2str(MSEaprx)])
end
runs=input('\n \n No. of runs (for ensemble averaging)? ');
mu=Misad/(N*(h'*h));
muB=L*mu;
xi=zeros(itn,1);
xiB=zeros(itn,1);
for k=1:runs
x=filter(h,1,randn(itn,1));
d=filter(wo,1,x)+sigman*randn(itn,1);
w=zeros(N,1);
counter=0;
XB=zeros(L,N);
dB=zeros(L,1);
wB=w;
for n=N:itn;
xtdl=x(n:-1:n-N+1);
e=d(n)-w'*xtdl;
w=w+2*mu*e*xtdl;
xi(n)=xi(n)+e^2;
counter=counter+1;
XB(counter,:)=xtdl';
dB(counter)=d(n);
if counter==L
eB=dB-XB*wB;
wB=wB+2*(muB/L)*(XB'*eB);
xiB(n:-1:n-L+1)=xiB(n:-1:n-L+1)+ones(L,1)*(eB'*eB)/L;
counter=0;
end
end
end
xi=xi/runs;
xiB=xiB/runs;
n=[1:itn];
semilogy(n,xi,'b',n,xiB,'r')
title('blue curve: conventional LMS; red curve: BLMS')
xlabel('NO. OF ITERATIONS')
ylabel('MSE')
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -