📄 analogy.m
字号:
%Conputer experiments on adaptive equalization Experiment 2: LMS algorithm
clear;close all;clc;
load('TEK00007.DAT');%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%load('TEK00009.DAT');%noise
%signal=TEK00011';%load('TEK00004.DAT') 10004*1
signal=TEK00007';
noise=zeros(1,4000);
W1=zeros(1,2200);
W2=zeros(1,2200);
W3=zeros(1,2200);
W4=zeros(1,2200);
W5=zeros(1,2200);
W6=zeros(1,2200);
for i=1:4000
noise(1,i)=signal(1,6000+i);
end
%noise=TEK00009';%load('TEK00003.DAT') 10004*1
N=3990;
%delta=7;
u3=[2.35 0.025 0.0075];
wk_mean=zeros(4,6);%****************************
err_mean=zeros(4,7993);
for ii=1:3
% w=3.3;
u=u3(ii); % w the factor of amplitude distortion;u step length
wk=zeros(100,6); % wk=zeros(N,11);%Weight vector ZEROS([M,N]) is an M-by-N matrix of zeros***********************************
err=zeros(100,N-7);
% power_v=0.001;
% hn=[0,0.5*(1+cos(2*pi/w*((1:3)-2)))];%The impulse response of the channel
% Am=sqrt(power_v); % Power of additive gaussian white noise
z=zeros(1,11000);
y=zeros(1,11000);
for nn=1:100
% bernuli=randsrc(1,N);% Generate Bernoulli sequence RANDSRC(M,N) generates an M-by-N random bipolar matrix 1*1000
% vn=Am*randn(1,N+3);%Additive gaussian white noise RANDN(M,N) and RANDN([M,N]) are M-by-N matrices with random entries 1*1003
% signal_in=[zeros(1,10),conv(bernuli,hn)+vn];%The input signal of adaptive transversal equalization C = CONV(A, B) convolves vectors A and B. The resulting. 1*1013
% bernuli_delta=[zeros(1,delta),bernuli]; % 1*1007
for m=1800:N-7
alc_data=noise(m+5:-1:m); %signal+noise
% alc_data=noise(m); %level+noise
z(m)=alc_data*wk(nn,:)'; %trasmation of noise
err(nn,m)=signal(m)-alc_data*wk(nn,:)'; %
wk(nn,:)=wk(nn,:)+u*err(nn,m)*noise(m+5:-1:m); %
% wk(nn,:)=wk(nn,:)+u*err(nn,m)*noise(m); %
W1(1,m)=wk(nn,1);
W2(1,m)=wk(nn,2);
W3(1,m)=wk(nn,3);
W4(1,m)=wk(nn,4);
W5(1,m)=wk(nn,5);
W6(1,m)=wk(nn,6);
end
end
wk_mean(ii,:)=sum(wk)/100;%
err_mean(ii,:)=sum(err.*err)/100;%
end
subplot(3,1,1);
stem(wk_mean(1,:));title('u=0.0075');axis([0 11 -1 1.8]);grid on;
subplot(3,1,2);
stem(wk_mean(2,:));title('u=0.025');axis([0 11 -1 1.8]);grid on;
subplot(3,1,3);
stem(wk_mean(3,:));title('u=0.075');axis([0 11 -1 1.8]);grid on;
figure;
semilogy(1:993,err_mean(1,1:993),1:993,err_mean(2,1:993),1:993,err_mean(3,1:993));
xlabel('Number of iterations n');ylabel('Ensemble averange square error');
legend('u=0.0075','u=0.025','u=0.075');title('LMS algorithm');
%%%%%%%%后面预处理
%sk=zeros(1,7993);
%>> for j=1:7993
% sk(1,j)=err(1,j);
% end
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -