📄 nnidbp.m
字号:
% bp算法辨识nonlinearFn1.m
%计算y(k)时,注意要用到y(k-1)和y(k-2)的值
%songying, 2005-6-12
% 6月13: 仿真出现异常情况:
% 当学习率eta由0.01改变为较大的的数时(比如0.1),出现NaN情况
clear
clc
close all
eta=0.01;
N=1000;
un=2*rand(1,N)-1;
pp=zeros(4,N);
y_1=0;
y_2=0;
u_1=0;
u_2=0;
for i=1:N
tt(i)=feval('nonlinearFn1',[y_1;y_2;u_1;u_2]);
pp(:,i)=[tt(i);y_1;un(i);u_1];
u_2=u_1;
u_1=un(i);
y_2=y_1;
y_1=tt(i);
end
maxt=max(tt);
mint=min(tt);
for i=1:4
PHI(i,:)=2*(pp(i,:)-min(pp(i,:)))/(max(pp(i,:))-min(pp(i,:)))-1;
end
Y=2*(tt-mint)/(maxt-mint)-1;
[inputs,N]=size(PHI);
[outputs,N]=size(Y);
hidden=5;
W1=rand(hidden,inputs)-0.5;
W2=rand(outputs,hidden)-0.5;
maxIter=4000;
PI_vector=zeros(maxIter,1);
y1=zeros(hidden,N);
delta1=y1;
y2=zeros(outputs,N);
delta2=y2;
%终止条件
critmin=0;
gradterm=1e-4;
paramterm=1e-3;
critterm=1e-7;
critdif=critterm+1;
tic
%开始训练
for iter=1:maxIter
%hidden-layer output
y1=2./(1+exp(-2*W1*PHI))-1;
%output-layer output
y2=W2*y1;
E=Y-y2;
SSE=sum(sum(E.*E));
%backpropagation error
delta2=E;
E1=W2'*delta2;
delta1=(1-y1.*y1).*E1;
G2=delta2*y1';
G1=delta1*PHI';
%update weights
W2=W2+eta*G2;
W1=W1+eta*G1;
%******** updates for next iteration********
PI=SSE/(2*N);
PI_vector(iter)=PI;
if iter>1
critdif=abs(PI_vector(iter-1)-PI);
end
paramdif=eta*max(max(max(abs(G1))),max(max(abs(G2))));
gradmax=paramdif/eta/N;
if (PI<critmin|(critdif<critterm & gradmax<gradterm & paramdif<paramterm))
break;
end
end
% *********end of network training
toc
PI_vector=PI_vector(1:iter);
normY2=(y2+1)*(maxt-mint)*0.5+mint;
normE=tt-normY2;
t=1:N;
subplot 211
plot(t,Y,'b',t,y2,'r');
title('归一化的实际输出与网络输出');
xlabel('样本数');ylabel('输出');
legend('y','yn');
subplot 212
plot(t,E)
title('归一化的网络训练误差');
xlabel('样本数');ylabel('误差');
%
figure
subplot 211
plot(t,tt,'b',t,normY2,'r');
title('未归一化的实际输出与网络输出');
xlabel('样本数');ylabel('输出');
legend('y','yn');
subplot 212
plot(t,normE)
title('未归一化的网络训练误差');
xlabel('样本数');ylabel('误差');
figure
semilogy(PI_vector); % y axis is logarithmic (base 10)
title('Criterion evaluated after each iteration');
xlabel('Iteration (epoch)');
ylabel('Criterion');
grid;
%test nn
un=2*rand(1,N)-1; %phi样本输入数据,如果仅用rand(4,N),效果不如rand(4,N)-0.5好
pp=zeros(4,N);
y_2=0;
y_1=0;
u_1=0;
u_2=0;
for i=1:N
tt(i)=feval('nonlinearFn1',[y_1;y_2;u_1;u_2]);
pp(:,i)=[tt(i);y_1;un(i);u_1];
u_2=u_1;
u_1=un(i);
y_2=y_1;
y_1=tt(i);
end
maxt=max(tt);
mint=min(tt);
for i=1:4
PHI2(i,:)=2*(pp(i,:)-min(pp(i,:)))/(max(pp(i,:))-min(pp(i,:)))-1;
end
Y2=2*(tt-mint)/(maxt-mint)-1;
yn=W2*(1-2./(1+exp(2*W1*PHI2)));
normYn=(yn+1)*(maxt-mint)*0.5+mint;
En=Y2-yn;
normEn=tt-normYn;
%
figure
subplot 211
plot(t,Y2,'b',t,yn,'r');
title('归一化的实际输出与网络输出');
xlabel('样本数');ylabel('输出');
legend('y','yn');
subplot 212
plot(t,En)
title('归一化的网络训练误差');
xlabel('样本数');ylabel('误差');
%
figure
subplot 211
plot(t,tt,'b',t,normYn,'r');
title('未归一化的实际输出与网络输出');
xlabel('样本数');ylabel('输出');
legend('y','yn');
subplot 212
plot(t,normEn)
title('未归一化的网络训练误差');
xlabel('样本数');ylabel('误差');
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -