📄 daoru.m
字号:
clear all;
%关闭警告消息
nntwarn off;
%隐含层神经元个数
NUMBER=9;
%NUMBER2=4;
%学习速率
lr=0.04;
%误差精度
err_goal=0.01;
%最大迭代次数
max_epoch=20000;
%训练数据集数量
TRAINNUM=20;
load X.txt
load T.txt
%输入数据集
% X=[0.3333333 0.4266667 0.6500000 0.1080000 0.4300000 0.5880000;
% 0.4333333 0.4800000 0.0700000 0.2880000 0.1000000 0.2860000;
% 0.5666667 0.5733333 0.7500000 0.8200000 0.3600000 0.5000000;
% 0.3333333 0.3733333 0.6500000 0.1600000 0.5400000 0.4360000;
% 0.5000000 0.5866667 0.6500000 0.1240000 0.4700000 0.8200000;
% 0.3666667 0.4133333 0.6000000 0.0680000 0.4000000 0.2980000;
% 0.4000000 0.5400000 0.7000000 0.2920000 0.2600000 0.6400000;
% 0.9000000 0.7066667 0.6000000 0.1280000 0.3200000 0.0420000;
% 0.1666667 0.3133333 0.7000000 0.2080000 0.5600000 0.7480000 ;
% 0.4000000 0.6933333 0.7500000 0.2080000 0.0700000 0.1280000;
% 0.3333333 0.6266667 0.8000000 0.3240000 0.1800000 0.1244000;
% 0.3666667 0.6666667 0.7000000 0.2240000 0.3900000 0.5840000;
% 0.3000000 0.3733333 0.6500000 0.1600000 0.5400000 0.5920000;
% 0.5166667 0.7066667 0.6700000 0.3360000 0.0800000 0.1020800;
% 0.2900000 0.4133333 0.6300000 0.1400000 0.4700000 0.4209200;
% 0.7533333 0.8800000 0.7550000 0.0680000 0.4100000 0.4974800;
% 0.3766667 0.6000000 0.8050000 0.3120000 0.1900000 0.1231200;
% 0.5200000 0.5866667 0.6550000 0.1160000 0.5400000 0.6800800;
% 0.2500000 0.7400000 0.8650000 0.3240000 0.2400000 0.0419600;
% 0.3266667 0.4600000 0.6250000 0.1000000 0.3700000 0.5262800;
% 0.3433333 0.4133333 0.6800000 0.1240000 0.4100000 0.5528800;
% 0.3300000 0.6266667 0.7000000 0.2400000 0.3400000 0.4741600;
% 0.3133333 0.4466667 0.5850000 0.0680000 0.4100000 0.2688400;
% 0.4266667 0.7133333 0.2800000 0.0960000 0.4800000 0.7786800;
% 0.7333333 0.8866667 0.7500000 0.0560000 0.4000000 0.3592000;
% 0.3000000 0.7066667 0.9000000 0.3320000 0.2100000 0.0388800;
% 0.4666667 0.6933333 0.2500000 0.0920000 0.4300000 0.9040000;
% 0.4666667 0.7466667 0.7000000 0.3520000 0.0700000 0.1272000;];
%输出数据集
%T=[0.0137455;0.9636364;0.4727273;0.0050909;0.0073455;0.0108909;0.8490545;0.1663273;
%0.0138182;0.3272727;0.8001818;0.8538545;0.0032182;0.3075455;0.0057455;0.0131636;
%0.7545273;0.0055455;0.0448364;0.0132909;0.0135636;0.5631818;0.0164182;0.1688182;
%0.0150545;0.0430909;0.1098182;0.2908364;]; %输入和输出数据集转置
% T=[0.0126000;0.8833333;0.4333333;0.0046667;0.0067333;0.0099833;0.7783000;0.1524667;
% 0.0126667;0.3000000;0.7335000;0.7827000;0.0029500;0.2819167;0.0052667;0.0120667;
% 0.6916500;0.0050833;0.0411000;0.0121833;0.0124833;0.5162500;0.0150500;0.1547500;
% 0.0136000;0.0395000;0.1006667;0.2666000;];
%输入和输出数据集转置
X=X';T=T';
%选择训练数据集
%X_test=[X(:,1) X(:,2) X(:,3) X(:,4) X(:,5) X(:,6) X(:,7)];
%T_test=[T(:,1) T(:,2) T(:,3) T(:,4) T(:,5) T(:,6) T(:,7)];
X_test=[X(:,linspace(1,TRAINNUM,TRAINNUM))];
T_test=[T(:,linspace(1,TRAINNUM,TRAINNUM))];
%取得输入神经元的个数M
[M,N]=size(X_test);
%每个输入神经元输入数据的最大值和最小值
Xr=[zeros(1,M);ones(1,M)]';
%生成BP网络
[Wij,bj,Wjk,bk]=initff(Xr,NUMBER,'tansig',1,'tansig');
Wijs=Wij;bjs=bj;
Wjks=Wjk;bks=bk;
%计时开始
tic
%开始循环
for epoch=1:max_epoch
%求出隐含层神经元的输出
Oj=tansig(Wij*X_test,bj);
%求出输出层神经元的输出
Ok=tansig(Wjk*Oj,bk);
%计算输出误差
E=T_test-Ok;
%计算输出层的delta
deltak=deltalin(Ok,E)
%计算隐含层的delta
deltaj=deltatan(Oj,deltak,Wjk);
%调整输出层加权系数
[dWjk,dbk]=learnbp(Oj,deltak,lr);
Wjk=Wjk+dWjk;bk=bk+dbk;
%调整隐含层加权系数
[dWij,dbj]=learnbp(X_test,deltaj,lr);
Wij=Wij+dWij;bj=bj+dbj;
%计算网络权值修正后的误差平方和
SSE=sumsqr(T_test-tansig(Wjk*tansig(Wij*X_test,bj),bk));
%如果满足误差条件就停止循环
if(SSE<err_goal)
break;
end
end
%计时结束
toc
%显示迭代次数
epoch
%使用全部的数据作测试,包括训练数据和测试数据
%X_test=[X(:,linspace(TRAINNUM+1,36,36-TRAINNUM))];
%T_test=[T(:,linspace(TRAINNUM+1,36,36-TRAINNUM))];
X_test=X;
T_test=T;
[tmp1,Out_test,Out_E,Out_SSE]=PredictNet21(Wij,bj,Wjk,bk,X_test,T_test);
%输出误差
%plot(linspace(0,36-TRAINNUM-1,36-TRAINNUM),Out_E,'bo-');hold on;
%plot(linspace(0,36-TRAINNUM-1,36-TRAINNUM),Out_SSE,'rv--');grid on;
figure(2);
plot(Out_E.^2,'bo-');grid on;
%T
%Out_test
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -