📄 whk_g.m
字号:
function main()
SamNum=15;
TargetSamNum=201;
InDim=1;
UnitNum=15;
NoiseVar=0.4;
% 根据目标函数获得样本输入输出
SamIn=-1+2/SamNum:2/SamNum:1;
SamOutNoNoise=sin(3*(SamIn+0.8).^2);
TargetIn=-1:0.01:1;
TargetOut=sin(3*(TargetIn+0.8).^2);
lr=0.006;
MaxEpoch=1000;
E0=0.01;
InNoiseVar=0.1;
TestNum=1;
tpGradRBF=[lr,MaxEpoch,E0];
tpGradRBFNoiseIn=[lr,MaxEpoch,E0,InNoiseVar];
AllTrainSSENoNoise=[];
AllTestSSENoNoise=[];
AllTrainSSENoNoiseIn=[];
AllTestSSENoNoiseIn=[];
for test=1:TestNum
% 训练样本输出加噪声
rand('state'),sum(100*clock))
Noise=NoiseVar*randn(1,SamNum);
SamOut=SamOutNoNoise+Noise;
%产生同样的神经网络参数
Center=rands(InDim,UnitNum);
SP=0.2*rand(1,UnitNum)+0.1;
W=0.1*rands(1,UnitNum);
%无输入噪声训练
[NewCenter,NewW,nEWsp,ErrHistory]=RBRGrad(Center,W,SP,SamIn,SamOut,tpGradRBF)
[xx,TranedEpoch]=size(ErrHistory);
TrainSSENoNoise=ErrHistory(TranedEpoch);
AllTrainSSENoNoise=[AllTrainSSENoNoise TrainSSENoNoise];
% 无输入噪声训练结果测试
TestNNOutNoNoise=RBF(NewCenter,NewW,NewSP,TargetIn);
TestSSENoNoise=sumsqr(TargetOut-TestNNOutNoNoise);
AllTestSSENoNoise=[AllTestSSENoNoise TestSSNoNoise];
%加输入噪声训练
[NewCenter,NewW,NewSP,ErrHistory]=RBFGradNoiseIn(Center,W,SP,SamIn,SumOut,tpGradRBFNoiseIn);
[xx,TrainedEpoch]=size(ErrHistory);
TrainSSENoiseIn=ErrHistory(TrainedEpoch);
AllTrainSSNoiseIn=[AllTrainSSNoiseIn TrainSSENoiseIn];
%加输入噪声训练结果测试
TestNNOutNoiseIn=RBF(NewCenter,NewW,NewSP,TargetIn);
TrainSSENoiseIn=sumsqr(TargetOut-TestNNOutNoiseIn);
AllTestSSENoise=[TestSSENoNoise TrainSSENoideIn TestSSENoiseIn];
[test TrainaSSENoNoise TestSSENoNoise TrainSSENoiseIn TestSSENoiseIn]
end
[mean(AllTrainSSENoise) std(AllTrainSSENoise)]
[mean(AllTestSSENoise) std(AllTestSSENoise)]
[mean(AllTrainSSENoise) std(AllTrainSSENoise)]
[mean(AllTestSSENoise) std(AllTestSSENoise)]
figure
hold on
axis([-1 1 -2 2])
axis on
xlable('Input x');
ylable('Output y');
plot(SamIn,SamOut,'k+')
plot(TargetIn,TargetOut,'k--')
plot(TargetIn,TestNNOutNoNoise,'k-.')
plot(TargetIn,TestNNOutNoNoise,'k-.')
%绘制学习误差曲线
figure
hold on
grid
[xx,Num]=size(ErrHistory);
plot(1:Num,ErrHistory,'k-');
function NNOut=RBF(Center,W,SP,DataIn)
[xxx,DataNum]=size(DataIn);
Distance=dist(Center',DataIn);
SpreadsMat=repmat(SP',1,DataNum);
HiddenUnitOut=radbas(Distance./SpreadsMat);
NNOut=W*HiddenUnitOut;
function [NewCenter,NewCenter,NewW,NewSP,ErrHistory]=RBFGrad(Center,W,SP,...
SamIn,SamOut,tp)
%(参考附录D)
function [NewCenter,NewCenter,NewW,NewSP,ErrHistory]=RBFGradNoiseIn(Center,W,SP,SamIn,SamOut,tp)
lr=tp(1);
MaxEpoch=tp(2);
EO=tp(3);
InNoiseVar=tp(4);
[xxx,SamNum]=size(SamIn);
[xxx,UnitNum]=size(Center);
ErrHistory=[]; %用于纪录每次参数调整后的训练误差
for epoch=1:MaxEpoch
InNoise=InNoiseVar*randn(1,SamNum);
AllDist=dist(Center',SamIn+InNoise);
SPMat=repmat(SP',1SamNum);
UnitOut=radbas(AllDist./SPMat);
NetOut=W*UnitOut;
Error=SamOUT-NetOut;
%停止学习判断
SSE=sumsqr(Error);
%纪录每次权值调整后的训练误差
ErrHistory=[ErrHistory SSE];
if SSE<EO,break,end
for i=1:UnitNum
CentGrad=(SamIn-repmat(Centr(:,i),1,SamNum))...
*(Error.*UnitOut(i,:)*W(i)/(SP(i)^2))';
SPGrad=AllDist(i,:).^2*(Error.*UnitOut(i,:)*W(i)/(SP(i)^3))';
WGrad=Error*UnitOut(i,:)';
Center(,i)=Center(:,i)+lr*CentGrad;
SP(i)=SP(i)+lr*SPGrad;
W(i)=W(i)+lr*WGrad;
end
end
NewCenter=Center
NewW=W;
NewSP=SP;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -