📄 whk_k.m
字号:
function main()
TrainSamNum=400;
TestSamNum=201;
InDim=1;
OutDim=1;
% 根据目标函数获得样本输入输出
TrainSamIn=4*rands(1,TrainSamNum);
TrainSamOut=1.1*(1-TrainSamIn+2*TrainSamIn.^2).*exp(-TrainSamIn.^2/2);
TestSamIn=-4:0.04:4;
TestSamOut=1.1*(1-TestSamIn+2*TestSamIn.^2).*exp(-TestSamIn.^2/2);
TestSTD=std(TestSamOut);
OverLapCoe=0.87;
Dist_Max=2.0;
Dist_Min=0.2;
ErrLimit=0.02;
Decay=0.977;
lr=0.05;
MaxEpoch=100;
DistLimit=Dist_Max;
b2=TrainSamOut(:,1);
w2=[];
UnitCenters=[];
SpreadConstant=[];
UnitNum=[];
AllUnitNum=[];
AllTestRSME=[];
tp=[ErrLimit lr MaxEpoch];
for TrainedNum=2:TrainSamNum
TrainedNum
NewInput=TrainSamIn(:,TrainedNum);
NewOutput=TrainSamOut(:,TrainedNum);
NetOut=RBFNN(NewInput,UnitCenters,w2,b2,SpreadConstant);
NewErr=NewOutput-NetOut;
if (UnitNum==0),
NewDist=Dist_Max;
else
AllDist=dist(UnitCenters',NewInput);
NewDist=min(AllDist);
end
if (norm(NewErr)>=ErrLimit&NewDist>=DistLimit),
[UnitCenters,w2,SpreadConstant]
AddNewUnit(NewInput,NewErr,NewDist,UnitCenters,w2,SpreadConstant,OverLapCoe);
UnitNum=UnitNum+1;
else
[UnitCenters,w2,b2,SpreadConstant]
FineTuning(NewInput,NewOutput,UnitCenters,w2,b2,SpreadConstant,tp);
end
if DistLimit>Dist_Min,
DistLimit=DistLimit*Decay;
else,
DistLimit=Dist_Min;
end
AllUnitNum=[AllUnitNum UnitNum];
TestNNOut=RBFNN(TestSamIn,UnitCenters,w2,b2,SpreadConstant);
TestRSME=sqrt(sumsqr(TestNNOut-TestSamOut)/TestSamNum)/TestSTD;
AllTestRSME=[AllTestRSME TestRSME];
end
% 绘制目标曲线和神经网络输出曲线
TestNNOut=RBFNN(TestSamIn,UnitCenters,w2,b2,SpreadConstant);
[xxx,PtNum]=size(TestSamOut);
figure
echo off
axis([0 PtNum -0.2 3.0])
axis on
grid
hold on
plot(1:PtNum,TestSamOut,'b-')
plot(1:PtNum,TestNNOut,'k-')
UnitNum
TestRSME
% 绘制隐节电变化曲线
[xxx,PtNum]=size(AllUnitNum);
figure
echo off
axis([0 PtNum 0 40])
axis on
grid
hold on
plot(1:PtNum,AllUnitNum,'b-')
% 绘制RSME变化曲线
[xxx,PtNum]=size(AllTestRSME);
figure
echo off
axis on
grid
hold on
plot(1:PtNum,AllTestRSME,'b-')
function NetOut=RBFNN(NewInput,UnitCenters,w2,b2,SpreadConstant)
[UnitNum,OutDim]=size(w2);
[xxx,InNum]=size(NewInput);
if (UnitNum==0),
NetOut=repmat(b2,1,InNum);
else
SpreadMat=repmat(SpreadConstant,1,InNum);
AllDist=dist(UnitCenters',NewInput);
a1=radbas(AllDist./SpreadMat);
NetOut=w2*a1+b2;
end
% 增加新隐节电
function [UnitCenters,w2,SpreadConstant]=AddNewUnit(NewInput,...
NewErr,NewDist,UnitCenters,w2,SpreadConstant,OverLapCoe)
UnitCenters=[UnitCenters NewInput];
w2=[w2 NewErr];
SpreadConstant=[SpreadConstant;OverLapCoe*NewDist];
% 梯度法实现参数精调
function [UnitCenters,w2,b2,SpreadConstant]=FineTuning(NewInput,...
NewOutput,UnitCenters,w2,b2,SpreadConstant,tp)
[xxx,UnitNum]=size(UnitCenters);
if (UnitNum==0),b2=NewOutput;return,end
ErrLimit=tp(1);
lr=tp(2);
MaxEpoch=tp(3);
for epoch=1:MaxEpoch
AllDist=dist(UnitCenters',NewInput);
NewDist=min(AllDist);
a1=radbas(AllDist./SpreadConstant);
NetOut=w2*a1+b2;
NewErr=NewOutput-NetOut;
if (norm(NewErr)<ErrLimit),break,end
b2=b2+lr*NewErr;
w2=w2+lr*NewErr*a1';
for i=1:Unit
CentInc=2*(NewInput-UnitCenters(:,i))...
*a1(i)*NewErr*w2(i)/(SpreadConstant(i)^2);
UnitCenters(:,i)=UnitCenters(:,i)+lr*CentInc;
end
end
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -