📄 ran.m
字号:
function ran()
TrainSamNum=400; %训练样本数
TestSamNum=201; %测试样本数
InDim=1; %样本输入维数
OutDim=1; %样本输出维数
%根据目标函数获得样本输入输出
TrainSamIn=4*rands(1,TrainSamNum);
TrainSamOut=1.1*(1-TrainSamIn+2*TrainSamIn.^2).*exp(-TrainSamIn.^2/2);
TestSamIn=-4:0.04:4;
TestSamOut=1.1*(1-TestSamIn+2*TestSamIn.^2).*exp(-TestSamIn.^2/2);
TestSTD=std(TestSamOut);
OverLapCoe=0.87;%重叠系数
Dist_Max=2.0;%最大距离分辨率
Dist_Min=0.2;%最小距离分辨率
ErrLimit=0.02;%误差分辨率
Decay=0.977;%分辨率衰减常数
lr=0.05;%学习率
MaxEpoch=100;%最大学习次数
DistLimit=Dist_Max;%距离分辨率
b2=TrainSamOut(:,1);
w2=[];
UnitCenters=[];
SpreadConstant=[];
UnitNum=0;
AllUnitNum=0;
AllTestRSME=[];
tp=[ErrLimit lr MaxEpoch];
for TrainedNum=2:TrainSamNum
TrainedNum
NewInput=TrainSamIn(:,TrainedNum);
NewOutput=TrainSamOut(:,TrainedNum);
NetOut=RBFNN(NewInput,UnitCenters,w2,b2,SpreadConstant);
NewErr=NewOutput-NetOut;
if(UnitNum==0)
NewDist=Dist_Max;
else
AllDist=dist(UnitCenters',NewInput)
NewDist=min(AllDist);
end
if(norm(NewErr)>=ErrLimit&NewDist>=DistLimit) %判断是否加新节点
[UnitCenters,w2,SpreadConstant]
AddNewUnit(NewInput,NewErr,NewDist,UnitCenters,w2,SpreadConstant,OverLapCoe);
UnitNum=UnitNum+1;
else
[UnitCenters,w2,b2,SpreadConstant]
FineTuning(NewInput,NewOutput,UnitCenters,w2,b2,SpreadConstant,tp);
end
if DistLimit>Dist_Min %分辨率衰减
DistLimit=DistLimit*Decay;
else
DistLimit=Dist_Min;
end
AllUnitNum=[AllUnitNum UnitNum];
TestNNOut=RBFNN(TestSamIn,UnitCenters,w2,b2,SpreadConstant);
TestRSME=sqrt(sumsqr(TestNNOut-TestSamOut)/TestSamNum)/TestSTD;
AllTestRSME=[AllTestRSME TestRSME];
end
%绘制目标曲线和神经网络输出曲线
TestNNOut=RBFNN(TestSamIn,UnitCenters,w2,b2,SpreadConstant);
[xxx,PtNum]=size(TestSamOut);
figure
echo off
axis([0 PtNum -0.2 3.0])
axis on
grid
hold on
plot(1:PtNum,TestSamOut,'b-')
plot(1:PtNum,TestNNOut,'k-')
UnitNum
TestRSME
%绘制隐藏层节点变化曲线
[xxx,PtNum]=size(AllUnitNum);
figure
echo off
axis([0 PtNum 0 40])
axis on
grid
hold on
plot(1:PtNum,AllUnitNum,'b-')
%绘制RSME变化曲线
[xxx,PtNum]=size(AllTestRSME);
figure
echo off
axis on
grid
hold on
plot(1:PtNum,AllTestRSME,'b-');
function NetOut=RBFNN(NewInput,UnitCenters,w2,b2,SpreadConstant)
[UnitNum,OutDim]=size(w2);
[xxx,InNum]=size(NewInput);
if(UnitNum==0)
NetOut=repmat(b2,1,InNum);
else
SpreadMat=repmat(SpreadConstant,1,InNum);
AllDist=dist(UnitCenters',NewInput);
a1=radbas(AllDist./SpreadMat);
NetOut=w2*a1+b2;
end
%增加新隐节点
function [UnitCenters,w2,b2,SpreadConstant]=AddNewUnit(NewInput,NewErr,NewDist,UnitCenters,w2,SpreadConstant,OverLapCoe)
UnitCenters=[UnitCenters NewInput];
w2=[w2 NewErr];
SpreadConstant=[SpreadConstant;OverLapCoe*NewDist];
%梯度法实现参数精调
function [UnitCenters,w2,b2,SpreadConstant]=FineTuning(NewInput,NewOutput,UnitCenters,w2,b2,SpreadConstant,tp)
[xxx,UnitNum]=size(UnitCenters);
if(UnitNum==0),b2=NewOutput;return,end
ErrLimit=tp(1);
lr=tp(2);
MaxEpoch=tp(3);
for epoch=1:MaxEpoch
AllDist=dist(UnitCenters',NewInput);
NewDist=min(AllDist);
a1=radbas(AllDist./SpreadConstant);
NetOut=w2*a1+b2;
NewErr=NewOutput-NetOut;
if(norm(NetErr)<ErrLimit),break,end
b2=b2+lr*NewErr;
w2=w2+lr*NewErr*a1';
for i=1:UnitNum
CentInc=2*(NewInput-UnitCenters(:,i))*a1(i)*NewErr*w2(i)/(SpreadConstant(i)^2);
UnitCenters(:,i)=UnitCenters(:,i)+lr*CentInc;
end
end
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -