⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 somfunction.m

📁 对应于som算法的源代码
💻 M
字号:
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% SOM Main
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [Net]=SOMFunction
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

Net=0;

InitializeRandoms();
Net=GenerateNetwork(Net);
Net=RandomWeights(Net);
Net=InitializeApplication(Net);
Net=TrainNet(Net);
WriteNet(Net);
FinalizeApplication(Net);

return;



%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%sqr(x)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [result]=sqr(x)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

result=x*x;

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%pow(x,n)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [result]=pow(x,n)
result=(x)^(n);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%InitializeRandoms
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function InitializeRandoms()
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

rand;
return;

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%RandomEqualREAL(low,high)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [result]=RandomEqualREAL(low,high)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

result=( rand()/32767 )*(high-low)+low;

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%RandomNormalREAL(Mu,Sigma)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [result]=RandomNormalREAL(Mu,Sigma)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

x=0;
fx=0;

x=RandomEqualREAL(Mu-3*Sigma,Mu+3*Sigma);
fx=(1/(sqrt(2*pi)*Sigma))*exp(-sqr(x-Mu)/(2*sqr(Sigma)));

while(fx<RandomEqualREAL(0,1))
   x=RandomEqualREAL(Mu-3*Sigma,Mu+3*Sigma);
   fx=(1/(sqrt(2*pi)*Sigma))*exp(-sqr(x-Mu)/(2*sqr(Sigma)));
end
result=x; 

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%InitializeApplication(Net)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [Net]=InitializeApplication(Net)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

for i=1:Net.KohonenLayer.Units
    Net.KohonenLayer.StepSize(i)=1;
    Net.KohonenLayer.dScoreMean(i)=0;
end
f=fopen('som.txt','w');
fclose(f);

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%WriteNet(Net)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function WriteNet(Net)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

f=fopen('som.txt','a');
fprintf(f,'\t\n\t\n\t\n');
for  r=1:ROWS
    for c=1:COLS
        x=Net.KohonenLayer.Weight((r-1)*ROWS+c,1);
        y=Net.KohonenLayer.Weight((r-1)*ROWS+c,2);
        z=Net.OutputLayer.Weight(1,(r-1)*ROWS+c);
        fprintf(f,'%5.1f  %5.1f %5.1f \t\n',x,y,z);
    end
    fprintf(f,'\t\n');
end
fclose(f);

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%FinalizeApplication(Net)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function FinalizeApplication(Net)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS
return;

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%InitializePole(Pole)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [Pole]=InitializePole(Pole)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

Pole.x=0;
Pole.xDot=0;
Pole.w=RandomEqualREAL(-30,30);
Pole.wDot=0;
Pole.F=0;
    
while Pole.w==0
    Pole.x=0;
    Pole.xDot=0;
    Pole.w=RandomEqualREAL(-30,30);
    Pole.wDot=0;
    Pole.F=0;
end

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%SimulatePole(Pole)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [Pole]=SimulatePole(Pole)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

s=0;
x=0;
XDot=0;
xDotDot=0;
w=0;
wDot=0;
wDotDot=0;
F=0;

x=Pole.x;
xDot=Pole.xDot;
w=(Pole.w/180)*pi;
wDot=(Pole.wDot/180)*pi;
F=Pole.F;


for s=1:STEPS
    wDotDot=(G*sin(w)+cos(w)*((-F-Mp*L*sqr(wDot)*sin(w))/(Mc+Mp)))/(L*(4/3-(Mp*sqr(cos(w)))/(Mc+Mp)));
    xDotDot=(F+Mp*L*(sqr(wDot)*sin(w)-wDotDot*cos(w)))/(Mc+Mp);
    x=x+(T/STEPS)*xDot;
    xDot=xDot+(T/STEPS)*xDotDot;
    w=w+(T/STEPS)*wDot;
    wDot=wDot+(T/STEPS)*wDotDot;
end
Pole.x=x;
Pole.xDot=xDot;
Pole.w=(w/pi)*180;
Pole.wDot=(wDot/pi)*180;

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%PoleStillBalanced(Pole)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [result]=PoleStillBalanced(Pole)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

if Pole.w>=-60 && Pole.w<=60
    result=1;
else
    result=0;
end

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%ScoreOfPole(Pole)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [result]=ScoreOfPole(Pole)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

result=-sqr(Pole.w);

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%GenerateNetwork(Net)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [Net]=GenerateNetwork(Net)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

Output=zeros(1,N);
Weight=zeros(N,N);
StepSize=zeros(1,N);
dScoreMean=zeros(1,N);

Net.InputLayer.Units=N;
Net.InputLayer.Output=Output;
Net.InputLayer.Weight=Weight;
Net.InputLayer.StepSize=StepSize;
Net.InputLayer.dScoreMean=dScoreMean;


Output=zeros(1,C);
Weight=zeros(C,M);
StepSize=zeros(1,C);
dScoreMean=zeros(1,C);

Net.KohonenLayer.Units=C;
Net.KohonenLayer.Output=Output;
Net.KohonenLayer.Weight=Weight;
Net.KohonenLayer.StepSize=StepSize;
Net.KohonenLayer.dScoreMean=dScoreMean;


Output=zeros(1,M);
Weight=zeros(M,C);
StepSize=zeros(1,M);
dScoreMean=zeros(1,M);

Net.OutputLayer.Units=M;
Net.OutputLayer.Output=Output;
Net.OutputLayer.Weight=Weight;
Net.OutputLayer.StepSize=StepSize;
Net.OutputLayer.dScoreMean=dScoreMean;

Net.Winner=0;
Net.Alpha=0;
Net.Alpha_=0;
Net.Alpha__=0;
Net.Gamma=0;
Net.Sigma=0;


%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%RandomWeights(Net)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [Net]=RandomWeights(Net)
for i=1:Net.KohonenLayer.Units
    for j=1:Net.InputLayer.Units
        Net.KohonenLayer.Weight(i,j)=RandomEqualREAL(-30,30);
    end
end

for i=1:Net.OutputLayer.Units
    for j=1:Net.KohonenLayer.Units
        Net.OutputLayer.Weight(i,j)=0;
    end
end

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%SetInput(Net,Input)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [Net]=SetInput(Net,Input)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

for i=1:Net.InputLayer.Units
    Net.InputLayer.Output(i)=Input(i);
end

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%GetOutput(Net)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [Output]=GetOutput(Net)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

Output=zeros(1,Net.OutputLayer.Units);
for i=1:Net.OutputLayer.Units
    Output(i)=Net.OutputLayer.Output(i);
end

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%PropagateToKohonen(Net)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [Net]=PropagateToKohonen(Net)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

for i=1:Net.KohonenLayer.Units
    Sum=0;
    for j=1:Net.InputLayer.Units
        Out=Net.InputLayer.Output(j);
        Weight=Net.KohonenLayer.Weight(i,j);
        Sum=Sum+sqr(Out-Weight);
    end
    Net.KohonenLayer.Output(i)=sqrt(Sum);
end

MinOut=MAX_REAL;

for i=1:Net.KohonenLayer.Units
    if Net.KohonenLayer.Output(i)<MinOut
        MinOut=Net.KohonenLayer.Output(i);
        Net.Winner=i;
    end
end

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%PropagateToOutput(Net)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [Net]=PropagateToOutput(Net)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

for i=1:Net.OutputLayer.Units
    Net.OutputLayer.Output(i)=Net.OutputLayer.Weight(i,Net.Winner);
end

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%PropagateNet(Net)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [Net]=PropagateNet(Net)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

Net=PropagateToKohonen(Net);
Net=PropagateToOutput(Net);

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%Neighborhood(Net,i)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [result]=Neighborhood(Net,i)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

iRow=i/COLS;
jRow=Net.Winner/COLS;
iCol=mod(i,COLS);
jCol=mod(Net.Winner,COLS);

Distance=sqrt(sqr(iRow-jRow)+sqr(iCol-jCol));

result=exp(-sqr(Distance)/(2*sqr(Net.Sigma)));

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%TrainKohonen(Net,Input)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [Net]=TrainKohonen(Net,Input)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

for i=1:Net.KohonenLayer.Units
    for j=1:Net.InputLayer.Units
        Out=Input(j);
        Weight=Net.KohonenLayer.Weight(i,j);
        Lambda=Neighborhood(Net,i);
        Net.KohonenLayer.Weight(i,j)=Net.KohonenLayer.Weight(i,j)+Net.Alpha*(Out-Weight);
    end
    StepSize=Net.KohonenLayer.StepSize(i);
    Net.KohonenLayer.StepSize(i)=Net.KohonenLayer.StepSize(i)+Net.Alpha__*Lambda*(-StepSize);
end

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%TrainOutput(Net,Output)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [Net]=TrainOutput(Net,Output)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

for i=1:Net.OutputLayer.Units
    for j=1:Net.KohonenLayer.Units
        Out=Output(i);
        Weight=Net.OutputLayer.Weight(i,j);
        Lambda=Neighborhood(Net,j);
        Net.OutputLayer.Weight(i,j)=Net.OutputLayer.Weight(i,j)+Net.Alpha_*Lambda*(Out-Weight);
    end
end

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%TrainUnits(Net,Input,Output)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [Net]=TrainUnits(Net,Input,Output)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS
fprintf(1,'*\n');
Net=TrainKohonen(Net,Input);
Net=TrainOutput(Net,Output);

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%TrainNet(Net)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
function [Net]=TrainNet(Net)
global MIN_REAL
global MAX_REAL
global ROWS
global COLS
global N
global C
global M
global TRAIN_STEPS
global BALANCED
global L
global Mc
global Mp
global G
global T
global STEPS

Input=zeros(1,N);
Output=zeros(1,M);
Target=zeros(1,M);

n=0;
Pole=0;
f=fopen('som.txt','a');

while(n<TRAIN_STEPS)
    t=0;
    Pole=InitializePole(Pole);
    fprintf(f,'Time      Angle      Force\t\n');
    fprintf(f,'%4.1fS    %5.1fD   %5.1fN\n\n',t*T,Pole.w,Pole.F);
    wOld=Pole.w;
    ScoreOld=ScoreOfPole(Pole);
    Pole=SimulatePole(Pole);
    wNew=Pole.w;
    ScoreNew=ScoreOfPole(Pole);
    while(PoleStillBalanced(Pole)==1 && t<BALANCED)
        n=n+1;
        t=t+1;
        Net.Alpha=0.5*pow(0.01,n/TRAIN_STEPS);
        Net.Alpha_=0.5*pow(0.01,n/TRAIN_STEPS);
        Net.Alpha__=0.005;
        Net.Gamma=0.05;
        Net.Sigma=6.0*pow(0.02,n/TRAIN_STEPS);
        Input(1)=wOld;
        Input(2)=wNew;
        Net=SetInput(Net,Input);
        Net=PropagateNet(Net);
        Output=GetOutput(Net);
        Pole.F=Output(1);
        StepSize=Net.KohonenLayer.StepSize(Net.Winner);
        Pole.F=Pole.F+StepSize*RandomNormalREAL(0,10);
        fprintf(f,'%4.1fS    %5.1fD   %5.1fN\t\n',t*T,Pole.w,Pole.F);
        wOld=Pole.w;
        ScoreOld=ScoreOfPole(Pole);
        Pole=SimulatePole(Pole);
        wNew=Pole.w;
        ScoreNew=ScoreOfPole(Pole);
        dScore=ScoreNew-ScoreOld;
        dScoreMean=Net.KohonenLayer.dScoreMean(Net.Winner);
        if dScore>dScoreMean
            Target(1)=Pole.F;
            Net=TrainUnits(Net,Input,Output);
        end
        Net.KohonenLayer.dScoreMean(Net.Winner)=Net.KohonenLayer.dScoreMean(Net.Winner)+Net.Gamma*(dScore-dScoreMean);
    end
    if PoleStillBalanced(Pole)
        fprintf(f,'Pole still balanced after %0.1fs...\t\n\t\n',t*T);
    else
        fprintf(f,'Pole fallen after %0.1fs...\t\n\t\n',(t+1)*T);
    end
end

fclose(f);

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -