📄 nlinreg.m
字号:
% A non-linear SVM regression example;
% Reference from "A Tutorial on Support Vector Regression",pp6,Eq.16;
% Parameters:
% n: number of samples per unite length(random produced),the default is 0.75;
% e: tube width in the algorithm (Fig.1), the default is 0.1;
% C: the punish parameter, the default is 5
% Li: 0 do both linear and nonlinear regression
% 1 do linear regression only
% 2 do nonlinear only
%
% Another default parameter:
% variance of the random data has been set as s, default to be 0.05;
%
function b=nLinReg(N,e,C,Li);
Arg=nargin;
if Arg==0,
N=5; % The number of data distributed in any unit length
e=0.1; % Control parameters
C=1;
Li=0;
end
if Arg==1,
e=0.1; % Control parameters
C=1;
Li=0;
end
if Arg==2,
C=1;
Li=0;
end
if Arg==3,
Li=0;
end
close all;
fclose all;
start=datestr(now)
Fc=0; % Select kernel function: 0-radius based; 1-polynomial
sg=3; % Radius based function exp[((xi-xj)^2)/sg]
p=10; % Order of the polynomial [xi*xj-1]^p
% Produce the nonlinear distributed data set [X,Y]
% Theoritic expectation curve,the non-linear distribution will be around it
Y0=[4.35, 4.35, 3.75, 3.7, 3.65, 3.6, 3.56, 3.55, 3.54, 3.53, 3.52]; % Green line indicated
X0=[0.0, 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4, 4.5, 5];
s=0.05; % Variance of random data
X=[];
Y=[];
for i=1:length(X0)-1
ns=round(N*(X0(i+1)-X0(i)));
RX=(X0(i)+rand(1,ns)*(X0(i+1)-X0(i)));
RX=sort(RX);
RY=randn(1,ns)*s;
k=(Y0(i+1)-Y0(i))/(X0(i+1)-X0(i));
dy=Y0(i)+k*(RX-min(RX));
RY=RY+dy;
X=[X,RX];
Y=[Y,RY];
end
figure(1);
whitebg(1,'k');
plot(X,Y,'g+');
hold on
%plot(X0,Y0,'g');
clear X0 Y0 RX RY ns k dy N s i;
X=X';
Y=Y';
%----------------------------------
n=length(X)
Do=input('Continue or no (1/0) ? ');
% To do linear optimization
%-------------------
if ((Li==0)|(Li==1)) & (Do==1),
% Setting optimal parameter,control parameters e and C has been set
a=zeros(2*n,1); % Original of optimal variables, all optimal variables are a(1:L) and a(L+1:2*L)
A=[]; % No inequation constraints;
B=[];
Aeq=[ones(1,n),-ones(1,n)]; % Summing all optimal variables is equal to 0;
Beq=0;
LB=zeros(2*n,1); % Bound of variables 0<ai<C;
UB=ones(2*n,1)*C;
a0=a;
% Transfer necessary parameters to FUN
fid=fopen('a.dat','w');
fwrite(fid,n,'float');
fwrite(fid,e,'float');
fwrite(fid,X,'float');
fwrite(fid,Y,'float');
fclose(fid)
% Optimizing a (Eq.9)
a=fmincon(@LinRegFUN,a0,A,B,Aeq,Beq,LB,UB);
% Calculatting w (Eq.10)
w=(a(1:n)-a((n+1):2*n))'*X;
% Calculating b, (Eq.13)
Sar=0;
MoP=-1;
a=round(a*1000)/1000;
for i=1:2*n,
if (a(i)>0.0000) & (a(i)<C),
if i>n,
i=i-n;
MoP=1; % Indicate a* if MoP=-1 Indicate a;
end
plot(X(i),Y(i),'w^');
if Sar==0; % Indicate intercept b has not been calculated
b=Y(i)-w*X(i)+MoP*e;
Sar=1;
end
end
end
% The optimal regeression function
% It is a more direct equation can be used that y=(a(1:n)-a((n+1):2*n))'*X*X'+b has contained the calculation to w;
y=w*X+b;
yu=y+e;
yl=y-e;
plot(X,y,'m') % Linear Regression Curve
plot(X,yu,':m') % It's boundary
plot(X,yl,':m') % It's boundary
delete a.dat
clear a b i Sar MoP y yu yl;
% Linear optimization is over
% --------------------------------
end
if ((Li==0)|(Li==2)) & (Do==1),
% To do non-linear optimization
%---------------------------------
% Setting optimal parameter. The control parameters e and C has been set
n=length(X);
a=zeros(2*n,1); % Original of optimal variables, all optimal variables are a(1:L) and a(L+1:2*L)
A=[]; % No inequation constraints;
B=[];
Aeq=[ones(1,n),-ones(1,n)]; % Summing all optimal variables is equal to 0;
Beq=0;
LB=zeros(2*n,1); % Bound of variables 0<ai<C;
UB=ones(2*n,1)*C;
a0=a;
% Transfer necessary parameters to FUN
fid=fopen('a.dat','w');
fwrite(fid,n,'float');
fwrite(fid,e,'float');
fwrite(fid,sg,'float');
fwrite(fid,Fc,'short');
fwrite(fid,p,'short');
fwrite(fid,X,'float');
fwrite(fid,Y,'float');
fclose(fid)
% Optimizing a (Eq.16), radius based Kernel function is adopted: exp(-(xi-xj)^2)
a=fmincon(@nLinRegFUN,a0,A,B,Aeq,Beq,LB,UB);
% Different from linear regression, no slope is calculated here because it must be contained implicitly in regression function.
% However,interceptation exists still
% Institute Eq.(17) into Eq.(13) to calculate b,
Sar=0;
MoP=-1;
a=round(a*1000)/1000;
for i=1:2*n,
if (a(i)>0.0000) & (a(i)<C),
if i>n,
i=i-n;
MoP=1; % Indicate a* if MoP=-1 Indicate a;
end
plot(X(i),Y(i),'cs');
if Sar==0; % Indicate intercept b has not been calculated
if Fc==0;
b=Y(i)-(a(1:n)-a(n+1:2*n))'*exp(-((X-X(i)).^2)./sg)+MoP*e; % Radius based function is used
else
b=Y(i)-(a(1:n)-a(n+1:2*n))'*((X*X(i)-1).^p)+MoP*e; % Polynomial function is used
end
Sar=1;
end
end
end
clear i Sar MoP;
% The optimal non-linear regeression function, 'slope' has implicitly contained here
%
I=ones(1,length(X));
if Fc==0,
y=(a(1:n)-a(n+1:2*n))'*exp(-(((X*I)-(X*I)').^2)./sg)+b;
else
y=(a(1:n)-a(n+1:2*n))'*((X*X'-1).^p)+b;
end
yu=y+e;
yl=y-e;
plot(X,y,'y')
plot(X,yu,':y')
plot(X,yl,':y')
delete a.dat
Y1=Y-y';
figure(2)
subplot(2,1,1);plot(X,Y1,'c+')
subplot(2,2,4);hist(Y1)
% Non-linear optimazation is over
% -------------------
['The operation was started at ',start,' and is finished at ',datestr(now)]
end
whitebg(2,'k')
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -