⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 example.m

📁 支持向量机 这是我的老师编写的matlab源文件 希望对大家有用
💻 M
字号:
clear;

%two examples of support vector machines for classfiction:1.two class gaussian examples and 2.two calsses spiral examples
numtrnsam1=100;%the number of training examples of the first class
numtrnsam2=100;%the number of training examples of the first class
numtrnsam=numtrnsam1+numtrnsam2;%total number of training examples of two classes
TrainX1=randn(numtrnsam1,2);%training examples of the first class with the normal distribution of zero mean and unit covariance
                         % each example is a 2-dim row vector;
TrainX2=randn(numtrnsam2,2)+3;%training examples of the first class with the normal distribution of [3,3] mean and unit covariance
TrainX=[TrainX1;TrainX2;];% total training examples
TrainY=[ones(numtrnsam1,1);-ones(numtrnsam2,1);];%Desired output of training examples
numtstsam1=500;numtstsam2=500;numtstsam=numtstsam1+numtstsam2;
TestX=[randn(numtstsam1,2);randn(numtstsam2,2)+3;];%testing examples of two classes
TestY=[ones(numtstsam1,1);-ones(numtstsam2,1);];%Desired output of testing examles(unkown in actual case)

figure(1);%plot training examples
clf;
plot(TrainX(1:numtrnsam1,1),TrainX(1:numtrnsam1,2),'r.');
hold on;
plot(TrainX(numtrnsam1+1:numtrnsam,1),TrainX(numtrnsam1+1:numtrnsam,2),'b.')
hold off;

%Traing SVC
ker='rbf';% linear kernel is used
C=10;
global p1; %only valid for rbf gaussian kernel,it is the variance of rbf kernel
p1=5;
[nsv,alpha,bias]=svc(TrainX,TrainY,ker,C);%Training SVC
figure(2);clf;
svcplot(TrainX,TrainY,ker,alpha,bias);%Ploting Training results,onvalid for 2-dimensional examples
Out = svcoutput(TrainX,TrainY,TestX,ker,alpha,bias);%Pridict the output for test examples;
rate=sum(Out==TestY)/length(TestY)

%two spirals
sita=[0:.1:4*pi]';
numtrnsam1=length(sita);
numtrnsam2=length(sita);
numtrnsam=numtrnsam1+numtrnsam2;
k=2;
TrainX1=[(k*sita).*cos(sita),(k*sita).*sin(sita)-0.5];
TrainX2=[(k*sita).*cos(sita-pi),(k*sita).*sin(sita-pi)+0.5];
TrainX=[TrainX1;TrainX2;];
TrainY=[ones(numtrnsam1,1);-ones(numtrnsam2,1)];
sita=[0:0.03:4*pi]';
numtstsam1=length(sita);
numtstsam2=numtstsam1;
numtstsam=numtstsam1+numtstsam2;
TestX1=[(k*sita).*cos(sita),(k*sita).*sin(sita)-0.5];
TestX2=[(k*sita).*cos(sita-pi),(k*sita).*sin(sita-pi)+0.5];
TestX=[TestX1;TestX2;];
TestY=[ones(numtstsam1,1);-ones(numtstsam2,1)];

figure(3);%plot training examples
clf;
plot(TrainX(1:numtrnsam1,1),TrainX(1:numtrnsam1,2),'r.');
hold on;
plot(TrainX(numtrnsam1+1:numtrnsam,1),TrainX(numtrnsam1+1:numtrnsam,2),'b.');
hold off;

%Traing SVC
ker='rbf';% linear kernel is used
C=100;
global p1; %only valid for rbf gaussian kernel,it is the variance of rbf kernel
p1=3;
[nsv,alpha,bias]=svc(TrainX,TrainY,ker,C);%Training SVC
figure(4);clf;
svcplot(TrainX,TrainY,ker,alpha,bias);%Ploting Training results,onvalid for 2-dimensional examples
Out = svcoutput(TrainX,TrainY,TestX,ker,alpha,bias);%Pridict the output for test examples;
rate=sum(Out==TestY)/length(TestY);

%one examples for support vector machines for regression: Sa(x)=sin(x)/x
%function regression
TrainX1=[-15:0.5:-0.2]';
TrainX2=[0.5:0.5:15]';
TrainX=[TrainX1;0;TrainX2];
numtrnsam=length(TrainX);
TrainY=[sin(TrainX1)./TrainX1;1;sin(TrainX2)./TrainX2]+0.1*randn(numtrnsam,1);
TestX1=[-15:0.11:-0.1]';
TestX2=[0.11:0.11:15]';
TestX=[TestX1;0;TestX2];
numtstsam=length(TestX);
TestY=[sin(TestX1)./TestX1;1;sin(TestX2)./TestX2]+0.1*randn(numtstsam,1);
figure(5);clf;
plot(TrainX,TrainY);

ker='rbf';
C=10;
loss='eInsensitive';
e=5e-2;
global p1;
p1=5;
[nsv beta bias] = svr(TrainX,TrainY,ker,C,loss,e);
figure(6);clf;
svrplot(TrainX,TrainY,ker,beta,bias,e);
Out = svroutput(TrainX,TestX,ker,beta,bias);
error=sqrt((Out-TestY)'*(Out-TestY)/length(TestY));






⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -