📄 adfd.m
字号:
clear;
%two examples of support vector machines for classfiction:1.two class gaussian examples and 2.two calsses spiral examples
numtrnsam1=100;%the number of training examples of the first class
numtrnsam2=100;%the number of training examples of the first class
numtrnsam=numtrnsam1+numtrnsam2;%total number of training examples of two classes
TrainX1=randn(numtrnsam1,2);%training examples of the first class with the normal distribution of zero mean and unit covariance
% each example is a 2-dim row vector;
TrainX2=randn(numtrnsam2,2)+3;%training examples of the first class with the normal distribution of [3,3] mean and unit covariance
TrainX=[TrainX1;TrainX2;];% total training examples
TrainY=[ones(numtrnsam1,1);-ones(numtrnsam2,1);];%Desired output of training examples
numtstsam1=500;numtstsam2=500;numtstsam=numtstsam1+numtstsam2;
TestX=[randn(numtstsam1,2);randn(numtstsam2,2)+3;];%testing examples of two classes
TestY=[ones(numtstsam1,1);-ones(numtstsam2,1);];%Desired output of testing examles(unkown in actual case)
figure(1);%plot training examples
clf;
plot(TrainX(1:numtrnsam1,1),TrainX(1:numtrnsam1,2),'r.');
hold on;
plot(TrainX(numtrnsam1+1:numtrnsam,1),TrainX(numtrnsam1+1:numtrnsam,2),'b.')
hold off;
%Traing SVC
ker='rbf';% linear kernel is used
C=10;
global p1; %only valid for rbf gaussian kernel,it is the variance of rbf kernel
p1=5;
result=knnclassification(TrainX,TrainY,ker,C);%Training SVC
figure(2);clf;
svcplot(TrainX,TrainY,ker,alpha,bias);%Ploting Training results,onvalid for 2-dimensional examples
Out = svcoutput(TrainX,TrainY,TestX,ker,alpha,bias);%Pridict the output for test examples;
rate=sum(Out==TestY)/length(TestY);
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -