📄 svmtrain.m
字号:
function svm = svmTrain(svmType,X,Y,ker,p1,p2)
% SVM Classification:
% svm = svmTrain('svc_c',x,y,ker,C);
% SVM Regression:
% svm = svmTrain('svr_epsilon',x,y,ker,C,e);
% 输入参数:
% X 训练样本,n×d的矩阵,n为样本个数,d为样本维数
% Y 训练目标,n×1的矩阵,n为样本个数,值为+1或-1
% ker 核参数(结构体变量)
% the following fields:
% type - linear : k(x,y) = x'*y
% poly : k(x,y) = (x'*y+c)^d
% gauss : k(x,y) = exp(-0.5*(norm(x-y)/s)^2)
% tanh : k(x,y) = tanh(g*x'*y+c)
% degree - Degree d of polynomial kernel (positive scalar).
% offset - Offset c of polynomial and tanh kernel (scalar, negative for tanh).
% width - Width s of Gauss kernel (positive scalar).
% gamma - Slope g of the tanh kernel (positive scalar).
% 输出参数:
% svm 支持向量机(结构体变量)
% the following fields:
% type - 支持向量机类型 {'svc_c', 'svr_epsilon' }
% ker - 核参数
% x - 训练样本,n×d的矩阵,n为样本个数,d为样本维数
% y - 训练目标,n×1的矩阵,n为样本个数,值为+1或-1
% a - 拉格朗日乘子,n×1的矩阵
% ------------------------------------------------------------%
%生成/改变优化选项结构
options = optimset;
options.LargeScale = 'off';
options.Display = 'off';
switch svmType
case 'svc_c',
C = p1;
n = length(Y);
H = (Y*Y').*kernel(ker,X,X);
f = -ones(1,n);
A = [];
b = [];
Aeq = Y';
beq = 0;
lb = zeros(n,1);
ub = C*ones(n,1);
a0 = zeros(n,1);
[a,fval,eXitflag,output,lambda]=quadprog(H,f,A,b,Aeq,beq,lb,ub,a0,options);
case 'svr_epsilon',
C = p1;
e = p2;
n = length(Y);
Q = kernel(ker,X,X);
H = [Q,-Q;-Q,Q];
f = [e*ones(n,1)-Y;e*ones(n,1)+Y];
A = [];
b = [];
Aeq = [ones(1,n),-ones(1,n)];
beq = 0;
lb = zeros(2*n,1);
ub = C*ones(2*n,1);
a0 = zeros(2*n,1);
[a,fval,eXitflag,output,lambda]=quadprog(H,f,A,b,Aeq,beq,lb,ub,a0,options);
a = a(1:n)-a(n+1:end);
otherwise,
end
eXitflag;
% ------------------------------------------------------------%
% 输出 svm
svm.type = svmType;
svm.ker = ker;
svm.x = X;
svm.y = Y;
svm.a = a;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -