📄 hssvctrain.m
字号:
function [alpha,bias, svi, nsv] = hsvctrain(samples,labels,kernel,kernelparam,C,threshold)
if (nargin ~= 6) % check correct number of arguments
help hssvc
else
fprintf('Hidden Space Support Vector Classification\n')
fprintf('__________________________________________\n')
n = size(samples,1);
% tolerance for Support Vector Detection
tol = 1e-5;
% Construct the Kernel matrix
fprintf('Constructing ...\n');
K = zeros(n,n);
st = cputime;
% for i=1:n
% K(:,i) = bsvkernel(samples,samples(i*ones(n,1),:),kernel,kernelparam);
% end
K = evalkernel(samples,samples,kernel,kernelparam);
% Set up the parameters for the Optimisation problem
H = (labels*labels').*(K'*K);
c = -ones(n,1);
vlb = zeros(n,1); % Set the bounds: alphas >= 0
vub = C*ones(n,1); % alphas <= C
x0 = zeros(n,1); % The starting point is [0 0 0 0]
if threshold
A = labels';, b = 0; % Set the constraint Ax = b
else
A = [];, b = [];
end
% Solve the Optimisation Problem
fprintf('Optimising ...\n');
% [beta,y,z,s,w,flag] = quadsolve(H,c,A,b,C);
[beta lambda how] = quadprog(H, c, A, b,[],[], vlb, vub, x0);
fprintf('Execution time: %4.1f seconds\n',cputime - st);
% Compute the number of Support Vectors
%svi = find( beta > tol);
%nsv = length(svi);
alpha = zeros(n,1);
for i = 1 : n
for j = 1 : n
alpha(i) = alpha(i) + labels(j)*beta(j)*K(i,j);
end
end
svi = find( abs(alpha) > tol);
nsv = length(svi);
fprintf('Support Vectors : %d (%3.1f%%)\n',nsv,100*nsv/n);
% Implicit bias, bias
bias = 0;
% Explicit bias, bias
if threshold
% find bias from average of support vectors on margin
% SVs on margin have alphas: 0 < alpha < C
svii = find( beta > tol & beta < (C - tol));
if length(svii) > 0
bias = (1/length(svii))*sum(labels(svii) - K(svii,:)*alpha);
else
fprintf('No support vectors on margin - cannot compute bias.\n');
end
end
end
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -