📄 svc.m
字号:
a(num_points + 1) = []; else disp ('Geen convergentie') end % If this minimization went OK, find the % support vectors and throw all points from this batch % out of the pool. % % OK means: There is at least one support vector % (i.e., a(i) > a_crit for some i) and we do not have % a vector of NaN's or an empty vector. if ((prod(a <= (a_crit * ones (size (a,1),1))) == 0) & ... (isempty(find(isnan(a)==1))) & ... (~isempty(a)) & ... (delta + 0.001 >= max(a))) % Throw the set of processed points out of the % set of remaining points. points_left = points_left - num_points; xo(r,:) = []; yo(r) = []; % Find all support vectors in the current % batch. Plot 'real' support vectors in red, % overlapping vectors (i.e., errors) in magenta. support_size = 0; for i=1:num_points if (a(i) > a_crit) if (dographics) if (a(i) < delta-0.0001) plot (x(i,1), x(i,2), 'ro'); else plot (x(i,1), x(i,2), 'mo'); end end support_size = support_size + 1; xs(support_size, :) = x(i, :); ys(support_size, :) = y(i); end end % Optimal hyperplane: calculate w. if (method == linear) wo = zeros (feature_dimension, 1); for i = 1:input_dimension for j = 1:num_points wo(i) = wo(i) + a(j)*y(j)*x(j,i); end end end % Calculate b, the offset of the classifying function. % Note that this only works for 'real' support vectors, % i.e. non-errors. We believe that we can discriminate % between these on the basis that errors have a(i) == delta, % unlike real support vectors. This is a bit tricky, however. real_sv = find ((a > a_crit) & (a < delta - 0.0001)); if (isempty(real_sv)) real_sv = find (a > a_crit); end bo = zeros (size (real_sv)); for j = 1:size(real_sv,1) r = real_sv(j); g = 0; for i = 1:num_points if (method == linear) g = g + y(i) * a(i) * (x(r,:) * x(i,:)'); elseif (method == polynomial) g = g + y(i) * a(i) * ((x(r,:) * x(i,:)' + 1)^feature_dimension); elseif (method == potential) g = g + y(i) * a(i) * exp (- vectdist (x(r,:), x(i,:)') / po_sigma); elseif (method == radial_basis) g = g + y(i) * a(i) * exp (- (vectdist (x(r,:), x(i,:)') / rb_sigma)^2); elseif (method == neural) g = g + y(i) * a(i) * tanh (nn_v * x(r,:) * x(i,:)' + nn_c); else disp ('Error: unknown dot product convolution.') return end end bo(j) = y(r) - g; b = bo; end b=mean(bo); bo=b; % Draw the classifier and its margins. if (dographics & (method == linear)) h=line ([-mx mx], [((-wo(1)*(-mx))-b )/wo(2) ((-wo(1)*mx)-b)/wo(2)]); set(h,'Color','w'); h=line ([-mx mx], [((-wo(1)*(-mx))-b-1)/wo(2) ((-wo(1)*mx)-b-1)/wo(2)]); set(h,'Color','r'); h=line ([-mx mx], [((-wo(1)*(-mx))-b+1)/wo(2) ((-wo(1)*mx)-b+1)/wo(2)]); set(h,'Color','r'); end % Throw out all points which lie beyond the margin. Quote the Raven: % 'These will become a support vector .... NeverMore.' % % This does not work if we do not consider points that have been % rejected. Solution: build a set of the points in the *entire* % dataset that do not satisfy constraint eqn. 10 (David). if (throwaway) r = zeros (size (xorg, 1), 1); for i = 1:size(r,1) g = 0; for j = 1:num_points if (method == linear) g = g + y(j) * a(j) * (xorg(i,:) * x(j,:)'); elseif (method == polynomial) g = g + y(j) * a(j) * ((xorg(i,:) * x(j,:)' + 1)^feature_dimension); elseif (method == potential) g = g + y(j) * a(j) * exp (- vectdist (xorg(i,:), x(j,:)') / po_sigma); elseif (method == radial_basis) g = g + y(j) * a(j) * exp (- (vectdist (xorg(i,:), x(j,:)') / rb_sigma)^2); elseif (method == neural) g = g + y(j) * a(j) * tanh (nn_v * xorg(i,:) * x(j,:)' + nn_c); else disp ('Error: unknown dot product convolution.') return end end r(i) = yorg(i) * (g + b); end xo = xorg (find (r < 1),:); yo = yorg (find (r < 1),:); end % !%#(^%&!^#$ We now get the possibility of having a % support vector in set xs *and* in xo because r < 1. % (soft margin classifier). Therefore: kick any % points in xs out of xo. k = size (xo,1); for j = 1:support_size i = 1; while i <= k if (vectdist(xo(i,:),xs(j,:)') == 0) xo(i,:) = []; yo(i,:) = []; k = k - 1; else i = i + 1; end end end % Clip a (throw away non-support vector values). a = a(find (a > a_crit)); % Clip the set of support vectors. xs = xs(1:support_size, :); ys = ys(1:support_size); if (dodebug) wait ('Press any key'); end s = fprintf ('%d vectors to go, %d support \n', ... size (xo, 1), support_size); retries = 0; else retries = retries + 1; % If all else fails, start over.... if (retries > max_retries) if (dodebug) disp ('Re-starting algorithm.') end support_size = 0; xs = []; ys = []; a = []; xo = xorg; yo = yorg; retries = 0; end end if (dodebug) disp ('Number of points left to process:') size(xo,1) end end if (~dodebug) clf; hold on; end % Plot the classifier, all datapoints and mark the support vectors. if (dographics) if (method == linear) h = line ([-mx mx], [((-wo(1)*(-mx))-b )/wo(2) ((-wo(1)*mx)-b )/wo(2)]); set (h, 'Color', 'w'); h = line ([-mx mx], [((-wo(1)*(-mx))-b-1)/wo(2) ((-wo(1)*mx)-b-1)/wo(2)]); set (h, 'Color', 'r'); h = line ([-mx mx], [((-wo(1)*(-mx))-b+1)/wo(2) ((-wo(1)*mx)-b+1)/wo(2)]); set (h, 'Color', 'r'); end plot (xorg(find(yorg<0),1), xorg(find(yorg<0),2), 'y+'); plot (xorg(find(yorg>0),1), xorg(find(yorg>0),2), 'y*'); plot (xs(find(a<delta-0.0001),1), xs(find(a<delta-0.0001),2), 'ro'); plot (xs(find(a>=delta-0.0001),1), xs(find(a>=delta-0.0001),2), 'mo'); axis (ax); axis ('square'); drawnow; end if (dographics3d & (input_dimension == 2)) fprintf ('Plotting function in 3D\n') figure (2); [fx,fy] = meshgrid (-mx:.03:(mx*1.01),-mx:.03:(mx*1.01)); for i = 1:size(fx, 1) for j = 1:size(fx, 2) fz(i, j) = b; for k = 1:support_size if (method == linear) fz(i, j) = fz(i, j) + ys(k) * a(k) * ... ([fx(i,j) fy(i,j)] * xs(k,:)'); elseif (method == polynomial) fz(i, j) = fz(i, j) + ys(k) * a(k) * ... (([fx(i,j) fy(i,j)] * xs(k,:)' + 1)^feature_dimension); elseif (method == potential) fz(i, j) = fz(i, j) + ys(k) * a(k) * ... exp (- vectdist ([fx(i,j) fy(i,j)], xs(k,:)') / po_sigma); elseif (method == radial_basis) fz(i, j) = fz(i, j) + ys(k) * a(k) * ... exp (- (vectdist ([fx(i,j) fy(i,j)], xs(k,:)') / rb_sigma)^2); elseif (method == neural) fz(i, j) = fz(i, j) + ys(k) * a(k) * ... tanh (nn_v * [fx(i,j) fy(i,j)] * xs(k,:)' + nn_c); else disp ('Error: unknown dot product convolution.') return end end end end figure (2); clf; hold on; surf (fx, fy, fz); shading flat; colormap('default'); colormap(colormap/2); contour3 (fx, fy, fz, [0 0], 'k'); contour3 (fx, fy, fz, [-1 -1], 'w-.'); contour3 (fx, fy, fz, [ 1 1], 'w-.'); plot3 (xorg(find(yorg<0),1), xorg(find(yorg<0),2), ... 10*max(max(fz))*ones(size(find(yorg<0),1),1), 'w+'); plot3 (xorg(find(yorg>0),1), xorg(find(yorg>0),2), ... 10*max(max(fz))*ones(size(find(yorg>0),1),1), 'w*'); plot3 (xs(:,1), xs(:,2), 100*ones(support_size,1), 'ko'); colorbar axis ([min(min(fx)) max(max(fx)) min(min(fy)) max(max(fy))]); axis ('square'); endreturn
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -