📄 svmtrain.m
字号:
otherwise error('Bioinfo:svmtrain:UnknownKernelFunction',... 'Unknown Kernel Function %s.',kfun); end elseif isa (pval, 'function_handle') kfun = pval; else error('Bioinfo:svmtrain:BadKernelFunction',... 'The kernel function input does not appear to be a function handle\nor valid function name.') end case 2 % method if strncmpi(pval,'qp',2) useQuadprog = true; if isempty(which('quadprog')) warning('Bioinfo:svmtrain:NoOptim',... 'The Optimization Toolbox is required to use the quadratic programming method.') useQuadprog = false; end elseif strncmpi(pval,'ls',2) useQuadprog = false; else error('Bioinfo:svmtrain:UnknownMethod',... 'Unknown method option %s. Valid methods are ''QP'' and ''LS''',pval); end case 3 % display plotflag = opttf(pval); if isempty(plotflag) error('Bioinfo:svmtrain:PlotflagNotLogical','%s must be a logical value, true or false.',... upper(char(okargs(k)))); end if pval == true if size(training,2) == 2 plotflag = true; else warning('Bioinfo:svmtrain:OnlyPlot2D',... 'The display option can only plot 2D training data.') end end case 4 % kfunargs if iscell(pval) kfunargs = pval; else kfunargs = {pval}; end case 5 % quadprog_opts if isstruct(pval) qp_opts = optimset(qp_opts,pval); elseif iscell(pval) qp_opts = optimset(qp_opts,pval{:}); else error('Bioinfo:svmtrain:BadQuadprogOpts',... 'QUADPROG_OPTS must be an opts structure.'); end case 6 % polyorder if ~isscalar(pval) || ~isnumeric(pval) error('Bioinfo:svmtrain:BadPolyOrder',... 'POLYORDER must be a scalar value.'); end if pval ~=floor(pval) || pval < 1 error('Bioinfo:svmtrain:PolyOrderNotInt',... 'The order of the polynomial kernel must be a positive integer.') end kfunargs = {pval}; setPoly = true; case 7 % mlpparams if numel(pval)~=2 error('Bioinfo:svmtrain:BadMLPParams',... 'MLP_PARAMS must be a two element array.'); end if ~isscalar(pval(1)) || ~isscalar(pval(2)) error('Bioinfo:svmtrain:MLPParamsNotScalar',... 'The parameters of the multi-layer perceptron kernel must be scalar.'); end kfunargs = {pval(1),pval(2)}; setMLP = true; case 8 % box constraint: it can be a positive numeric scalar % or a numeric vector of the same length as there are % data points if isscalar(pval) && isnumeric(pval) && pval > 0 % scalar input: adjust to group size and transform into vector n1 = length(find(groupIndex==1)); n2 = length(find(groupIndex==-1)); c1 = 0.5 * pval * nPoints / n1; c2 = 0.5 * pval * nPoints / n2; boxconstraint(groupIndex==1) = c1; boxconstraint(groupIndex==-1) = c2; elseif isvector(pval) && isnumeric(pval) && all(pval > 0) % vector input if length(pval) ~= nPoints error('Bioinfo:svmtrain:BoxConstraintNotScalar',... 'If box constraint is passed as vector, its size must equal the number of training points'); end boxconstraint = pval; else error('Bioinfo:svmtrain:BoxConstraintNotScalar',... 'The box constraint must be a numeric scalar or vector > 0.'); end case 9 % rbf sigma if isscalar(pval) && isnumeric(pval) kfunargs = {pval}; setSigma = true; else error('Bioinfo:svmtrain:RBFSigmaNotScalar',... 'Sigma must be a numeric scalar.'); end case 10 % autoscale autoScale = opttf(pval); if isempty(autoScale) error('Bioinfo:svmtrain:AutoscaleNotLogical','%s must be a logical value, true or false.',... upper(char(okargs(k)))); end end end endendif setPoly && ~usePoly warning('Bioinfo:svmtrain:PolyOrderNotPolyKernel',... 'You specified a polynomial order but not a polynomial kernel');endif setMLP && ~useMLP warning('Bioinfo:svmtrain:MLPParamNotMLPKernel',... 'You specified MLP parameters but not an MLP kernel');endif setSigma && ~useSigma warning('Bioinfo:svmtrain:RBFParamNotRBFKernel',... 'You specified radial basis function parameters but not a radial basis function kernel');end% plot the data if requestedif plotflag [hAxis,hLines] = svmplotdata(training,groupIndex); legend(hLines,cellstr(groupString));end% autoscale data if required, we can't use the zscore function here,% because we need the shift and scale values.scaleData = [];if autoScale scaleData.shift = - mean(training); stdVals = std(training); scaleData.scaleFactor = 1./stdVals; % leave zero-variance data unscaled: scaleData.scaleFactor(~isfinite(scaleData.scaleFactor)) = 1; % shift and scale columns of data matrix: for c = 1:size(training, 2) training(:,c) = scaleData.scaleFactor(c) * ... (training(:,c) + scaleData.shift(c)); endend% calculate kernel function and add additional term required% for two-norm soft margintry kx = feval(kfun,training,training,kfunargs{:}); % ensure function is symmetric kx = (kx+kx')/2 + diag(1./boxconstraint);catch error('Bioinfo:svmtrain:UnknownKernelFunction',... 'Error calculating the kernel function:\n%s\n', lasterr);end% create HessianH =((groupIndex * groupIndex').*kx);if useQuadprog % X=QUADPROG(H,f,A,b,Aeq,beq,LB,UB,X0,opts) [alpha, fval, exitflag, ... output, lambda] = quadprog(H,-ones(nPoints,1),[],[],... groupIndex',0,zeros(nPoints,1), ... Inf *ones(nPoints,1), ones(nPoints,1), ... qp_opts); %#ok if exitflag <= 0 error('Bioinfo:svmtrain:UnsolvableOptimization',... 'Unable to solve the optimization problem:\n%s\n', output.message); end % The support vectors are the non-zeros of alpha. % We could also use the zero values of the Lagrangian (fifth output of % quadprog) though the method below seems to be good enough. svIndex = find(alpha > sqrt(eps)); sv = training(svIndex,:); % calculate the parameters of the separating line from the support % vectors. alphaHat = groupIndex(svIndex).*alpha(svIndex); % Calculate the bias by applying the indicator function to the support % vector with largest alpha. [maxAlpha,maxPos] = max(alpha); %#ok bias = groupIndex(maxPos) - sum(alphaHat.*kx(svIndex,maxPos)); % an alternative method is to average the values over all support vectors % bias = mean(groupIndex(sv)' - sum(alphaHat(:,ones(1,numSVs)).*kx(sv,sv))); % An alternative way to calculate support vectors is to look for zeros of % the Lagrangians (fifth output from QUADPROG). % % [alpha,fval,output,exitflag,t] = quadprog(H,-ones(nPoints,1),[],[],... % groupIndex',0,zeros(nPoints,1),inf *ones(nPoints,1),zeros(nPoints,1),opts); % % sv = t.lower < sqrt(eps) & t.upper < sqrt(eps);else % Least-Squares % now build up compound matrix for solver A = [0 groupIndex';groupIndex,H]; b = [0;ones(size(groupIndex))]; x = A\b; % calculate the parameters of the separating line from the support % vectors. sv = training; bias = x(1); alphaHat = groupIndex.*x(2:end);endsvm_struct.SupportVectors = sv;svm_struct.Alpha = alphaHat;svm_struct.Bias = bias;svm_struct.KernelFunction = kfun;svm_struct.KernelFunctionArgs = kfunargs;svm_struct.GroupNames = groupnames;svm_struct.ScaleData = scaleData;svm_struct.FigureHandles = [];if plotflag hSV = svmplotsvs(hAxis,hLines,groupString,svm_struct); svm_struct.FigureHandles = {hAxis,hLines,hSV};end
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -