⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 multisvc.asv

📁 支持向量机 这是我的老师编写的matlab源文件 希望对大家有用
💻 ASV
字号:
function  [nsv, alpha, b0,t] = multisvc (X,Y,ker,C,)
%MULTISVC Support Vector Classification
%
%  Usage: [nsv alpha bias] = multisvc(X,Y,ker,C)
%
%  Parameters: X      - Training inputs
%              Y      - Training targets
%              ker    - kernel function
%              C      - upper bound (non-separable case)
%              nsv    - number of support vectors
%              alpha  - Lagrange Multipliers
%              b0     - bias term
%
%  Author: Ren weihua
 if (nargin <2 | nargin>5) % check correct number of arguments
    help multisvc
 else
      fprintf('Support Vector Multiclassification\n')
      fprintf('_____________________________\n')
      if (nargin<4) C=Inf;, end
      if (nargin<3) ker='linear';, end
       % tolerance for Support Vector Detection
       epsilon = svtol(C(end,end));
       % Construct the Kernel matrix
       fprintf('Constructing ...\n')
       t=0;
       svi=[];
       for i=0:9
           Y'=((Y==i)-(Y~=i));
           n = size(X,1);
           H = zeros(n,n);
           for i=1:n
               for j=1:n
                   H1(i,j)=svkernel(ker,X(i,:),X(j,:));
               end 
           end
           Dist = zeros(n,n); 
           for i=1:n
               for j=1:n
                   Dist=H1(i,i)+H(j,j)-2*H(i,j);
               end 
           end
           if size(Y,2)~=1
               Y=Y'
           end
           H = (Y'*Y).*H1;%*svkernel(ker,X(i,:),X(j,:));
           c = -ones(n,1); 
           Rmax=max(max(Dist));%2-2*sum(sum(H1))/(n*n);
           clear H1;
           % Add small amount of zero order regularisation to 
           % avoid problems when Hessian is badly conditioned. 
           H = H+1e-10*eye(size(H));
           % Set up the parameters for the Optimisation problem
           vlb = zeros(n,1);      % Set the bounds: alphas >= 0
    if size(C,1)==1 & size(C,2)==1     %  alphas <= C

       vub = C*ones(n,1);
    elseif size(C,1)==1
       vub=C';
    elseif size(C,2)==1
       vub=C;
    end   
    
    x0 = zeros(n,1);       % The starting point is [0 0 0   0]
    neqcstr = nobias(ker); % Set the number of equality constraints (1 or 0)  
    if neqcstr
       A = Y';, b = 0;     % Set the constraint Ax = b
    else
       A = [];, b = [];
    end

    % Solve the Optimisation Problem
    
    fprintf('Optimising ...\n');
    st = cputime;
    
   %[alpha lambda how] = qp(H, c, A, b, vlb, vub, x0, neqcstr);
    options=optimset('MaxIter',5000);
    [alpha,fval,exitflag]=quadprog(H,c,[],[],A,b,vlb,vub,x0,options);


    t=t+cputime - st;
     fprintf('Status : %d\n',exitflag);
     w2 = alpha'*H*alpha;
    fprintf('Margin    : %f\n',2/sqrt(w2));
     fprintf('Sum alpha : %f\n',sum(alpha));
     
    fprintf('the max R : %f,     %f\n',Rmax,Rmax*w2);
     svi = [find( alpha > epsilon);
       nsv = length(svi);
       svi = [svi,find( alpha > epsilon)];
        b0 = 0;

    % Explicit bias, b0 
    if nobias(ker) ~= 0
      % find b0 from average of support vectors on margin
      % SVs on margin have alphas: 0 < alpha < C
      svii = find( alpha > epsilon & alpha < (C - epsilon));
      if length(svii) > 0
        b0 =  (1/length(svii))*sum(Y(svii) - H(svii,svi)*alpha(svi).*Y(svii));
      else 
        fprintf('No support vectors on margin - cannot compute bias.\n');
        end
        end
        end
        
        
 
      
    fprintf('Execution time: %4.1f seconds\n',t);
            % Compute the number of Support Vectors
    
    nsv = length(svi);
    fprintf('Support Vectors : %d (%3.1f%%)\n',nsv,100*nsv/n);
    end
    

   
    

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -