⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 svm_learn.cpp

📁 支持向量机分类器(可分类文本
💻 CPP
📖 第 1 页 / 共 5 页
字号:
                retrain=identify_one_misclassified(lin,label,unlabeled,totdoc,
                    model,&inconsistentnum,inconsistent);
            }
            if(retrain) 
            {
                if(kernel_parm->kernel_type == LINEAR)
                { /* reinit shrinking */
                    learn_parm->epsilon_crit=2.0;
                } 
            }
            if(retrain)
            {
                sprintf(temstr," Now %ld inconsistent examples.\n",inconsistentnum);
                printm(temstr);
            }
        }
  } /* end of loop */
  
  free(chosen);
  free(last_suboptimal_at);
  free(key);
  free(selcrit);
  free(selexam);
  free(a_old);
  free(aicache);
  free(working2dnum);
  free(active2dnum);
  free(qp.opt_ce);
  free(qp.opt_ce0);
  free(qp.opt_g);
  free(qp.opt_g0);
  free(qp.opt_xinit);
  free(qp.opt_low);
  free(qp.opt_up);
  free(weights);
  
  learn_parm->epsilon_crit=epsilon_crit_org; /* restore org */
  
  return(iteration);
}


double compute_objective_function( double *a,double *lin,   long *label,long*active2dnum   )
{
    long i,ii;
    double criterion;
    /* calculate value of objective function */
    criterion=0;
    for(ii=0;active2dnum[ii]>=0;ii++) 
    {
        i=active2dnum[ii];
        criterion=criterion-a[i]+0.5*a[i]*label[i]*lin[i];
    } 
    return(criterion);
}

void clear_index(long *index)
/* initializes and empties index */
{
    index[0]=-1;
} 

void add_to_index(long *index,long elem)
/* initializes and empties index */
{
    register long i;
    for(i=0;index[i] != -1;i++);
    index[i]=elem;
    index[i+1]=-1;
}

long compute_index(long *binfeature,long range,long *index)
/* create an inverted index of binfeature */
{               
    register long i,ii;
    
    ii=0;
    for(i=0;i<range;i++) 
    {
        if(binfeature[i]) 
        {
            index[ii]=i;
            ii++;
        }
    }
    for(i=0;i<4;i++) 
    {
        index[ii+i]=-1;
    }
    return(ii);
}

void optimize_svm(
                  DOC *docs,               /* Do optimization on the working set. */
                  long *label,long *unlabeled,long *chosen,long *active2dnum,
                  MODEL *model,
                  long totdoc,long *working2dnum,long varnum,
                  double *a,double *lin,
                  LEARN_PARM *learn_parm,
                  CFLOAT *aicache,
                  KERNEL_PARM *kernel_parm,
                  QP *qp,
                  double *epsilon_crit_target
                  )
{
    long i;
    double *a_v;
    
    compute_matrices_for_optimization(docs,label,unlabeled,chosen,active2dnum,
        working2dnum,model,a,lin,varnum,
        totdoc,learn_parm,aicache,kernel_parm,
        qp);
    
	if (com_pro.show_compute_3) 
	{
     sprintf(temstr,"Running optimizer..."); 
	 printm(temstr);
	}
    
    /* call the qp-subsolver */
    a_v=optimize_qp(qp,epsilon_crit_target, learn_parm->svm_maxqpsize,&(model->b),learn_parm);
    for(i=0;i<varnum;i++) 
    {
        a[working2dnum[i]]=a_v[i];
    }
}

void compute_matrices_for_optimization(
                                       DOC *docs,
                                       long *label,long *unlabeled,long *chosen,long *active2dnum,long *key,
                                       MODEL *model,
                                       double *a,double *lin,
                                       long varnum,long totdoc,
                                       LEARN_PARM *learn_parm,
                                       CFLOAT *aicache,
                                       KERNEL_PARM *kernel_parm,
                                       QP *qp)
{
    register long ki,kj,i,j;
    register double kernel_temp;
	if (com_pro.show_compute_3) 
	{
     sprintf(temstr,"Computing qp-matrices (type %ld kernel [degree %ld, rbf_gamma %f, coef_lin %f, coef_const %f])...",kernel_parm->kernel_type,kernel_parm->poly_degree,kernel_parm->rbf_gamma,kernel_parm->coef_lin,kernel_parm->coef_const);
     printm(temstr);
	}
    
    
    qp->opt_n=varnum;
    qp->opt_ce0[0]=0; /* compute the constant for equality constraint */
    for(j=1;j<model->sv_num;j++) { /* start at 1 */
        if(!chosen[(model->supvec[j])->docnum])
        {
            qp->opt_ce0[0]+=model->alpha[j];
        }
    } 
    if(learn_parm->biased_hyperplane) 
        qp->opt_m=1;
    else 
        qp->opt_m=0;  /* eq-constraint will be ignored */
    
    /* init linear part of objective function */
    for(i=0;i<varnum;i++)
    {
        qp->opt_g0[i]=lin[key[i]];
    }
    
    for(i=0;i<varnum;i++)
    {
        ki=key[i];
        
        /* Compute the matrix for equality constraints */
        qp->opt_ce[i]=label[ki];
        qp->opt_low[i]=0;
        qp->opt_up[i]=learn_parm->svm_cost[ki];
        
        kernel_temp=(double)kernel(kernel_parm,&(docs[ki]),&(docs[ki])); 
        /* compute linear part of objective function */
        qp->opt_g0[i]-=(kernel_temp*a[ki]*(double)label[ki]); 
        /* compute quadratic part of objective function */
        qp->opt_g[varnum*i+i]=kernel_temp;
        for(j=i+1;j<varnum;j++) {
            kj=key[j];
            kernel_temp=(double)kernel(kernel_parm,&(docs[ki]),&(docs[kj]));
            /* compute linear part of objective function */
            qp->opt_g0[i]-=(kernel_temp*a[kj]*(double)label[kj]);
            qp->opt_g0[j]-=(kernel_temp*a[ki]*(double)label[ki]); 
            /* compute quadratic part of objective function */
            qp->opt_g[varnum*i+j]=(double)label[ki]*(double)label[kj]*kernel_temp;
            qp->opt_g[varnum*j+i]=(double)label[ki]*(double)label[kj]*kernel_temp;
        }
        
        
        if(i % 20 == 0&&com_pro.show_compute_2) 
        {
            sprintf(temstr,"%ld..",i); 
			printm(temstr);
            
        }
    }
    
    for(i=0;i<varnum;i++) 
    {
        /* assure starting at feasible point */
        qp->opt_xinit[i]=a[key[i]];
        /* set linear part of objective function */
        qp->opt_g0[i]=-1.0+qp->opt_g0[i]*(double)label[key[i]];    
    }
    
    
    
}

long calculate_svm_model(
                         DOC *docs,              /* Compute decision function based on current values */
                         long *label,long *unlabeled, /* of alpha. */
                         double *lin,double *a,double *a_old,
                         LEARN_PARM *learn_parm,
                         long *working2dnum,
                         MODEL *model)
{
    long i,ii,pos,b_calculated=0;
    double ex_c;
    
    
    //    sprintf(temstr,"Calculating model...");  printm(temstr);
    
    
    if(!learn_parm->biased_hyperplane)
    {
        model->b=0;
        b_calculated=1;
    }
    
    for(ii=0;(i=working2dnum[ii])>=0;ii++) 
    {
        if((a_old[i]>0) && (a[i]==0)) { /* remove from model */
            pos=model->index[i]; 
            model->index[i]=-1;
            (model->sv_num)--;
            model->supvec[pos]=model->supvec[model->sv_num];
            model->alpha[pos]=model->alpha[model->sv_num];
            model->index[(model->supvec[pos])->docnum]=pos;
        }
        else if((a_old[i]==0) && (a[i]>0)) 
        { /* add to model */
            model->supvec[model->sv_num]=&(docs[i]);
            model->alpha[model->sv_num]=a[i]*(double)label[i];
            model->index[i]=model->sv_num;
            (model->sv_num)++;
        }
        else if(a_old[i]==a[i]) 
        { /* nothing to do */
        }
        else
        {  /* just update alpha */
            model->alpha[model->index[i]]=a[i]*(double)label[i];
        }
        
        ex_c=learn_parm->svm_cost[i]-learn_parm->epsilon_a;
        if((a_old[i]>=ex_c) && (a[i]<ex_c)) 
        { 
            (model->at_upper_bound)--;
        }
        else if((a_old[i]<ex_c) && (a[i]>=ex_c))
        { 
            (model->at_upper_bound)++;
        }
        
        if((!b_calculated) 
            && (a[i]>learn_parm->epsilon_a) && (a[i]<ex_c))
        {   /* calculate b */
            model->b=(-(double)label[i]+lin[i]);
            b_calculated=1;
        }
    }      
    
    
    
    
    /* If there is no alpha in the working set not at bounds, then
    just use the model->b from the last iteration or the one provided 
    by the core optimizer */
    
    return(model->sv_num-1); /* have to substract one, since element 0 is empty*/
}

long check_optimality(
                      MODEL *model,            /* Check KT-conditions */
                      long *label,long *unlabeled,
                      double *a,double *lin,
                      long totdoc,
                      LEARN_PARM *learn_parm,
                      double *maxdiff,double epsilon_crit_org,
                      long *misclassified,
                      long *inconsistent,long *active2dnum,long *last_suboptimal_at,long iteration,
                      KERNEL_PARM *kernel_parm)
{
    long i,ii,retrain;
    double dist,ex_c;
    
    if(kernel_parm->kernel_type == LINEAR)
    {  /* be optimistic */
        learn_parm->epsilon_shrink=-learn_parm->epsilon_crit+epsilon_crit_org;  
    }
    else
    {  /* be conservative */
        learn_parm->epsilon_shrink=learn_parm->epsilon_shrink*0.7+(*maxdiff)*0.3; 
    }
    retrain=0;
    (*maxdiff)=0;
    (*misclassified)=0;
    for(ii=0;(i=active2dnum[ii])>=0;ii++) 
    {
        if((!inconsistent[i]) && label[i])
        {
            dist=(lin[i]-model->b)*(double)label[i];/* 'distance' from hyperplane*/
            ex_c=learn_parm->svm_cost[i]-learn_parm->epsilon_a;
            if(dist <= 0)
            {       
                (*misclassified)++;  /* does not work due to deactivation of var */
            }
            if((a[i]>learn_parm->epsilon_a) && (dist > 1)) 
            {
                if((dist-1.0)>(*maxdiff))  /* largest violation */
                    (*maxdiff)=dist-1.0;
            }
            else if((a[i]<ex_c) && (dist < 1))
            {
                if((1.0-dist)>(*maxdiff))  /* largest violation */
                    (*maxdiff)=1.0-dist;
            }
            /* Count how long a variable was at lower/upper bound (and optimal).*/
            /* Variables, which were at the bound and optimal for a long */
            /* time are unlikely to become support vectors. In case our */
            /* cache is filled up, those variables are excluded to save */
            /* kernel evaluations. (See chapter 'Shrinking').*/ 
            if((a[i]>(learn_parm->epsilon_a)) && (a[i]<ex_c)) 
            { 
                last_suboptimal_at[i]=iteration;         /* not at bound */
            }
            else if((a[i]<=(learn_parm->epsilon_a)) 
                && (dist < (1.0+learn_parm->epsilon_shrink)))
            {
                last_suboptimal_at[i]=iteration;         /* not likely optimal */
            }
            else if((a[i]>=ex_c)
                && (dist > (1.0-learn_parm->epsilon_shrink)))  
            { 
                last_suboptimal_at[i]=iteration;         /* not likely optimal */
            }
        }   
    }
    /* termination criterion */
    if((!retrain) && ((*maxdiff) > learn_parm->epsilon_crit)) 
    {  
        retrain=1;
    }
    return(retrain);
}

long identify_inconsistent(
                           double *a,
                           long *label,long *unlabeled,long totdoc,
                           LEARN_PARM *learn_parm,
                           long *inconsistentnum,long *inconsistent)
{
    long i,retrain;
    
    /* Throw out examples with multipliers at upper bound. This */
    /* corresponds to the -i 1 option. */
    /* ATTENTION: this is just a heuristic for finding a close */
    /*            to minimum number of examples to exclude to */
    /*            make the problem separable with desired margin */
    retrain=0;
    for(i=0;i<totdoc;i++)
    {

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -