⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 svm.cpp

📁 良好的代码实现
💻 CPP
📖 第 1 页 / 共 5 页
字号:

  if(m>0) {  
    sum=0;             /* dual linear component for eq constraints */
    for(j=0;j<n;j++) {
      for(k=0;k<n;k++) {
	sum+=(ce[k]*ig[k*n+j]*g0[j]); 
      }
    }
    d0[2*n]=ce0[0]+sum;
    d0[2*n+1]=-ce0[0]-sum;
  }

  maxviol=999999;
  iter=0;
  retrain=1;
  maxfaktor=1;
  scalemaxiter=maxiter/5;
  while((retrain) && (maxviol > 0) && (iter < (scalemaxiter*maxfaktor))) {
    iter++;
    
    while((maxviol > precision) && (iter < (scalemaxiter*maxfaktor))) {
      iter++;
      maxviol=0;
      for(i=0;i<2*(n+m);i++) {
	sum=d0[i];
	for(j=0;j<2*(n+m);j++) {
	  sum+=d[i*2*(n+m)+j]*dual_old[j];
	}
	sum-=d[i*2*(n+m)+i]*dual_old[i];
	dual[i]=-sum/d[i*2*(n+m)+i];
	if(dual[i]<0) dual[i]=0;
	
	viol=fabs(dual[i]-dual_old[i]);
	if(viol>maxviol) 
	  maxviol=viol;
	dual_old[i]=dual[i];
      }
      /*
      sprintf(temstr,"%d) maxviol=%20f precision=%f\n",iter,maxviol,precision); 
      */
    }
  
    if(m>0) {
      for(i=0;i<n;i++) {
	temp[i]=dual[i]-dual[i+n]+ce[i]*(dual[n+n]-dual[n+n+1])+g0[i];
      }
    } 
    else {
      for(i=0;i<n;i++) {
	temp[i]=dual[i]-dual[i+n]+g0[i];
      }
    }
    for(i=0;i<n;i++) {
      primal[i]=0;             /* calc value of primal variables */
      for(j=0;j<n;j++) {
	primal[i]+=ig[i*n+j]*temp[j];
      }
      primal[i]*=-1.0;
      if(primal[i]<=(low[i])) {  /* clip conservatively */
	primal[i]=low[i];
      }
      else if(primal[i]>=(up[i])) {
	primal[i]=up[i];
      }
    }

    if(m>0) 
      model_b=dual[n+n+1]-dual[n+n];
    else
      model_b=0;

    epsilon_hideo=EPSILON_HIDEO;
    for(i=0;i<n;i++) {           /* check precision of alphas */
      isnantest+=primal[j];
      dist=-model_b*ce[i]; 
      dist+=(g0[i]+1.0);
      for(j=0;j<i;j++) {
	dist+=(primal[j]*g[j*n+i]);
      }
      for(j=i;j<n;j++) {
	dist+=(primal[j]*g[i*n+j]);
      }
      if((primal[i]<(up[i]-epsilon_hideo)) && (dist < (1.0-epsilon_crit))) {
	epsilon_hideo=(up[i]-primal[i])*2.0;
      }
      else if((primal[i]>(low[i]+epsilon_hideo)) &&(dist>(1.0+epsilon_crit))) {
	epsilon_hideo=(primal[i]-low[i])*2.0;
      }
    }
     /*sprintf(temstr,"\nEPSILON_HIDEO=%.30f\n",epsilon_hideo); */


    for(i=0;i<n;i++) {           /* clip alphas to bounds */
      if(primal[i]<=(low[i]+epsilon_hideo)) {
	primal[i]=low[i];
      }
      else if(primal[i]>=(up[i]-epsilon_hideo)) {
	primal[i]=up[i];
      }
    }

    retrain=0;
    primal_optimal=1;
    at_bound=0;
    for(i=0;(i<n);i++) {  /* check primal KT-Conditions */
      dist=-model_b*ce[i]; 
      dist+=(g0[i]+1.0);
      for(j=0;j<i;j++) {
	dist+=(primal[j]*g[j*n+i]);
      }
      for(j=i;j<n;j++) {
	dist+=(primal[j]*g[i*n+j]);
      }
      if((primal[i]<(up[i]-epsilon_a)) && (dist < (1.0-epsilon_crit))) {
	retrain=1;
	primal_optimal=0;
      }
      else if((primal[i]>(low[i]+epsilon_a)) && (dist > (1.0+epsilon_crit))) {
	retrain=1;
	primal_optimal=0;
      }
      if((primal[i]<=(low[i]+epsilon_a)) || (primal[i]>=(up[i]-epsilon_a))) {
	at_bound++;
      }
      /*    sprintf(temstr,"HIDEOtemp: a[%ld]=%.30f, dist=%.6f, b=%f, at_bound=%ld\n",i,primal[i],dist,model_b,at_bound);  */
    }
    if(m>0) {
      eq=-ce0[0];               /* check precision of eq-constraint */
      for(i=0;i<n;i++) { 
	eq+=(ce[i]*primal[i]);
      }
      if((EPSILON_EQ < fabs(eq)) 
	 /*
	 && !((goal==PRIMAL_OPTIMAL) 
	       && (at_bound==n)) */
	 ) {
	retrain=1;
	primal_optimal=0;
      }
      /* sprintf(temstr,"\n eq=%.30f ce0=%f at-bound=%ld\n",eq,ce0[0],at_bound);  */
    }

    if(retrain) {
      precision/=10;
      if(((goal == PRIMAL_OPTIMAL) && (maxfaktor < 50000))
	 || (maxfaktor < 5)) {
	maxfaktor++;
      }
    }
  }

  if(!primal_optimal) {
    for(i=0;i<n;i++) {
      primal[i]=0;             /* calc value of primal variables */
      for(j=0;j<n;j++) {
	primal[i]+=ig[i*n+j]*temp[j];
      }
      primal[i]*=-1.0;
      if(primal[i]<=(low[i]+epsilon_a)) {  /* clip conservatively */
	primal[i]=low[i];
      }
      else if(primal[i]>=(up[i]-epsilon_a)) {
	primal[i]=up[i];
      }
    }
  }

  isnantest=0;
  for(i=0;i<n;i++) {           /* check for isnan */
    isnantest+=primal[i];
  }

  if(m>0) {
    temp1=dual[n+n+1];   /* copy the dual variables for the eq */
    temp2=dual[n+n];     /* constraints to a handier location */
    for(i=n+n+1;i>=2;i--) {
      dual[i]=dual[i-2];
    }
    dual[0]=temp2;
    dual[1]=temp1;
    isnantest+=temp1+temp2;
  }

  if(isnan(isnantest)) {
    return((int)NAN_SOLUTION);
  }
  else if(primal_optimal) {
    return((int)PRIMAL_OPTIMAL);
  }
  else if(maxviol == 0.0) {
    return((int)DUAL_OPTIMAL);
  }
  else {
    return((int)MAXITER_EXCEEDED);
  }
}


void CSVM::linvert_matrix(
double *matrix,
long depth,
double *inverse,double lindep_sensitivity,
long *lin_dependent)  /* indicates the active parts of matrix on 
			 input and output*/
{
  long i,j,k;
  double factor;

  for(i=0;i<depth;i++) {
    /*    lin_dependent[i]=0; */
    for(j=0;j<depth;j++) {
      inverse[i*depth+j]=0.0;
    }
    inverse[i*depth+i]=1.0;
  }
  for(i=0;i<depth;i++) {
    if(lin_dependent[i] || (fabs(matrix[i*depth+i])<lindep_sensitivity)) {
      lin_dependent[i]=1;
    }
    else {
      for(j=i+1;j<depth;j++) {
	factor=matrix[j*depth+i]/matrix[i*depth+i];
	for(k=i;k<depth;k++) {
	  matrix[j*depth+k]-=(factor*matrix[i*depth+k]);
	}
	for(k=0;k<depth;k++) {
	  inverse[j*depth+k]-=(factor*inverse[i*depth+k]);
	}
      }
    }
  }
  for(i=depth-1;i>=0;i--) {
    if(!lin_dependent[i]) {
      factor=1/matrix[i*depth+i];
      for(k=0;k<depth;k++) {
	inverse[i*depth+k]*=factor;
      }
      matrix[i*depth+i]=1;
      for(j=i-1;j>=0;j--) {
	factor=matrix[j*depth+i];
	matrix[j*depth+i]=0;
	for(k=0;k<depth;k++) {
	  inverse[j*depth+k]-=(factor*inverse[i*depth+k]);
	}
      }
    }
  }
}

void CSVM::ladd_matrix(
double *matrix,
long depth,
double scalar)
{
  long i,j;
  for(i=0;i<depth;i++) {
    for(j=0;j<depth;j++) {
      matrix[i*depth+j]+=scalar;
    }
  }
}

void CSVM::lcopy_matrix(
double *matrix,
long depth,
double *matrix2)
{
  long i;
  
  for(i=0;i<(depth)*(depth);i++) {
    matrix2[i]=matrix[i];
  }
}

void CSVM::lswitch_rows_matrix(
double *matrix,
long depth,long r1,long r2)
{
  long i;
  double temp;

  for(i=0;i<depth;i++) {
    temp=matrix[r1*depth+i];
    matrix[r1*depth+i]=matrix[r2*depth+i];
    matrix[r2*depth+i]=temp;
  }
}

void CSVM::lswitchrk_matrix(
double *matrix,
long depth,long rk1,long rk2)
{
  long i;
  double temp;

  for(i=0;i<depth;i++) {
    temp=matrix[rk1*depth+i];
    matrix[rk1*depth+i]=matrix[rk2*depth+i];
    matrix[rk2*depth+i]=temp;
  }
  for(i=0;i<depth;i++) {
    temp=matrix[i*depth+rk1];
    matrix[i*depth+rk1]=matrix[i*depth+rk2];
    matrix[i*depth+rk2]=temp;
  }
}

double CSVM::calculate_qp_objective(
long opt_n,
double *opt_g,double *opt_g0,double *alpha)
{
  double obj;
  long i,j;
  obj=0;  /* calculate objective  */
  for(i=0;i<opt_n;i++) {
    obj+=(opt_g0[i]*alpha[i]);
    obj+=(0.5*alpha[i]*alpha[i]*opt_g[i*opt_n+i]);
    for(j=0;j<i;j++) {
      obj+=(alpha[j]*alpha[i]*opt_g[j*opt_n+i]);
    }
  }
  return(obj);
}
/********************************svm_hideo****************************/


/********************************svm_learn****************************/
/* Learns an SVM model based on the training data in docs/label. The resulting
model is returned in the structure model. */

void CSVM::svm_learn(
               DOC *docs,                
               long *label,               
               long totdoc,               
               long totwords,             
               LEARN_PARM *learn_parm,    
               KERNEL_PARM *kernel_parm, 
               KERNEL_CACHE *kernel_cache,
               MODEL *model             
               )
{
    long *inconsistent,i;
    long inconsistentnum;
    long misclassified,upsupvecnum;
    double loss,model_length,example_length;
    double maxdiff,*lin,*a;
    long runtime_start,runtime_end;
    long iterations;
    long *unlabeled,transduction;
    long heldout;
    long loo_count=0,loo_count_pos=0,loo_count_neg=0,trainpos=0,trainneg=0;
    long loocomputed=0,runtime_start_loo=0,runtime_start_xa=0;
    double heldout_c=0,r_delta_sq=0,r_delta,r_delta_avg;
    
    double *xi_fullset; /* buffer for storing xi on full sample in loo */
    double *a_fullset;  /* buffer for storing alpha on full sample in loo */
    TIMING timing_profile;
    SHRINK_STATE shrink_state;
    
    runtime_start=get_runtime();

    timing_profile.time_kernel=0;
    timing_profile.time_opti=0;
    timing_profile.time_shrink=0;
    timing_profile.time_update=0;
    timing_profile.time_model=0;
    timing_profile.time_check=0;
    timing_profile.time_select=0;
    
	com_result.kernel_cache_statistic=0;
    
    learn_parm->totwords=totwords;
    
    /* make sure -n value is reasonable */
    if((learn_parm->svm_newvarsinqp < 2) || (learn_parm->svm_newvarsinqp > learn_parm->svm_maxqpsize))
    {
        learn_parm->svm_newvarsinqp=learn_parm->svm_maxqpsize;
    }
    
    init_shrink_state(&shrink_state,totdoc,(long)10000);
    
    inconsistent = (long *)my_malloc(sizeof(long)*totdoc);
    unlabeled = (long *)my_malloc(sizeof(long)*totdoc);
    a = (double *)my_malloc(sizeof(double)*totdoc);
    a_fullset = (double *)my_malloc(sizeof(double)*totdoc);
    xi_fullset = (double *)my_malloc(sizeof(double)*totdoc);
    lin = (double *)my_malloc(sizeof(double)*totdoc);
    learn_parm->svm_cost = (double *)my_malloc(sizeof(double)*totdoc);
    model->supvec = (DOC **)my_malloc(sizeof(DOC *)*(totdoc+2));
    model->alpha = (double *)my_malloc(sizeof(double)*(totdoc+2));
    model->index = (long *)my_malloc(sizeof(long)*(totdoc+2));

    
    model->at_upper_bound=0;
    model->b=0;        
    model->supvec[0]=0;  /* element 0 reserved and empty for now */
    model->alpha[0]=0;
    model->lin_weights=NULL;
    model->totwords=totwords;
    model->totdoc=totdoc;
    model->kernel_parm=(*kernel_parm);
    model->sv_num=1;
    model->loo_error=-1;
    model->loo_recall=-1;
    model->loo_precision=-1;
    model->xa_error=-1;
    model->xa_recall=-1;
    model->xa_precision=-1;
    inconsistentnum=0;
    transduction=0;
    
    r_delta=estimate_r_delta(docs,totdoc,kernel_parm);
    r_delta_sq=r_delta*r_delta;
    
    r_delta_avg=estimate_r_delta_average(docs,totdoc,kernel_parm);
    if(learn_parm->svm_c == 0.0)
    {  /* default value for C */
        learn_parm->svm_c=1.0/(r_delta_avg*r_delta_avg);
		if (com_pro.show_compute_1)
		{
        sprintf(temstr,"Setting default regularization parameter C=%.4f\n",learn_parm->svm_c);
        printm(temstr);
		}
    }
    
    for(i=0;i<totdoc;i++) 
    {    /* various inits */
        inconsistent[i]=0;
        a[i]=0;
        lin[i]=0;
        unlabeled[i]=0;
        if(label[i] == 0) 
        {
            unlabeled[i]=1;
            transduction=1;
        }
        if(label[i] > 0)
        {
            learn_parm->svm_cost[i]=learn_parm->svm_c*learn_parm->svm_costratio*
                fabs((double)label[i]);
            label[i]=1;
            trainpos++;
        }
        else if(label[i] < 0) 
        {
            learn_parm->svm_cost[i]=learn_parm->svm_c*fabs((double)label[i]);
            label[i]=-1;
            trainneg++;
        }
        else

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -