⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 svm_c.cpp

📁 这是我找到的C语言编写的支持向量机程序包
💻 CPP
📖 第 1 页 / 共 4 页
字号:
  SVMINT i,j;  SVMINT pos_i, pos_j;  SVMFLOAT* kernel_row;  SVMFLOAT sum_WS;  for(pos_i=0;pos_i<working_set_size;pos_i++){    i = working_set[pos_i];    // put row sort_i in hessian     kernel_row = kernel->get_row(i);    sum_WS=0;    //    for(pos_j=0;pos_j<working_set_size;pos_j++){    for(pos_j=0;pos_j<pos_i;pos_j++){      j = working_set[pos_j];      // put all elements K(i,j) in hessian, where j in WS      if(((which_alpha[pos_j] < 0) && (which_alpha[pos_i] < 0)) ||	 ((which_alpha[pos_j] > 0) && (which_alpha[pos_i] > 0))){	// both i and j positive or negative	(qp.H)[pos_i*working_set_size+pos_j] = kernel_row[j];	(qp.H)[pos_j*working_set_size+pos_i] = kernel_row[j];      }      else{	// one of i and j positive, one negative	(qp.H)[pos_i*working_set_size+pos_j] = -kernel_row[j];	(qp.H)[pos_j*working_set_size+pos_i] = -kernel_row[j];      };    };    for(pos_j=0;pos_j<working_set_size;pos_j++){      j = working_set[pos_j];      sum_WS+=all_alphas[j]*kernel_row[j];    };    // set main diagonal     (qp.H)[pos_i*working_set_size+pos_i] = kernel_row[i];    // linear and box constraints    if(which_alpha[pos_i]<0){      // alpha      (qp.A)[pos_i] = -1;      // lin(alpha) = y_i+eps-sum_{i not in WS} alpha_i K_{ij}      //            = y_i+eps-sum_i+sum_{i in WS}      (qp.c)[pos_i] = all_ys[i]+epsilon_pos-sum[i]+sum_WS;      primal[pos_i] = -all_alphas[i];      (qp.u)[pos_i] = Cpos;    }    else{      // alpha^*      (qp.A)[pos_i] = 1;      (qp.c)[pos_i] = -all_ys[i]+epsilon_neg+sum[i]-sum_WS;      primal[pos_i] = all_alphas[i];      (qp.u)[pos_i] = Cneg;    };  };  if(parameters->quadraticLossNeg){    for(pos_i=0;pos_i<working_set_size;pos_i++){      if(which_alpha[pos_i]>0){	(qp.H)[pos_i*(working_set_size+1)] += 1/Cneg;	(qp.u)[pos_i] = infinity;      };    };  };  if(parameters->quadraticLossPos){    for(pos_i=0;pos_i<working_set_size;pos_i++){      if(which_alpha[pos_i]<0){	(qp.H)[pos_i*(working_set_size+1)] += 1/Cpos;	(qp.u)[pos_i] = infinity;      };    };  };  time_update += get_time() - time_start; };svm_result svm_c::test(example_set_c* test_examples, int verbose){  svm_result the_result;  test_set = test_examples;  SVMINT i;  SVMFLOAT MAE=0;  SVMFLOAT MSE=0;  SVMFLOAT actloss=0;  SVMFLOAT theloss=0;  SVMFLOAT theloss_pos=0;  SVMFLOAT theloss_neg=0;  SVMINT countpos=0;  SVMINT countneg=0;  // for pattern:  SVMINT correct_pos=0;  SVMINT correct_neg=0;  SVMINT total_pos=0;  SVMINT total_neg=0;  SVMFLOAT prediction;  SVMFLOAT y;  svm_example example;  for(i=0;i<test_set->size();i++){    example = test_set->get_example(i);    prediction = predict(example);    y = examples->unscale_y(test_set->get_y(i));    MAE += abs(y-prediction);    MSE += (y-prediction)*(y-prediction);    actloss=loss(prediction,y);    theloss+=actloss;    if(y < prediction-parameters->epsilon_pos){      theloss_pos += actloss;      countpos++;    }    else if(y > prediction+parameters->epsilon_neg){      theloss_neg += actloss;      countneg++;    };    // if pattern!    if(is_pattern){      if(y>0){	if(prediction>0){	  correct_pos++;	};	total_pos++;      }      else{	if(prediction<=0){	  correct_neg++;	};	total_neg++;      };    };      };  if(countpos != 0){    theloss_pos /= (SVMFLOAT)countpos;  };  if(countneg != 0){    theloss_neg /= (SVMFLOAT)countneg;  };  the_result.MAE =  MAE / (SVMFLOAT)test_set->size();  the_result.MSE =  MSE / (SVMFLOAT)test_set->size();  the_result.loss = theloss/test_set->size();  the_result.loss_pos = theloss_pos;  the_result.loss_neg = theloss_neg;  the_result.number_svs = 0;  the_result.number_bsv = 0;  if(is_pattern){    the_result.accuracy = ((SVMFLOAT)(correct_pos+correct_neg))/((SVMFLOAT)(total_pos+total_neg));    the_result.precision = ((SVMFLOAT)correct_pos/((SVMFLOAT)(correct_pos+total_neg-correct_neg)));    the_result.recall = ((SVMFLOAT)correct_pos/(SVMFLOAT)total_pos);  }  else{    the_result.accuracy = -1;    the_result.precision = -1;    the_result.recall = -1;  };  if(verbose){    cout << "Average loss  : "<<(theloss/test_set->size())<<endl;    cout << "Avg. loss pos : "<<theloss_pos<<"\t ("<<countpos<<" occurences)"<<endl;    cout << "Avg. loss neg : "<<theloss_neg<<"\t ("<<countneg<<" occurences)"<<endl;    cout << "Mean absolute error : "<<the_result.MAE<<endl;    cout << "Mean squared error  : "<<the_result.MSE<<endl;    if(is_pattern){      // output precision, recall and accuracy      cout<<"Accuracy  : "<<the_result.accuracy<<endl;      cout<<"Precision : "<<the_result.precision<<endl;      cout<<"Recall    : "<<the_result.recall<<endl;      // nice printout ;-)      int rows = (int)(1+log10((SVMFLOAT)(total_pos+total_neg)));      int now_digits = rows+2;      int i,j;      cout<<endl;      cout<<"Predicted values:"<<endl;      cout<<"   |";      for(i=0;i<rows;i++){ cout<<" "; };      cout<<"+  |";      for(j=0;j<rows;j++){ cout<<" "; };      cout<<"-"<<endl;            cout<<"---+";      for(i=0;i<now_digits;i++){ cout<<"-"; };      cout<<"-+-";      for(i=0;i<now_digits;i++){ cout<<"-"; };      cout<<endl;            cout<<" + |  ";      now_digits=rows-(int)(1+log10((SVMFLOAT)correct_pos))-1;      for(i=0;i<now_digits;i++){ cout<<" "; };      cout<<correct_pos<<"  |  ";      now_digits=rows-(int)(1+log10((SVMFLOAT)(total_pos-correct_pos)))-1;      for(i=0;i<now_digits;i++){ cout<<" "; };      cout<<total_pos-correct_pos<<"    (true pos)"<<endl;            cout<<" - |  ";      now_digits=rows-(int)(1+log10((SVMFLOAT)(total_neg-correct_neg)))-1;      for(i=0;i<now_digits;i++){ cout<<" "; };      cout<<(total_neg-correct_neg)<<"  |  ";      now_digits=rows-(int)(1+log10((SVMFLOAT)correct_neg))-1;      for(i=0;i<now_digits;i++){ cout<<" "; };      cout<<correct_neg<<"    (true neg)"<<endl;      cout<<endl;    };  };  return the_result;};void svm_c::optimize(){  // optimizer-specific call  // get time  long time_start = get_time();  qp.n = working_set_size;  SVMINT i;  SVMINT j;  // equality constraint  qp.b[0]=0;  for(i=0;i<working_set_size;i++){    qp.b[0] += all_alphas[working_set[i]];  };  // set initial optimization parameters  SVMFLOAT new_target=0;  SVMFLOAT old_target=0;  SVMFLOAT target_tmp;  for(i=0;i<working_set_size;i++){    target_tmp = primal[i]*qp.H[i*working_set_size+i]/2;    for(j=0;j<i;j++){      target_tmp+=primal[j]*qp.H[j*working_set_size+i];    };    target_tmp+=qp.c[i];    old_target+=target_tmp*primal[i];  };  SVMFLOAT new_constraint_sum=0;  SVMFLOAT my_is_zero = is_zero;  SVMINT sv_count=working_set_size;  qp.n = working_set_size;  // optimize  int KKTerror=1;  int convError=0;  smo.set_max_allowed_error(convergence_epsilon);  // loop while some KKT condition is not valid (alpha=0)  int result;  if(biased){    result = smo.smo_solve(&qp,primal);    lambda_WS = smo.get_lambda_eq();  }  else{    result = smo.smo_solve_single(&qp,primal);    lambda_WS = 0.0;  };  /////////// new  SVMINT it=3;  if(! is_pattern){      // iterate optimization 3 times with changed sign on variables, if KKT conditions are not satisfied    SVMFLOAT lambda_lo;    while(KKTerror && (it>0)){      KKTerror = 0;      it--;      for(i=0;i<working_set_size;i++){	if(primal[i]<is_zero){	  lambda_lo =  epsilon_neg + epsilon_pos - qp.c[i];	  for(j=0;j<working_set_size;j++){	    lambda_lo -= primal[j]*qp.H[i*working_set_size+j];	  };	  if(qp.A[i] > 0){	    lambda_lo -= lambda_WS;	  }	  else{	    lambda_lo += lambda_WS;	  };	  if(lambda_lo<-convergence_epsilon){	    // change sign of i	    KKTerror=1;	    qp.A[i] = -qp.A[i];	    which_alpha[i] = -which_alpha[i];	    primal[i] = -primal[i];	    qp.c[i] = epsilon_neg + epsilon_pos - qp.c[i];	    if(qp.A[i]>0){	      qp.u[i] = Cneg;	    }	    else{	      qp.u[i] = Cpos;	    };	    for(j=0;j<working_set_size;j++){	      qp.H[i*working_set_size+j] = -qp.H[i*working_set_size+j];	      qp.H[j*working_set_size+i] = -qp.H[j*working_set_size+i];	    };	    if(parameters->quadraticLossNeg){	      if(which_alpha[i]>0){		(qp.H)[i*(working_set_size+1)] += 1/Cneg;		(qp.u)[i] = infinity;	      }	      else{		// previous was neg		(qp.H)[i*(working_set_size+1)] -= 1/Cneg;	      };	    };	    if(parameters->quadraticLossPos){	      if(which_alpha[i]<0){		(qp.H)[i*(working_set_size+1)] += 1/Cpos;		(qp.u)[i] = infinity;	      }	      else{		//previous was pos		(qp.H)[i*(working_set_size+1)] -= 1/Cpos;	      };	    };	  };	};      };      result = smo.smo_solve(&qp,primal);      if(biased){	lambda_WS = smo.get_lambda_eq();      }; // sonst 0 beibehalten    };  };  KKTerror = 1;  //////////////////////  if(parameters->verbosity>=5){    cout<<"smo ended with result "<<result<<endl;    cout<<"lambda_WS = "<<lambda_WS<<endl;    cout<<"smo: Resulting values:"<<endl;    for(i=0;i<working_set_size;i++){      cout<<i<<": "<<primal[i]<<endl;     };  };  while(KKTerror){    // clip    sv_count=working_set_size;    new_constraint_sum=qp.b[0];    if(biased){      // more checks for feasibility      for(i=0;i<working_set_size;i++){	// check if at bound	if(primal[i] <= my_is_zero){	  // at lower bound	  primal[i] = qp.l[i];	  sv_count--;	}	else if(qp.u[i]-primal[i] <= my_is_zero){	  // at upper bound	  primal[i] = qp.u[i];	  sv_count--;	};	new_constraint_sum -= qp.A[i]*primal[i];      };            // enforce equality constraint      if(sv_count>0){	new_constraint_sum /= (SVMFLOAT)sv_count;	if(parameters->verbosity>=5){	  cout<<"adjusting "<<sv_count<<" alphas by "<<new_constraint_sum<<endl;	};	for(i=0;i<working_set_size;i++){	  if((primal[i] > qp.l[i]) && 	     (primal[i] < qp.u[i])){	    // real sv	    primal[i] += qp.A[i]*new_constraint_sum;	  };	};      }      else if(abs(new_constraint_sum)>(SVMFLOAT)working_set_size*is_zero){	// error, can't get feasible point	if(parameters->verbosity>=5){	  cout<<"WARNING: No SVs, constraint_sum = "<<new_constraint_sum<<endl;	};	old_target = -infinity; 	//is_ok=0;	convError=1;      };    };    // test descend    new_target=0;    for(i=0;i<working_set_size;i++){      // attention: loqo changes one triangle of H!      target_tmp = primal[i]*qp.H[i*working_set_size+i]/2;      for(j=0;j<i;j++){	target_tmp+=primal[j]*qp.H[j*working_set_size+i];      };      target_tmp+=qp.c[i];      new_target+=target_tmp*primal[i];    };    if(new_target < old_target){      KKTerror = 0;      if(parameters->descend < old_target - new_target){	target_count=0;      }      else{	convError=1;      };      if(parameters->verbosity>=5){	cout<<"descend = "<<old_target-new_target<<endl;      };    }    else if(sv_count > 0){      // less SVs      // set my_is_zero to min_i(primal[i]-qp.l[i], qp.u[i]-primal[i])      my_is_zero = 1e20;      for(i=0;i<working_set_size;i++){	if((primal[i] > qp.l[i]) && (primal[i] < qp.u[i])){	  if(primal[i] - qp.l[i] < my_is_zero){	    my_is_zero = primal[i]-qp.l[i];	  };	  if(qp.u[i]  - primal[i]  < my_is_zero){	    my_is_zero = qp.u[i] - primal[i];	  };	};      };      if(target_count == 0){      	my_is_zero *= 2;      };      if(parameters->verbosity>=5){	cout<<"WARNING: no descend ("<<old_target-new_target	    <<" <= "<<parameters->descend	  //	    <<", alpha_diff = "<<alpha_diff	    <<"), adjusting is_zero to "<<my_is_zero<<endl;	cout<<"new_target = "<<new_target<<endl;      };    }    else{      // nothing we can do      if(parameters->verbosity>=5){	cout<<"WARNING: no descend ("<<old_target-new_target	    <<" <= "<<parameters->descend<<"), stopping."<<endl;      };      KKTerror=0;      convError=1;    };  };  if(1 == convError){    target_count++;    //    sigfig_max+=0.05;    if(old_target < new_target){      for(i=0;i<working_set_size;i++){	primal[i] = qp.A[i]*all_alphas[working_set[i]];      };                                    if(parameters->verbosity>=5){		cout<<"WARNING: Convergence error, restoring old primals"<<endl;       };    };                                            };  if(target_count>50){    // non-recoverable numerical error    convergence_epsilon*=2;    feasible_epsilon = convergence_epsilon;    //    sigfig_max=-log10(is_zero);    if(parameters->verbosity>=1)      cout<<"WARNING: reducing KKT precision to "<<convergence_epsilon<<endl;    target_count=0;  };  time_optimize += get_time() - time_start;};void svm_c::predict(example_set_c* test_examples){  test_set = test_examples;  SVMINT i;  SVMFLOAT prediction;  svm_example example;  for(i=0;i<test_set->size();i++){    example = test_set->get_example(i);    prediction = predict(example);    test_set->put_y(i,prediction);  };  test_set->set_initialised_y();  test_set->put_b(examples->get_b());  if(parameters->verbosity>=4){    cout<<"Prediction generated"<<endl;  };};SVMFLOAT svm_c::predict(svm_example example){   SVMINT i;  svm_example sv;  SVMFLOAT the_sum=examples->get_b();  for(i=0;i<examples_total;i++){    if(all_alphas[i] != 0){      sv = examples->get_example(i);      the_sum += all_alphas[i]*kernel->calculate_K(sv,example);    };  };  the_sum = examples->unscale_y(the_sum);  if(parameters->use_min_prediction){    if(the_sum < parameters->min_prediction){      the_sum = parameters->min_prediction;    };  };  return the_sum;};SVMFLOAT svm_c::predict(SVMINT i){  //   return (sum[i]+examples->get_b());  // numerically more stable:  return predict(examples->get_example(i));};SVMFLOAT svm_c::loss(SVMINT i){  return loss(predict(i),examples->unscale_y(all_ys[i]));};SVMFLOAT svm_c::loss(SVMFLOAT prediction, SVMFLOAT value){  SVMFLOAT theloss = prediction - value;  if(is_pattern){    if(((value > 0) && (prediction > 0)) ||       ((value <= 0) && (prediction <= 0))){      theloss = 0;    }  };  if(theloss > parameters->epsilon_pos){     if(parameters->quadraticLossPos){      theloss = parameters->Lpos*(theloss-parameters->epsilon_pos)	*(theloss-parameters->epsilon_pos);     }    else{      theloss =  parameters->Lpos*(theloss-parameters->epsilon_pos);     };  }  else if(theloss >= -parameters->epsilon_neg){ theloss = 0; }  else{     if(parameters->quadraticLossNeg){      theloss = parameters->Lneg*(-theloss-parameters->epsilon_neg)	*(-theloss-parameters->epsilon_neg);    }    else{      theloss = parameters->Lneg*(-theloss-parameters->epsilon_neg);    };  };  return theloss;};void svm_c::print_special_statistics(){  // nothing special here!};svm_result svm_c::print_statistics(){  // # SV, # BSV, pos&neg, Loss, VCdim  // Pattern: Acc, Rec, Pred  if(parameters->verbosity>=2){    cout<<"----------------------------------------"<<endl;  };  svm_result the_result;  if(test_set->size() <= 0){    if(parameters->verbosity>= 0){      cout << "No training set given" << endl;    };    the_result.loss = -1;    return the_result;  // undefined  };  SVMINT i;  SVMINT svs = 0;  SVMINT bsv = 0;  SVMFLOAT actloss = 0;  SVMFLOAT theloss = 0;  SVMFLOAT theloss_pos=0;  SVMFLOAT theloss_neg=0;  SVMINT countpos=0;  SVMINT countneg=0;  SVMFLOAT min_alpha=infinity;  SVMFLOAT max_alpha=-infinity;  SVMFLOAT norm_w=0;  SVMFLOAT max_norm_x=0;  SVMFLOAT min_norm_x=1e20;  SVMFLOAT norm_x=0;  SVMFLOAT loo_loss_estim=0;  // for pattern:  SVMFLOAT correct_pos=0;  SVMINT correct_neg=0;  SVMINT total_pos=0;  SVMINT total_neg=0;  SVMINT estim_pos=0;  SVMINT estim_neg=0;  SVMFLOAT MSE=0;  SVMFLOAT MAE=0;  SVMFLOAT alpha;  SVMFLOAT prediction;  SVMFLOAT y;  SVMFLOAT xi;  for(i=0;i<examples_total;i++){    // needed before test-loop for performance estimators    norm_w+=all_alphas[i]*sum[i];    alpha=all_alphas[i];    if(alpha!=0){      norm_x = kernel->calculate_K(i,i);      if(norm_x>max_norm_x){	max_norm_x = norm_x;      };      if(norm_x<min_norm_x){	min_norm_x = norm_x;      };    };  };    SVMFLOAT r_delta = max_norm_x;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -