⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 svm_learn_main.c

📁 SVM-light Version llf_dqy_hhu
💻 C
📖 第 1 页 / 共 2 页
字号:
/***********************************************************************//*                                                                     *//*   svm_learn_main.c                                                  *//*                                                                     *//*   Command line interface to the learning module of the              *//*   Support Vector Machine.                                           *//*                                                                     *//*   Author: Thorsten Joachims                                         *//*   Date: 02.07.02                                                    *//*                                                                     *//*   Copyright (c) 2000  Thorsten Joachims - All rights reserved       *//*                                                                     *//*   This software is available for non-commercial use only. It must   *//*   not be modified and distributed without prior permission of the   *//*   author. The author is not responsible for implications from the   *//*   use of this software.                                             *//*                                                                     *//***********************************************************************//* uncomment, if you want to use svm-learn out of C++ *//* extern "C" { */# include "svm_common.h"# include "svm_learn.h"/* } */char docfile[200];           /* file with training examples */char modelfile[200];         /* file for resulting classifier */void   read_input_parameters(int, char **, char *, char *,long *, long *, 			     LEARN_PARM *, KERNEL_PARM *);void   wait_any_key();//void   print_help();//////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////#define NUMBER 3char dataFile[30];
char dataFilec[30];
//char resultFile[30];
char resultFile1c[30];int main (int argc, char* argv[]){   
	//输入数据    DOC *docs;  /* training examples */  long max_docs,max_words_doc;  long totwords,totdoc,ll,i;  long kernel_cache_size;  double *target;  KERNEL_CACHE kernel_cache;  LEARN_PARM learn_parm;  KERNEL_PARM kernel_parm;  MODEL model;
  /////////////////////////////////////////////////////////  printf("请输入训练数据文件的名称: ");
  scanf("%s",dataFile);
  //printf("请输入存放模型数据文件的名称: ");
  //scanf("%s",resultFile);
  //printf("%s",&resultFile);
  //&resultFile="model.txt";
  printf("请输入测试数据文件的名称: ");
  scanf("%s",dataFilec);
  printf("请输入存放预测结果数据文件的名称: ");
  scanf("%s",resultFile1c);
  argc=NUMBER;	
  argv[1]=dataFile;
  //argv[2]=resultFile;
  argv[2]="model.txt";

  ////////////////////////////////////////////////////////
  printf("******************************训练******************************\n");
  /////////////////////////////////////////////////  read_input_parameters(argc,argv,docfile,modelfile,&verbosity,			&kernel_cache_size,&learn_parm,&kernel_parm);  if(verbosity>=1) {    printf("Scanning examples..."); fflush(stdout);  }  nol_ll(docfile,&max_docs,&max_words_doc,&ll); /* scan size of input file */  max_words_doc+=2;  ll+=2;  max_docs+=2;  if(verbosity>=1) {    printf("done\n"); fflush(stdout);  }  docs = (DOC *)my_malloc(sizeof(DOC)*max_docs);         /* feature vectors */  target = (double *)my_malloc(sizeof(double)*max_docs); /* target values */  read_documents(docfile,docs,target,max_words_doc,ll,&totwords,&totdoc);  /*if(kernel_parm.kernel_type == LINEAR) { // don't need the cache //    if(learn_parm.type == CLASSIFICATION) {      svm_learn_classification(docs,target,totdoc,totwords,&learn_parm,			       &kernel_parm,NULL,&model);    }    else if(learn_parm.type == REGRESSION) {      svm_learn_regression(docs,target,totdoc,totwords,&learn_parm,			   &kernel_parm,NULL,&model);    }    else if(learn_parm.type == RANKING) {      svm_learn_ranking(docs,target,totdoc,totwords,&learn_parm,			&kernel_parm,NULL,&model);    }  }  else {    if(learn_parm.type == CLASSIFICATION) {      // Always get a new kernel cache. It is not possible to use the//         //same cache for two different training runs //      kernel_cache_init(&kernel_cache,totdoc,kernel_cache_size);      svm_learn_classification(docs,target,totdoc,totwords,&learn_parm,			       &kernel_parm,&kernel_cache,&model);      // Free the memory used for the cache. //      kernel_cache_cleanup(&kernel_cache);    }    else 
		if(learn_parm.type == REGRESSION) {*/      /* Always get a new kernel cache. It is not possible to use the         same cache for two different training runs */      kernel_cache_init(&kernel_cache,2*totdoc,kernel_cache_size);      svm_learn_regression(docs,target,totdoc,totwords,&learn_parm,			   &kernel_parm,&kernel_cache,&model);      /* Free the memory used for the cache. */      kernel_cache_cleanup(&kernel_cache);    /*}    else if(learn_parm.type == RANKING) {      printf("Learning rankings is not implemented for non-linear kernels in this version!\n");      exit(1);    }  }*/  /* Warning: The model contains references to the original data 'docs'.     If you want to free the original data, and only keep the model, you      have to make a deep copy of 'model'. */  write_model(modelfile,&model);  free(model.supvec);  free(model.alpha);  free(model.index);  for(i=0;i<totdoc;i++)   free(docs[i].words);  free(docs);  free(target);
  printf("******************************测试******************************\n");  predict(dataFilec,argv[2],resultFile1c);  return(0);}/*---------------------------------------------------------------------------*/void read_input_parameters(int argc,char *argv[],char *docfile,char *modelfile,			   long *verbosity,long *kernel_cache_size,			   LEARN_PARM *learn_parm,KERNEL_PARM *kernel_parm){  long i;  char type[100];    /* set default */  strcpy (modelfile, "svm_model");  strcpy (learn_parm->predfile, "trans_predictions");  strcpy (learn_parm->alphafile, "");  (*verbosity)=1;  learn_parm->biased_hyperplane=1;  learn_parm->remove_inconsistent=0;  learn_parm->skip_final_opt_check=0;  learn_parm->svm_maxqpsize=10;  learn_parm->svm_newvarsinqp=0;  learn_parm->svm_iter_to_shrink=-9999;  (*kernel_cache_size)=40;  learn_parm->svm_c=0.0;/////  learn_parm->eps=0.1;/////////////  learn_parm->transduction_posratio=-1.0;  learn_parm->svm_costratio=1.0;  learn_parm->svm_costratio_unlab=1.0;  learn_parm->svm_unlabbound=1E-5;  learn_parm->epsilon_crit=0.001;  learn_parm->epsilon_a=1E-15;  learn_parm->compute_loo=0;  learn_parm->rho=1.0;  learn_parm->xa_depth=0;  kernel_parm->kernel_type=2;////////选rbf  kernel_parm->poly_degree=3;  kernel_parm->rbf_gamma=1.0;/////////  kernel_parm->coef_lin=1;  kernel_parm->coef_const=1;  strcpy(kernel_parm->custom,"empty");  strcpy(type,"r");/////////////
  for(i=1;(i<argc) && ((argv[i])[0] == '-');i++) {    /*switch ((argv[i])[1])       {       //case '?': print_help(); exit(0);      case 'z': i++; strcpy(type,argv[i]); break;      case 'v': i++; (*verbosity)=atol(argv[i]); break;      case 'b': i++; learn_parm->biased_hyperplane=atol(argv[i]); break;      case 'i': i++; learn_parm->remove_inconsistent=atol(argv[i]); break;      case 'f': i++; learn_parm->skip_final_opt_check=!atol(argv[i]); break;      case 'q': i++; learn_parm->svm_maxqpsize=atol(argv[i]); break;      case 'n': i++; learn_parm->svm_newvarsinqp=atol(argv[i]); break;      case 'h': i++; learn_parm->svm_iter_to_shrink=atol(argv[i]); break;      case 'm': i++; (*kernel_cache_size)=atol(argv[i]); break;      case 'c': i++; learn_parm->svm_c=atof(argv[i]); break;      case 'w': i++; learn_parm->eps=atof(argv[i]); break;      case 'p': i++; learn_parm->transduction_posratio=atof(argv[i]); break;      case 'j': i++; learn_parm->svm_costratio=atof(argv[i]); break;      case 'e': i++; learn_parm->epsilon_crit=atof(argv[i]); break;      case 'o': i++; learn_parm->rho=atof(argv[i]); break;      case 'k': i++; learn_parm->xa_depth=atol(argv[i]); break;      case 'x': i++; learn_parm->compute_loo=atol(argv[i]); break;      case 't': i++; kernel_parm->kernel_type=atol(argv[i]); break;      case 'd': i++; kernel_parm->poly_degree=atol(argv[i]); break;      case 'g': i++; kernel_parm->rbf_gamma=atof(argv[i]); break;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -