⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 svm_learn_main.c

📁 机器学习方法支持向量机SVM源程序
💻 C
📖 第 1 页 / 共 2 页
字号:
/***********************************************************************//*                                                                     *//*   svm_learn_main.c                                                  *//*                                                                     *//*   Command line interface to the learning module of the              *//*   Support Vector Machine.                                           *//*                                                                     *//*   Author: Thorsten Joachims                                         *//*   Date: 22.07.00                                                    *//*                                                                     *//*   Copyright (c) 2000  Universitaet Dortmund - All rights reserved   *//*                                                                     *//*   This software is available for non-commercial use only. It must   *//*   not be modified and distributed without prior permission of the   *//*   author. The author is not responsible for implications from the   *//*   use of this software.                                             *//*                                                                     *//***********************************************************************//* uncomment, if you want to use svm-learn out of C++ *//* extern "C" { */# include "svm_common.h"# include "svm_learn.h"/* } */char docfile[200];           /* file with training examples */char modelfile[200];         /* file for resulting classifier */void   read_input_parameters(int, char **, char *, char *,long *, long *, 			     LEARN_PARM *, KERNEL_PARM *);void   wait_any_key();void   print_help();int    omain (int, char **);

void main()
{
	int  i;
	int  argc = 13;
	char** argv;

	argv = (char **)malloc(sizeof(char *)*argc);
	for (i=0;i<argc;i++)
	{
		argv[i] = (char *)malloc(sizeof(char)*128);
	}

	strcpy(argv[0],"@");
	strcpy(argv[1],"-c");
	strcpy(argv[2],"10");
	strcpy(argv[3],"-p");
	strcpy(argv[4],"1");
	strcpy(argv[5],"-t");
	strcpy(argv[6],"2");
	strcpy(argv[7],"-g");
	strcpy(argv[8],"0.01");
	strcpy(argv[9],"-m");
	strcpy(argv[10],"60");
	
	strcpy(argv[11], "f:\\8_FDNRV\\datas\\facesvm2\\smpl\\facesmpl2_1.dat");
	strcpy(argv[12], "f:\\8_FDNRV\\datas\\facesvm2\\mdl\\fmdl0r502_1.dat");
	
//	strcpy(argv[11], "f:\\8_FDNRV\\datas\\headsvm2\\smpl\\headsmplo2_1.dat");
//	strcpy(argv[12], "f:\\8_FDNRV\\datas\\headsvm2\\mdl\\hmdlr52_1.dat");
	
	//strcpy(argv[11], "f:\\8_FDNRV\\datas\\facesvm1i\\smpl\\faceicrsmpl1_12345.dat");
	//strcpy(argv[12], "f:\\8_FDNRV\\datas\\facesvm1i\\mdl\\facesvmmdl_12345.dat");	

	omain(argc, argv);

	for (i=0;i<argc;i++)
	{
		free(argv[i]);
	}
	free(argv);
}
	

int omain (int argc, char** argv){    DOC *docs;  /* training examples */  long max_docs,max_words_doc;  long totwords,totdoc,ll,i;  long kernel_cache_size;  double *target;  KERNEL_CACHE kernel_cache;  LEARN_PARM learn_parm;  KERNEL_PARM kernel_parm;  MODEL model;  read_input_parameters(argc,argv,docfile,modelfile,&verbosity,			&kernel_cache_size,&learn_parm,&kernel_parm);
    if(verbosity>=1) {    printf("Scanning examples..."); fflush(stdout);  }  nol_ll(docfile,&max_docs,&max_words_doc,&ll); /* scan size of input file */  max_words_doc+=2;  ll+=2;  max_docs+=2;  if(verbosity>=1) {    printf("done\n"); fflush(stdout);  }  docs = (DOC *)my_malloc(sizeof(DOC)*max_docs);         /* feature vectors */  target = (double *)my_malloc(sizeof(double)*max_docs); /* target values */  read_documents(docfile,docs,target,max_words_doc,ll,&totwords,&totdoc);  if(kernel_parm.kernel_type == LINEAR) { /* don't need the cache */    if(learn_parm.type == CLASSIFICATION) {      svm_learn_classification(docs,target,totdoc,totwords,&learn_parm,			       &kernel_parm,NULL,&model);    }    else if(learn_parm.type == REGRESSION) {      svm_learn_regression(docs,target,totdoc,totwords,&learn_parm,			   &kernel_parm,NULL,&model);    }  }  else {    if(learn_parm.type == CLASSIFICATION) {      kernel_cache_init(&kernel_cache,totdoc,kernel_cache_size);      svm_learn_classification(docs,target,totdoc,totwords,&learn_parm,			       &kernel_parm,&kernel_cache,&model);      kernel_cache_cleanup(&kernel_cache);    }    else if(learn_parm.type == REGRESSION) {      kernel_cache_init(&kernel_cache,2*totdoc,kernel_cache_size);      svm_learn_regression(docs,target,totdoc,totwords,&learn_parm,			   &kernel_parm,&kernel_cache,&model);      kernel_cache_cleanup(&kernel_cache);    }  }  /* Warning: The model contains references to the original data 'docs'.     If you want to free the original data, and only keep the model, you      have to make a deep copy of 'model'. */  write_model(modelfile,&model);  free(model.supvec);  free(model.alpha);  free(model.index);  for(i=0;i<totdoc;i++)     free(docs[i].words);  free(docs);  free(target);  return(0);}/*---------------------------------------------------------------------------*/void read_input_parameters(int argc,char *argv[],char *docfile,char *modelfile,			   long *verbosity,long *kernel_cache_size,			   LEARN_PARM *learn_parm,KERNEL_PARM *kernel_parm){  long i;  char type[100];    /* set default */  strcpy (modelfile, "svm_model");  strcpy (learn_parm->predfile, "trans_predictions");  strcpy (learn_parm->alphafile, "");  (*verbosity)=1;  learn_parm->biased_hyperplane=1;  learn_parm->remove_inconsistent=0;  learn_parm->skip_final_opt_check=0;  learn_parm->svm_maxqpsize=10;  learn_parm->svm_newvarsinqp=0;  learn_parm->svm_iter_to_shrink=-9999;  (*kernel_cache_size)=40;  learn_parm->svm_c=0.0;  learn_parm->eps=0.1;  learn_parm->transduction_posratio=-1.0;  learn_parm->svm_costratio=1.0;  learn_parm->svm_costratio_unlab=1.0;  learn_parm->svm_unlabbound=1E-5;  learn_parm->epsilon_crit=0.001;  learn_parm->epsilon_a=1E-15;  learn_parm->compute_loo=0;  learn_parm->rho=1.0;  learn_parm->xa_depth=0;  kernel_parm->kernel_type=0;  kernel_parm->poly_degree=3;  kernel_parm->rbf_gamma=1.0;  kernel_parm->coef_lin=1;  kernel_parm->coef_const=1;  strcpy(kernel_parm->custom,"empty");  strcpy(type,"c");  for(i=1;(i<argc) && ((argv[i])[0] == '-');i++) {    switch ((argv[i])[1])       {       case '?': print_help(); exit(0);      case 'z': i++; strcpy(type,argv[i]); break;      case 'v': i++; (*verbosity)=atol(argv[i]); break;      case 'b': i++; learn_parm->biased_hyperplane=atol(argv[i]); break;      case 'i': i++; learn_parm->remove_inconsistent=atol(argv[i]); break;      case 'f': i++; learn_parm->skip_final_opt_check=!atol(argv[i]); break;      case 'q': i++; learn_parm->svm_maxqpsize=atol(argv[i]); break;      case 'n': i++; learn_parm->svm_newvarsinqp=atol(argv[i]); break;      case 'h': i++; learn_parm->svm_iter_to_shrink=atol(argv[i]); break;      case 'm': i++; (*kernel_cache_size)=atol(argv[i]); break;      case 'c': i++; learn_parm->svm_c=atof(argv[i]); break;      case 'w': i++; learn_parm->eps=atof(argv[i]); break;      case 'p': i++; learn_parm->transduction_posratio=atof(argv[i]); break;      case 'j': i++; learn_parm->svm_costratio=atof(argv[i]); break;      case 'e': i++; learn_parm->epsilon_crit=atof(argv[i]); break;      case 'o': i++; learn_parm->rho=atof(argv[i]); break;      case 'k': i++; learn_parm->xa_depth=atol(argv[i]); break;      case 'x': i++; learn_parm->compute_loo=atol(argv[i]); break;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -