📄 svm_struct_main.c
字号:
}
strcpy (trainfile, argv[i]);
if((i+1)<argc) {
strcpy (modelfile, argv[i+1]);
}
if(learn_parm->svm_iter_to_shrink == -9999) {
learn_parm->svm_iter_to_shrink=100;
}
if((learn_parm->skip_final_opt_check)
&& (kernel_parm->kernel_type == LINEAR)) {
printf("\nIt does not make sense to skip the final optimality check for linear kernels.\n\n");
learn_parm->skip_final_opt_check=0;
}
if((learn_parm->skip_final_opt_check)
&& (learn_parm->remove_inconsistent)) {
printf("\nIt is necessary to do the final optimality check when removing inconsistent \nexamples.\n");
wait_any_key();
print_help();
exit(0);
}
if((learn_parm->svm_maxqpsize<2)) {
printf("\nMaximum size of QP-subproblems not in valid range: %ld [2..]\n",learn_parm->svm_maxqpsize);
wait_any_key();
print_help();
exit(0);
}
if((learn_parm->svm_maxqpsize<learn_parm->svm_newvarsinqp)) {
printf("\nMaximum size of QP-subproblems [%ld] must be larger than the number of\n",learn_parm->svm_maxqpsize);
printf("new variables [%ld] entering the working set in each iteration.\n",learn_parm->svm_newvarsinqp);
wait_any_key();
print_help();
exit(0);
}
if(learn_parm->svm_iter_to_shrink<1) {
printf("\nMaximum number of iterations for shrinking not in valid range: %ld [1,..]\n",learn_parm->svm_iter_to_shrink);
wait_any_key();
print_help();
exit(0);
}
if(struct_parm->C<0) {
printf("\nYou have to specify a value for the parameter '-c' (C>0)!\n\n");
wait_any_key();
print_help();
exit(0);
}
if(((*alg_type) < 1) || ((*alg_type) > 4)) {
printf("\nAlgorithm type must be either '1', '2', '3', or '4'!\n\n");
wait_any_key();
print_help();
exit(0);
}
if(learn_parm->transduction_posratio>1) {
printf("\nThe fraction of unlabeled examples to classify as positives must\n");
printf("be less than 1.0 !!!\n\n");
wait_any_key();
print_help();
exit(0);
}
if(learn_parm->svm_costratio<=0) {
printf("\nThe COSTRATIO parameter must be greater than zero!\n\n");
wait_any_key();
print_help();
exit(0);
}
if(struct_parm->epsilon<=0) {
printf("\nThe epsilon parameter must be greater than zero!\n\n");
wait_any_key();
print_help();
exit(0);
}
if((struct_parm->slack_norm<1) || (struct_parm->slack_norm>2)) {
printf("\nThe norm of the slacks must be either 1 (L1-norm) or 2 (L2-norm)!\n\n");
wait_any_key();
print_help();
exit(0);
}
if((struct_parm->loss_type != SLACK_RESCALING)
&& (struct_parm->loss_type != MARGIN_RESCALING)) {
printf("\nThe loss type must be either 1 (slack rescaling) or 2 (margin rescaling)!\n\n");
wait_any_key();
print_help();
exit(0);
}
if(learn_parm->rho<0) {
printf("\nThe parameter rho for xi/alpha-estimates and leave-one-out pruning must\n");
printf("be greater than zero (typically 1.0 or 2.0, see T. Joachims, Estimating the\n");
printf("Generalization Performance of an SVM Efficiently, ICML, 2000.)!\n\n");
wait_any_key();
print_help();
exit(0);
}
if((learn_parm->xa_depth<0) || (learn_parm->xa_depth>100)) {
printf("\nThe parameter depth for ext. xi/alpha-estimates must be in [0..100] (zero\n");
printf("for switching to the conventional xa/estimates described in T. Joachims,\n");
printf("Estimating the Generalization Performance of an SVM Efficiently, ICML, 2000.)\n");
wait_any_key();
print_help();
exit(0);
}
parse_struct_parameters(struct_parm);
}
void wait_any_key()
{
printf("\n(more)\n");
(void)getc(stdin);
}
void print_help()
{
printf("\nSVM-struct learning module: %s, %s, %s\n",INST_NAME,INST_VERSION,INST_VERSION_DATE);
printf(" includes SVM-struct %s for learning complex outputs, %s\n",STRUCT_VERSION,STRUCT_VERSION_DATE);
printf(" includes SVM-light %s quadratic optimizer, %s\n",VERSION,VERSION_DATE);
copyright_notice();
printf(" usage: svm_struct_learn [options] example_file model_file\n\n");
printf("Arguments:\n");
printf(" example_file-> file with training data\n");
printf(" model_file -> file to store learned decision rule in\n");
printf("General options:\n");
printf(" -? -> this help\n");
printf(" -v [0..3] -> verbosity level (default 1)\n");
printf(" -y [0..3] -> verbosity level for svm_light (default 0)\n");
printf("Learning options:\n");
printf(" -c float -> C: trade-off between training error\n");
printf(" and margin (default 0.01)\n");
printf(" -p [1,2] -> L-norm to use for slack variables. Use 1 for L1-norm,\n");
printf(" use 2 for squared slacks. (default 1)\n");
printf(" -o [1,2] -> Rescaling method to use for loss.\n");
printf(" 1: slack rescaling\n");
printf(" 2: margin rescaling\n");
printf(" (default %d)\n",DEFAULT_RESCALING);
printf(" -l [0..] -> Loss function to use.\n");
printf(" 0: zero/one loss\n");
printf(" (default %d)\n",DEFAULT_LOSS_FCT);
printf("Kernel options:\n");
printf(" -t int -> type of kernel function:\n");
printf(" 0: linear (default)\n");
printf(" 1: polynomial (s a*b+c)^d\n");
printf(" 2: radial basis function exp(-gamma ||a-b||^2)\n");
printf(" 3: sigmoid tanh(s a*b + c)\n");
printf(" 4: user defined kernel from kernel.h\n");
printf(" -d int -> parameter d in polynomial kernel\n");
printf(" -g float -> parameter gamma in rbf kernel\n");
printf(" -s float -> parameter s in sigmoid/poly kernel\n");
printf(" -r float -> parameter c in sigmoid/poly kernel\n");
printf(" -u string -> parameter of user defined kernel\n");
printf("Optimization options (see [2][3]):\n");
printf(" -w [1,2,3,4]-> choice of structural learning algorithm (default %d):\n",(int)DEFAULT_ALG_TYPE);
printf(" 1: algorithm described in [2]\n");
printf(" 2: joint constraint algorithm (primal) [to be published]\n");
printf(" 3: joint constraint algorithm (dual) [to be published]\n");
printf(" 4: joint constraint algorithm (dual) with constr. cache\n");
printf(" -q [2..] -> maximum size of QP-subproblems (default 10)\n");
printf(" -n [2..q] -> number of new variables entering the working set\n");
printf(" in each iteration (default n = q). Set n<q to prevent\n");
printf(" zig-zagging.\n");
printf(" -m [5..] -> size of cache for kernel evaluations in MB (default 40)\n");
printf(" (used only for -w 1 with kernels)\n");
printf(" -f [5..] -> number of constraints to cache for each example\n");
printf(" (default 5) (used with -w 4)\n");
printf(" -e float -> eps: Allow that error for termination criterion\n");
printf(" (default %f)\n",DEFAULT_EPS);
printf(" -h [5..] -> number of iterations a variable needs to be\n");
printf(" optimal before considered for shrinking (default 100)\n");
printf(" -k [1..] -> number of new constraints to accumulate before\n");
printf(" recomputing the QP solution (default 100) (-w 1 only)\n");
printf(" -# int -> terminate QP subproblem optimization, if no progress\n");
printf(" after this number of iterations. (default 100000)\n");
printf("Output options:\n");
printf(" -a string -> write all alphas to this file after learning\n");
printf(" (in the same order as in the training set)\n");
printf("Structure learning options:\n");
print_struct_help();
wait_any_key();
printf("\nMore details in:\n");
printf("[1] T. Joachims, Learning to Align Sequences: A Maximum Margin Aproach.\n");
printf(" Technical Report, September, 2003.\n");
printf("[2] I. Tsochantaridis, T. Joachims, T. Hofmann, and Y. Altun, Large Margin\n");
printf(" Methods for Structured and Interdependent Output Variables, Journal\n");
printf(" of Machine Learning Research (JMLR), Vol. 6(Sep):1453-1484, 2005.\n");
printf("[3] T. Joachims, Making Large-Scale SVM Learning Practical. Advances in\n");
printf(" Kernel Methods - Support Vector Learning, B. Sch鰈kopf and C. Burges and\n");
printf(" A. Smola (ed.), MIT Press, 1999.\n");
printf("[4] T. Joachims, Learning to Classify Text Using Support Vector\n");
printf(" Machines: Methods, Theory, and Algorithms. Dissertation, Kluwer,\n");
printf(" 2002.\n\n");
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -