📄 svm_struct_api.c
字号:
}
return(fvec);
}
double loss(LABEL y, LABEL ybar, STRUCT_LEARN_PARM *sparm)
{
/* loss for correct label y and predicted label ybar. The loss for
y==ybar has to be zero. sparm->loss_function is set with the -l option. */
if(sparm->loss_function == 0) { /* type 0 loss: 0/1 loss */
if(y.class == ybar.class) /* return 0, if y==ybar. return 1 else */
return(0);
else
return(1);
}
else {
/* Put your code for different loss functions here. But then
find_most_violated_constraint_???(x, y, sm) has to return the
highest scoring label with the largest loss. */
printf("Unkown loss function\n");
exit(1);
}
}
void print_struct_learning_stats(SAMPLE sample, STRUCTMODEL *sm,
CONSTSET cset, double *alpha,
STRUCT_LEARN_PARM *sparm)
{
/* This function is called after training and allows final touches to
the model sm. But primarly it allows computing and printing any
kind of statistic (e.g. training error) you might want. */
}
void write_struct_model(char *file, STRUCTMODEL *sm,
STRUCT_LEARN_PARM *sparm)
{
/* Writes structural model sm to file file. */
FILE *modelfl;
long j,i,sv_num;
MODEL *model=sm->svm_model;
SVECTOR *v;
if ((modelfl = fopen (file, "w")) == NULL)
{ perror (file); exit (1); }
fprintf(modelfl,"SVM-multiclass Version %s\n",INST_VERSION);
fprintf(modelfl,"%d # number of classes\n",
sparm->num_classes);
fprintf(modelfl,"%d # number of base features\n",
sparm->num_features);
fprintf(modelfl,"%d # loss function\n",
sparm->loss_function);
fprintf(modelfl,"%ld # kernel type\n",
model->kernel_parm.kernel_type);
fprintf(modelfl,"%ld # kernel parameter -d \n",
model->kernel_parm.poly_degree);
fprintf(modelfl,"%.8g # kernel parameter -g \n",
model->kernel_parm.rbf_gamma);
fprintf(modelfl,"%.8g # kernel parameter -s \n",
model->kernel_parm.coef_lin);
fprintf(modelfl,"%.8g # kernel parameter -r \n",
model->kernel_parm.coef_const);
fprintf(modelfl,"%s# kernel parameter -u \n",model->kernel_parm.custom);
fprintf(modelfl,"%ld # highest feature index \n",model->totwords);
fprintf(modelfl,"%ld # number of training documents \n",model->totdoc);
sv_num=1;
for(i=1;i<model->sv_num;i++) {
for(v=model->supvec[i]->fvec;v;v=v->next)
sv_num++;
}
fprintf(modelfl,"%ld # number of support vectors plus 1 \n",sv_num);
fprintf(modelfl,"%.8g # threshold b, each following line is a SV (starting with alpha*y)\n",model->b);
for(i=1;i<model->sv_num;i++) {
for(v=model->supvec[i]->fvec;v;v=v->next) {
fprintf(modelfl,"%.32g ",model->alpha[i]*v->factor);
for (j=0; (v->words[j]).wnum; j++) {
fprintf(modelfl,"%ld:%.8g ",
(long)(v->words[j]).wnum,
(double)(v->words[j]).weight);
}
fprintf(modelfl,"#%s\n",v->userdefined);
/* NOTE: this could be made more efficient by summing the
alpha's of identical vectors before writing them to the
file. */
}
}
fclose(modelfl);
}
void print_struct_testing_stats(SAMPLE sample, STRUCTMODEL *sm,
STRUCT_LEARN_PARM *sparm,
STRUCT_TEST_STATS *teststats)
{
/* This function is called after making all test predictions in
svm_struct_classify and allows computing and printing any kind of
evaluation (e.g. precision/recall) you might want. You can use
the function eval_prediction to accumulate the necessary
statistics for each prediction. */
}
void eval_prediction(long exnum, EXAMPLE ex, LABEL ypred,
STRUCTMODEL *sm, STRUCT_LEARN_PARM *sparm,
STRUCT_TEST_STATS *teststats)
{
/* This function allows you to accumlate statistic for how well the
predicition matches the labeled example. It is called from
svm_struct_classify. See also the function
print_struct_testing_stats. */
if(exnum == 0) { /* this is the first time the function is
called. So initialize the teststats */
}
}
STRUCTMODEL read_struct_model(char *file, STRUCT_LEARN_PARM *sparm)
{
/* Reads structural model sm from file file. This function is used
only in the prediction module, not in the learning module. */
FILE *modelfl;
STRUCTMODEL sm;
long i,queryid,slackid;
double costfactor;
long max_sv,max_words,ll,wpos;
char *line,*comment;
WORD *words;
char version_buffer[100];
MODEL *model;
nol_ll(file,&max_sv,&max_words,&ll); /* scan size of model file */
max_words+=2;
ll+=2;
words = (WORD *)my_malloc(sizeof(WORD)*(max_words+10));
line = (char *)my_malloc(sizeof(char)*ll);
model = (MODEL *)my_malloc(sizeof(MODEL));
if ((modelfl = fopen (file, "r")) == NULL)
{ perror (file); exit (1); }
fscanf(modelfl,"SVM-multiclass Version %s\n",version_buffer);
if(strcmp(version_buffer,INST_VERSION)) {
perror ("Version of model-file does not match version of svm_struct_classify!");
exit (1);
}
fscanf(modelfl,"%d%*[^\n]\n", &sparm->num_classes);
fscanf(modelfl,"%d%*[^\n]\n", &sparm->num_features);
fscanf(modelfl,"%d%*[^\n]\n", &sparm->loss_function);
fscanf(modelfl,"%ld%*[^\n]\n", &model->kernel_parm.kernel_type);
fscanf(modelfl,"%ld%*[^\n]\n", &model->kernel_parm.poly_degree);
fscanf(modelfl,"%lf%*[^\n]\n", &model->kernel_parm.rbf_gamma);
fscanf(modelfl,"%lf%*[^\n]\n", &model->kernel_parm.coef_lin);
fscanf(modelfl,"%lf%*[^\n]\n", &model->kernel_parm.coef_const);
fscanf(modelfl,"%[^#]%*[^\n]\n", model->kernel_parm.custom);
fscanf(modelfl,"%ld%*[^\n]\n", &model->totwords);
fscanf(modelfl,"%ld%*[^\n]\n", &model->totdoc);
fscanf(modelfl,"%ld%*[^\n]\n", &model->sv_num);
fscanf(modelfl,"%lf%*[^\n]\n", &model->b);
model->supvec = (DOC **)my_malloc(sizeof(DOC *)*model->sv_num);
model->alpha = (double *)my_malloc(sizeof(double)*model->sv_num);
model->index=NULL;
model->lin_weights=NULL;
for(i=1;i<model->sv_num;i++) {
fgets(line,(int)ll,modelfl);
if(!parse_document(line,words,&(model->alpha[i]),&queryid,&slackid,
&costfactor,&wpos,max_words,&comment)) {
printf("\nParsing error while reading model file in SV %ld!\n%s",
i,line);
exit(1);
}
model->supvec[i] = create_example(-1,0,0,0.0,
create_svector(words,comment,1.0));
}
fclose(modelfl);
free(line);
free(words);
if(verbosity>=1) {
fprintf(stdout, " (%d support vectors read) ",(int)(model->sv_num-1));
}
sm.svm_model=model;
sm.sizePsi=model->totwords;
sm.w=NULL;
return(sm);
}
void write_label(FILE *fp, LABEL y)
{
/* Writes label y to file handle fp. */
fprintf(fp,"%d\n",y.class);
}
void free_pattern(PATTERN x) {
/* Frees the memory of x. */
free_example(x.doc,1);
}
void free_label(LABEL y) {
/* Frees the memory of y. */
}
void free_struct_model(STRUCTMODEL sm)
{
/* Frees the memory of model. */
/* if(sm.w) free(sm.w); */ /* this is free'd in free_model */
if(sm.svm_model) free_model(sm.svm_model,1);
/* add free calls for user defined data here */
}
void free_struct_sample(SAMPLE s)
{
/* Frees the memory of sample s. */
int i;
for(i=0;i<s.n;i++) {
free_pattern(s.examples[i].x);
free_label(s.examples[i].y);
}
free(s.examples);
}
void print_struct_help()
{
/* Prints a help text that is appended to the common help text of
svm_struct_learn. */
printf(" none\n\n");
printf("Based on multi-class SVM described in:\n");
printf(" K. Crammer and Y. Singer. On the Algorithmic Implementation of\n");
printf(" Multi-class SVMs, JMLR, 2001.\n");
}
void parse_struct_parameters(STRUCT_LEARN_PARM *sparm)
{
/* Parses the command line parameters that start with -- */
int i;
for(i=0;(i<sparm->custom_argc) && ((sparm->custom_argv[i])[0] == '-');i++) {
switch ((sparm->custom_argv[i])[2])
{
case 'a': i++; /* strcpy(learn_parm->alphafile,argv[i]); */ break;
case 'e': i++; /* sparm->epsilon=atof(sparm->custom_argv[i]); */ break;
case 'k': i++; /* sparm->newconstretrain=atol(sparm->custom_argv[i]); */ break;
}
}
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -