📄 svm_struct_api_types.h
字号:
/***********************************************************************/
/* */
/* svm_struct_api.h (modified for PCFG parsing) */
/* */
/* Definition of API for attaching implementing SVM learning of */
/* structures (e.g. parsing, multi-label classification, HMM) */
/* */
/* Author: Thorsten Joachims */
/* Date: 12.07.04 */
/* */
/* Copyright (c) 2004 Thorsten Joachims - All rights reserved */
/* */
/* This software is available for non-commercial use only. It must */
/* not be modified and distributed without prior permission of the */
/* author. The author is not responsible for implications from the */
/* use of this software. */
/* */
/***********************************************************************/
#ifndef svm_struct_api_types
#define svm_struct_api_types
#define MAXFEAT 28 /* maximum number of features in a rule */
#include "tree.h"
#include "vindex.h"
#include "grammar.h"
#include "svm_light/svm_common.h"
#include "svm_light/svm_learn.h"
#define INST_NAME "Context-Free Grammar"
#define INST_VERSION "V3.00"
#define INST_VERSION_DATE "23.10.06"
/* default precision for solving the optimization problem */
# define DEFAULT_EPS 0.1
/* default loss rescaling method: 1=slack_rescaling, 2=margin_rescaling */
# define DEFAULT_RESCALING 2
/* default loss function: */
# define DEFAULT_LOSS_FCT 0
/* default optimization algorithm to use: */
# define DEFAULT_ALG_TYPE 4
/* store Psi(x,y) once instead of recomputing it every time: */
# define USE_FYCACHE 1
typedef struct pattern {
/* this defines the x-part of a training example, e.g. the structure
for storing a natural language sentence in NLP parsing */
struct vindex sentence;
si_t si;
} PATTERN;
typedef struct label {
/* this defines the y-part (the label) of a training example,
e.g. the parse tree of the corresponding sentence. */
tree parse;
si_t si;
double prob;
double loss;
} LABEL;
typedef struct struct_learn_parm {
double epsilon; /* precision for which to solve
quadratic program */
double newconstretrain; /* number of new constraints to
accumulate before recomputing the QP
solution */
int ccache_size; /* maximum number of constraints to
cache for each example (used in w=4
algorithm) */
double C; /* trade-off between margin and loss */
char custom_argv[20][300]; /* string set with the -u command line option */
int custom_argc; /* number of -u command line options */
int slack_norm; /* norm to use in objective function
for slack variables; 1 -> L1-norm,
2 -> L2-norm */
int loss_type; /* selected loss function from -r
command line option. Select between
slack rescaling (1) and margin
rescaling (2) */
int loss_function; /* select between different loss
functions via -l command line
option */
/* further parameters that are passed to init_struct_model() */
int maxsentlen;
si_t si;
int parent_annotation;
int feat_borders;
int feat_parent_span_length;
int feat_children_span_length;
int feat_diff_children_length;
} STRUCT_LEARN_PARM;
typedef struct structmodel {
double *w; /* pointer to the learned weights */
MODEL *svm_model; /* the learned SVM model */
long sizePsi; /* maximum number of weights in w */
/* other information that is needed for the stuctural model can be
added here, e.g. the grammar rules for NLP parsing */
grammar grammar;
si_t si;
vihashl weightid_ht;
STRUCT_LEARN_PARM *sparm;
} STRUCTMODEL;
typedef struct struct_test_stats {
/* you can add variables for keeping statistics when evaluating the
test predictions in svm_struct_classify. This can be used in the
function eval_prediction and print_struct_testing_stats. */
long parsed_sentences;
long test_bracket_sum;
long parse_bracket_sum;
long common_bracket_sum;
} STRUCT_TEST_STATS;
#endif
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -