⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 property.cc

📁 Amis - A maximum entropy estimator 一个最大熵模型统计工具
💻 CC
📖 第 1 页 / 共 2 页
字号:
	StringHash<FeatureWeightType> feature_weight_hash;	feature_weight_hash["alpha"]  = ALPHA;	feature_weight_hash["lambda"] = LAMBDA;	opt.setOpt( new OptNamedInt<FeatureWeightType>( "FEATURE_WEIGHT", "--feature-weight", "-w", &feature_weight_type, feature_weight_hash, LAMBDA, "The scale of weights of features" ) );	// NUM_NEWTON_ITERATIONS	opt.setOpt( new Opt<int>( "NUM_NEWTON_ITERATIONS", "--num-newton-iterations", "-n", &num_newton_iterations, 200, "The number of iterations of Newton's method" ) );		// MEMORY_SIZE	opt.setOpt( new Opt<int>( "MEMORY_SIZE", "--memory-size", "-s", &memory_size, 5, "The momory size of BFGS/LMVM algorithms" ) );		// EVENT_ON_MEMORY	opt.setOpt( new OptToggle( "EVENT_ON_MEMORY", "--event-on-memory", "-eom", &event_on_file, true, "Use event on memory during the estimation" ) );		event_on_file_stream = NULL;	// TILL_CONVERGENCE	opt.setOpt( new OptToggle( "TILL_CONVERGENCE", "--till-convergence", "-tc", &till_convergence, false, "Run estimation till convergence (When using LMVM, LMVMMAP, and LMVMBC, TILL_CONVERGENCE is true always)" ) );		// VALSUM_THRESHOLD	opt.setOpt( new Opt<double>( "VALSUM_THRESHOLD", "--valsum-threshold", "-vthr", &valsum_threshold, 0.0, "valsum_threshold ..." ) );		// COUNT_THRESHOLD	opt.setOpt( new Opt<double>( "COUNT_THRESHOLD", "--count-threshold", "-cthr", &count_threshold, 1.0, "count_threshold ..." ) );		// MAKE_INIT_MODEL	opt.setOpt( new OptToggle( "MAKE_INIT_MODEL", "--make-init-model", "-mim", &make_init_model_mode, false, "Make init model" ) );		// LOADING_ONLY	opt.setOpt( new OptToggle( "LOADING_ONLY", "--loading-only", "-lo", &loading_only, false, "loading_only ... " ) );		// KERNEL_FUNCTION_TYPE	StringHash<KernelFunctionType> kernel_type_hash;	kernel_type_hash[ "InnerProduct" ] = INNER;	kernel_type_hash[ "Polynomial"   ] = POLY;	kernel_type_hash[ "Gaussian"     ] = GAUSSIAN;	opt.setOpt( new OptNamedInt<KernelFunctionType>( "KERNEL_FUNCTION_TYPE", "--kernel-function-type", "-kft", &kernel_function_type, kernel_type_hash, INNER, "The type of the kernel function" ) );		// KERNEL_FUNCTION_ARGS	opt.setOpt( new Opt<std::string>( "KERNEL_FUNCTION_ARGS", "--kernel-function-args", "-kfa", &kernel_function_args, "", "The parameters of the kernel fucntion:\n Inner: ...\nPolynomial: ...\nGaussian: ..." ) );		// KERNEL_REFERENCE_EVENT_FILE	opt.setOpt( new Opt< std::vector<std::string> >( "KERNEL_REFERENCE_EVENT_FILE", "--kernel-reference-event-file", "-kref", &kernel_reference_event_file_list, std::vector< std::string >(), "..." ) );		// ORIGINAL_MODEL_FILE	opt.setOpt( new Opt< std::vector<std::string> >( "ORIGINAL_MODEL_FILE", "--original-model-file", "-omf", &original_model_file_list, std::vector< std::string >(1, "amis.org.model" ), "..." ) );		// ANALYZE_LOOP	opt.setOpt( new OptToggle( "ANALYZE_LOOP", "--analyze-loop", "-al", &analyze_loop, false, "Enter into analyzing loop at the end of the program" ) );		// ANALYZE_INPUT	opt.setOpt( new Opt<std::string>( "ANALYZE_INPUT", "--analyze-input", "-ai", &analyze_input, "stdin", "The input for the analyzing loop (\"stdin\" can be used)" ) );		// ANALYZE_OUTPUT	opt.setOpt( new Opt<std::string>( "ANALYZE_OUTPUT", "--analyze-output", "-ao", &analyze_output, "stdout", "The output for the analyzing loop (\"stdout\" can be used)" ) );		// ANALYZE_OUTPUT_BEST	opt.setOpt( new OptToggle( "ANALYZE_OUTPUT_BEST", "--analyze-output-best", "-aob", &analyze_output_best, false, "..." ) );		// UBIQUITOUS_RATIO	opt.setOpt( new Opt<double>( "UBIQUITOUS_RATIO", "--ubiquitous-ratio", "-ub", &ubiquitous_ratio, 1.0, "Ubiquitous ratio" ) );		// ROOT_DIR	opt.setOpt( new Opt<std::string>( "CANT_SPECIFY_IN_PROPERTY_FILE_ROOT_DIR", "--root-dir", "-rd", &root_dir, ".", "Specify the root directory of all the files. All filenames are relative to this directory" ) );		// POST_NORMALIZE1	opt.setOpt( new OptToggle( "POST_NORMALIZE1", "--post-normalize1", "-pn1", &post_normalize1, false, "Normalize kerneled event using L1 norm" ) );		// POST_NORMALIZE2	opt.setOpt( new OptToggle( "POST_NORMALIZE2", "--post-normalize2", "-pn2", &post_normalize2, false, "Normalize kerneled event using L2 norm" ) );		// KERNEL_CACHE_NONZERO	opt.setOpt( new OptToggle( "KERNEL_CACHE_NONZERO", "--kernel-cache-nonzero", "-kcn", &kernel_cache_nonzero, "Cache kerneled events" ) );		// MAP_SIGMA	opt.setOpt( new Opt<double>( "MAP_SIGMA", "--map-sigma", "-ms", &map_sigma, 1.0, "Sigma for Gaussian MAP estimation" ) );		// MAP_VARIANCE_TYPE	StringHash<MAPGaussianPrior::VarianceType> map_variance_hash;	map_variance_hash[ "single"  ] = MAPGaussianPrior::SINGLE;	map_variance_hash[ "inv_emp" ] = MAPGaussianPrior::INV_EMP;	map_variance_hash[ "bayes_bin" ] = MAPGaussianPrior::BAYES_BIN;	map_variance_hash[ "bayes_bin_raw_sigma" ] = MAPGaussianPrior::BAYES_BIN_RAW_SIGMA;	map_variance_hash[ "bayes_bin_hist" ] = MAPGaussianPrior::BAYES_BIN_HIST;	map_variance_hash[ "bayes_bin_hist_raw_sigma" ] = MAPGaussianPrior::BAYES_BIN_HIST_RAW_SIGMA;	opt.setOpt( new OptNamedInt<MAPGaussianPrior::VarianceType>( "MAP_VARIANCE_TYPE", "--map-variance-type", "-mvt", &map_variance_type, map_variance_hash, MAPGaussianPrior::SINGLE, "The type of MAP estimation. \"single\" uses an identical sigma values for all the features. \"inv_emp\" uses different sigma for each feature depending on the empirical expectation of the feature. " ) );		// FEATURE_FREEZE_TYPE	StringHash<Model::FeatureFreezeType> feature_freeze_hash;	feature_freeze_hash[ "no_freeze" ] = Model::NO_FREEZE;	feature_freeze_hash[ "count"     ] = Model::FREEZE_BY_COUNT;	feature_freeze_hash[ "bayes_bin" ] = Model::FREEZE_BY_BAYES_BIN;	feature_freeze_hash[ "bayes_bin_raw_sigma" ] = Model::FREEZE_BY_BAYES_BIN_RAW_SIGMA;	feature_freeze_hash[ "bayes_bin_hist" ] = Model::FREEZE_BY_BAYES_BIN_HIST;	feature_freeze_hash[ "bayes_bin_hist_raw_sigma" ] = Model::FREEZE_BY_BAYES_BIN_HIST_RAW_SIGMA;	opt.setOpt( new OptNamedInt<Model::FeatureFreezeType>( "FEATURE_FREEZE_TYPE", "--feature-freeze-type", "-fft", &feature_freeze_type, feature_freeze_hash, Model::NO_FREEZE, "The type of feature freezing. Freezing takes place just after the calculation of empirical expectation using available information at that time such as feature counts, baysian variances, etc. " ) );		// FEATURE_FREEZE_THRESHOLD	opt.setOpt( new Opt<double>( "FEATURE_FREEZE_THRESHOLD", "--feature-freeze-threshold", "-ffthr", &feature_freeze_threshold, 1.0, "The threshold for the feature freezing. The meaning of the value depends on FEATURE_FREEZE_TYPE" ) );		// FEATURE_IGNORE_RATIO	opt.setOpt( new Opt<double>( "FEATURE_IGNORE_RATIO", "--feature-ignore-ratio", "-fir", &feature_ignore_ratio, 1.0, "The rate of ingoring features (according to the absolute value of lambda). 0.0 means ignoring all the features. 1.0 means ignoring no feature. Lambda is set to zero after loading if the feature is ignored, so that this option affects only the analyze mode" ) ); 			// USE_BOX_CONSTRAINT	opt.setOpt( new OptToggle( "USE_BOX_CONSTRAINT", "--use-box-constraint", "-ubc", &use_box_constraint, false, "Use box constraint" ) );		// BOX_CONSTRAINT_TYPE	StringHash<ModelBC::ConstraintType> box_constraint_hash;	box_constraint_hash[ "single" ] = ModelBC::SINGLE;	box_constraint_hash[ "each"   ] = ModelBC::EACH;	box_constraint_hash[ "inv_cnt" ] = ModelBC::INV_CNT;	box_constraint_hash[ "inv_cnt2"] = ModelBC::INV_CNT2;	box_constraint_hash[ "bayes_bin" ] = ModelBC::BAYES_BIN;	box_constraint_hash[ "bayes_bin_raw_sigma" ] = ModelBC::BAYES_BIN_RAW_SIGMA;	box_constraint_hash[ "bayes_bin_change_emp" ] = ModelBC::BAYES_BIN_CHANGE_EMP;	box_constraint_hash[ "bayes_bin_hist" ] = ModelBC::BAYES_BIN_HIST;	box_constraint_hash[ "bayes_bin_hist_raw_sigma" ] = ModelBC::BAYES_BIN_HIST_RAW_SIGMA;	box_constraint_hash[ "bayes_bin_hist_change_emp" ] = ModelBC::BAYES_BIN_HIST_CHANGE_EMP;		// BOX_CONSTRAINT_TYPE	opt.setOpt( new OptNamedInt<ModelBC::ConstraintType>( "BOX_CONSTRAINT_TYPE", "--box-constraint-type", "-bct", &box_constraint_type, box_constraint_hash, ModelBC::SINGLE, "The type of box constraint" ) );		// BOX_CONSTRAINT_FACTOR	opt.setOpt( new Opt<double>( "BOX_CONSTRAINT_FACTOR", "--box-constraint-factor", "-bcf", &box_constraint_factor, 0.01, "The factor in box constraint (see BOX_CONSTRAINT_TYPE for how it is used)" ) );		// BOX_CONSTRAINT_ALPHA	opt.setOpt( new Opt<double>( "BOX_CONSTRAINT_ALPHA", "--box-constraint-alpha", "-bca", &box_constraint_alpha, 1.0, "A parameter in box constraint (see BOX_CONSTRAINT_TYPE for how it is used)" ) ); 		// BOX_CONSTRAINT_BETA	opt.setOpt( new Opt<double>( "BOX_CONSTRAINT_BETA", "--box-constraint-beta", "-bcb", &box_constraint_beta, 2.0, "A parameter in box constraint (see BOX_CONSTRAINT_TYPE for how it is used)" ) );		// BOX_CONSTRAINT_INIT_VALUE	opt.setOpt( new Opt<double>( "BOX_CONSTRAINT_INIT_VALUE", "--box-constraint-init-value", "-bciv", &box_constraint_init_value, 1.0, "You can set the initial values for ulambas and llambdas." ) );		// KERNEL_FIRST_REF_NUM	opt.setOpt( new Opt<int>( "KERNEL_FIRST_REF_NUM", "--kernel-first-ref-num", "-kfrn", &kernel_first_ref_num, -1, "This number of first events are used as the canditates of kernel reference events. The default is use all (-1)" ) );		// TAKE_ALL_KERNEL	opt.setOpt( new OptToggle( "TAKE_ALL_KERNEL", "--take-all-kernel", "-tak", &take_all_kernel, false, "Take kernel with all the candidate reference events determined by KERNEL_FIRST_REF_NUM" ) );		// CHECK_KKT	opt.setOpt( new OptToggle( "CHECK_KKT", "--check-kkt", "-ck", &check_KKT, true, "Check KKT condition (in box constraint estimation)" ) );		// KKT_TOLERANCE	opt.setOpt( new Opt<double>( "KKT_TOLERANCE", "--kkt-tolerance", "-kktt", &KKT_tolerance, 1.0E-3, "Tolerance of the KKT condition check" ) );		// KKT_BAD_FEATURE_RATIO	opt.setOpt( new Opt<double>( "KKT_BAD_FEATURE_RATIO", "--kkt-bad-feature-ratio", "-kbfr", &KKT_bad_feature_ratio, 0.05, "The ratio of tolerable bad (ul_active) features"  ) );		// KKT_MAX_FAILURE_FACTOR	opt.setOpt( new Opt<double>( "KKT_MAX_KKT_FAITURE_FACTOR", "--kkt-max-failure-factor", "-kmff", &KKT_max_failure_factor, 10.0, "precision * <this factor> times failures of KKT check are allowed. If the ratio of bad features <= KKT_BA_FEATURE_RATIO, the training stops. If not, precision * <this factor> * 10 times failures can be made before the training terminates" ) );		// SOFT_CONSTANT	opt.setOpt( new Opt<double>( "SOFT_BOX_CONSTANT", "--soft-box-constant", "-sbc", &soft_box_constant, 1.0E10, "C of soft box constraint" ) ); 		// MAKE_COMB_FEATURE	opt.setOpt( new OptToggle( "MAKE_COMB_FEATURE", "--make-comb-feature", "-mcf", &make_comb_feature, false, "Make combination feature when loading event files. This is inefficient implementation, so it might be reimplemented in the future version" ) );		// KERNEL_EXCLUDE_SELF	opt.setOpt( new OptToggle( "KERNEL_EXCLUDE_SELF", "--kernel-exclude-self", "-kes", &kernel_exclude_self, false, "Exclude the event itself from the candidate kernel reference events" ) );		// KERNEL_INCLUDE_ORIGINAL_HISTORY	opt.setOpt( new OptToggle( "KERNEL_INCLUDE_ORIGINAL_HISTORY", "--kernel-include-original-history", "-kioh", &kernel_include_original_history, false, "Include original histories in addition to the kerneled histories. " ) );		// INCLUDE_STABILIZING_HISTORY	opt.setOpt( new OptToggle( "INCLUDE_STABILIZING_HISTORY", "--include-stabilizing-history", "-ish", &include_stabilizing_history, false, "Include a virtual history, which always fires to stabilize the estimation (seems not to be useful) " ) );}Property::~Property() {  if ( event_on_file_stream != NULL ) {    delete event_on_file_stream;  }}*/AMIS_NAMESPACE_END// end of Property.cc

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -