📄 tnod_05.cc
字号:
// file: $isip/class/search/TrainNode/tnod_05.cc// version: $Id: tnod_05.cc,v 1.8 2003/01/23 20:00:32 alphonso Exp $//// isip include files//#include "TrainNode.h"// method: assign//// arguments:// const TrainNode& copy_node: (input) node to copy//// return: logical error status//// assign TrainNode from the copy//boolean TrainNode::assign(const TrainNode& copy_node_a) { // copy the pointers // reference_d = copy_node_a.reference_d; stat_model_d = copy_node_a.stat_model_d; // copy the values // score_d = copy_node_a.score_d; frame_d = copy_node_a.frame_d; alpha_d = copy_node_a.alpha_d; beta_d = copy_node_a.beta_d; is_valid_d = copy_node_a.is_valid_d; is_alpha_valid_d = copy_node_a.is_alpha_valid_d; is_beta_valid_d = copy_node_a.is_beta_valid_d; is_accum_valid_d = copy_node_a.is_accum_valid_d; // exit gracefully // return true;}// method: clear//// arguments:// Integral::CMODE cmode: (input) clear mode//// return: logical error status//// clear the contents of the TrainNode//boolean TrainNode::clear(Integral::CMODE cmode_a) { // clear the pointers // reference_d = (Context*)NULL; stat_model_d = (StatisticalModel*)NULL; // reset the values // frame_d = DEF_TIMESTAMP; alpha_d = Integral::DB_LOG_MIN_VALUE; beta_d = Integral::DB_LOG_MIN_VALUE; score_d = Integral::DB_LOG_MIN_VALUE; is_valid_d = false; is_alpha_valid_d = false; is_beta_valid_d = false; is_accum_valid_d = false; // exit gracefully // return true;}// method: eq//// arguments:// const TrainNode& compare_node: (input) TrainNode to compare//// return: true if the TrainNodes are equivalent, else false//// compare two TrainNodes. they are equivalent if they have equivalent history// objects//boolean TrainNode::eq(const TrainNode& compare_node_a) const { // compare the address // return (this == &compare_node_a);}// method: update//// arguments:// VectorFloat& varfloor: (input) variance floor// long min_model: (input) minimum model count//// return: logical status//// updates the models using the accumulators generated during training//boolean TrainNode::update(VectorFloat& varfloor_a, long min_model_a) { // declare local variables // boolean status = false; // check to make sure that the model is valid // if (stat_model_d != (StatisticalModel*)NULL) { // update: call the statistical model // status = stat_model_d->update(varfloor_a, min_model_a); } // exit gracefully // return status;}// method: accumulate//// arguments:// float utter_prob: (input) utterance probability// Vector<VectorFloat>& data: (input) feature vectors// float min_mpd: (input) minimum model probability deviance// float min_occupancy: (input) floor on the occupancy probability//// return: logical status//// accumulates the statistics in training which are used to update the model//boolean TrainNode::accumulate(double utter_prob_a, Vector<VectorFloat>& data_a, float min_mpd_a, float min_occupancy_a) { // declare local variables // VectorDouble param; boolean precomp = false; boolean status = false; // set up the parameter list // param.setLength(6); // assumptions: // // param(0) - forward probability (alpha) // param(1) - backward probability (beta) // param(2) - utterance normalization, i.e., (1) above // param(3) - minimum model probability deviance // param(4) - floor on the occupancy probability // param(5) - likelihood score param(0) = alpha_d; param(1) = beta_d; param(2) = utter_prob_a; param(3) = (double)min_mpd_a; param(4) = (double)min_occupancy_a; param(5) = score_d; // check to make sure the model is valid // if (stat_model_d != (StatisticalModel*)NULL) { // accumulate: call the statistical model // status = stat_model_d->accumulate(param, data_a(frame_d), precomp); } // exit gracefully // return status;}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -