⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 hsrch_11.cc

📁 这是一个从音频信号里提取特征参量的程序
💻 CC
📖 第 1 页 / 共 5 页
字号:
      vertex->getItem()->setBetaValid(false);      timestamp = vertex->getItem()->getFrame();            if ((timestamp >= 0) && (timestamp < data_a.length())) {	beta_value = vertex->getItem()->getBeta();	if (beta_bound(timestamp) < beta_value) {	  beta_bound(timestamp) = beta_value;	}      }    }        // insert the parents of the vertex on the list    //    for (boolean more = vertex->gotoFirstParent();	 more; more = vertex->gotoNextParent()) {      parent = vertex->getCurrParent()->getVertex();	        if (parent != (BiGraphVertex<TrainNode>*)NULL) {	if ((parent->getItem() != (TrainNode*)NULL) &&	    !parent->getItem()->isBetaValid()) {	  active_list.insertLast(parent);	  parent->getItem()->setBetaValid(true);	}      }    }  }  // second pass applies beta pruning using the maximum beta score  //    // add the term vertex to the active list  //  active_list.insertLast(trellis_d.getTerm());    // loop until the active list is empty  //  while (!active_list.isEmpty()) {    // remove the first element from the list    //    active_list.removeFirst(vertex);    // set the maximum beta value for the time frame if applicable    //    if (vertex->getItem() != (TrainNode*)NULL) {      timestamp = vertex->getItem()->getFrame();      if ((timestamp >= 0) && (timestamp < data_a.length())) {	beta_value = vertex->getItem()->getBeta();	if ((beta_bound(timestamp) - beta_value) > beta_threshold_a) {	  vertex->getItem()->setValidNode(false);	  vertex->getItem()->setBeta(Integral::DB_LOG_MIN_VALUE);	}      }    }        // insert the parents of the vertex on the list    //    for (boolean more = vertex->gotoFirstParent();	 more; more = vertex->gotoNextParent()) {      parent = vertex->getCurrParent()->getVertex();	        if (parent != (BiGraphVertex<TrainNode>*)NULL) {	if ((parent->getItem() != (TrainNode*)NULL) &&	    !parent->getItem()->isBetaValid()) {	  active_list.insertLast(parent);	  parent->getItem()->setBetaValid(true);	}      }    }  }  // clear the data structures before exiting  //  active_list.clear();    // free memory  //  for (long i=0; i < num_frames; i++) {    delete model_cache[i];    model_cache[i] = (float*)NULL;      }    delete [] model_cache;  model_cache = (float**)NULL;    // exit gracefully  //  return true;}// method: computeAlpha//// arguments://   Vector<VectorFloat> data: (output) feature vectors//  BiGraphVertex<TrainNode>& vertex: (input) trace whose back pointers we need//                                     to recursively visit//  float weight: (input) weight of the arc from the current trace to the next//// [1] L. Rabiner, B. H. Juang, "Fundamentals of Speech Recognition", Prentice// Hall P T R, New Jersey, 1993, pp. 337-338, ISBN 0-13-015157-2//// 1. Initialization:////    Alpha(1)[i] = pi(i) * b(1)           1 <= i <= N//         pi(i) = initial probability for state i//// 2. Induction:////    Alpha(t + 1)[j] = aij * b(t + 1)[j] * Alpha (t)[i]//             i = [1, ..., N]//             b = model evaluation for state j at time t + 1//             a = state transition between i at time t and j at time t + 1//// return: logical error status//// computes the forward probability//boolean HierarchicalSearch::computeAlpha(Vector<VectorFloat>& data_a,					 BiGraphVertex<TrainNode>* vertex_a) {  // declare local variables  //  double tmp_accum = 0.0;  double accumulator = 0.0;  boolean accum_valid = false;    TrainNode* prev_node = (TrainNode*)NULL;  TrainNode* train_node = (TrainNode*)NULL;      // get the state to be evaluated  //  train_node = vertex_a->getItem();  // make sure the state is not null  //  if (train_node == (TrainNode*)NULL) {    return true;      }    // loop over all states at the previous time frame  //  accumulator = Integral::DB_LOG_MIN_VALUE;  for (boolean more = vertex_a->gotoFirstParent();       more; more = vertex_a->gotoNextParent()) {              // get the train node associate with the state at the previous time frame    //    prev_node = vertex_a->getCurrParent()->getVertex()->getItem();    // make sure that this node contributes to the alpha probability    //    if ((prev_node == (TrainNode*)NULL) || !prev_node->getValidNode()) {      continue;    }    // compute the forward probability    //    tmp_accum = prev_node->getAlpha() +      vertex_a->getCurrParent()->getWeight();        // accumulate the backward probability over different states    //    accum_valid = true;    accumulator = Integral::logAddLog(accumulator, tmp_accum);  }      // when the current state does not have a valid statistical model  // propagate the forward probabilities through  //  if (!train_node->getValidModel()) {    if (accum_valid) {      train_node->setAlpha(accumulator);    }  }    // when the current state does have a valid statistical model propagate  // the forward probabilities through after model evaluation  //  else {    if (accum_valid) {      accumulator += train_node->getScore();      train_node->setAlpha(accumulator);    }  }    // exit gracefully  //  return true;}// method: computeBeta//// arguments://   Vector<VectorFloat> data: (output) feature vectors//  BiGraphVertex<TrainNode>& vertex: (input) trace whose back pointers we need//                                     to recursively visit//  float** model_cache: (input) model cache//// [1] L. Rabiner, B. H. Juang, "Fundamentals of Speech Recognition", Prentice// Hall P T R, New Jersey, 1993, pp. 337-338, ISBN 0-13-015157-2//// 1. Initialization:////    Beta(T)[i] = 1           1 <= i <= N//// 2. Induction:////    Beta(t)[i] = aij * b(t + 1)[j] * Beta (t + 1)[j]//             j = [1, ..., N]//             b = model evaluation for state j at time t + 1//             a = state transition between i at time t and j at time t + 1//// return: logical error status//// computes the backward probability//boolean HierarchicalSearch::computeBeta(Vector<VectorFloat>& data_a,					BiGraphVertex<TrainNode>* vertex_a,					float** model_cache_a) {  // declare local variables  //  double tmp_accum = 0.0;  double accumulator = 0.0;  boolean accum_valid = false;    TrainNode* next_node = (TrainNode*)NULL;  TrainNode* train_node = (TrainNode*)NULL;    SearchNode* search_node = (SearchNode*)NULL;    Context* reference_symbol = (Context*)NULL;  GraphVertex<SearchNode>* reference_vertex = (GraphVertex<SearchNode>*)NULL;    // get the trace associated with the state  //  train_node = vertex_a->getItem();  // make sure the state is not null  //  if (train_node == (TrainNode*)NULL) {    return true;      }  // loop over all states at next time frame  //  accumulator = Integral::DB_LOG_MIN_VALUE;  for (boolean more = vertex_a->gotoFirstChild();       more; more = vertex_a->gotoNextChild()) {    // get the train node associate with the state at the next time frame    //    next_node = vertex_a->getCurrChild()->getVertex()->getItem();    // make sure that this node contributes to the beta probability    //    if ((next_node == (TrainNode*)NULL) || !next_node->getValidNode()) {      continue;    }    // compute the backward probability    //    float score = 0.0;        long frame = next_node->getFrame();    reference_vertex = (GraphVertex<SearchNode>*)NULL;    if ((reference_symbol = next_node->getReference()) != (Context*)NULL) {      reference_vertex = reference_symbol->getCentralVertex();    }    if ((reference_vertex != (GraphVertex<SearchNode>*)NULL) &&	((search_node = reference_vertex->getItem()) != (SearchNode*)NULL)) {            if ((frame > -1) && (frame < data_a.length())) {		StatisticalModel* stat_model = search_node->getStatisticalModel();	if (stat_model != (StatisticalModel*)NULL) {	  long model_index = search_node->getModelId();	  	  if (model_cache_a[frame][model_index] != Integral::DB_LOG_MIN_VALUE) {	    score = model_cache_a[frame][model_index];	  }	  else {	    score = stat_model->getLogLikelihood(data_a(frame));	    model_cache_a[frame][model_index] = score;	  }	}      }    }        // update the score    //    next_node->setScore(score);        tmp_accum = next_node->getBeta() +      vertex_a->getCurrChild()->getWeight() + score;          // accumulate the backward probability over different states    //    accum_valid = true;    accumulator = Integral::logAddLog(accumulator, tmp_accum);  }  // mark the train node so that is used in the accumulate/update phase  //  train_node->setValidNode(true);      // have we accumulated any statistics?  //  if (accum_valid) {    train_node->setBeta(accumulator);  }    // exit gracefully  //  return true;}// method: traverseTraceLevels//// arguments: none//// return: logical error status//// do a single-step traversal of the hypotheses at each level//boolean HierarchicalSearch::traverseTraceLevels() {    // define local variables  //  boolean status = false;    // make sure we have some paths left to traverse  //  if (!pathsRemainTrace()) {    return false;  }  // propagate traces forward until they are all ready to be evaluated  //  status = (propagateTraces() || status);  // now we evaluate all traces that are in the lowest level and move   // them forward one arc. viterbi pruning takes place at this level   // as does beam pruning.  //  status = (evaluateTraceModels() || status);  // exit gracefully  //  return status;}// method: traverseInstanceLevels//// arguments: none//// return: logical error status//// do a single-step traversal of the hypotheses at each level//boolean HierarchicalSearch::traverseInstanceLevels() {    // define local variables  //  boolean status = false;    // make sure we have some paths left to traverse  //  if (!pathsRemainInstance()) {    return false;  }  // propagate instances forward until they are all ready to be evaluated  //  status = (propagateInstances() || status);  // now we evaluate all instances that are in the lowest level and move   // them forward one arc. viterbi pruning takes place at this level   // as does beam pruning.  //  status = (evaluateInstanceModels() || status);  // exit gracefully  //  return status;}// method: propagateInstances//// arguments: none//// return: logical error status//// move instances through the hierarchy until all active instances are ready// to be evaluated//boolean HierarchicalSearch::propagateInstances() {  // define local variables  //  long level_num = 0;  boolean status = true;  long num_levels = getNumLevels();  // clear the hypothesis list since we are generating new hypotheses for this  // frame  //  clearValidHypsInstance();  // move all traces forward in the search space. we do this by  // pushing traces up and down in the hierarchy until they all rest  // in a state that is ready to be evaluated. if there is an infinite  // "skip" loop in the search graph then this process will go into an  // infinite loop. an infinite skip loop is of the form:  // A -> B && B -> A.  // this infinite loop structure may be masked by the fact that it   // extends over multiple search levels.  //   // when we exit the while loop below, all traces should be in the lowest  // level and should be ready for evaluation.  //  while ((getActiveInstances() != getActiveInstances(num_levels - 1)) ||	 status) {        // status indicates whether any movement in the traces happened    //    status = false;    for (level_num = (long)initial_level_d;  level_num < num_levels; level_num++) {      if (getSearchLevel(level_num).useBeam()) {	max_instance_scores_d(level_num) = Trace::INACTIVE_SCORE;      }    }        // work from the bottom up until we reach a point where all traces    // are ready to descend again    //    for (level_num = num_levels - 1; level_num >= (long)initial_level_d; level_num--) {      if (debug_level_d >= Integral::DETAILED) {      	String output;        	output.assign(L"propagating instances up from level: ");	output.concat(level_num);	output.concat(L" in frame: ");    	output.concat(current_frame_d);	Console::put(output);      }            status = (propagateInstancesUp(level_num) || status);    }        // propagate traces down the hierarchy, carrying out viterbi pruning    //    for (level_num = (long)initial_level_d;  level_num < num_levels - 1; level_num++) {      // beam pruning      //      if (getSearchLevel(level_num).useBeam()) {		if (debug_level_d >= Integral::DETAILED) {	  Console::put(L"\nbeam pruning after propagation instance up");	}	beamPruneInstance(level_num);

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -