⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 hsrch_14.cc

📁 这是一个从音频信号里提取特征参量的程序
💻 CC
📖 第 1 页 / 共 5 页
字号:
  if (search_mode_d == TRAIN) {    return Error::handle(name(),		 L"initializeLexNetworkDecoder: can't used in training mode",		 HierarchicalSearch::ERR_LEVEL, __FILE__, __LINE__);  }  // initialize lexical trees  //  for (long i = 0; i < num_levels ; i++){    if ( getSearchLevel(i).useLexicalTree()){      expandLexicalTree(i);    }  }    // generate initial instances for the search engine  //  networkLexStart();    // exit gracefully  //  return true;}// method: networkLexStart//// arguments://// return: logical error status//// start search engin with a network structure//boolean HierarchicalSearch::networkLexStart(){    // seed the top instance list with the start node of the search graph  //  Instance* tmp_instance = new Instance();    if (search_mode_d == TRAIN) {    tmp_instance ->setInstanceMode(Instance::TRAIN);  }  tmp_instance ->setFrame((ulong)current_frame_d);  // has the history been previously generated? if so reuse the history,  // if not generate a new history and add it to the history pool  //  tmp_instance->setHistory(history_pool_d.initAndAllocate());  tmp_instance->setSymbolStack(history_pool_d.initAndAllocate());      // set the start vertex  //  GraphVertex<SearchNode>* tmp_vert    = search_levels_d((long)initial_level_d).getSubGraph(0).getStart();  // descend this original instance to level 0, that why we set  // the level number to -1: (-1 + 1 = 0)  //  changeHistory(tmp_instance, false, -1,		tmp_vert);  // add the instance to the instance list  //  Instance* start_instance = (Instance*)NULL;  // initialize the structure that holds the n-symbols  //  initializeNsymbolInstance(tmp_instance);  // add to the instance list  //  addInstance( 0 , start_instance, tmp_instance, false);  // exit gracefully  //  return true;}// method: traverseLexInstanceLevels//// arguments: none//// return: logical error status//// do a single-step traversal of the hypotheses at each level//boolean HierarchicalSearch::traverseLexInstanceLevels() {    // define local variables  //  boolean status = false;    // make sure we have some paths left to traverse  //  if (!pathsRemainInstance()) {    return false;  }  // propagate instances forward until they are all ready to be evaluated  //  status = (propagateLexInstances() || status);  // now we evaluate all instances that are in the lowest level and move   // them forward one arc. viterbi pruning takes place at this level   // as does beam pruning.  //  // lexical tree can't be at this level  //  status = (evaluateLexInstanceModels() || status);  // exit gracefully  //  return status;}// method: propagateLexInstances//// arguments: none//// return: logical error status//// move instances through the hierarchy until all active instances are ready to be// evaluated//boolean HierarchicalSearch::propagateLexInstances() {    // define local variables  //  long level_num = 0;  boolean status = true;  long num_levels = getNumLevels();  String out;    // clear the hypothesis list since we are generating new hypotheses for this  // frame  //  clearValidHypsLexInstance();  // move all instances forward in the search space. we do this by  // pushing instances up and down in the hierarchy until they all rest  // in a state that is ready to be evaluated. if there is an infinite  // "skip" loop in the search graph then this process will go into an  // infinite loop. an infinite skip loop is of the form:  // A -> B && B -> A.  // this infinite loop structure may be masked by the fact that it   // extends over multiple search levels.  //   // when we exit the while loop below, all instances should be in the lowest  // level and should be ready for evaluation.  //  while ((getActiveInstances() != getActiveInstances(num_levels - 1)) ||	 status) {        // status indicates whether any movement in the instances happened    //    status = false;    for (level_num = 0;  level_num < num_levels; level_num++) {      if (getSearchLevel(level_num).useBeam()) {	max_instance_scores_d(level_num) = Instance::INACTIVE_SCORE;      }    }        // work from the bottom up until we reach a point where all instances    // are ready to descend again. this segment takes care of    // posterior scores such as nsymbol probabilities and applies    // pruning (beam and instance) at each level.    //    for (level_num = num_levels - 1; level_num >= 0; level_num--) {            if (debug_level_d >= Integral::DETAILED) {      	String output;        	output.assign(L"propagating instances up from level: ");	output.concat(level_num);	output.concat(L" in frame: ");    	output.concat(current_frame_d);	Console::put(output);      }            // propagate up      //      status = (propagateLexInstancesUp(level_num) || status);    }        // propagate instances down the hierarchy, carrying out viterbi pruning    // as we go.    //    for (level_num = (long)initial_level_d;  level_num < num_levels - 1; level_num++) {            // beam pruning      //      if (getSearchLevel(level_num).useBeam()) {	if (debug_level_d >= Integral::DETAILED) {	  Console::put(L"\nbeam pruning after propagation instances up");	}		beamPruneLexInstance(level_num);      }      // instance pruning      //      if (getSearchLevel(level_num).useInstance()) {	if (debug_level_d >= Integral::DETAILED) {	  Console::put(L"\ninstance pruning after propagation instances up");	}		instancePruneLexInstance(level_num);      }            // print debug infomation      //      if (debug_level_d >= Integral::DETAILED) {	String output;        	output.assign(L"propagating instances down from level: ");	output.concat(level_num);	output.concat(L" in frame: ");    	output.concat(current_frame_d);	Console::put(output);      }            // propagate down      //      status = (propagateLexInstancesDown(level_num) || status);    }  }    // print debugging information  //  if (debug_level_d >= Integral::ALL) {        String val;    val.assign(L"Frame:");    val.concat(current_frame_d);    val.concat(L" Instances left:");    val.concat(getActiveInstances(num_levels - 1));    Console::put(val);  }    // exit gracefully  //  return true;}// method: expandLexicalTree//// arguments://// return: logical error status//// expand graphs into lexical trees//boolean HierarchicalSearch::expandLexicalTree(long level_num_a) {  // check with the lexical tree level  //  if ( level_num_a > ( getNumLevels() - 1)       || level_num_a < 0 ) {    return Error::handle(name(),			 L"expandLexicalTree: wrong lexical tree level",			 HierarchicalSearch::ERR_LEVEL, __FILE__, __LINE__);  }  // build the lexical tree from top to down  //  for (long i  = 0; i < search_levels_d(level_num_a).getSubGraphs().length();       i++ ){    DiGraph<SearchNode>& word_graph =      search_levels_d(level_num_a).getSubGraph((long)i);    //search_levels_d(level_num_a+1).debug(L"search_levels_d");    //search_levels_d(level_num_a+1).getSubGraphs().debug(L"sub_graphs_d");        // expand the top level graph to a lexical tree    //    LexicalTree::expandLexicalTree(word_graph,				 search_levels_d(level_num_a+1).getSubGraphs(),				 level_num_a);      }    // exit gracefully  //  return true;}// method: getLexHypotheses//// arguments://  String& output_hyp: (output) the current search hypotheses//  long level: (input) the level to print hypotheses from//  float& total_score: (output) the hypothesis total score//  long& num_frames: (output) frame index of the last instance//  DoubleLinkedList<Instance>& instance_path: (output) best hypothesis instance path//// return: logical error status//// build a graph representing the hypotheses and return it//boolean HierarchicalSearch::getLexHypotheses(String& output_hyp_a,					  long level_a,					  float& total_score_a,					  long& num_frames_a,					  DoubleLinkedList<Instance>& instance_path_a) {  // declare local variables  //  Instance* tmp_instance = (Instance*)NULL;  SearchNode*  prev_node = (SearchNode*)NULL;  SearchNode*  tmp_node = (SearchNode*)NULL;    long counter = 0;  long curr_level = 0;  long frame_ind = 0;  long prev_frame_ind = -1;  long back_count = 0;  float score = 0.0;  float prev_score = 0.0;  long symbol_id = 0;    String out_str;  SearchSymbol sym;       // clear the output and set the allocation mode for the instance path  //  output_hyp_a.clear();  instance_path_a.clear();  instance_path_a.setAllocationMode(DstrBase::USER);    // move all instances forward in the search space  //  if (!propagateLexInstances()) {    return false;  }  // make sure we have at least one valid hypothesis  //    if (instance_valid_hyps_d.length() < 1) {    return false;  }    // loop over all valid instances in the hypothesis list  //  instance_valid_hyps_d.gotoFirst();  Instance* best_end_hyp = instance_valid_hyps_d.getCurr();    // find the instance with the best score, i.e., best hypothesis instance  //  while (true) {      if (!instance_valid_hyps_d.gotoNext()) {      break;    }    tmp_instance = instance_valid_hyps_d.getCurr();    if (tmp_instance->getScore() > best_end_hyp->getScore()) {      best_end_hyp = tmp_instance;    }  }    // print debugging information  //  if (debug_level_d >= Integral::DETAILED) {        // print the best instance    //    printInstance(best_end_hyp, -1, true);  }    // backtrack from the best hypothesis instance and generate the instance path  //  for (tmp_instance = best_end_hyp; tmp_instance != (Instance*)NULL;       tmp_instance = tmp_instance->getBackPointer()) {    instance_path_a.insertFirst(tmp_instance);    back_count++;  }  // use the best hypothesis instance path to generate the hypothesis string  //  for (boolean more_instances = instance_path_a.gotoFirst(); more_instances;       more_instances = instance_path_a.gotoNext()) {    counter++;    tmp_instance = instance_path_a.getCurr();    frame_ind = tmp_instance->getFrame();    // get the central vertex from the top of the history stack    //    GraphVertex<SearchNode>* tmp_vertex = (GraphVertex<SearchNode>*)NULL;    tmp_vertex = tmp_instance->getSymbol()->getCentralVertex();    // check for a NULL vertex    //    if (tmp_vertex == (GraphVertex<SearchNode>*)NULL) {      	prev_score = tmp_instance->getScore();	score = prev_score;    }        // make sure the vertex is neither the dummy    // start nor terminating node    //    else if ((!tmp_vertex->isStart()) &&	(!tmp_vertex->isTerm())) {      tmp_node = tmp_vertex->getItem();      curr_level = tmp_node->getSearchLevel()->getLevelIndex();      if ((level_a < 0) && (curr_level == 0)) {	tmp_node->getSymbol(sym);	score = tmp_instance->getScore();	symbol_id = tmp_node->getSymbolId();		// generate the output hypothesis containing only search symbols	//	if (!getSearchLevel(curr_level).isExcludeSymbol(symbol_id)) {	  out_str.assign(sym);	  out_str.concat(L" ");	}	else {	  out_str.assign(L"");	}		// append partial string to the complete hypothesis report	//	if ( frame_ind != prev_frame_ind ) {	  output_hyp_a.concat(out_str);	}		prev_node = tmp_node;	prev_score = score;	prev_frame_ind = frame_ind;      }            if ((level_a >= 0) && (curr_level == level_a)) {	String context;	tmp_instance->getSymbol()->print(context);	score = tmp_instance->getScore();	sym.assign(context);		// generate the output hypothesis	//	if (counter > 1) {	  out_str.assign(L"\n");	} else {	  out_str.assign(L"");	}	// append previous frame index	//	if (level_a != (getNumLevels() - 1)) {	  out_str.concat(prev_frame_ind);	}	else {	  out_str.concat(prev_frame_ind + 1);	}	out_str.concat(L"\t");	// append current frame index	//	if (level_a != (getNumLevels() - 1)) {	  out_str.concat(frame_ind);	}	else {	  out_str.concat(frame_ind + 1);	}	out_str.concat(L"\t");		// append search symbol	//	out_str.concat(sym);	out_str.concat(L"\t\t");		// append score	//	out_str.concat(score - prev_score);		// append partial string to the complete hypothesis report	//	if (level_a == (getNumLevels() - 1)) {	  output_hyp_a.concat(out_str);	}	else {	  if ( frame_ind != prev_frame_ind ) {	    output_hyp_a.concat(out_str);	  }	}		

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -