⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 hmm_train.cc

📁 这是处理语音信号的程序
💻 CC
📖 第 1 页 / 共 3 页
字号:
      // find the beam pruning score for state-level pruning      //      if (beam_width[HT_STATE_LEVEL] != (float_8)0) {	beam_thresh[HT_STATE_LEVEL] = max_score[HT_STATE_LEVEL] +	  beam_width[HT_STATE_LEVEL];      }            // if mapmi pruning works better make that the state-level beam      // threshold      //      if (beam_thresh[HT_STATE_LEVEL] < mapmi_thresh) {	beam_thresh[HT_STATE_LEVEL] = mapmi_thresh;      }            // perform beam pruning at state tokens      //      token_prune_cc(state_toklist, active_phones, num_active_ph,		     beam_thresh[HT_STATE_LEVEL], num_traces_del);            // reset the status flags and update the list of lexical trees      //      update_trees_cc(lextree_list);            // store counts      //      for (int_4 i = 0; i < num_levels; i++) {	total_gen[i] += num_traces_gen[i];	total_del[i] += num_traces_del[i];	num_traces_total[i] += (num_traces_gen[i] - num_traces_del[i]);      }            // reset counts      //      for (int_4 i = 0; i < num_levels; i++) {	num_traces_gen[i] = (int_4)0;	num_traces_del[i] = (int_4)0;      }          num_mapmi = (int_4)0;      num_hyps = 0;            // reset word-level marker positions       //      for (int_4 i = 0; i < num_active_wd; i++) {	prev_wdmark[active_words[i]] = wdmarker[active_words[i]];	wdmarker[active_words[i]] = (Train_Link_node*)NULL;      }            // reset phone-level marker positions       //      for (int_4 i = 0; i < num_active_ph; i++) {	prev_phmark[active_phones[i]] = phmarker[active_phones[i]];	phmarker[active_phones[i]] = (Train_Link_node*)NULL;      }            // increment the current time frame      //      current_frame++;          } // end while loop over all frames        // close file    //    fclose(fpi);    // for phone_level input transcriptions, delete the phn_list and    // utterance id    //    if (mlf_mode == HT_MODEL_TRANS) {      delete [] phn_list;      phn_list = (int_4*)NULL;      delete [] phns;      phns = (int_4*)NULL;      utterance = (char_1*)NULL;    }        // reset the n-best array    //    for (int_4 ff = 0; ff < num_nbest; ff++) {      n_best_array[ff] = (Train_Trace*)NULL;    }        // reset word-level marker positions     //    for (int_4 i = 0; i < num_active_wd; i++) {      wdmarker[active_words[i]] = prev_wdmark[active_words[i]];      prev_wdmark[active_words[i]] = (Train_Link_node*)NULL;    }        // reset phone-level marker positions     //    for (int_4 i = 0; i < num_active_ph; i++) {      phmarker[active_phones[i]] = prev_phmark[active_phones[i]];      prev_phmark[active_phones[i]] = (Train_Link_node*)NULL;    }        // reset the current time frame    //    current_frame--;        // we have now finished the state sequences so we need to backtrace    // i.e. find an emitting word level trace with the highest score and    // follow backpointers to the start trace    //    backtrace_cc(wdmarker[HT_SENT_END_WORD], num_nbest, num_hyps,		 n_best_array);        // if no sentence end is found print the best word end anyway    //    if (num_hyps == 0) {            // print error message      //      fprintf(stdout, "%s :: No sentence-end hypotheses were found.\n",	      mfcc_file);            // find the best word end      //      find_nbest_cc(word_trlist, wdmarker, (int_4)1, active_words,		    num_active_wd, num_hyps, n_best_array);    }    if (op_out == ISIP_TRUE) {            // print the n-best list of sentence-hypotheses to file      //      print_hyps_cc(num_hyps, output_file, n_best_array, current_frame,		    phones, ph_size, total_gen, total_del, num_traces_total);    }        if (num_hyps != 0) {            // count the valid file      //      file_count++;      para_estimate_cc(num_hyps, n_best_array, vectors, states, models,		       train_mean, train_covar, count, num_features,		       num_states, num_mix, trans_count, mod_map,		       st_map, trans_map, sp_phn_ind);    }        // free the vectors array TRAIN    //    for (int_4 i = 0; i < num_vect; i++) {      delete [] vectors[i];    }    delete [] vectors;    vectors = (float_8**)NULL;        // dummy variables for deleting memory    //    Train_Link_node* nd = (Train_Link_node*)NULL;    Train_Link_node* pd = (Train_Link_node*)NULL;        // free memory in word-level trace lists    //    Train_Trace* tr = (Train_Trace*)NULL;    for (int_4 i = 0; i < num_active_wd; i++) {      if (word_trlist[active_words[i]] != (Train_Link_list*)NULL) {	for (nd = word_trlist[active_words[i]]->get_head_cc();	     nd != (Train_Link_node*)NULL; nd = pd) {	  pd = nd->get_next_cc();	  tr = (Train_Trace*)(nd->get_item_cc());	  word_trlist[active_words[i]]->remove_cc(nd);	  if (tr != (Train_Trace*)NULL) {	    manager->delete_cc(tr);	  }	}      }      delete word_trlist[active_words[i]];    }        // free memory in phone-level trace lists    //    tr = (Train_Trace*)NULL;    for (int_4 i = 0; i < num_active_ph; i++) {      if (phone_trlist[active_phones[i]] != (Train_Link_list*)NULL) {	for (nd = phone_trlist[active_phones[i]]->get_head_cc();	     nd != (Train_Link_node*)NULL; nd = pd) {	  pd = nd->get_next_cc();	  tr = (Train_Trace*)(nd->get_item_cc());	  phone_trlist[active_phones[i]]->remove_cc(nd);	  if (tr != (Train_Trace*)NULL) {	    manager->delete_cc(tr);	  }	}      }      delete phone_trlist[active_phones[i]];    }        // free memory in the token lists    //    Train_Token* tok = (Train_Token*)NULL;    for (int_4 i = 0; i < num_active_ph; i++) {      if (state_toklist[active_phones[i]] != (Train_Link_list*)NULL) {	for (nd = state_toklist[active_phones[i]]->get_head_cc();	     nd != (Train_Link_node*)NULL; nd = pd) {	  pd = nd->get_next_cc();	  tok = (Train_Token*)(nd->get_item_cc());	    state_toklist[active_phones[i]]->remove_cc(nd);	    if (tok != (Train_Token*)NULL) {	      delete tok;	    }	}      }      delete state_toklist[active_phones[i]];    }        // free memory in active lexical tree list    //    Train_Lex_tree* tree = (Train_Lex_tree*)NULL;    for (nd = lextree_list->get_head_cc(); nd != (Train_Link_node*)NULL; nd = pd) {      pd = nd->get_next_cc();      tree = (Train_Lex_tree*)(nd->get_item_cc());      lextree_list->remove_cc(nd);      if (tree != (Train_Lex_tree*)NULL) {	delete tree;      }    }    delete lextree_list;    lextree_list = (Train_Link_list*)NULL;    // delete the lex tree    //    delete traintree;    // free memory    //    delete lattice;    lattice = (Train_Lattice*)NULL;      } // end while loop over all files  // close transcription file  //  fclose(fpl);    // for TRAIN update the states parameter and transition prob.  //  if (file_count > (int_4)0 && train_mode == HT_NORMAL) {    update_cc(transitions, states, train_mean, train_covar, count,	      trans_count, trans_size, num_features, num_states, num_mix,	      num_trans, var_floor);        // print out the training result    //    print_state_cc(new_states_file, states, num_states, num_features,		   output_mode);    print_trans_cc(new_trans_file, num_trans, trans_size, transitions);    // determine if we need to output the occupancy    //    if (state_occ_mode == HT_OPT_ON) {      // open the occupancy file      //      FILE* fp_occ_file = fopen((char*)state_occ_file, "w");      if (fp_occ_file == (FILE*)NULL) {	fprintf(stdout, "Error: unable to open state occupancy file %s\n",		state_occ_file);	exit (ISIP_PROTO_ERROR);      }      // call the print method      //      print_occ_cc(fp_occ_file, count, num_states,		   states[1]->get_num_mixtures_cc());            fclose(fp_occ_file);    }        fprintf(stdout, "Number of valid mfcc files: %ld\n", file_count);  }  else if (file_count > (int_4)0 && train_mode == HT_BATCH) {    // open file to dump the stats for this batch    //    FILE* fp_batch = fopen((char*)batch_stats_file, "wb");    // exit if file does not exist    //    if (fp_batch == (FILE*)NULL) {      fprintf(stdout, "Cannot open file %s\n", batch_stats_file);      exit(ISIP_PROTO_ERROR);    }        // dump the stats    //     dump_stats_cc(fp_batch, train_mean, train_covar, count, trans_count,		  trans_size, num_states, num_mix, num_features, num_trans);    // close file and deallocate memory for the file name    //    fclose(fp_batch);  }  else {    fprintf(stdout, "Warning: no valid mfcc file found\n");  }    // free memory for TRAIN  //  for (int_4 i = 0; i < num_states; i++) {    for (int_4 j =0; j < num_mix; j++) {      delete [] train_mean[i][j];      delete [] train_covar[i][j];    }    delete [] train_mean[i];    delete [] train_covar[i];    delete [] count[i];  }  delete [] train_mean;  delete [] train_covar;  delete [] count;    for (int_4 i = 0; i < num_trans; i++) {    for (int_4 j = 0; j < trans_size[i]; j++) {      delete [] trans_count[i][j];    }    delete [] trans_count[i];  }  delete [] trans_count;  for (int_4 i = 0; i < num_trans; i++) {    for (int_4 j = 0; j < trans_size[i]; j++) {      delete [] transitions[i][j];    }    delete [] transitions[i];  }  delete [] transitions;  delete [] trans_size;  for (int_4 i = 0; i < num_models; i++) {    delete [] st_map[i];  }  delete [] st_map;  delete [] trans_map;  delete [] mod_map;  delete [] phone_map;    // free memory  //  delete [] max_score;  delete [] beam_width;  delete [] beam_thresh;  // free memory  //  delete [] num_traces_total;  delete [] num_traces_gen;  delete [] num_traces_del;  delete [] total_gen;  delete [] total_del;  // free memory  //  delete [] features;  delete [] active_phones;  delete [] active_words;      // free memory  //  delete [] wdmarker;  delete [] prev_wdmark;  delete [] word_trlist;  delete [] phmarker;  delete [] prev_phmark;  delete [] phone_trlist;  delete [] n_best_array;  delete [] state_toklist;      // close files  //  fclose(flat);  fclose(fin);  if (op_out == ISIP_TRUE)     fclose(fout);    // free memory in filenames  //  delete [] input_file;  delete [] mfcc_file;  delete [] output_file;  delete [] mlf_file;  delete [] temp_vect;  delete [] new_states_file;  delete [] new_trans_file;  delete [] acc_list_file;  delete [] batch_stats_file;  delete [] state_occ_file;  batch_stats_file = (char_1*)NULL;  // free memory in the word hash table  //  Train_Word* word = (Train_Word*)NULL;  Train_Hash_cell* next = (Train_Hash_cell*)NULL;  Train_Hash_cell** hash_cells = word_table->get_cells_cc();  int_4 hash_size = word_table->get_size_cc();  for (int_4 k = 0; k < hash_size; k++) {    for (Train_Hash_cell* cell = hash_cells[k]; cell != (Train_Hash_cell*)NULL;	 cell = next) {      next = cell->get_next_cc();      word = (Train_Word*)(cell->get_item_cc());      delete word;      manager->delete_cc(cell);    }    hash_cells[k] = (Train_Hash_cell*)NULL;  }  delete word_table;  // free memory in data structures  //  for (int_4 i = 0; i < num_monophones; i++) {    delete [] monophones[i];  }  delete [] monophones;    for (int_4 i = 0; i < num_states; i++) {    delete states[i];  }  delete [] states;    for (int_4 i = 0; i < num_models; i++) {    delete models[i];  }  delete [] models;    for (int_4 i = 0; i < num_phones; i++) {    delete phones[i];  }  delete [] phones;  if(num_sph>0) {    delete [] sph_index;  }    delete [] phn_ind;  delete [] phn_str;    // free the memory manager  //  delete manager;      // exit gracefully  //  exit(ISIP_NO_ERROR);}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -