📄 lat_read_1.cc
字号:
// file: lat_read_1.cc//// system include files//#include <memory.h>#include <string.h>#include <ctype.h>// isip include files//#include "lattice.h"#include "lattice_constants.h"// method: read_trans_cc//// arguments:// char_1* trans_str: (input) the transcription for the current mfcc file// Hash_table* lexicon_a: (input) the hash table of word indices//// return: a logical_1 indicating status//// this method reads in the lattice from a file//logical_1 Lattice::read_trans_cc(char_1* trans_str_a, Hash_table* lexicon_a) { // get the null word from the lexicon // if ((lexicon_a->hash_lookup_cc(WRD_NULL)) != (Hash_cell*)NULL) { null_word_d = (Word*)((lexicon_a->hash_lookup_cc(WRD_NULL))->get_item_cc()); } // local variables // int_4 string_length = (int_4)0; // find the first non-space character // char_1* tmp_ptr = trans_str_a; while (isspace((char)tmp_ptr[0])) { tmp_ptr++; } // copy the transcription string // char_1* str = (char_1*)NULL; char_1* buffer = new char_1[strlen((char*)tmp_ptr) + 1]; strcpy((char*)buffer, (char*)tmp_ptr); while (isspace(buffer[strlen((char*)buffer) - 1])) { buffer[strlen((char*)buffer) - 1] = '\0'; } // dummy variables // Word** words = new Word*[LATTICE_MAX_NUM_WORD]; Hash_cell* hcell = (Hash_cell*)NULL; // start with the sentence start word // int_4 num_words = (int_4)0; hcell = lexicon_a->hash_lookup_cc(WRD_SENT_START); words[num_words++] = (Word*)(hcell->get_item_cc()); // break the word sequence from input string // str = (char_1*)strtok((char*)buffer, ISIP_STRING_SPACE); hcell = lexicon_a->hash_lookup_cc(str); if (hcell == (Hash_cell*)NULL) { error_handler_cc((char_1*)"read_trans_cc", (char_1*)"word not in lexicon"); } words[num_words++] = (Word*)(hcell->get_item_cc()); // get the rest of the words // while ((str = (char_1*)strtok((char*)NULL, ISIP_STRING_SPACE)) && str[0] != ISIP_NEWLINE) { // replace last character of string with the null string terminator // this this is the last word of the transcription // string_length = strlen((char*)str); if (str[string_length-1] == (char_1)'\n') { str[string_length-1] = (char_1)'\0'; } hcell = lexicon_a->hash_lookup_cc(str); if (hcell == (Hash_cell*)NULL) { error_handler_cc((char_1*)"read_trans_cc", (char_1*)"word not in lexicon"); } words[num_words++] = (Word*)(hcell->get_item_cc()); } // stop with the sentence end word // hcell = lexicon_a->hash_lookup_cc(WRD_SENT_END); words[num_words++] = (Word*)(hcell->get_item_cc()); // free memory // delete [] buffer; // set number of nodes and arcs // num_nodes_d = num_words + 1; num_arcs_d = num_words; // allocate space for lattice node list // Lattice_node** lnodes = new Lattice_node*[num_nodes_d]; float_4 lm_score = (float_4)0; float_4 ac_score = (float_4)0; // put them in the lattice hash table // init_nodes_cc(lnodes); // create the lattice start node --- this is a dummy node that is // not evaluated, it just serves as a starting point for all the // sentence start nodes // start_node_d = lnodes[0]; // loop over all words i.e. lattice nodes // for (int_4 sn = 0, en = 1; sn < num_words; sn++, en++) { // set the item index of the node // lnodes[en]->set_word_cc(words[sn]); // update the prev node and next node pointers for the two nodes // corresponding to this arc // lnodes[sn]->add_next_node_cc(lnodes[en]); lnodes[en]->add_prev_node_cc(lnodes[sn]); // set the acoustic and language model scores // lnodes[sn]->add_lm_score_cc(lm_score); lnodes[sn]->add_ac_score_cc(ac_score); } // free memory // delete [] words; delete [] lnodes; // exit gracefully // return ISIP_TRUE;}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -