📄 hsrch_11.cc
字号:
// check to see if the child trace exists in the map // ptr = child_a->getReference(); if (ptr != (BiGraphVertex<TrainNode>*)NULL) { dst = ptr; } // add a transition in the trellis from the parent to the child // if ((src != (BiGraphVertex<TrainNode>*)NULL) && (dst != (BiGraphVertex<TrainNode>*)NULL)) { trellis_d.insertArc(src, dst, false, weight_a); // print debugging information // if (debug_level_d >= Integral::DETAILED) { GraphVertex<SearchNode>* src_vertex = src->getItem()->getReference()->getCentralVertex(); GraphVertex<SearchNode>* dst_vertex = dst->getItem()->getReference()->getCentralVertex(); long src_frame = src->getItem()->getFrame(); long dst_frame = dst->getItem()->getFrame(); SearchSymbol src_sym; if (src_vertex->isStart()) { src_sym.assign(L"_START_"); } else if (src_vertex->isTerm()) { src_sym.assign(L"_TERM_"); } else { src->getItem()->getReference()->print(src_sym); } SearchSymbol dst_sym; if (dst_vertex->isStart()) { dst_sym.assign(L"_START_"); } else if (dst_vertex->isTerm()) { dst_sym.assign(L"_TERM_"); } else { dst->getItem()->getReference()->print(dst_sym); } String val; String output(L"\n-> source: "); output.concat(src_vertex); output.concat(L", destination: "); output.concat(dst_vertex); output.concat(L"\n ["); output.concat(src_sym); output.concat(L"], frame: "); val.assign((long)src_frame); output.concat(val); output.concat(L" -> ["); output.concat(dst_sym); output.concat(L"], frame: "); val.assign((long)dst_frame); output.concat(val); Console::put(output); } } // exit gracefully // return true;}// method: insertOldPath//// arguments:// Instance* parent: (input) source instance// Instance* child: (input) destination instance// float weight: (input) transition probability//// return: logical error status//// method adds a new path to the trellis//boolean HierarchicalSearch::insertOldPath(Instance* parent_a, Instance* child_a, float weight_a) { // declare local variables // BiGraphVertex<TrainNode>* ptr = (BiGraphVertex<TrainNode>*)NULL; BiGraphVertex<TrainNode>* src = (BiGraphVertex<TrainNode>*)NULL; BiGraphVertex<TrainNode>* dst = (BiGraphVertex<TrainNode>*)NULL; // make sure that neither the parent nor the child instances are null // if ((parent_a == (Instance*)NULL) || (child_a == (Instance*)NULL)) { return false; } // check to see if the parent instance exists in the map // ptr = parent_a->getReference(); if (ptr != (BiGraphVertex<TrainNode>*)NULL) { src = ptr; } // check to see if the child instance exists in the map // ptr = child_a->getReference(); if (ptr != (BiGraphVertex<TrainNode>*)NULL) { dst = ptr; } // add a transition in the trellis from the parent to the child // if ((src != (BiGraphVertex<TrainNode>*)NULL) && (dst != (BiGraphVertex<TrainNode>*)NULL)) { trellis_d.insertArc(src, dst, false, weight_a); // print debugging information // if (debug_level_d >= Integral::DETAILED) { GraphVertex<SearchNode>* src_vertex = src->getItem()->getReference()->getCentralVertex(); GraphVertex<SearchNode>* dst_vertex = dst->getItem()->getReference()->getCentralVertex(); long src_frame = src->getItem()->getFrame(); long dst_frame = dst->getItem()->getFrame(); SearchSymbol src_sym; if (src_vertex->isStart()) { src_sym.assign(L"_START_"); } else if (src_vertex->isTerm()) { src_sym.assign(L"_TERM_"); } else { src->getItem()->getReference()->print(src_sym); } SearchSymbol dst_sym; if (dst_vertex->isStart()) { dst_sym.assign(L"_START_"); } else if (dst_vertex->isTerm()) { dst_sym.assign(L"_TERM_"); } else { dst->getItem()->getReference()->print(dst_sym); } String val; String output(L"\n-> source: "); output.concat(src_vertex); output.concat(L", destination: "); output.concat(dst_vertex); output.concat(L"\n ["); output.concat(src_sym); output.concat(L"], frame: "); val.assign((long)src_frame); output.concat(val); output.concat(L" -> ["); output.concat(dst_sym); output.concat(L"], frame: "); val.assign((long)dst_frame); output.concat(val); Console::put(output); } } // exit gracefully // return true;}// method: insertTrace//// arguments:// Trace* trace: (input) trace to be added to the trellis//// return: train node vertex//// insert the trace into the trellis//BiGraphVertex<TrainNode>* HierarchicalSearch::insertTrace(Trace* trace_a) { // declare local variables // TrainNode train_node; BiGraphVertex<TrainNode>* vertex = (BiGraphVertex<TrainNode>*)NULL; // initialize the time stamp (t) // if (trace_a != (Trace*)NULL) { train_node.setFrame((long)trace_a->getFrame()); } // initialize the forward/backward probabilities at time (t) // train_node.setAlpha(Integral::DB_LOG_MIN_VALUE); train_node.setBeta(Integral::DB_LOG_MIN_VALUE); // initialize the reference pointers at time (t) // if (trace_a != (Trace*)NULL) { train_node.setReference(trace_a->getSymbol()); } // initialize the statistical model at time (t) // if (trace_a != (Trace*)NULL) { train_node.setStatisticalModel(trace_a->getSymbol()->getCentralVertex()-> getItem()->getStatisticalModel()); } // insert the node in the trellis and return the graph vertex // vertex = trellis_d.insertVertex(&train_node); // initialize the reference pointers for the trace // if (trace_a != (Trace*)NULL) { trace_a->setReference(vertex); } // return the inserted vertex // return vertex;}// method: insertInstance//// arguments:// Instance* instance: (input) instance to be added to the trellis//// return: train node vertex//// insert the instance into the trellis//BiGraphVertex<TrainNode>* HierarchicalSearch::insertInstance(Instance* instance_a) { // declare local variables // TrainNode train_node; BiGraphVertex<TrainNode>* vertex = (BiGraphVertex<TrainNode>*)NULL; // initialize the time stamp (t) // if (instance_a != (Instance*)NULL) { train_node.setFrame((long)instance_a->getFrame()); } // initialize the forward/backward probabilities at time (t) // train_node.setAlpha(Integral::DB_LOG_MIN_VALUE); train_node.setBeta(Integral::DB_LOG_MIN_VALUE); // initialize the reference pointer at time (t) // if (instance_a != (Instance*)NULL) { train_node.setReference(instance_a->getSymbol()); } // initialize the statistical model at time (t) // if (instance_a != (Instance*)NULL) { train_node.setStatisticalModel(instance_a->getSymbol()-> getCentralVertex()-> getItem()->getStatisticalModel()); } // insert the node in the trellis and return the graph vertex // vertex = trellis_d.insertVertex(&train_node); // initialize the reference pointers for the instance // if (instance_a != (Instance*)NULL) { instance_a->setReference(vertex); } // return the inserted vertex // return vertex;}// method: computeForwardBackward//// arguments:// Vector<VectorFloat> data: (output) feature vectors// float beta_threshold: (input) beta pruning threshold//// return: logical error status//// traverse the trellis and compute the state occupancies//BiGraph<TrainNode>* HierarchicalSearch::computeForwardBackward(Vector<VectorFloat>& data_a, float beta_threshold_a) { // setup the initial conditions for the forward backward algorithm // initializeForwardBackward(); // traverse the trellis backwards and compute the backward probability // computeBackward(data_a, beta_threshold_a); // traverse the trellis backwards and compute the forward probability // computeForward(data_a); // return the trellis // return &trellis_d; }// method: initializeForwardBackward//// arguments: none//// return: logical error status//// this method setup the initial conditions for the forward backward algorithm//boolean HierarchicalSearch::initializeForwardBackward() { // declare local variables // BiGraphVertex<TrainNode>* vertex = (BiGraphVertex<TrainNode>*)NULL; // get the start node of the trellis // vertex = trellis_d.getStart(); if (vertex != (BiGraphVertex<TrainNode>*)NULL) { vertex->getItem()->setAlpha(0); } // get the term node of the trellis // vertex = trellis_d.getTerm(); if (vertex != (BiGraphVertex<TrainNode>*)NULL) { vertex->getItem()->setBeta(0); } // exit gracefully // return true; }// method: computeForward//// arguments:// Vector<VectorFloat> data: (output) feature vectors//// return: logical error status//// this method computes the forward probability of the network//boolean HierarchicalSearch::computeForward(Vector<VectorFloat>& data_a) { // iterate over all valid traces in the hypotheses list // BiGraphVertex<TrainNode>* child = (BiGraphVertex<TrainNode>*)NULL; BiGraphVertex<TrainNode>* vertex = (BiGraphVertex<TrainNode>*)NULL; DoubleLinkedList<BiGraphVertex<TrainNode> > active_list(DstrBase::USER); // add the term vertex to the active list // active_list.insertLast(trellis_d.getStart()); // loop until the active list is empty // while (!active_list.isEmpty()) { // remove the first element from the list // active_list.removeFirst(vertex); // compute the backward probability of the vertex // computeAlpha(data_a, vertex); // insert the children of the vertex on the list // for (boolean more = vertex->gotoFirstChild(); more; more = vertex->gotoNextChild()) { child = vertex->getCurrChild()->getVertex(); if (child != (BiGraphVertex<TrainNode>*)NULL) { if ((child->getItem() != (TrainNode*)NULL) && !child->getItem()->isAlphaValid()) { active_list.insertLast(child); child->getItem()->setAlphaValid(true); } } } } // clear the list before exiting // active_list.clear(); // exit gracefully // return true;}// method: computeBackward//// arguments:// Vector<VectorFloat> data: (input) feature vectors// float beta_threshold: (input) beta threshold//// return: logical error status//// this method computes the backward probability of the network//boolean HierarchicalSearch::computeBackward(Vector<VectorFloat>& data_a, float beta_threshold_a) { // iterate over all valid traces in the hypotheses list // long timestamp = 0; float beta_value = 0.0; BiGraphVertex<TrainNode>* parent = (BiGraphVertex<TrainNode>*)NULL; BiGraphVertex<TrainNode>* vertex = (BiGraphVertex<TrainNode>*)NULL; VectorFloat beta_bound; DoubleLinkedList<BiGraphVertex<TrainNode> > active_list(DstrBase::USER); long num_frames = data_a.length(); long num_models = getSearchLevel(getNumLevels() - 1).getStatisticalModels().length(); float** model_cache = new float*[num_frames]; for (long i=0; i < num_frames; i++) { model_cache[i] = new float[num_models]; for (long j=0; j < num_models; j++) { model_cache[i][j] = Integral::DB_LOG_MIN_VALUE; } } // set the length and initialize the beta bound // beta_bound.setLength(data_a.length()); beta_bound.assign(Integral::DB_LOG_MIN_VALUE); // first pass determines the maximum beta score for each time frame // // add the term vertex to the active list // active_list.insertLast(trellis_d.getTerm()); // loop until the active list is empty // while (!active_list.isEmpty()) { // remove the first element from the list // active_list.removeFirst(vertex); // compute the backward probability of the vertex // computeBeta(data_a, vertex, model_cache); // set the maximum beta value for the time frame if applicable // if (vertex->getItem() != (TrainNode*)NULL) {
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -