📄 hrec.c
字号:
inst->node=node;/*printf("\n当前pri->psi->stHeapIdx[n]=%d\n",pri->psi->stHeapIdx[n]);*/ inst->state=(TokenSet*) New(pri->stHeap+pri->psi->stHeapIdx[n],0); inst->exit=(TokenSet*) New(pri->stHeap+pri->psi->stHeapIdx[1],0); inst->exit->tok=null_token; if (pri->nToks>1) {/*当前不执行此分支*/ inst->exit->set=(RelToken*) New(&pri->rTokHeap,0); inst->exit->n=1; inst->exit->set[0]=rmax; } else {/*当前执行此处*/ inst->exit->n=0; } for (i=1,cur=inst->state;i<=n;i++,cur++) { cur->tok=null_token; if (pri->nToks>1) { cur->set=(RelToken*) New(&pri->rTokHeap,0); cur->n=1; cur->set[0]=rmax; } else { cur->n=0; } } inst->max=LZERO; /*将当前节点的网络实例加入到私有识别信息的实例Inst链接表中*/ inst->link=&pri->tail; inst->knil=pri->tail.knil; inst->link->knil=inst; inst->knil->link=inst; node->inst=inst;/*将NetInst与节点node关联*/ /*printf("\n当前node->type=%d\n",node->type);*/ if (node_wd0(node))/*当前两个分支都有可能执行*/ {inst->wdlk=LikeToWord(inst->node);/*执行此分支时node->type=3*//*printf("\n当前node->type=%d,n_wd0=%d\n",node->type,n_wd0);*/}/*执行此分支时,inst->wdlk=0.000000*/ else inst->wdlk=LZERO;/*执行此分支时node->type=4*/ pri->nact++; /* New node needs any currently alive following insts moved */ /* to be more recent than it to ensure tokens propagated in */ /* correct order. */ inst->ooo=TRUE; /* Need keep list in propagation order */#ifdef SANITY inst->ipos=pri->ipos++; pri->start_inst=inst;#endif ReOrderList(node);/*迭代将其他节点加入到私有识别信息的网络实例Inst链表中*/}static void DetachInst(NetNode *node){ TokenSet *cur; NetInst *inst; int i,n; inst=node->inst; pri->nact--;#ifdef SANITY if (inst->node!=node) HError(8591,"DetachInst: Node/Inst mismatch");#endif inst->link->knil=inst->knil; inst->knil->link=inst->link; if (node_hmm(node)) n=node->info.hmm->numStates-1; else n=1; if (pri->nToks>1) { for (i=0,cur=inst->state;i<n;i++,cur++) Dispose(&pri->rTokHeap,cur->set); Dispose(&pri->rTokHeap,inst->exit->set); } Dispose(pri->stHeap+pri->psi->stHeapIdx[n],inst->state); Dispose(pri->stHeap+pri->psi->stHeapIdx[1],inst->exit);#ifdef SANITY inst->state=NULL; inst->exit=NULL;#endif Dispose(&pri->instHeap,inst); node->inst=0;}static void SetEntryState(NetNode *node,TokenSet *src)/*将src的tok集合传递到节点node*/{ NetInst *inst; TokenSet *res; if (node->inst==NULL){/*初始化时执行*/ AttachInst(node); } inst=node->inst; res=inst->state;#ifdef SANITY if ((res->n==0 && src->n!=0) || (res->n!=0 && src->n==0)) HError(8590,"SetEntryState: TokenSet size mismatch"); /* if (src->tok.like>LSMALL && src->tok.path!=NULL && src->tok.path->node->info.pron==NULL) HError(8590,"SetEntryState: NULL word propagated into path"); */#endif if (res->n==0) { if (src->tok.like > res->tok.like) res->tok=src->tok; } else TokSetMerge(res,&src->tok,src); if (res->tok.like>inst->max) inst->max=res->tok.like; if (node->type==n_word && (pri->wordMaxNode==NULL || pri->wordMaxNode->inst==NULL || res->tok.like > pri->wordMaxNode->inst->max)) pri->wordMaxNode=node;}static void StepInst1(NetNode *node) /* First pass of token propagation (Internal) */{ if (node_hmm(node))/*此节点是hmm*/ StepHMM1(node); /* Advance tokens within HMM instance t => t-1 */ /* Entry tokens valid for t-1, do states 2..N */ else StepWord1(node);/*此节点是word*/ node->inst->pxd=FALSE;/*node->inst->pxd标志External propagation done this frame*/}static void StepInst2(NetNode *node) /* Second pass of token propagation (External) *//*每经过一个词节点就创建一个新的路径,用新路径保存当前节点,如果旧路径没有移到pri->pYesRef路径链表中,则将其移到pri->pYesRef路径链表的头部,并循环将tok传递到与之相连的目标节点*/ /* Must be able to survive doing this twice !! */{ Token tok; TokenSet xtok; RelToken rtoks[MAX_TOKS]; NetLink *dest; LogFloat lm; int i,k; if (node_word(node))/*当前执行分支*/ StepWord2(node); /* Merge tokens and update traceback *//*每经过一个词节点就创建一个新的路径,用新路径保存当前节点,如果旧路径没有移到pri->pYesRef路径链表中,则将其移到pri->pYesRef路径链表的头部*/ else if (node_tr0(node) /* && node_hmm(node) */)/*{*/ StepHMM2(node);/*printf("\n执行\n"); }*//*当前此处不被执行*/ /* Advance tokens within HMM instance t => t-1 */ /* Entry token valid for t, only do state N */ tok=node->inst->exit->tok; xtok.tok=tok; xtok.n=node->inst->exit->n; xtok.set=rtoks; for (k=0;k<xtok.n;k++) xtok.set[k]=node->inst->exit->set[k]; if (node_word(node)) if (tok.like<pri->wordThresh) tok=null_token; /*无论是模型节点还是词节点都执行如下步骤*/ if (tok.like>pri->genThresh) { for(i=0,dest=node->links;i<node->nlinks;i++,dest++) {/*循环将tok传递到与之相连的目标节点*/ lm=dest->like; xtok.tok.like=tok.like+lm*pri->scale; xtok.tok.lm=tok.lm+lm; for (k=0;k<xtok.n;k++) xtok.set[k].lm=node->inst->exit->set[k].lm+lm; if (xtok.tok.like>pri->genThresh) { SetEntryState(dest->node,&xtok); /* Transfer set of tokens to node, activating when necessary */ /* choosing N most likely after adding transition likelihood */ } } } node->inst->pxd=TRUE;}static void CreateSEIndex(PSetInfo *psi,HLink hmm){ SMatrix trP; short **se; /* Actually (*se)[2] */ int j,min,max,N; trP=hmm->transP; N=hmm->numStates; se=psi->seIndexes[hmm->tIdx];/*hmm->tIdx是hmm转移矩阵的索引号*/ if (se==NULL) {/*当前执行这里*/ se=(short**) New(&psi->heap,(N-1)*sizeof(short*)); se-=2; for (j=2;j<=N;j++) { se[j]=(short*) New(&psi->heap,2*sizeof(short)); for (min=(j==N)?2:1;min<N;min++) /* Don't want tee transitions */ if (trP[min][j]>LSMALL) break; for (max=N-1;max>1;max--) if (trP[max][j]>LSMALL) break;#ifdef SANITY if (min>max) {/*当前不执行这里*/ HError(-8520,"CreateSEIndex: No transitions to state %d",j); min=(j==N)?2:1; max=N-1; }#endif se[j][0]=min;/*min是当前模型中能够转到第j个状态的最小状态标号*/ se[j][1]=max;/*max是当前模型中能够转到第j个状态的最大状态标号*/ } psi->seIndexes[hmm->tIdx]=se; }}/* Prepare HMMSet for recognition. Allocates seIndex and preComp from *//* hmmset heap.*/PSetInfo *InitPSetInfo(HMMSet *hset){ PSetInfo *psi; RelToken *rtoks; int n,h,i; HLink hmm; MLink q; PreComp *pre; char name[80]; static int psid=0; psi=(PSetInfo*) New(&gcheap,sizeof(PSetInfo)); psi->hset=hset; sprintf(name,"PRI-%d Heap",psid++);/*当前name=PRI-0 Heap*/ CreateHeap(&psi->heap,name,MSTAK,1,1.0,1000,8000); psi->max=MaxStatesInSet(hset)-1;/*当前psi->max=4*/ psi->tBuf=(Token*) New(&psi->heap,(psi->max-1)*sizeof(Token)); psi->tBuf-=2; psi->sBuf=(TokenSet*) New(&psi->heap,psi->max*sizeof(TokenSet)); rtoks=(RelToken*) New(&psi->heap,psi->max*sizeof(RelToken)*MAX_TOKS); psi->sBuf-=1; for (i=0; i<psi->max; i++) { psi->sBuf[i+1].set=rtoks;rtoks+=MAX_TOKS; psi->sBuf[i+1].tok=null_token; psi->sBuf[i+1].n=0; psi->sBuf[i+1].set[0]=rmax; } psi->stHeapIdx=(short*) New(&psi->heap,(psi->max+1)*sizeof(short)); for (i=0; i<=psi->max; i++) psi->stHeapIdx[i]=-1; psi->stHeapIdx[1]=0; /* For one state word end models */ psi->ntr=hset->numTransP; psi->seIndexes=(short***) New(&psi->heap, sizeof(short**)*psi->ntr); psi->seIndexes--; for(i=1;i<=psi->ntr;i++) psi->seIndexes[i]=NULL; for (h=0; h<MACHASHSIZE; h++) for (q=hset->mtab[h]; q!=NULL; q=q->next) { if (q->type=='h') { hmm=(HLink)q->structure; n=hmm->numStates-1; psi->stHeapIdx[n]=0; CreateSEIndex(psi,hmm); } } psi->nsp=hset->numStates; psi->sPre=(PreComp*) New(&psi->heap, sizeof(PreComp)*psi->nsp); psi->sPre--; for(i=1,pre=psi->sPre+1;i<=psi->nsp;i++,pre++) pre->id=-1; if (hset->numSharedMix>0) { psi->mixShared=TRUE; psi->nmp=hset->numSharedMix; psi->mPre=(PreComp*) New(&psi->heap, sizeof(PreComp)*psi->nmp); psi->mPre--; for(i=1,pre=psi->mPre+1;i<=psi->nmp;i++,pre++) pre->id=-1; } else psi->mixShared=FALSE,psi->nmp=0,psi->mPre=NULL; for (n=1,i=0;n<=psi->max;n++) if (psi->stHeapIdx[n]>=0) psi->stHeapIdx[n]=i++; psi->stHeapNum=i; return(psi);}void FreePSetInfo(PSetInfo *psi){ DeleteHeap(&psi->heap); Dispose(&gcheap,psi);}/*从路径中生成词格*/static void LatFromPaths(Path *path,int *ln,Lattice *lat){ LNode *ne,*ns; LArc *la; Word nullwordId; NxtPath tmp,*pth; Align *align,*al,*pr; MLink ml; LabId labid,labpr = NULL; char buf[80]; int i,frame; double prlk,dur,like,wp; nullwordId = GetWord(lat->voc,GetLabId("!NULL",FALSE),FALSE); /*将当前路径缓存到tmp中*/ tmp.prev=path->prev; tmp.like=path->like; tmp.chain=path->chain; tmp.lm=path->lm; /*生成当前路径对应的词格节点*/ ne=lat->lnodes-path->usage;/*当前-path->usage即为path对应节点的在词格中的序号*/ ne->time=path->frame*lat->framedur; if (path->node->info.pron != NULL)
ne->word=path->node->info.pron->word;
else
ne->word=nullwordId; ne->tag=path->node->tag; if (path->node->info.pron != NULL) ne->v=path->node->info.pron->pnum; else ne->v=1; ne->score=path->like; align=path->align; for(pth=&tmp;pth!=NULL;pth=pth->chain) { la=lat->larcs+(*ln)++;/*为词格中第ln个弧赋值*/ if (pth->prev){/*如果当前路径有前驱路径*/ ns=lat->lnodes-pth->prev->usage,prlk=pth->prev->like;/*取得当前路径的前一个路径所对应的词格节点*/ } else {/*若没有前驱路径则当前路径的前一个路径对应的节点为词格的初始节点*/ ns=lat->lnodes,prlk=0.0; } la->start=ns;la->end=ne;/*用当前路径对应的词格节点及其前驱节点为词格中第ln个弧赋值*/ if (ne->word==NULL || ne->word==nullwordId) /* no word or NULL node */ wp=0.0; /* No penalty for current word */ else wp=pri->wordpen; /* Inc penalty for current word */ la->aclike=pth->like-prlk-pth->lm*pri->scale-wp; if (path->node->info.pron != NULL) { la->aclike-=path->node->info.pron->prob*pri->pscale; la->prlike=path->node->info.pron->prob; } else la->prlike=0.0; la->lmlike=pth->lm; la->score=pth->like; la->farc=ns->foll;la->parc=ne->pred; ns->foll=ne->pred=la; if (pth->prev!=NULL && ns->word==NULL) LatFromPaths(pth->prev,ln,lat);/*根据当前路径的前驱递归生成词格节点及弧*/ if (align!=NULL) {/*识别时以下部分不被执行*/ i=0; for (al=align;al!=NULL;al=al->prev) i++; la->nAlign=i; la->lAlign=(LAlign*) New(lat->heap,sizeof(LAlign)*i); frame=path->frame;pr=NULL; /* Allow for wp diff between path and align */ like=path->like-pth->lm*pri->scale-wp; for (al=align;al!=NULL;al=al->prev) { ml=FindMacroStruct(pri->psi->hset,'h',al->node->info.hmm); if (ml==NULL) HError(8520,"LatFromPaths: Cannot find hmm in hset"); if (al->state<0) { if (pr==NULL) { pr=al; labpr=ml->id; continue; } if (labpr==NULL) HError(8522,"LatFromPaths: Align records out of order"); dur=(pr->frame-al->frame)*lat->framedur; like=pr->like-al->like; pr=al; labid=labpr; labpr=ml->id; } else { if (pri->models) sprintf(buf,"s%d",al->state); else sprintf(buf,"%s[%d]",ml->id->name,al->state); labid=GetLabId(buf,TRUE);
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -