📄 hrec.c
字号:
next=inst->link; DetachInst(inst->node); } else { pri->nxtInst=inst; StepInst2(inst->node); next=pri->nxtInst->link; }}void ProcessObservation(VRecInfo *vri,Observation *obs,int id, AdaptXForm *xform){ NetInst *inst,*next; int j; float thresh; pri=vri->pri; inXForm = xform; /* sepcifies the transform to use for this observation */ if (pri==NULL) HError(8570,"ProcessObservation: Visible recognition info not initialised"); if (pri->net==NULL) HError(8570,"ProcessObservation: Recognition not started"); pri->psi->sBuf[1].n=((pri->nToks>1)?1:0); /* Needed every observation */ pri->frame++; pri->obs=obs; if (id<0) pri->id=(pri->prid<<20)+pri->frame; else pri->id=id; if (obs->swidth[0]!=pri->psi->hset->swidth[0]) HError(8571,"ProcessObservation: incompatible number of streams (%d vs %d)", obs->swidth[0],pri->psi->hset->swidth[0]); if (pri->psi->mixShared) for (j=1;j<=obs->swidth[0];j++) if (VectorSize(obs->fv[j])!=pri->psi->hset->swidth[j]) HError(8571,"ProcessObservatio: incompatible stream widths for %d (%d vs %d)", j,VectorSize(obs->fv[j]),pri->psi->hset->swidth[j]); /* Max model pruning is done initially in a separate pass */ if (vri->maxBeam>0 && pri->nact>vri->maxBeam) { if (pri->nact>pri->qsn) { if (pri->qsn>0) Dispose(&vri->heap,pri->qsa); pri->qsn=(pri->nact*3)/2; pri->qsa=(LogFloat*) New(&vri->heap,pri->qsn*sizeof(LogFloat)); } for (inst=pri->head.link,j=0;inst!=NULL;inst=inst->link,j++) pri->qsa[j]=inst->max; if (j>=vri->maxBeam) { qcksrtM(pri->qsa,0,j-1,vri->maxBeam); thresh=pri->qsa[vri->maxBeam]; if (thresh>LSMALL) for (inst=pri->head.link;inst->link!=NULL;inst=next) { next=inst->link; if (inst->max<thresh) DetachInst(inst->node); } } } if (pri->psi->hset->hsKind==TIEDHS) PrecomputeTMix(pri->psi->hset,obs,vri->tmBeam,0); /* Pass 1 must calculate top of all beams - inc word end !! */ pri->genMaxTok=pri->wordMaxTok=null_token; pri->genMaxNode=pri->wordMaxNode=NULL; for (inst=pri->head.link,j=0;inst!=NULL;inst=inst->link,j++) if (inst->node) StepInst1(inst->node); /* Not changing beam width for max model pruning */ pri->wordThresh=pri->wordMaxTok.like-vri->wordBeam; if (pri->wordThresh<LSMALL) pri->wordThresh=LSMALL; pri->genThresh=pri->genMaxTok.like-vri->genBeam; if (pri->genThresh<LSMALL) pri->genThresh=LSMALL; if (pri->nToks>1) { pri->nThresh=pri->genMaxTok.like-vri->nBeam; if (pri->nThresh<LSMALL/2) pri->nThresh=LSMALL/2; } /* Pass 2 Performs external token propagation and pruning */ for (inst=pri->head.link,j=0;inst!=NULL && inst->node!=NULL;inst=next,j++) if (inst->max<pri->genThresh) { next=inst->link; DetachInst(inst->node); } else { pri->nxtInst=inst; StepInst2(inst->node); next=pri->nxtInst->link; } if ((pri->npth-pri->cpth) > vri->pCollThresh || (pri->nalign-pri->calign) > vri->aCollThresh) CollectPaths(); pri->tact+=pri->nact; vri->frame=pri->frame; vri->nact=pri->nact; vri->genMaxNode=pri->genMaxNode; vri->wordMaxNode=pri->wordMaxNode; vri->genMaxTok=pri->genMaxTok; vri->wordMaxTok=pri->wordMaxTok;}/* EXPORT->TracePath: Summarise word history */void TracePath(FILE *file,Path *path){ MLink ml; Align *align; if (path->prev!=NULL) TracePath(file,path->prev); fprintf(file,"%s ",path->node->info.pron->word->wordName->name); if (path->align!=NULL) { fprintf(file,"{"); for (align=path->align;align!=NULL;align=align->prev) { ml=FindMacroStruct(pri->psi->hset,'h',align->node->info.hmm); if (ml==NULL) fprintf(file," !*!"); else fprintf(file," %s",ml->id->name); if (align->state>0) fprintf(file,"[%d]",align->state); } fprintf(file," }\n"); }}/* EXPORT->CompleteRecognition: Free unused data and return traceback */Lattice *CompleteRecognition(VRecInfo *vri,HTime frameDur,MemHeap *heap){ Lattice *lat = NULL; NetInst *inst; TokenSet dummy; RelToken rtok[1]; int i; pri=vri->pri; if (pri==NULL) HError(8570,"CompleteRecognition: Visible recognition info not initialised"); if (pri->net==NULL) HError(8570,"CompleteRecognition: Recognition not started"); if (pri->frame==0) HError(-8570,"CompleteRecognition: No observations processed"); vri->frameDur=frameDur; /* Should delay this until we have freed everything that we can */ if (heap!=NULL) { lat=NULL;vri->noTokenSurvived=TRUE; if (pri->net->final.inst!=NULL) if (pri->net->final.inst->exit->tok.path!=NULL) lat=CreateLattice(heap,pri->net->final.inst->exit,vri->frameDur), vri->noTokenSurvived=FALSE; if (lat==NULL && forceOutput) { dummy.n=((pri->nToks>1)?1:0); dummy.tok=pri->genMaxTok; dummy.set=rtok; dummy.set[0].like=0.0; dummy.set[0].path=dummy.tok.path; dummy.set[0].lm=dummy.tok.lm; lat=CreateLattice(heap,&dummy,vri->frameDur); } } /* Now dispose of everything apart from the answer */ for (inst=pri->head.link;inst!=NULL;inst=inst->link) if (inst->node) inst->node->inst=NULL; /* Remove everything from active lists */ pri->head.link=&pri->tail;pri->tail.knil=&pri->head; pri->npth=pri->cpth=0; pri->nalign=pri->calign=0; pri->nact=pri->frame=0; pri->pYesRef.link=&pri->pYesTail;pri->pYesTail.knil=&pri->pYesRef; pri->pNoRef.link=&pri->pNoTail;pri->pNoTail.knil=&pri->pNoRef; pri->npth=pri->cpth=0; pri->aYesRef.link=&pri->aYesTail;pri->aYesTail.knil=&pri->aYesRef; pri->aNoRef.link=&pri->aNoTail;pri->aNoTail.knil=&pri->aNoRef; pri->nalign=pri->calign=0; vri->frame=0; vri->nact=0; vri->genMaxNode=NULL; vri->wordMaxNode=NULL; vri->genMaxTok=null_token; vri->wordMaxTok=null_token; if (pri->nToks>1) ResetHeap(&pri->rTokHeap); ResetHeap(&pri->instHeap); for (i=0;i<pri->psi->stHeapNum;i++) ResetHeap(pri->stHeap+i); ResetHeap(&pri->alignHeap); ResetHeap(&pri->rPthHeap); ResetHeap(&pri->pathHeap); return(lat);}/* EXPORT->SetPruningLevels: Set pruning levels for following frames */void SetPruningLevels(VRecInfo *vri,int maxBeam,LogFloat genBeam, LogFloat wordBeam,LogFloat nBeam,LogFloat tmBeam){ vri->maxBeam=maxBeam; vri->genBeam=genBeam; vri->wordBeam=wordBeam; vri->nBeam=nBeam; vri->tmBeam=tmBeam;}/* Lattice output routines. Note lattices need to be sorted before output */typedef struct nbestentry NBestEntry;struct nbestentry { NBestEntry *link; NBestEntry *knil; NBestEntry *prev; double score; double like; LNode *lnode; LArc *larc;};static void MarkBack(LNode *ln,int *nn){ LArc *la; ln->n=-2; for (la=ln->pred;la!=NULL;la=la->parc) if (la->start->n==-1) MarkBack(la->start,nn); ln->n=(*nn)++;}static Boolean WordMatch(NBestEntry *cmp,NBestEntry *ans){ if (cmp==ans) return(TRUE); else if (cmp==NULL || ans==NULL) return(FALSE); else if (cmp->larc->end->word!=ans->larc->end->word) return(FALSE); else return(WordMatch(cmp->prev,ans->prev));}/* EXPORT->TranscriptionFromLattice: Generate NBest labels from lattice */Transcription *TranscriptionFromLattice(MemHeap *heap,Lattice *lat,int N){ Transcription *trans; LabList *ll; LLink lab,where; LabId model; Word word, nullWord; Pron pron; NBestEntry head,tail,**ans,*best,*newNBE,*pos; LArc *la; LNode *ln; LAlign *lal; LogFloat lm,modlk; double score,like,start,end; Boolean states,models; int i,j,n,nAux,*order; int nexp=0,nent=0; ans=(NBestEntry**) New(&gstack,sizeof(NBestEntry*)*N);ans--; for (i=0,ln=lat->lnodes;i<lat->nn;i++,ln++) { if (ln->foll==NULL) ln->score=0.0; else ln->score=LZERO; ln->n=-1; } n=0; for (i=0,ln=lat->lnodes;i<lat->nn;i++,ln++) if (ln->n==-1) MarkBack(ln,&n); order=(int*) New(&gstack, sizeof(int)*lat->nn); for (i=0,ln=lat->lnodes;i<lat->nn;i++,ln++) order[ln->n]=i; for (i=0,la=lat->larcs;i<lat->na;i++,la++) if (la->start->n>la->end->n) HError(8522,"TranscriptionFromLattice: Arcs not properly directed"); for (i=lat->nn-1;i>0;i--) { ln=lat->lnodes+order[i]; for (la=ln->pred;la!=NULL;la=la->parc) { score=ln->score+LArcTotLike(lat,la); if (score>la->start->score) la->start->score=score; } } Dispose(&gstack,order); /* Then do NBest AStar for real answers */ head.link=&tail;head.knil=NULL; tail.link=NULL;tail.knil=&head; tail.score=head.score=LZERO; for (i=0,ln=lat->lnodes;i<lat->nn;i++,ln++) { if (ln->pred!=NULL) continue; if (ln->score<LSMALL) HError(8522,"TranscriptionFromLattice: No route through lattice"); for (la=ln->foll;la!=NULL;la=la->farc) { like=LArcTotLike(lat,la); score=like+la->end->score; if (score<LSMALL) continue; newNBE=(NBestEntry*) New(&gstack,sizeof(NBestEntry)); newNBE->score=score; newNBE->like=like; newNBE->lnode=la->end; newNBE->larc=la; newNBE->prev=NULL; for (pos=head.link;score<pos->score;pos=pos->link); newNBE->knil=pos->knil;newNBE->link=pos; newNBE->knil->link=newNBE->link->knil=newNBE; } } for (n=0,best=head.link;n<N && best!=&tail;best=head.link) { if (head.link==&tail) break; best=head.link; best->link->knil=best->knil; best->knil->link=best->link; nent--; if (best->lnode->foll!=NULL) { nexp++; for (la=best->lnode->foll;la!=NULL;la=la->farc) { like=best->like+LArcTotLike(lat,la); score=like+la->end->score; if (score<LSMALL) continue; newNBE=(NBestEntry*) New(&gstack,sizeof(NBestEntry)); newNBE->score=score; newNBE->like=like; newNBE->lnode=la->end; newNBE->larc=la; newNBE->prev=best; for (pos=head.link;score<pos->score;pos=pos->link); newNBE->knil=pos->knil;newNBE->link=pos; newNBE->knil->link=newNBE->link->knil=newNBE; nent++; } continue; } for (i=1;i<=n;i++) if (WordMatch(best,ans[i])) { best=NULL; break; } if (best!=NULL) { ans[++n]=best; } } nullWord=GetWord(lat->voc, GetLabId("!NULL", FALSE), FALSE); trans=CreateTranscription(heap); for (i=1;i<=n;i++) { states=models=FALSE; /* Note initial and final nodes are !NULL so ignore these !! */ for (pos=ans[i]->prev;pos!=NULL;pos=pos->prev) { if (pos->larc->end->word==nullWord) continue; if (pos->larc->lAlign==NULL) { states=models=FALSE; break; } for (j=0,lal=pos->larc->lAlign;j<pos->larc->nAlign;j++,lal++) if (lal->state<0) models=TRUE; else if (lal->state>0) states=TRUE; } nAux=(states?1:0)+(models?1:0); ll=CreateLabelList(heap,nAux); if (nAux>0) { for (pos=ans[i]->prev;pos!=NULL;pos=pos->prev) { la=pos->larc; lal=la->lAlign; word=la->end->word;model=NULL; if (word == nullWord) continue; lm=LArcTotLMLike(lat,la); modlk=0.0; start=la->start->time*1.0E7; where=ll->head->succ; for (j=0,lal=la->lAlign;j<la->nAlign;j++,lal++) { if (lal->state<0 && states) { model=lal->label; modlk=lal->like; lab=NULL; continue; } lab=CreateLabel(heap,ll->maxAuxLab); lab->labid=lal->label; lab->score=lal->like; end=start+lal->dur*1.0E7; lab->start=start; lab->end=end; lab->pred=where->pred;lab->succ=where; lab->succ->pred=lab->pred->succ=lab; start=end; if (word==NULL) lab->auxLab[nAux]=NULL; else lab->auxLab[nAux]=word->wordName; word=NULL; lab->auxScore[nAux]=lm;lm=0.0; if (models && states) { lab->auxLab[1]=model;model=NULL; lab->auxScore[1]=modlk;modlk=0.0; } } } } else { for (pos=ans[i]->prev;pos!=NULL;pos=pos->prev) { la=pos->larc; for (pron=la->end->word->pron;pron!=NULL;pron=pron->next) if (pron->pnum==la->end->v) break; if (pron==NULL || pron->outSym==NULL || pron->outSym->name[0]==0) continue; if (la->end->word == nullWord) continue; lab=CreateLabel(heap,ll->maxAuxLab); lab->labid=pron->outSym; lab->score=LArcTotLike(lat,la); lab->start=la->start->time*1.0E7; lab->end=la->end->time*1.0E7; lab->succ=ll->head->succ;lab->pred=ll->head; lab->succ->pred=lab->pred->succ=lab; } } AddLabelList(ll,trans); } ans++;Dispose(&gstack,ans); if (trace&T_NGEN) printf("HLat: %d NBest generation %d exp, %d ent\n",N,nexp,nent); return(trans);}/* EXPORT->FormatTranscription: Format transcription prior to output */void
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -